mirror of
https://github.com/Xevion/banner.git
synced 2026-01-31 14:23:36 -06:00
Compare commits
103 Commits
c7117f14a3
...
v0.5.0
| Author | SHA1 | Date | |
|---|---|---|---|
| 4207783cdd | |||
| c90bd740de | |||
| 61f8bd9de7 | |||
| b5eaedc9bc | |||
| 58475c8673 | |||
| 78159707e2 | |||
| 779144a4d5 | |||
| 0da2e810fe | |||
| ed72ac6bff | |||
| 57b5cafb27 | |||
| 841191c44d | |||
| 67d7c81ef4 | |||
| d108a41f91 | |||
| 5fab8c216a | |||
| 15256ff91c | |||
| 6df4303bd6 | |||
| e3b855b956 | |||
| 0ce0257fdc | |||
| fa2fc45aa9 | |||
| 7cc8267c2e | |||
| 1733ee5f86 | |||
| 992263205c | |||
| 37942378ae | |||
| c445190838 | |||
| 57a6a9871f | |||
|
|
966732a6d2 | ||
|
|
3292d35521 | ||
|
|
71ac0782d0 | ||
|
|
1c6d2d4b6e | ||
|
|
51f8256e61 | ||
|
|
b1ed2434f8 | ||
|
|
47c23459f1 | ||
|
|
8af9b0a1a2 | ||
| 020a00254f | |||
| 45de5be60d | |||
| 8384f418c8 | |||
| 3dca896a35 | |||
| 1b7d2d2824 | |||
| e370008d75 | |||
| 176574343f | |||
| 91899bb109 | |||
| 08ae54c093 | |||
| 33b8681b19 | |||
| 398a1b9474 | |||
| a732ff9a15 | |||
| bfcd868337 | |||
| 99f0d0bc49 | |||
| 8b7729788d | |||
| 27b0cb877e | |||
| 8ec2f7d36f | |||
| 28a8a15b6b | |||
| 19b3a98f66 | |||
| b64aa41b14 | |||
| 64449e8976 | |||
| 2e0fefa5ee | |||
| 97488494fb | |||
| b3322636a9 | |||
| 878cc5f773 | |||
| 94fb6b4190 | |||
| e3b638a7d8 | |||
| 404a52e64c | |||
| a917315967 | |||
| 9d51fde893 | |||
| 79fc931077 | |||
| f3861a60c4 | |||
| 26b1a88860 | |||
| 27ac9a7302 | |||
| 1d345ed247 | |||
| 6f831f5fa6 | |||
| ac2638dd9a | |||
| cfb847f2e5 | |||
| e7d47f1f96 | |||
| 9a48587479 | |||
| 624247ee14 | |||
| 430e2a255b | |||
| bbc78131ec | |||
| 77ab71d4d5 | |||
| 9d720bb0a7 | |||
| dcc564dee6 | |||
| 4ca55a1fd4 | |||
| a6e7adcaef | |||
| 752c855dec | |||
| 14b02df8f4 | |||
| 00cb209052 | |||
| dfc05a2789 | |||
| fe798e1867 | |||
| 39688f800f | |||
| b2b4bb67f0 | |||
| e5d8cec2d6 | |||
| e9a0558535 | |||
| 353c36bcf2 | |||
| 2f853a7de9 | |||
| dd212c3239 | |||
| 8ff3a18c3e | |||
| 43647096e9 | |||
| 1bdbd1d6d6 | |||
| 23be6035ed | |||
| 139e4aa635 | |||
| 677bb05b87 | |||
| f2bd02c970 | |||
| 8cdf969a53 | |||
| 4764d48ac9 | |||
| e734e40347 |
@@ -0,0 +1,2 @@
|
||||
[env]
|
||||
TS_RS_EXPORT_DIR = { value = "web/src/lib/bindings/", relative = true }
|
||||
@@ -0,0 +1,51 @@
|
||||
# Build artifacts
|
||||
target/
|
||||
**/target/
|
||||
|
||||
# Documentation
|
||||
README.md
|
||||
docs/
|
||||
*.md
|
||||
|
||||
# Old Go codebase
|
||||
go/
|
||||
|
||||
# Development configuration
|
||||
bacon.toml
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
|
||||
# CI/CD
|
||||
.github/
|
||||
.git/
|
||||
|
||||
# Development tools
|
||||
Justfile
|
||||
rust-toolchain.toml
|
||||
|
||||
# Frontend build artifacts and cache
|
||||
web/node_modules/
|
||||
web/dist/
|
||||
web/.vite/
|
||||
web/.tanstack/
|
||||
web/.vscode/
|
||||
|
||||
# IDE and editor files
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# OS files
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Test coverage
|
||||
coverage/
|
||||
*.profdata
|
||||
*.profraw
|
||||
|
||||
# SQLx offline mode (include this in builds)
|
||||
!.sqlx/
|
||||
Vendored
+65
@@ -0,0 +1,65 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
branches: [master]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: rustfmt, clippy
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Cache Rust dependencies
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
cache-on-failure: true
|
||||
|
||||
- name: Install frontend dependencies
|
||||
working-directory: web
|
||||
run: bun install --frozen-lockfile
|
||||
|
||||
- name: Check Rust formatting
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
- name: Check TypeScript formatting
|
||||
working-directory: web
|
||||
run: bun run format:check
|
||||
|
||||
- name: TypeScript type check
|
||||
working-directory: web
|
||||
run: bun run typecheck
|
||||
|
||||
- name: ESLint
|
||||
working-directory: web
|
||||
run: bun run lint
|
||||
|
||||
- name: Clippy
|
||||
run: cargo clippy --all-features -- --deny warnings
|
||||
|
||||
- name: Run tests
|
||||
run: cargo test --all-features
|
||||
|
||||
- name: Build frontend
|
||||
working-directory: web
|
||||
run: bun run build
|
||||
|
||||
- name: Build backend
|
||||
run: cargo build --release --bin banner
|
||||
Vendored
+4
-1
@@ -1,3 +1,6 @@
|
||||
.env
|
||||
/target
|
||||
/go/
|
||||
|
||||
# ts-rs bindings
|
||||
web/src/lib/bindings/*.ts
|
||||
!web/src/lib/bindings/index.ts
|
||||
|
||||
Vendored
+3
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"rust-analyzer.check.command": "clippy"
|
||||
}
|
||||
Generated
+1004
-265
File diff suppressed because it is too large
Load Diff
+60
-25
@@ -1,32 +1,67 @@
|
||||
[package]
|
||||
name = "banner"
|
||||
version = "0.1.0"
|
||||
version = "0.5.0"
|
||||
edition = "2024"
|
||||
default-run = "banner"
|
||||
|
||||
[features]
|
||||
default = ["embed-assets"]
|
||||
embed-assets = ["dep:rust-embed", "dep:mime_guess"]
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1.47.1", features = ["full"] }
|
||||
axum = "0.8.4"
|
||||
serenity = { version = "0.12.4", features = ["rustls_backend"] }
|
||||
reqwest = { version = "0.12.23", features = ["json", "cookies"] }
|
||||
diesel = { version = "2.2.12", features = ["chrono", "postgres", "uuid"] }
|
||||
redis = { version = "0.32.5", features = ["tokio-comp"] }
|
||||
figment = { version = "0.10.19", features = ["toml", "env"] }
|
||||
serde_json = "1.0.143"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
governor = "0.10.1"
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||
dotenvy = "0.15.7"
|
||||
poise = "0.6.1"
|
||||
async-trait = "0.1"
|
||||
fundu = "2.0.1"
|
||||
anyhow = "1.0.99"
|
||||
thiserror = "2.0.16"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
chrono-tz = "0.8"
|
||||
rand = "0.8"
|
||||
regex = "1.10"
|
||||
url = "2.5"
|
||||
async-trait = "0.1"
|
||||
axum = "0.8.4"
|
||||
bitflags = { version = "2.9.4", features = ["serde"] }
|
||||
chrono = { version = "0.4.42", features = ["serde"] }
|
||||
compile-time = "0.2.0"
|
||||
time = "0.3.41"
|
||||
bitflags = { version = "2.9.3", features = ["serde"] }
|
||||
cookie = "0.18.1"
|
||||
dashmap = "6.1.0"
|
||||
dotenvy = "0.15.7"
|
||||
figment = { version = "0.10.19", features = ["toml", "env"] }
|
||||
fundu = "2.0.1"
|
||||
futures = "0.3"
|
||||
http = "1.3.1"
|
||||
poise = "0.6.1"
|
||||
rand = "0.9.2"
|
||||
regex = "1.10"
|
||||
reqwest = { version = "0.12.23", features = ["json", "cookies"] }
|
||||
reqwest-middleware = { version = "0.4.2", features = ["json"] }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = "1.0.143"
|
||||
serenity = { version = "0.12.4", features = ["rustls_backend"] }
|
||||
sqlx = { version = "0.8.6", features = [
|
||||
"runtime-tokio-rustls",
|
||||
"postgres",
|
||||
"chrono",
|
||||
"json",
|
||||
"macros",
|
||||
"migrate",
|
||||
] }
|
||||
thiserror = "2.0.16"
|
||||
time = "0.3.43"
|
||||
tokio = { version = "1.47.1", features = ["full"] }
|
||||
tokio-util = "0.7"
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = { version = "0.3.20", features = ["env-filter", "json"] }
|
||||
url = "2.5"
|
||||
governor = "0.10.1"
|
||||
serde_path_to_error = "0.1.17"
|
||||
num-format = "0.4.4"
|
||||
tower-http = { version = "0.6.0", features = ["cors", "trace", "timeout"] }
|
||||
rust-embed = { version = "8.0", features = ["include-exclude"], optional = true }
|
||||
mime_guess = { version = "2.0", optional = true }
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
rapidhash = "4.1.0"
|
||||
yansi = "1.0.1"
|
||||
extension-traits = "2"
|
||||
ts-rs = { version = "11.1.0", features = ["serde-compat", "serde-json-impl"] }
|
||||
html-escape = "0.2.13"
|
||||
axum-extra = { version = "0.12.5", features = ["query"] }
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
# A 'release mode' profile that compiles quickly, but still 'appears' like a release build, useful for debugging
|
||||
[profile.dev-release]
|
||||
inherits = "dev"
|
||||
debug-assertions = false
|
||||
|
||||
+116
@@ -0,0 +1,116 @@
|
||||
# Build arguments
|
||||
ARG RUST_VERSION=1.89.0
|
||||
ARG RAILWAY_GIT_COMMIT_SHA
|
||||
|
||||
# --- Frontend Build Stage ---
|
||||
FROM oven/bun:1 AS frontend-builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy backend Cargo.toml for build-time version retrieval
|
||||
COPY ./Cargo.toml ./
|
||||
|
||||
# Copy frontend package files
|
||||
COPY ./web/package.json ./web/bun.lock* ./
|
||||
|
||||
# Install dependencies
|
||||
RUN bun install --frozen-lockfile
|
||||
|
||||
# Copy frontend source code
|
||||
COPY ./web ./
|
||||
|
||||
# Build frontend
|
||||
RUN bun run build
|
||||
|
||||
# --- Chef Base Stage ---
|
||||
FROM lukemathwalker/cargo-chef:latest-rust-${RUST_VERSION} AS chef
|
||||
WORKDIR /app
|
||||
|
||||
# --- Planner Stage ---
|
||||
FROM chef AS planner
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
COPY build.rs ./
|
||||
COPY src ./src
|
||||
# Migrations & .sqlx specifically left out to avoid invalidating cache
|
||||
RUN cargo chef prepare --recipe-path recipe.json --bin banner
|
||||
|
||||
# --- Rust Build Stage ---
|
||||
FROM chef AS builder
|
||||
|
||||
# Set build-time environment variable for Railway Git commit SHA
|
||||
ARG RAILWAY_GIT_COMMIT_SHA
|
||||
ENV RAILWAY_GIT_COMMIT_SHA=${RAILWAY_GIT_COMMIT_SHA}
|
||||
|
||||
# Copy recipe from planner and build dependencies only
|
||||
COPY --from=planner /app/recipe.json recipe.json
|
||||
RUN cargo chef cook --release --recipe-path recipe.json --bin banner
|
||||
|
||||
# Install build dependencies for final compilation
|
||||
RUN apt-get update && apt-get install -y \
|
||||
pkg-config \
|
||||
libssl-dev \
|
||||
git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy source code and built frontend assets
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
COPY build.rs ./
|
||||
COPY .git* ./
|
||||
COPY src ./src
|
||||
COPY migrations ./migrations
|
||||
COPY --from=frontend-builder /app/dist ./web/dist
|
||||
|
||||
# Build web app with embedded assets
|
||||
RUN cargo build --release --bin banner
|
||||
|
||||
# Strip the binary to reduce size
|
||||
RUN strip target/release/banner
|
||||
|
||||
# --- Runtime Stage ---
|
||||
FROM debian:12-slim
|
||||
|
||||
ARG APP=/usr/src/app
|
||||
ARG APP_USER=appuser
|
||||
ARG UID=1000
|
||||
ARG GID=1000
|
||||
|
||||
# Install runtime dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
tzdata \
|
||||
wget \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ARG TZ=Etc/UTC
|
||||
ENV TZ=${TZ}
|
||||
|
||||
# Create user with specific UID/GID
|
||||
RUN addgroup --gid $GID $APP_USER \
|
||||
&& adduser --uid $UID --disabled-password --gecos "" --ingroup $APP_USER $APP_USER \
|
||||
&& mkdir -p ${APP}
|
||||
|
||||
# Copy application binary
|
||||
COPY --from=builder --chown=$APP_USER:$APP_USER /app/target/release/banner ${APP}/banner
|
||||
|
||||
# Set proper permissions
|
||||
RUN chmod +x ${APP}/banner
|
||||
|
||||
USER $APP_USER
|
||||
WORKDIR ${APP}
|
||||
|
||||
# Build-time arg for PORT, default to 8000
|
||||
ARG PORT=8000
|
||||
# Runtime environment var for PORT, default to build-time arg
|
||||
ENV PORT=${PORT}
|
||||
EXPOSE ${PORT}
|
||||
|
||||
# Add health check
|
||||
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
||||
CMD wget --no-verbose --tries=1 --spider http://localhost:${PORT}/health || exit 1
|
||||
|
||||
# Can be explicitly overriden with different hosts & ports
|
||||
ENV HOSTS=0.0.0.0,[::]
|
||||
|
||||
# Implicitly uses PORT environment variable
|
||||
# temporary: running without 'scraper' service
|
||||
CMD ["sh", "-c", "exec ./banner --services web,bot"]
|
||||
@@ -0,0 +1,203 @@
|
||||
set dotenv-load
|
||||
default_services := "bot,web,scraper"
|
||||
|
||||
default:
|
||||
just --list
|
||||
|
||||
# Run all checks (format, clippy, tests, lint)
|
||||
check:
|
||||
cargo fmt --all -- --check
|
||||
cargo clippy --all-features -- --deny warnings
|
||||
cargo nextest run -E 'not test(export_bindings)'
|
||||
bun run --cwd web check
|
||||
bun run --cwd web test
|
||||
|
||||
# Generate TypeScript bindings from Rust types (ts-rs)
|
||||
bindings:
|
||||
cargo test export_bindings
|
||||
|
||||
# Run all tests (Rust + frontend)
|
||||
test: test-rust test-web
|
||||
|
||||
# Run only Rust tests (excludes ts-rs bindings generation)
|
||||
test-rust *ARGS:
|
||||
cargo nextest run -E 'not test(export_bindings)' {{ARGS}}
|
||||
|
||||
# Run only frontend tests
|
||||
test-web:
|
||||
bun run --cwd web test
|
||||
|
||||
# Quick check: clippy + tests + typecheck (skips formatting)
|
||||
check-quick:
|
||||
cargo clippy --all-features -- --deny warnings
|
||||
cargo nextest run -E 'not test(export_bindings)'
|
||||
bun run --cwd web check
|
||||
|
||||
# Run the Banner API search demo (hits live UTSA API, ~20s)
|
||||
search *ARGS:
|
||||
cargo run -q --bin search -- {{ARGS}}
|
||||
|
||||
# Format all Rust and TypeScript code
|
||||
format:
|
||||
cargo fmt --all
|
||||
bun run --cwd web format
|
||||
|
||||
# Check formatting without modifying (CI-friendly)
|
||||
format-check:
|
||||
cargo fmt --all -- --check
|
||||
bun run --cwd web format:check
|
||||
|
||||
# Start PostgreSQL in Docker and update .env with connection string
|
||||
# Commands: start (default), reset, rm
|
||||
[script("bun")]
|
||||
db cmd="start":
|
||||
const fs = await import("fs/promises");
|
||||
const { spawnSync } = await import("child_process");
|
||||
|
||||
const NAME = "banner-postgres";
|
||||
const USER = "banner";
|
||||
const PASS = "banner";
|
||||
const DB = "banner";
|
||||
const PORT = "59489";
|
||||
const ENV_FILE = ".env";
|
||||
const CMD = "{{cmd}}";
|
||||
|
||||
const run = (args) => spawnSync("docker", args, { encoding: "utf8" });
|
||||
const getContainer = () => {
|
||||
const res = run(["ps", "-a", "--filter", `name=^${NAME}$`, "--format", "json"]);
|
||||
return res.stdout.trim() ? JSON.parse(res.stdout) : null;
|
||||
};
|
||||
|
||||
const updateEnv = async () => {
|
||||
const url = `postgresql://${USER}:${PASS}@localhost:${PORT}/${DB}`;
|
||||
try {
|
||||
let content = await fs.readFile(ENV_FILE, "utf8");
|
||||
content = content.includes("DATABASE_URL=")
|
||||
? content.replace(/DATABASE_URL=.*$/m, `DATABASE_URL=${url}`)
|
||||
: content.trim() + `\nDATABASE_URL=${url}\n`;
|
||||
await fs.writeFile(ENV_FILE, content);
|
||||
} catch {
|
||||
await fs.writeFile(ENV_FILE, `DATABASE_URL=${url}\n`);
|
||||
}
|
||||
};
|
||||
|
||||
const create = () => {
|
||||
run(["run", "-d", "--name", NAME, "-e", `POSTGRES_USER=${USER}`,
|
||||
"-e", `POSTGRES_PASSWORD=${PASS}`, "-e", `POSTGRES_DB=${DB}`,
|
||||
"-p", `${PORT}:5432`, "postgres:17-alpine"]);
|
||||
console.log("created");
|
||||
};
|
||||
|
||||
const container = getContainer();
|
||||
|
||||
if (CMD === "rm") {
|
||||
if (!container) process.exit(0);
|
||||
run(["stop", NAME]);
|
||||
run(["rm", NAME]);
|
||||
console.log("removed");
|
||||
} else if (CMD === "reset") {
|
||||
if (!container) create();
|
||||
else {
|
||||
run(["exec", NAME, "psql", "-U", USER, "-d", "postgres", "-c", `DROP DATABASE IF EXISTS ${DB}`]);
|
||||
run(["exec", NAME, "psql", "-U", USER, "-d", "postgres", "-c", `CREATE DATABASE ${DB}`]);
|
||||
console.log("reset");
|
||||
}
|
||||
await updateEnv();
|
||||
} else {
|
||||
if (!container) {
|
||||
create();
|
||||
} else if (container.State !== "running") {
|
||||
run(["start", NAME]);
|
||||
console.log("started");
|
||||
} else {
|
||||
console.log("running");
|
||||
}
|
||||
await updateEnv();
|
||||
}
|
||||
|
||||
# Auto-reloading frontend server
|
||||
frontend:
|
||||
bun run --cwd web dev
|
||||
|
||||
# Production build of frontend
|
||||
build-frontend:
|
||||
bun run --cwd web build
|
||||
|
||||
# Auto-reloading backend server (with embedded assets)
|
||||
backend *ARGS:
|
||||
bacon --headless run -- -- {{ARGS}}
|
||||
|
||||
# Auto-reloading backend server (no embedded assets, for dev proxy mode)
|
||||
backend-dev *ARGS:
|
||||
bacon --headless run -- --no-default-features -- {{ARGS}}
|
||||
|
||||
# Production build
|
||||
build:
|
||||
bun run --cwd web build
|
||||
cargo build --release --bin banner
|
||||
|
||||
# Run auto-reloading development build with release characteristics
|
||||
dev-build *ARGS='--services web --tracing pretty': build-frontend
|
||||
bacon --headless run -- --profile dev-release -- {{ARGS}}
|
||||
|
||||
# Auto-reloading development build: Vite frontend + backend (no embedded assets, proxies to Vite)
|
||||
[parallel]
|
||||
dev *ARGS='--services web,bot': frontend (backend-dev ARGS)
|
||||
|
||||
# Smoke test: start web server, hit API endpoints, verify responses
|
||||
[script("bash")]
|
||||
test-smoke port="18080":
|
||||
set -euo pipefail
|
||||
PORT={{port}}
|
||||
|
||||
cleanup() { kill "$SERVER_PID" 2>/dev/null; wait "$SERVER_PID" 2>/dev/null; }
|
||||
|
||||
# Start server in background
|
||||
PORT=$PORT cargo run -q --no-default-features -- --services web --tracing json &
|
||||
SERVER_PID=$!
|
||||
trap cleanup EXIT
|
||||
|
||||
# Wait for server to be ready (up to 15s)
|
||||
for i in $(seq 1 30); do
|
||||
if curl -sf "http://localhost:$PORT/api/health" >/dev/null 2>&1; then break; fi
|
||||
if ! kill -0 "$SERVER_PID" 2>/dev/null; then echo "FAIL: server exited early"; exit 1; fi
|
||||
sleep 0.5
|
||||
done
|
||||
|
||||
PASS=0; FAIL=0
|
||||
check() {
|
||||
local label="$1" url="$2" expected="$3"
|
||||
body=$(curl -sf "$url") || { echo "FAIL: $label - request failed"; FAIL=$((FAIL+1)); return; }
|
||||
if echo "$body" | grep -q "$expected"; then
|
||||
echo "PASS: $label"
|
||||
PASS=$((PASS+1))
|
||||
else
|
||||
echo "FAIL: $label - expected '$expected' in: $body"
|
||||
FAIL=$((FAIL+1))
|
||||
fi
|
||||
}
|
||||
|
||||
check "GET /api/health" "http://localhost:$PORT/api/health" '"status":"healthy"'
|
||||
check "GET /api/status" "http://localhost:$PORT/api/status" '"version"'
|
||||
check "GET /api/metrics" "http://localhost:$PORT/api/metrics" '"banner_api"'
|
||||
|
||||
# Test 404
|
||||
STATUS=$(curl -s -o /dev/null -w "%{http_code}" "http://localhost:$PORT/api/nonexistent")
|
||||
if [ "$STATUS" = "404" ]; then
|
||||
echo "PASS: 404 on unknown route"
|
||||
PASS=$((PASS+1))
|
||||
else
|
||||
echo "FAIL: expected 404, got $STATUS"
|
||||
FAIL=$((FAIL+1))
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Results: $PASS passed, $FAIL failed"
|
||||
[ "$FAIL" -eq 0 ]
|
||||
|
||||
alias b := bun
|
||||
bun *ARGS:
|
||||
cd web && bun {{ ARGS }}
|
||||
|
||||
sql *ARGS:
|
||||
lazysql ${DATABASE_URL}
|
||||
@@ -1,125 +1,51 @@
|
||||
# banner
|
||||
|
||||
A discord bot for executing queries & searches on the Ellucian Banner instance hosting all of UTSA's class data.
|
||||
A complex multi-service system providing a Discord bot and browser-based interface to UTSA's course data.
|
||||
|
||||
## Feature Wishlist
|
||||
## Services
|
||||
|
||||
- Commands
|
||||
- ICS Download (get a ICS download of your classes with location & timing perfectly - set for every class you're in)
|
||||
- Classes Now (find classes happening)
|
||||
- Autocomplete
|
||||
- Class Title
|
||||
- Course Number
|
||||
- Term/Part of Term
|
||||
- Professor
|
||||
- Attribute
|
||||
- Component Pagination
|
||||
- RateMyProfessor Integration (Linked/Embedded)
|
||||
- Smart term selection (i.e. Summer 2024 will be selected automatically when opened)
|
||||
- Rate Limiting (bursting with global/user limits)
|
||||
- DMs Integration (allow usage of the bot in DMs)
|
||||
- Class Change Notifications (get notified when details about a class change)
|
||||
- Multi-term Querying (currently the backend for searching is kinda weird)
|
||||
- Full Autocomplete for Every Search Option
|
||||
- Metrics, Log Query, Privileged Error Feedback
|
||||
- Search for Classes
|
||||
- Major, Professor, Location, Name, Time of Day
|
||||
- Subscribe to Classes
|
||||
- Availability (seat, pre-seat)
|
||||
- Waitlist Movement
|
||||
- Detail Changes (meta, time, location, seats, professor)
|
||||
- `time` Start, End, Days of Week
|
||||
- `seats` Any change in seat/waitlist data
|
||||
- `meta`
|
||||
- Lookup via Course Reference Number (CRN)
|
||||
- Smart Time of Day Handling
|
||||
- "2 PM" -> Start within 2:00 PM to 2:59 PM
|
||||
- "2-3 PM" -> Start within 2:00 PM to 3:59 PM
|
||||
- "ends by 2 PM" -> Ends within 12:00 AM to 2:00 PM
|
||||
- "after 2 PM" -> Start within 2:01 PM to 11:59 PM
|
||||
- "before 2 PM" -> Ends within 12:00 AM to 1:59 PM
|
||||
- Get By Section Command
|
||||
- CS 4393 001 =>
|
||||
- Will require SQL to be able to search for a class by its section number
|
||||
The application consists of three modular services that can be run independently or together:
|
||||
|
||||
## Analysis Required
|
||||
- Discord Bot ([`bot`][src-bot])
|
||||
|
||||
Some of the features and architecture of Ellucian's Banner system are not clear.
|
||||
The follow features, JSON, and more require validation & analysis:
|
||||
- Primary interface for course monitoring and data queries
|
||||
- Built with [Serenity][serenity] and [Poise][poise] frameworks for robust command handling
|
||||
- Uses slash commands with comprehensive error handling and logging
|
||||
|
||||
- Struct Nullability
|
||||
- Much of the responses provided by Ellucian contain nulls, and most of them are uncertain as to when and why they're null.
|
||||
- Analysis must be conducted to be sure of when to use a string and when it should nillable (pointer).
|
||||
- Multiple Professors / Primary Indicator
|
||||
- Multiple Meeting Times
|
||||
- Meeting Schedule Types
|
||||
- AFF vs AIN vs AHB etc.
|
||||
- Do CRNs repeat between years?
|
||||
- Check whether partOfTerm is always filled in, and it's meaning for various class results.
|
||||
- Check which API calls are affected by change in term/sessionID term select
|
||||
- SessionIDs
|
||||
- How long does a session ID work?
|
||||
- Do I really require a separate one per term?
|
||||
- How many can I activate, are there any restrictions?
|
||||
- How should session IDs be checked as 'invalid'?
|
||||
- What action(s) keep a session ID 'active', if any?
|
||||
- Are there any courses with multiple meeting times?
|
||||
- Google Calendar link generation, as an alternative to ICS file generation
|
||||
- Web Server ([`web`][src-web])
|
||||
|
||||
## Change Identification
|
||||
- [Axum][axum]-based server with Vite/React-based frontend
|
||||
- [Embeds static assets][rust-embed] at compile time with E-Tags & Cache-Control headers
|
||||
|
||||
- Important attributes of a class will be parsed on both the old and new data.
|
||||
- These attributes will be compared and given identifiers that can be subscribed to.
|
||||
- When a user subscribes to one of these identifiers, any changes identified will be sent to the user.
|
||||
- Scraper ([`scraper`][src-scraper])
|
||||
|
||||
## Real-time Suggestions
|
||||
- Intelligent data collection system with priority-based queuing inside PostgreSQL via [`sqlx`][sqlx]
|
||||
- Rate-limited scraping with burst handling to respect UTSA's systems
|
||||
- Handles course data updates, availability changes, and metadata synchronization
|
||||
|
||||
Various commands arguments have the ability to have suggestions appear.
|
||||
## Quick Start
|
||||
|
||||
- They must be fast. As ephemeral suggestions that are only relevant for seconds or less, they need to be delivered in less than a second.
|
||||
- They need to be easy to acquire. With as many commands & arguments to search as I do, it is paramount that the API be easy to understand & use.
|
||||
- It cannot be complicated. I only have so much time to develop this.
|
||||
- It does not need to be persistent. Since the data is scraped and rolled periodically from the Banner system, the data used will be deleted and re-requested occasionally.
|
||||
```bash
|
||||
bun install --cwd web # Install frontend dependencies
|
||||
cargo build # Build the backend
|
||||
|
||||
For these reasons, I believe SQLite to be the ideal place for this data to be stored.
|
||||
It is exceptionally fast, works well in-memory, and is less complicated compared to most other solutions.
|
||||
just dev # Runs auto-reloading dev build
|
||||
just dev --services bot,web # Runs auto-reloading dev build, running only the bot and web services
|
||||
just dev-build # Development build with release characteristics (frontend is embedded, non-auto-reloading)
|
||||
|
||||
- Only required data about the class will be stored, along with the JSON-encoded string.
|
||||
- For now, this would only be the CRN (and possibly the Term).
|
||||
- Potentially, a binary encoding could be used for performance, but it is unlikely to be better.
|
||||
- Database dumping into R2 would be good to ensure that over-scraping of the Banner system does not occur.
|
||||
- Upon a safe close requested
|
||||
- Must be done quickly (<8 seconds)
|
||||
- Every 30 minutes, if any scraping ocurred.
|
||||
- May cause locking of commands.
|
||||
just build # Production build that embeds assets
|
||||
```
|
||||
|
||||
## Scraping
|
||||
## Documentation
|
||||
|
||||
In order to keep the in-memory database of the bot up-to-date with the Banner system, the API must be scraped.
|
||||
Scraping will be separated by major to allow for priority majors (namely, Computer Science) to be scraped more often compared to others.
|
||||
This will lower the overall load on the Banner system while ensuring that data presented by the app is still relevant.
|
||||
Comprehensive documentation is available in the [`docs/`][documentation] folder.
|
||||
|
||||
For now, all majors will be scraped fully every 4 hours with at least 5 minutes between each one.
|
||||
|
||||
- On startup, priority majors will be scraped first (if required).
|
||||
- Other majors will be scraped in arbitrary order (if required).
|
||||
- Scrape timing will be stored in Redis.
|
||||
- CRNs will be the Primary Key within SQLite
|
||||
- If CRNs are duplicated between terms, then the primary key will be (CRN, Term)
|
||||
|
||||
Considerations
|
||||
|
||||
- Change in metadata should decrease the interval
|
||||
- The number of courses scraped should change the interval (2 hours per 500 courses involved)
|
||||
|
||||
## Rate Limiting, Costs & Bursting
|
||||
|
||||
Ideally, this application would implement dynamic rate limiting to ensure overload on the server does not occur.
|
||||
Better, it would also ensure that priority requests (commands) are dispatched faster than background processes (scraping), while making sure different requests are weighted differently.
|
||||
For example, a recent scrape of 350 classes should be weighted 5x more than a search for 8 classes by a user.
|
||||
Still, even if the cap does not normally allow for this request to be processed immediately, the small user search should proceed with a small bursting cap.
|
||||
|
||||
The requirements to this hypothetical system would be:
|
||||
|
||||
- Conditional Bursting: background processes or other requests deemed "low priority" are not allowed to use bursting.
|
||||
- Arbitrary Costs: rate limiting is considered in the form of the request size/speed more or less, such that small simple requests can be made more frequently, unlike large requests.
|
||||
[documentation]: docs/README.md
|
||||
[src-bot]: src/bot
|
||||
[src-web]: src/web
|
||||
[src-scraper]: src/scraper
|
||||
[serenity]: https://github.com/serenity-rs/serenity
|
||||
[poise]: https://github.com/serenity-rs/poise
|
||||
[axum]: https://github.com/tokio-rs/axum
|
||||
[rust-embed]: https://lib.rs/crates/rust-embed
|
||||
[sqlx]: https://github.com/launchbadge/sqlx
|
||||
|
||||
+52
@@ -0,0 +1,52 @@
|
||||
# This is a configuration file for the bacon tool
|
||||
#
|
||||
# Complete help on configuration: https://dystroy.org/bacon/config/
|
||||
#
|
||||
# You may check the current default at
|
||||
# https://github.com/Canop/bacon/blob/main/defaults/default-bacon.toml
|
||||
|
||||
default_job = "check"
|
||||
env.CARGO_TERM_COLOR = "always"
|
||||
|
||||
[jobs.check]
|
||||
command = ["cargo", "check", "--all-targets"]
|
||||
need_stdout = false
|
||||
|
||||
[jobs.clippy]
|
||||
command = ["cargo", "clippy", "--all-targets"]
|
||||
need_stdout = false
|
||||
|
||||
[jobs.test]
|
||||
command = [
|
||||
"cargo", "nextest", "run",
|
||||
]
|
||||
need_stdout = true
|
||||
analyzer = "nextest"
|
||||
|
||||
[jobs.run]
|
||||
command = [
|
||||
"cargo", "run",
|
||||
]
|
||||
need_stdout = true
|
||||
allow_warnings = true
|
||||
background = false
|
||||
on_change_strategy = "kill_then_restart"
|
||||
# kill = ["pkill", "-TERM", "-P"]'
|
||||
|
||||
[jobs.dev]
|
||||
command = [
|
||||
"just", "dev"
|
||||
]
|
||||
need_stdout = true
|
||||
allow_warnings = true
|
||||
background = false
|
||||
on_change_strategy = "kill_then_restart"
|
||||
|
||||
# You may define here keybindings that would be specific to
|
||||
# a project, for example a shortcut to launch a specific job.
|
||||
# Shortcuts to internal functions (scrolling, toggling, etc.)
|
||||
# should go in your personal global prefs.toml file instead.
|
||||
[keybindings]
|
||||
c = "job:clippy" # comment this to have 'c' run clippy on only the default target
|
||||
shift-c = "job:check"
|
||||
d = "job:dev"
|
||||
@@ -0,0 +1,36 @@
|
||||
use std::process::Command;
|
||||
|
||||
fn main() {
|
||||
// Try to get Git commit hash from Railway environment variable first
|
||||
let git_hash = std::env::var("RAILWAY_GIT_COMMIT_SHA").unwrap_or_else(|_| {
|
||||
// Fallback to git command if not on Railway
|
||||
let output = Command::new("git").args(["rev-parse", "HEAD"]).output();
|
||||
match output {
|
||||
Ok(output) => {
|
||||
if output.status.success() {
|
||||
String::from_utf8_lossy(&output.stdout).trim().to_string()
|
||||
} else {
|
||||
"unknown".to_string()
|
||||
}
|
||||
}
|
||||
Err(_) => "unknown".to_string(),
|
||||
}
|
||||
});
|
||||
|
||||
// Get the short hash (first 7 characters)
|
||||
let short_hash = if git_hash != "unknown" && git_hash.len() >= 7 {
|
||||
git_hash[..7].to_string()
|
||||
} else {
|
||||
git_hash.clone()
|
||||
};
|
||||
|
||||
// Set the environment variables that will be available at compile time
|
||||
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_hash);
|
||||
println!("cargo:rustc-env=GIT_COMMIT_SHORT={}", short_hash);
|
||||
|
||||
// Rebuild if the Git commit changes (only works when .git directory is available)
|
||||
if std::path::Path::new(".git/HEAD").exists() {
|
||||
println!("cargo:rerun-if-changed=.git/HEAD");
|
||||
println!("cargo:rerun-if-changed=.git/refs/heads");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
# Architecture
|
||||
|
||||
## System Overview
|
||||
|
||||
The Banner project is built as a multi-service application with the following components:
|
||||
|
||||
- **Discord Bot Service**: Handles Discord interactions and commands (Serenity/Poise)
|
||||
- **Web Service**: Axum HTTP server serving the SvelteKit frontend and REST API endpoints
|
||||
- **Scraper Service**: Background data collection and synchronization with job queue
|
||||
- **Database Layer**: PostgreSQL 17 for persistent storage (SQLx with compile-time verification)
|
||||
- **RateMyProfessors Client**: GraphQL-based bulk sync of professor ratings
|
||||
|
||||
### Frontend Stack
|
||||
|
||||
- **SvelteKit** with Svelte 5 runes (`$state`, `$derived`, `$effect`)
|
||||
- **Tailwind CSS v4** via `@tailwindcss/vite`
|
||||
- **bits-ui** for headless UI primitives (comboboxes, tooltips, dropdowns)
|
||||
- **TanStack Table** for interactive data tables with sorting and column control
|
||||
- **OverlayScrollbars** for styled, theme-aware scrollable areas
|
||||
- **ts-rs** generates TypeScript type bindings from Rust structs
|
||||
|
||||
### API Endpoints
|
||||
|
||||
| Endpoint | Description |
|
||||
|---|---|
|
||||
| `GET /api/health` | Health check |
|
||||
| `GET /api/status` | Service status, version, and commit hash |
|
||||
| `GET /api/metrics` | Basic metrics |
|
||||
| `GET /api/courses/search` | Paginated course search with filters (term, subject, query, open-only, sort) |
|
||||
| `GET /api/courses/:term/:crn` | Single course detail with instructors and RMP ratings |
|
||||
| `GET /api/terms` | Available terms from reference cache |
|
||||
| `GET /api/subjects?term=` | Subjects for a term, ordered by enrollment |
|
||||
| `GET /api/reference/:category` | Reference data lookups (campuses, instructional methods, etc.) |
|
||||
|
||||
## Technical Analysis
|
||||
|
||||
### Banner System Integration
|
||||
|
||||
Some of the features and architecture of Ellucian's Banner system are not clear.
|
||||
The following features, JSON, and more require validation & analysis:
|
||||
|
||||
- Struct Nullability
|
||||
- Much of the responses provided by Ellucian contain nulls, and most of them are uncertain as to when and why they're null.
|
||||
- Analysis must be conducted to be sure of when to use a string and when it should nillable (pointer).
|
||||
- Multiple Professors / Primary Indicator
|
||||
- Multiple Meeting Times
|
||||
- Meeting Schedule Types
|
||||
- AFF vs AIN vs AHB etc.
|
||||
- Do CRNs repeat between years?
|
||||
- Check whether partOfTerm is always filled in, and it's meaning for various class results.
|
||||
- Check which API calls are affected by change in term/sessionID term select
|
||||
- SessionIDs
|
||||
- How long does a session ID work?
|
||||
- Do I really require a separate one per term?
|
||||
- How many can I activate, are there any restrictions?
|
||||
- How should session IDs be checked as 'invalid'?
|
||||
- What action(s) keep a session ID 'active', if any?
|
||||
- Are there any courses with multiple meeting times?
|
||||
- Google Calendar link generation, as an alternative to ICS file generation
|
||||
|
||||
## Change Identification
|
||||
|
||||
- Important attributes of a class will be parsed on both the old and new data.
|
||||
- These attributes will be compared and given identifiers that can be subscribed to.
|
||||
- When a user subscribes to one of these identifiers, any changes identified will be sent to the user.
|
||||
|
||||
## Real-time Suggestions
|
||||
|
||||
Various commands arguments have the ability to have suggestions appear.
|
||||
|
||||
- They must be fast. As ephemeral suggestions that are only relevant for seconds or less, they need to be delivered in less than a second.
|
||||
- They need to be easy to acquire. With as many commands & arguments to search as I do, it is paramount that the API be easy to understand & use.
|
||||
- It cannot be complicated. I only have so much time to develop this.
|
||||
- It does not need to be persistent. Since the data is scraped and rolled periodically from the Banner system, the data used will be deleted and re-requested occasionally.
|
||||
|
||||
For these reasons, I believe PostgreSQL to be the ideal place for this data to be stored.
|
||||
It is exceptionally fast, works well in-memory, and is less complicated compared to most other solutions.
|
||||
|
||||
- Only required data about the class will be stored, along with the JSON-encoded string.
|
||||
- For now, this would only be the CRN (and possibly the Term).
|
||||
- Potentially, a binary encoding could be used for performance, but it is unlikely to be better.
|
||||
- Database dumping into R2 would be good to ensure that over-scraping of the Banner system does not occur.
|
||||
- Upon a safe close requested
|
||||
- Must be done quickly (<8 seconds)
|
||||
- Every 30 minutes, if any scraping ocurred.
|
||||
- May cause locking of commands.
|
||||
|
||||
## Scraping System
|
||||
|
||||
In order to keep the in-memory database of the bot up-to-date with the Banner system, the API must be scraped.
|
||||
Scraping will be separated by major to allow for priority majors (namely, Computer Science) to be scraped more often compared to others.
|
||||
This will lower the overall load on the Banner system while ensuring that data presented by the app is still relevant.
|
||||
|
||||
For now, all majors will be scraped fully every 4 hours with at least 5 minutes between each one.
|
||||
|
||||
- On startup, priority majors will be scraped first (if required).
|
||||
- Other majors will be scraped in arbitrary order (if required).
|
||||
- Scrape timing will be stored in database.
|
||||
- CRNs will be the Primary Key within database
|
||||
- If CRNs are duplicated between terms, then the primary key will be (CRN, Term)
|
||||
|
||||
Considerations
|
||||
|
||||
- Change in metadata should decrease the interval
|
||||
- The number of courses scraped should change the interval (2 hours per 500 courses involved)
|
||||
|
||||
## Rate Limiting, Costs & Bursting
|
||||
|
||||
Ideally, this application would implement dynamic rate limiting to ensure overload on the server does not occur.
|
||||
Better, it would also ensure that priority requests (commands) are dispatched faster than background processes (scraping), while making sure different requests are weighted differently.
|
||||
For example, a recent scrape of 350 classes should be weighted 5x more than a search for 8 classes by a user.
|
||||
Still, even if the cap does not normally allow for this request to be processed immediately, the small user search should proceed with a small bursting cap.
|
||||
|
||||
The requirements to this hypothetical system would be:
|
||||
|
||||
- Conditional Bursting: background processes or other requests deemed "low priority" are not allowed to use bursting.
|
||||
- Arbitrary Costs: rate limiting is considered in the form of the request size/speed more or less, such that small simple requests can be made more frequently, unlike large requests.
|
||||
@@ -1,11 +1,17 @@
|
||||
# Sessions
|
||||
# Banner
|
||||
|
||||
All notes on the internal workings of the Banner system by Ellucian.
|
||||
|
||||
## Sessions
|
||||
|
||||
All notes on the internal workings of Sessions in the Banner system.
|
||||
|
||||
- Sessions are generated on demand with a random string of characters.
|
||||
- The format `{5 random characters}{milliseconds since epoch}`
|
||||
- Example: ``
|
||||
- Sessions are invalidated after 30 minutes, but may change.
|
||||
- This delay can be found in the original HTML returned, find `meta[name="maxInactiveInterval"]` and read the `content` attribute.
|
||||
- This is read at runtime by the javascript on initialization.
|
||||
- This is read at runtime (in the browser, by javascript) on initialization.
|
||||
- Multiple timers exist, one is for the Inactivity Timer.
|
||||
- A dialog will appear asking the user to continue their session.
|
||||
- If they click the button, the session will be extended via the keepAliveURL (see `meta[name="keepAliveURL"]`).
|
||||
@@ -0,0 +1,61 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [0.5.0] - 2026-01-29
|
||||
|
||||
### Added
|
||||
|
||||
- Multi-select subject filtering with searchable comboboxes.
|
||||
- Smart instructor name abbreviation for compact table display.
|
||||
- Delivery mode indicators and tooltips in location column.
|
||||
- Page selector dropdown with animated pagination controls.
|
||||
- FLIP animations for smooth table row transitions during pagination.
|
||||
- Time tooltip with detailed meeting schedule and day abbreviations.
|
||||
- Reusable SimpleTooltip component for consistent UI hints.
|
||||
|
||||
### Changed
|
||||
|
||||
- Consolidated query logic and eliminated N+1 instructor loads via batch fetching.
|
||||
- Consolidated menu snippets and strengthened component type safety.
|
||||
- Enhanced table scrolling with OverlayScrollbars and theme-aware styling.
|
||||
- Eliminated initial theme flash on page load.
|
||||
|
||||
## [0.4.0] - 2026-01-28
|
||||
|
||||
### Added
|
||||
|
||||
- Web-based course search UI with interactive data table, multi-column sorting, and column visibility controls.
|
||||
- TypeScript type bindings generated from Rust types via ts-rs.
|
||||
- RateMyProfessors integration: bulk professor sync via GraphQL and inline rating display in search results.
|
||||
- Course detail expansion panel with enrollment, meeting times, and instructor info.
|
||||
- OverlayScrollbars integration for styled, theme-aware scrollable areas.
|
||||
- Pagination component for navigating large search result sets.
|
||||
- Footer component with version display.
|
||||
- API endpoints: `/api/courses/search`, `/api/courses/:term/:crn`, `/api/terms`, `/api/subjects`, `/api/reference/:category`.
|
||||
- Frontend API client with typed request/response handling and test coverage.
|
||||
- Course formatting utilities with comprehensive unit tests.
|
||||
|
||||
## [0.3.4] - 2026-01
|
||||
|
||||
### Added
|
||||
|
||||
- Live service status tracking on web dashboard with auto-refresh and health indicators.
|
||||
- DB operation extraction for improved testability.
|
||||
- Unit test suite foundation covering core functionality.
|
||||
- Docker support for PostgreSQL development environment.
|
||||
- ICS calendar export with comprehensive holiday exclusion coverage.
|
||||
- Google Calendar link generation with recurrence rules and meeting details.
|
||||
- Job queue with priority-based scheduling for background scraping.
|
||||
- Rate limiting with burst allowance for Banner API requests.
|
||||
- Session management and caching for Banner API interactions.
|
||||
- Discord bot commands: search, terms, ics, gcal.
|
||||
- Intelligent scraping system with priority queues and retry tracking.
|
||||
|
||||
### Changed
|
||||
|
||||
- Type consolidation and dead code removal across the codebase.
|
||||
@@ -0,0 +1,43 @@
|
||||
# Documentation
|
||||
|
||||
This folder contains detailed documentation for the Banner project. This file acts as the index.
|
||||
|
||||
## Files
|
||||
|
||||
- [`CHANGELOG.md`](CHANGELOG.md) - Notable changes by version
|
||||
- [`ROADMAP.md`](ROADMAP.md) - Planned features and priorities
|
||||
- [`BANNER.md`](BANNER.md) - General API documentation on the Banner system
|
||||
- [`ARCHITECTURE.md`](ARCHITECTURE.md) - Technical implementation details, system design, and analysis
|
||||
|
||||
## Samples
|
||||
|
||||
The `samples/` folder contains real Banner API response examples:
|
||||
|
||||
- `search/` - Course search API responses with various filters
|
||||
- [`searchResults.json`](samples/search/searchResults.json)
|
||||
- [`searchResults_500.json`](samples/search/searchResults_500.json)
|
||||
- [`searchResults_CS500.json`](samples/search/searchResults_CS500.json)
|
||||
- [`searchResults_malware.json`](samples/search/searchResults_malware.json)
|
||||
- `meta/` - Metadata API responses (terms, subjects, instructors, etc.)
|
||||
- [`get_attribute.json`](samples/meta/get_attribute.json)
|
||||
- [`get_campus.json`](samples/meta/get_campus.json)
|
||||
- [`get_instructionalMethod.json`](samples/meta/get_instructionalMethod.json)
|
||||
- [`get_instructor.json`](samples/meta/get_instructor.json)
|
||||
- [`get_partOfTerm.json`](samples/meta/get_partOfTerm.json)
|
||||
- [`get_subject.json`](samples/meta/get_subject.json)
|
||||
- [`getTerms.json`](samples/meta/getTerms.json)
|
||||
- `course/` - Course detail API responses (HTML and JSON)
|
||||
- [`getFacultyMeetingTimes.json`](samples/course/getFacultyMeetingTimes.json)
|
||||
- [`getClassDetails.html`](samples/course/getClassDetails.html)
|
||||
- [`getCorequisites.html`](samples/course/getCorequisites.html)
|
||||
- [`getCourseDescription.html`](samples/course/getCourseDescription.html)
|
||||
- [`getEnrollmentInfo.html`](samples/course/getEnrollmentInfo.html)
|
||||
- [`getFees.html`](samples/course/getFees.html)
|
||||
- [`getLinkedSections.html`](samples/course/getLinkedSections.html)
|
||||
- [`getRestrictions.html`](samples/course/getRestrictions.html)
|
||||
- [`getSectionAttributes.html`](samples/course/getSectionAttributes.html)
|
||||
- [`getSectionBookstoreDetails.html`](samples/course/getSectionBookstoreDetails.html)
|
||||
- [`getSectionPrerequisites.html`](samples/course/getSectionPrerequisites.html)
|
||||
- [`getXlistSections.html`](samples/course/getXlistSections.html)
|
||||
|
||||
These samples are used for development, testing, and understanding the Banner API structure.
|
||||
@@ -0,0 +1,35 @@
|
||||
# Roadmap
|
||||
|
||||
## Now
|
||||
|
||||
- **Notification and subscription system** - Subscribe to courses and get alerts on seat availability, waitlist movement, and detail changes (time, location, professor, seats). DB schema exists.
|
||||
- **Professor name search filter** - Filter search results by instructor. Backend code exists but is commented out.
|
||||
- **Autocomplete for search fields** - Typeahead for course titles, course numbers, professors, and terms.
|
||||
- **Test coverage expansion** - Broaden coverage with session/rate-limiter tests and more DB integration tests.
|
||||
|
||||
## Soon
|
||||
|
||||
- **Smart time-of-day search parsing** - Support natural queries like "2 PM", "2-3 PM", "ends by 2 PM", "after 2 PM", "before 2 PM" mapped to time ranges.
|
||||
- **Section-based lookup** - Search by full section identifier, e.g. "CS 4393 001".
|
||||
- **Search result pagination** - Paginated embeds for large result sets in Discord.
|
||||
- **Multi-term querying** - Query across multiple terms in a single search instead of one at a time.
|
||||
- **Historical analytics** - Track seat availability over time and visualize fill-rate trends per course or professor.
|
||||
- **Schedule builder** - Visual weekly schedule tool for assembling a conflict-free course lineup.
|
||||
- **Professor stats** - Aggregate data views: average class size, typical waitlist length, schedule patterns across semesters.
|
||||
|
||||
## Eventually
|
||||
|
||||
- **Degree audit helper** - Map available courses to degree requirements and suggest what to take next.
|
||||
- **Dynamic scraper scheduling** - Adjust scrape intervals based on change frequency and course count (e.g. 2 hours per 500 courses, shorter intervals when changes are detected).
|
||||
- **DM support** - Allow the Discord bot to respond in direct messages, not just guild channels.
|
||||
- **"Classes Now" command** - Find classes currently in session based on the current day and time.
|
||||
- **CRN direct lookup** - Look up a course by its CRN without going through search.
|
||||
- **Metrics dashboard** - Surface scraper and service metrics visually on the web dashboard.
|
||||
- **Privileged error feedback** - Detailed error information surfaced to bot admins when commands fail.
|
||||
|
||||
## Done
|
||||
|
||||
- **Web course search UI** - Browser-based course search with interactive data table, sorting, pagination, and column controls. *(0.4.0)*
|
||||
- **RateMyProfessor integration** - Bulk professor sync via GraphQL with inline ratings in search results. *(0.4.0)*
|
||||
- **Subject/major search filter** - Multi-select subject filtering with searchable comboboxes. *(0.5.0)*
|
||||
- **Test coverage expansion** - Unit tests for course formatting, API client, query builder, CLI args, and config parsing. *(0.3.4–0.4.0)*
|
||||
@@ -0,0 +1,56 @@
|
||||
-- Drop all old tables
|
||||
DROP TABLE IF EXISTS scrape_jobs;
|
||||
DROP TABLE IF EXISTS course_metrics;
|
||||
DROP TABLE IF EXISTS course_audits;
|
||||
DROP TABLE IF EXISTS courses;
|
||||
|
||||
-- Enums for scrape_jobs
|
||||
CREATE TYPE scrape_priority AS ENUM ('Low', 'Medium', 'High', 'Critical');
|
||||
CREATE TYPE target_type AS ENUM ('Subject', 'CourseRange', 'CrnList', 'SingleCrn');
|
||||
|
||||
-- Main course data table
|
||||
CREATE TABLE courses (
|
||||
id SERIAL PRIMARY KEY,
|
||||
crn VARCHAR NOT NULL,
|
||||
subject VARCHAR NOT NULL,
|
||||
course_number VARCHAR NOT NULL,
|
||||
title VARCHAR NOT NULL,
|
||||
term_code VARCHAR NOT NULL,
|
||||
enrollment INTEGER NOT NULL,
|
||||
max_enrollment INTEGER NOT NULL,
|
||||
wait_count INTEGER NOT NULL,
|
||||
wait_capacity INTEGER NOT NULL,
|
||||
last_scraped_at TIMESTAMPTZ NOT NULL,
|
||||
UNIQUE(crn, term_code)
|
||||
);
|
||||
|
||||
-- Time-series data for course enrollment
|
||||
CREATE TABLE course_metrics (
|
||||
id SERIAL PRIMARY KEY,
|
||||
course_id INTEGER NOT NULL REFERENCES courses(id) ON DELETE CASCADE,
|
||||
timestamp TIMESTAMPTZ NOT NULL,
|
||||
enrollment INTEGER NOT NULL,
|
||||
wait_count INTEGER NOT NULL,
|
||||
seats_available INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- Audit trail for changes to course data
|
||||
CREATE TABLE course_audits (
|
||||
id SERIAL PRIMARY KEY,
|
||||
course_id INTEGER NOT NULL REFERENCES courses(id) ON DELETE CASCADE,
|
||||
timestamp TIMESTAMPTZ NOT NULL,
|
||||
field_changed VARCHAR NOT NULL,
|
||||
old_value TEXT NOT NULL,
|
||||
new_value TEXT NOT NULL
|
||||
);
|
||||
|
||||
-- Job queue for the scraper
|
||||
CREATE TABLE scrape_jobs (
|
||||
id SERIAL PRIMARY KEY,
|
||||
target_type target_type NOT NULL,
|
||||
target_payload JSONB NOT NULL,
|
||||
priority scrape_priority NOT NULL,
|
||||
execute_at TIMESTAMPTZ NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
locked_at TIMESTAMPTZ
|
||||
);
|
||||
@@ -0,0 +1,3 @@
|
||||
-- Add retry tracking columns to scrape_jobs table
|
||||
ALTER TABLE scrape_jobs ADD COLUMN retry_count INTEGER NOT NULL DEFAULT 0 CHECK (retry_count >= 0);
|
||||
ALTER TABLE scrape_jobs ADD COLUMN max_retries INTEGER NOT NULL DEFAULT 5 CHECK (max_retries >= 0);
|
||||
@@ -0,0 +1,45 @@
|
||||
-- Performance optimization indexes
|
||||
|
||||
-- Index for term-based queries (most common access pattern)
|
||||
CREATE INDEX IF NOT EXISTS idx_courses_term_code ON courses(term_code);
|
||||
|
||||
-- Index for subject-based filtering
|
||||
CREATE INDEX IF NOT EXISTS idx_courses_subject ON courses(subject);
|
||||
|
||||
-- Composite index for subject + term queries
|
||||
CREATE INDEX IF NOT EXISTS idx_courses_subject_term ON courses(subject, term_code);
|
||||
|
||||
-- Index for course number lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_courses_course_number ON courses(course_number);
|
||||
|
||||
-- Index for last scraped timestamp (useful for finding stale data)
|
||||
CREATE INDEX IF NOT EXISTS idx_courses_last_scraped ON courses(last_scraped_at);
|
||||
|
||||
-- Index for course metrics time-series queries
|
||||
-- BRIN index is optimal for time-series data
|
||||
CREATE INDEX IF NOT EXISTS idx_course_metrics_timestamp ON course_metrics USING BRIN(timestamp);
|
||||
|
||||
-- B-tree index for specific course metric lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_course_metrics_course_timestamp
|
||||
ON course_metrics(course_id, timestamp DESC);
|
||||
|
||||
-- Partial index for pending scrape jobs (only unlocked jobs)
|
||||
CREATE INDEX IF NOT EXISTS idx_scrape_jobs_pending
|
||||
ON scrape_jobs(execute_at ASC)
|
||||
WHERE locked_at IS NULL;
|
||||
|
||||
-- Index for high-priority job processing
|
||||
CREATE INDEX IF NOT EXISTS idx_scrape_jobs_priority_pending
|
||||
ON scrape_jobs(priority DESC, execute_at ASC)
|
||||
WHERE locked_at IS NULL;
|
||||
|
||||
-- Index for retry tracking
|
||||
CREATE INDEX IF NOT EXISTS idx_scrape_jobs_retry_count
|
||||
ON scrape_jobs(retry_count)
|
||||
WHERE retry_count > 0 AND locked_at IS NULL;
|
||||
|
||||
-- Analyze tables to update statistics
|
||||
ANALYZE courses;
|
||||
ANALYZE course_metrics;
|
||||
ANALYZE course_audits;
|
||||
ANALYZE scrape_jobs;
|
||||
@@ -0,0 +1,53 @@
|
||||
-- Index Optimization Follow-up Migration
|
||||
|
||||
-- Reason: Redundant with composite index idx_courses_subject_term
|
||||
DROP INDEX IF EXISTS idx_courses_subject;
|
||||
|
||||
-- Remove: idx_scrape_jobs_retry_count
|
||||
DROP INDEX IF EXISTS idx_scrape_jobs_retry_count;
|
||||
|
||||
-- Purpose: Optimize the scheduler's frequent query (runs every 60 seconds)
|
||||
CREATE INDEX IF NOT EXISTS idx_scrape_jobs_scheduler_lookup
|
||||
ON scrape_jobs(target_type, target_payload)
|
||||
WHERE locked_at IS NULL;
|
||||
|
||||
-- Note: We use (target_type, target_payload) instead of including locked_at
|
||||
-- in the index columns because:
|
||||
-- 1. The WHERE clause filters locked_at IS NULL (partial index optimization)
|
||||
-- 2. target_payload is JSONB and already large; keeping it as an indexed column
|
||||
-- allows PostgreSQL to use index-only scans for the SELECT target_payload query
|
||||
-- 3. This design minimizes index size while maximizing query performance
|
||||
|
||||
|
||||
-- Purpose: Enable efficient audit trail queries by course
|
||||
CREATE INDEX IF NOT EXISTS idx_course_audits_course_timestamp
|
||||
ON course_audits(course_id, timestamp DESC);
|
||||
|
||||
-- Purpose: Enable queries like "Show all changes in the last 24 hours"
|
||||
CREATE INDEX IF NOT EXISTS idx_course_audits_timestamp
|
||||
ON course_audits(timestamp DESC);
|
||||
|
||||
|
||||
-- The BRIN index on course_metrics(timestamp) assumes data is inserted in
|
||||
-- chronological order. BRIN indexes are only effective when data is physically
|
||||
-- ordered on disk. If you perform:
|
||||
-- - Backfills of historical data
|
||||
-- - Out-of-order inserts
|
||||
-- - Frequent UPDATEs that move rows
|
||||
--
|
||||
-- Then the BRIN index effectiveness will degrade. Monitor with:
|
||||
-- SELECT * FROM brin_page_items(get_raw_page('idx_course_metrics_timestamp', 1));
|
||||
--
|
||||
-- If you see poor selectivity, consider:
|
||||
-- 1. REINDEX to rebuild after bulk loads
|
||||
-- 2. Switch to B-tree if inserts are not time-ordered
|
||||
-- 3. Use CLUSTER to physically reorder the table (requires downtime)
|
||||
|
||||
COMMENT ON INDEX idx_course_metrics_timestamp IS
|
||||
'BRIN index - requires chronologically ordered inserts for efficiency. Monitor selectivity.';
|
||||
|
||||
-- Update statistics for query planner
|
||||
ANALYZE courses;
|
||||
ANALYZE course_metrics;
|
||||
ANALYZE course_audits;
|
||||
ANALYZE scrape_jobs;
|
||||
@@ -0,0 +1,83 @@
|
||||
-- ============================================================
|
||||
-- Expand courses table with rich Banner API fields
|
||||
-- ============================================================
|
||||
|
||||
-- Section identifiers
|
||||
ALTER TABLE courses ADD COLUMN sequence_number VARCHAR;
|
||||
ALTER TABLE courses ADD COLUMN part_of_term VARCHAR;
|
||||
|
||||
-- Schedule & delivery (store codes, descriptions come from reference_data)
|
||||
ALTER TABLE courses ADD COLUMN instructional_method VARCHAR;
|
||||
ALTER TABLE courses ADD COLUMN campus VARCHAR;
|
||||
|
||||
-- Credit hours
|
||||
ALTER TABLE courses ADD COLUMN credit_hours INTEGER;
|
||||
ALTER TABLE courses ADD COLUMN credit_hour_low INTEGER;
|
||||
ALTER TABLE courses ADD COLUMN credit_hour_high INTEGER;
|
||||
|
||||
-- Cross-listing
|
||||
ALTER TABLE courses ADD COLUMN cross_list VARCHAR;
|
||||
ALTER TABLE courses ADD COLUMN cross_list_capacity INTEGER;
|
||||
ALTER TABLE courses ADD COLUMN cross_list_count INTEGER;
|
||||
|
||||
-- Section linking
|
||||
ALTER TABLE courses ADD COLUMN link_identifier VARCHAR;
|
||||
ALTER TABLE courses ADD COLUMN is_section_linked BOOLEAN;
|
||||
|
||||
-- JSONB columns for 1-to-many data
|
||||
ALTER TABLE courses ADD COLUMN meeting_times JSONB NOT NULL DEFAULT '[]'::jsonb;
|
||||
ALTER TABLE courses ADD COLUMN attributes JSONB NOT NULL DEFAULT '[]'::jsonb;
|
||||
|
||||
-- ============================================================
|
||||
-- Full-text search support
|
||||
-- ============================================================
|
||||
|
||||
-- Generated tsvector for word-based search on title
|
||||
ALTER TABLE courses ADD COLUMN title_search tsvector
|
||||
GENERATED ALWAYS AS (to_tsvector('simple', coalesce(title, ''))) STORED;
|
||||
|
||||
CREATE INDEX idx_courses_title_search ON courses USING GIN (title_search);
|
||||
|
||||
-- Trigram index for substring/ILIKE search on title
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
CREATE INDEX idx_courses_title_trgm ON courses USING GIN (title gin_trgm_ops);
|
||||
|
||||
-- ============================================================
|
||||
-- New filter indexes
|
||||
-- ============================================================
|
||||
|
||||
CREATE INDEX idx_courses_instructional_method ON courses(instructional_method);
|
||||
CREATE INDEX idx_courses_campus ON courses(campus);
|
||||
|
||||
-- Composite for "open CS courses in Fall 2024" pattern
|
||||
CREATE INDEX idx_courses_term_subject_avail ON courses(term_code, subject, max_enrollment, enrollment);
|
||||
|
||||
-- ============================================================
|
||||
-- Instructors table (normalized, deduplicated)
|
||||
-- ============================================================
|
||||
|
||||
CREATE TABLE instructors (
|
||||
banner_id VARCHAR PRIMARY KEY,
|
||||
display_name VARCHAR NOT NULL,
|
||||
email VARCHAR
|
||||
);
|
||||
|
||||
CREATE TABLE course_instructors (
|
||||
course_id INTEGER NOT NULL REFERENCES courses(id) ON DELETE CASCADE,
|
||||
instructor_id VARCHAR NOT NULL REFERENCES instructors(banner_id) ON DELETE CASCADE,
|
||||
is_primary BOOLEAN NOT NULL DEFAULT false,
|
||||
PRIMARY KEY (course_id, instructor_id)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_course_instructors_instructor ON course_instructors(instructor_id);
|
||||
|
||||
-- ============================================================
|
||||
-- Reference data table (all code→description lookups)
|
||||
-- ============================================================
|
||||
|
||||
CREATE TABLE reference_data (
|
||||
category VARCHAR NOT NULL,
|
||||
code VARCHAR NOT NULL,
|
||||
description VARCHAR NOT NULL,
|
||||
PRIMARY KEY (category, code)
|
||||
);
|
||||
@@ -0,0 +1,17 @@
|
||||
-- RMP professor data (bulk synced from RateMyProfessors)
|
||||
CREATE TABLE rmp_professors (
|
||||
legacy_id INTEGER PRIMARY KEY,
|
||||
graphql_id VARCHAR NOT NULL,
|
||||
first_name VARCHAR NOT NULL,
|
||||
last_name VARCHAR NOT NULL,
|
||||
department VARCHAR,
|
||||
avg_rating REAL,
|
||||
avg_difficulty REAL,
|
||||
num_ratings INTEGER NOT NULL DEFAULT 0,
|
||||
would_take_again_pct REAL,
|
||||
last_synced_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Link Banner instructors to RMP professors
|
||||
ALTER TABLE instructors ADD COLUMN rmp_legacy_id INTEGER REFERENCES rmp_professors(legacy_id);
|
||||
ALTER TABLE instructors ADD COLUMN rmp_match_status VARCHAR NOT NULL DEFAULT 'pending';
|
||||
+170
@@ -0,0 +1,170 @@
|
||||
use crate::banner::BannerApi;
|
||||
use crate::cli::ServiceName;
|
||||
use crate::config::Config;
|
||||
use crate::scraper::ScraperService;
|
||||
use crate::services::bot::BotService;
|
||||
use crate::services::manager::ServiceManager;
|
||||
use crate::services::web::WebService;
|
||||
use crate::state::AppState;
|
||||
use anyhow::Context;
|
||||
use figment::value::UncasedStr;
|
||||
use figment::{Figment, providers::Env};
|
||||
use sqlx::postgres::PgPoolOptions;
|
||||
use std::process::ExitCode;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tracing::{error, info};
|
||||
|
||||
/// Main application struct containing all necessary components
|
||||
pub struct App {
|
||||
config: Config,
|
||||
db_pool: sqlx::PgPool,
|
||||
banner_api: Arc<BannerApi>,
|
||||
app_state: AppState,
|
||||
service_manager: ServiceManager,
|
||||
}
|
||||
|
||||
impl App {
|
||||
/// Create a new App instance with all necessary components initialized
|
||||
pub async fn new() -> Result<Self, anyhow::Error> {
|
||||
// Load configuration
|
||||
let config: Config = Figment::new()
|
||||
.merge(Env::raw().map(|k| {
|
||||
if k == UncasedStr::new("RAILWAY_DEPLOYMENT_DRAINING_SECONDS") {
|
||||
"SHUTDOWN_TIMEOUT".into()
|
||||
} else {
|
||||
k.into()
|
||||
}
|
||||
}))
|
||||
.extract()
|
||||
.context("Failed to load config")?;
|
||||
|
||||
// Check if the database URL is via private networking
|
||||
let is_private = config.database_url.contains("railway.internal");
|
||||
let slow_threshold = Duration::from_millis(if is_private { 200 } else { 500 });
|
||||
|
||||
// Create database connection pool
|
||||
let db_pool = PgPoolOptions::new()
|
||||
.min_connections(0)
|
||||
.max_connections(4)
|
||||
.acquire_slow_threshold(slow_threshold)
|
||||
.acquire_timeout(Duration::from_secs(4))
|
||||
.idle_timeout(Duration::from_secs(60 * 2))
|
||||
.max_lifetime(Duration::from_secs(60 * 30))
|
||||
.connect(&config.database_url)
|
||||
.await
|
||||
.context("Failed to create database pool")?;
|
||||
|
||||
info!(
|
||||
is_private = is_private,
|
||||
slow_threshold = format!("{:.2?}", slow_threshold),
|
||||
"database pool established"
|
||||
);
|
||||
|
||||
// Run database migrations
|
||||
info!("Running database migrations...");
|
||||
sqlx::migrate!("./migrations")
|
||||
.run(&db_pool)
|
||||
.await
|
||||
.context("Failed to run database migrations")?;
|
||||
info!("Database migrations completed successfully");
|
||||
|
||||
// Create BannerApi and AppState
|
||||
let banner_api = BannerApi::new_with_config(
|
||||
config.banner_base_url.clone(),
|
||||
config.rate_limiting.clone(),
|
||||
)
|
||||
.context("Failed to create BannerApi")?;
|
||||
|
||||
let banner_api_arc = Arc::new(banner_api);
|
||||
let app_state = AppState::new(banner_api_arc.clone(), db_pool.clone());
|
||||
|
||||
// Load reference data cache from DB (may be empty on first run)
|
||||
if let Err(e) = app_state.load_reference_cache().await {
|
||||
info!(error = ?e, "Could not load reference cache on startup (may be empty)");
|
||||
}
|
||||
|
||||
Ok(App {
|
||||
config,
|
||||
db_pool,
|
||||
banner_api: banner_api_arc,
|
||||
app_state,
|
||||
service_manager: ServiceManager::new(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Setup and register services based on enabled service list
|
||||
pub fn setup_services(&mut self, services: &[ServiceName]) -> Result<(), anyhow::Error> {
|
||||
// Register enabled services with the manager
|
||||
if services.contains(&ServiceName::Web) {
|
||||
let web_service = Box::new(WebService::new(self.config.port, self.app_state.clone()));
|
||||
self.service_manager
|
||||
.register_service(ServiceName::Web.as_str(), web_service);
|
||||
}
|
||||
|
||||
if services.contains(&ServiceName::Scraper) {
|
||||
let scraper_service = Box::new(ScraperService::new(
|
||||
self.db_pool.clone(),
|
||||
self.banner_api.clone(),
|
||||
self.app_state.reference_cache.clone(),
|
||||
self.app_state.service_statuses.clone(),
|
||||
));
|
||||
self.service_manager
|
||||
.register_service(ServiceName::Scraper.as_str(), scraper_service);
|
||||
}
|
||||
|
||||
// Check if any services are enabled
|
||||
if !self.service_manager.has_services() && !services.contains(&ServiceName::Bot) {
|
||||
error!("No services enabled. Cannot start application.");
|
||||
return Err(anyhow::anyhow!("No services enabled"));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Setup bot service if enabled
|
||||
pub async fn setup_bot_service(&mut self) -> Result<(), anyhow::Error> {
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{Mutex, broadcast};
|
||||
|
||||
// Create shutdown channel for status update task
|
||||
let (status_shutdown_tx, status_shutdown_rx) = broadcast::channel(1);
|
||||
let status_task_handle = Arc::new(Mutex::new(None));
|
||||
|
||||
let client = BotService::create_client(
|
||||
&self.config,
|
||||
self.app_state.clone(),
|
||||
status_task_handle.clone(),
|
||||
status_shutdown_rx,
|
||||
)
|
||||
.await
|
||||
.context("Failed to create Discord client")?;
|
||||
|
||||
let bot_service = Box::new(BotService::new(
|
||||
client,
|
||||
status_task_handle,
|
||||
status_shutdown_tx,
|
||||
self.app_state.service_statuses.clone(),
|
||||
));
|
||||
|
||||
self.service_manager
|
||||
.register_service(ServiceName::Bot.as_str(), bot_service);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Start all registered services
|
||||
pub fn start_services(&mut self) {
|
||||
self.service_manager.spawn_all();
|
||||
}
|
||||
|
||||
/// Run the application and handle shutdown signals
|
||||
pub async fn run(self) -> ExitCode {
|
||||
use crate::signals::handle_shutdown_signals;
|
||||
handle_shutdown_signals(self.service_manager, self.config.shutdown_timeout).await
|
||||
}
|
||||
|
||||
/// Get a reference to the configuration
|
||||
pub fn config(&self) -> &Config {
|
||||
&self.config
|
||||
}
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
//! Application state shared across components (bot, web, scheduler).
|
||||
|
||||
use crate::banner::BannerApi;
|
||||
use crate::banner::Course;
|
||||
use anyhow::Result;
|
||||
use redis::AsyncCommands;
|
||||
use redis::Client;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AppState {
|
||||
pub banner_api: Arc<BannerApi>,
|
||||
pub redis: Arc<Client>,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
pub fn new(
|
||||
banner_api: Arc<BannerApi>,
|
||||
redis_url: &str,
|
||||
) -> Result<Self, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let redis_client = Client::open(redis_url)?;
|
||||
|
||||
Ok(Self {
|
||||
banner_api,
|
||||
redis: Arc::new(redis_client),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get a course by CRN with Redis cache fallback to Banner API
|
||||
pub async fn get_course_or_fetch(&self, term: &str, crn: &str) -> Result<Course> {
|
||||
let mut conn = self.redis.get_multiplexed_async_connection().await?;
|
||||
|
||||
let key = format!("class:{crn}");
|
||||
if let Some(serialized) = conn.get::<_, Option<String>>(&key).await? {
|
||||
let course: Course = serde_json::from_str(&serialized)?;
|
||||
return Ok(course);
|
||||
}
|
||||
|
||||
// Fallback: fetch from Banner API
|
||||
if let Some(course) = self.banner_api.get_course_by_crn(term, crn).await? {
|
||||
let serialized = serde_json::to_string(&course)?;
|
||||
let _: () = conn.set(&key, serialized).await?;
|
||||
return Ok(course);
|
||||
}
|
||||
|
||||
Err(anyhow::anyhow!("Course not found for CRN {crn}"))
|
||||
}
|
||||
}
|
||||
+215
-285
@@ -1,87 +1,219 @@
|
||||
//! Main Banner API client implementation.
|
||||
|
||||
use crate::banner::{models::*, query::SearchQuery, session::SessionManager, util::user_agent};
|
||||
use anyhow::{Context, Result};
|
||||
use axum::http::HeaderValue;
|
||||
use reqwest::Client;
|
||||
use serde_json;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use tracing::{error, info};
|
||||
use crate::banner::{
|
||||
SessionPool, create_shared_rate_limiter, errors::BannerApiError, json::parse_json_with_context,
|
||||
middleware::TransparentMiddleware, models::*, nonce, query::SearchQuery,
|
||||
rate_limit_middleware::RateLimitMiddleware, util::user_agent,
|
||||
};
|
||||
use crate::config::RateLimitingConfig;
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use http::HeaderValue;
|
||||
use reqwest::Client;
|
||||
use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
|
||||
use tracing::debug;
|
||||
|
||||
/// Main Banner API client.
|
||||
#[derive(Debug)]
|
||||
pub struct BannerApi {
|
||||
session_manager: SessionManager,
|
||||
client: Client,
|
||||
pub sessions: SessionPool,
|
||||
http: ClientWithMiddleware,
|
||||
base_url: String,
|
||||
}
|
||||
|
||||
impl BannerApi {
|
||||
/// Creates a new Banner API client.
|
||||
#[allow(dead_code)]
|
||||
pub fn new(base_url: String) -> Result<Self> {
|
||||
let client = Client::builder()
|
||||
.cookie_store(true)
|
||||
.user_agent(user_agent())
|
||||
.tcp_keepalive(Some(std::time::Duration::from_secs(60 * 5)))
|
||||
.read_timeout(std::time::Duration::from_secs(10))
|
||||
.connect_timeout(std::time::Duration::from_secs(10))
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.build()
|
||||
.context("Failed to create HTTP client")?;
|
||||
Self::new_with_config(base_url, RateLimitingConfig::default())
|
||||
}
|
||||
|
||||
let session_manager = SessionManager::new(base_url.clone(), client.clone());
|
||||
/// Creates a new Banner API client with custom rate limiting configuration.
|
||||
pub fn new_with_config(
|
||||
base_url: String,
|
||||
rate_limit_config: RateLimitingConfig,
|
||||
) -> Result<Self> {
|
||||
let rate_limiter = create_shared_rate_limiter(Some(rate_limit_config));
|
||||
|
||||
let http = ClientBuilder::new(
|
||||
Client::builder()
|
||||
.cookie_store(false)
|
||||
.user_agent(user_agent())
|
||||
.tcp_keepalive(Some(std::time::Duration::from_secs(60 * 5)))
|
||||
.read_timeout(std::time::Duration::from_secs(10))
|
||||
.connect_timeout(std::time::Duration::from_secs(10))
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.build()
|
||||
.context("Failed to create HTTP client")?,
|
||||
)
|
||||
.with(TransparentMiddleware)
|
||||
.with(RateLimitMiddleware::new(rate_limiter.clone()))
|
||||
.build();
|
||||
|
||||
Ok(Self {
|
||||
session_manager,
|
||||
client,
|
||||
sessions: SessionPool::new(http.clone(), base_url.clone()),
|
||||
http,
|
||||
base_url,
|
||||
})
|
||||
}
|
||||
|
||||
/// Sets up the API client by initializing session cookies.
|
||||
pub async fn setup(&self) -> Result<()> {
|
||||
info!(base_url = self.base_url, "setting up banner api client");
|
||||
let result = self.session_manager.setup().await;
|
||||
match &result {
|
||||
Ok(()) => info!("banner api client setup completed successfully"),
|
||||
Err(e) => error!(error = ?e, "banner api client setup failed"),
|
||||
/// Validates offset parameter for search methods.
|
||||
fn validate_offset(offset: i32) -> Result<()> {
|
||||
if offset <= 0 {
|
||||
Err(anyhow::anyhow!("Offset must be greater than 0"))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Retrieves a list of terms from the Banner API.
|
||||
pub async fn get_terms(
|
||||
/// Builds common search parameters for list endpoints.
|
||||
fn build_list_params(
|
||||
&self,
|
||||
search: &str,
|
||||
page: i32,
|
||||
term: &str,
|
||||
offset: i32,
|
||||
max_results: i32,
|
||||
) -> Result<Vec<BannerTerm>> {
|
||||
if page <= 0 {
|
||||
return Err(anyhow::anyhow!("Page must be greater than 0"));
|
||||
}
|
||||
session_id: &str,
|
||||
) -> Vec<(&str, String)> {
|
||||
vec![
|
||||
("searchTerm", search.to_string()),
|
||||
("term", term.to_string()),
|
||||
("offset", offset.to_string()),
|
||||
("max", max_results.to_string()),
|
||||
("uniqueSessionId", session_id.to_string()),
|
||||
("_", nonce()),
|
||||
]
|
||||
}
|
||||
|
||||
let url = format!("{}/classSearch/getTerms", self.base_url);
|
||||
let params = [
|
||||
("searchTerm", search),
|
||||
("offset", &page.to_string()),
|
||||
("max", &max_results.to_string()),
|
||||
("_", &SessionManager::nonce()),
|
||||
];
|
||||
/// Makes a GET request to a list endpoint and parses JSON response.
|
||||
async fn get_list_endpoint<T>(
|
||||
&self,
|
||||
endpoint: &str,
|
||||
search: &str,
|
||||
term: &str,
|
||||
offset: i32,
|
||||
max_results: i32,
|
||||
) -> Result<Vec<T>>
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de>,
|
||||
{
|
||||
Self::validate_offset(offset)?;
|
||||
|
||||
let session = self.sessions.acquire(term.parse()?).await?;
|
||||
let url = format!("{}/classSearch/{}", self.base_url, endpoint);
|
||||
let params = self.build_list_params(search, term, offset, max_results, session.id());
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.http
|
||||
.get(&url)
|
||||
.query(¶ms)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to get terms")?;
|
||||
.with_context(|| format!("Failed to get {}", endpoint))?;
|
||||
|
||||
let terms: Vec<BannerTerm> = response
|
||||
let data: Vec<T> = response
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse terms response")?;
|
||||
.with_context(|| format!("Failed to parse {} response", endpoint))?;
|
||||
|
||||
Ok(terms)
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
/// Builds search parameters for course search methods.
|
||||
fn build_search_params(
|
||||
&self,
|
||||
query: &SearchQuery,
|
||||
term: &str,
|
||||
session_id: &str,
|
||||
sort: &str,
|
||||
sort_descending: bool,
|
||||
) -> HashMap<String, String> {
|
||||
let mut params = query.to_params();
|
||||
params.insert("txt_term".to_string(), term.to_string());
|
||||
params.insert("uniqueSessionId".to_string(), session_id.to_string());
|
||||
params.insert("sortColumn".to_string(), sort.to_string());
|
||||
params.insert(
|
||||
"sortDirection".to_string(),
|
||||
if sort_descending { "desc" } else { "asc" }.to_string(),
|
||||
);
|
||||
params.insert("startDatepicker".to_string(), String::new());
|
||||
params.insert("endDatepicker".to_string(), String::new());
|
||||
params
|
||||
}
|
||||
|
||||
/// Performs a course search and handles common response processing.
|
||||
#[tracing::instrument(
|
||||
skip(self, query, sort, sort_descending),
|
||||
fields(term = %term)
|
||||
)]
|
||||
async fn perform_search(
|
||||
&self,
|
||||
term: &str,
|
||||
query: &SearchQuery,
|
||||
sort: &str,
|
||||
sort_descending: bool,
|
||||
) -> Result<SearchResult, BannerApiError> {
|
||||
let mut session = self.sessions.acquire(term.parse()?).await?;
|
||||
|
||||
if session.been_used() {
|
||||
self.http
|
||||
.post(format!("{}/classSearch/resetDataForm", self.base_url))
|
||||
.header("Cookie", session.cookie())
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| BannerApiError::RequestFailed(e.into()))?;
|
||||
}
|
||||
|
||||
session.touch();
|
||||
|
||||
let params = self.build_search_params(query, term, session.id(), sort, sort_descending);
|
||||
|
||||
debug!(
|
||||
term = term,
|
||||
subject = query.get_subject().map(|s| s.as_str()).unwrap_or("all"),
|
||||
max_results = query.get_max_results(),
|
||||
"Searching for courses"
|
||||
);
|
||||
|
||||
let response = self
|
||||
.http
|
||||
.get(format!("{}/searchResults/searchResults", self.base_url))
|
||||
.header("Cookie", session.cookie())
|
||||
.query(¶ms)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to search courses")?;
|
||||
|
||||
let status = response.status();
|
||||
let url = response.url().clone();
|
||||
let body = response
|
||||
.text()
|
||||
.await
|
||||
.with_context(|| format!("Failed to read body (status={status})"))?;
|
||||
|
||||
let search_result: SearchResult = parse_json_with_context(&body).map_err(|e| {
|
||||
BannerApiError::RequestFailed(anyhow!(
|
||||
"Failed to parse search response (status={status}, url={url}): {e}"
|
||||
))
|
||||
})?;
|
||||
|
||||
// Check for signs of an invalid session
|
||||
if search_result.path_mode.is_none() {
|
||||
return Err(BannerApiError::InvalidSession(
|
||||
"Search result path mode is none".to_string(),
|
||||
));
|
||||
} else if search_result.data.is_none() {
|
||||
return Err(BannerApiError::InvalidSession(
|
||||
"Search result data is none".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if !search_result.success {
|
||||
return Err(BannerApiError::RequestFailed(anyhow!(
|
||||
"Search marked as unsuccessful by Banner API"
|
||||
)));
|
||||
}
|
||||
|
||||
Ok(search_result)
|
||||
}
|
||||
|
||||
/// Retrieves a list of subjects from the Banner API.
|
||||
@@ -92,113 +224,31 @@ impl BannerApi {
|
||||
offset: i32,
|
||||
max_results: i32,
|
||||
) -> Result<Vec<Pair>> {
|
||||
if offset <= 0 {
|
||||
return Err(anyhow::anyhow!("Offset must be greater than 0"));
|
||||
}
|
||||
|
||||
let session_id = self.session_manager.ensure_session()?;
|
||||
let url = format!("{}/classSearch/get_subject", self.base_url);
|
||||
let params = [
|
||||
("searchTerm", search),
|
||||
("term", term),
|
||||
("offset", &offset.to_string()),
|
||||
("max", &max_results.to_string()),
|
||||
("uniqueSessionId", &session_id),
|
||||
("_", &SessionManager::nonce()),
|
||||
];
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.query(¶ms)
|
||||
.send()
|
||||
self.get_list_endpoint("get_subject", search, term, offset, max_results)
|
||||
.await
|
||||
.context("Failed to get subjects")?;
|
||||
|
||||
let subjects: Vec<Pair> = response
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse subjects response")?;
|
||||
|
||||
Ok(subjects)
|
||||
}
|
||||
|
||||
/// Retrieves a list of instructors from the Banner API.
|
||||
pub async fn get_instructors(
|
||||
&self,
|
||||
search: &str,
|
||||
term: &str,
|
||||
offset: i32,
|
||||
max_results: i32,
|
||||
) -> Result<Vec<Instructor>> {
|
||||
if offset <= 0 {
|
||||
return Err(anyhow::anyhow!("Offset must be greater than 0"));
|
||||
}
|
||||
|
||||
let session_id = self.session_manager.ensure_session()?;
|
||||
let url = format!("{}/classSearch/get_instructor", self.base_url);
|
||||
let params = [
|
||||
("searchTerm", search),
|
||||
("term", term),
|
||||
("offset", &offset.to_string()),
|
||||
("max", &max_results.to_string()),
|
||||
("uniqueSessionId", &session_id),
|
||||
("_", &SessionManager::nonce()),
|
||||
];
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.query(¶ms)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to get instructors")?;
|
||||
|
||||
let instructors: Vec<Instructor> = response
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse instructors response")?;
|
||||
|
||||
Ok(instructors)
|
||||
/// Retrieves campus codes and descriptions.
|
||||
pub async fn get_campuses(&self, term: &str) -> Result<Vec<Pair>> {
|
||||
self.get_list_endpoint("get_campus", "", term, 1, 500).await
|
||||
}
|
||||
|
||||
/// Retrieves a list of campuses from the Banner API.
|
||||
pub async fn get_campuses(
|
||||
&self,
|
||||
search: &str,
|
||||
term: &str,
|
||||
offset: i32,
|
||||
max_results: i32,
|
||||
) -> Result<Vec<Pair>> {
|
||||
if offset <= 0 {
|
||||
return Err(anyhow::anyhow!("Offset must be greater than 0"));
|
||||
}
|
||||
|
||||
let session_id = self.session_manager.ensure_session()?;
|
||||
let url = format!("{}/classSearch/get_campus", self.base_url);
|
||||
let params = [
|
||||
("searchTerm", search),
|
||||
("term", term),
|
||||
("offset", &offset.to_string()),
|
||||
("max", &max_results.to_string()),
|
||||
("uniqueSessionId", &session_id),
|
||||
("_", &SessionManager::nonce()),
|
||||
];
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.query(¶ms)
|
||||
.send()
|
||||
/// Retrieves instructional method codes and descriptions.
|
||||
pub async fn get_instructional_methods(&self, term: &str) -> Result<Vec<Pair>> {
|
||||
self.get_list_endpoint("get_instructionalMethod", "", term, 1, 500)
|
||||
.await
|
||||
.context("Failed to get campuses")?;
|
||||
}
|
||||
|
||||
let campuses: Vec<Pair> = response
|
||||
.json()
|
||||
/// Retrieves part-of-term codes and descriptions.
|
||||
pub async fn get_parts_of_term(&self, term: &str) -> Result<Vec<Pair>> {
|
||||
self.get_list_endpoint("get_partOfTerm", "", term, 1, 500)
|
||||
.await
|
||||
.context("Failed to parse campuses response")?;
|
||||
}
|
||||
|
||||
Ok(campuses)
|
||||
/// Retrieves section attribute codes and descriptions.
|
||||
pub async fn get_attributes(&self, term: &str) -> Result<Vec<Pair>> {
|
||||
self.get_list_endpoint("get_attribute", "", term, 1, 500)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Retrieves meeting time information for a course.
|
||||
@@ -211,7 +261,7 @@ impl BannerApi {
|
||||
let params = [("term", term), ("courseReferenceNumber", crn)];
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.http
|
||||
.get(&url)
|
||||
.query(¶ms)
|
||||
.send()
|
||||
@@ -259,95 +309,33 @@ impl BannerApi {
|
||||
query: &SearchQuery,
|
||||
sort: &str,
|
||||
sort_descending: bool,
|
||||
) -> Result<SearchResult> {
|
||||
self.session_manager.reset_data_form().await?;
|
||||
|
||||
let session_id = self.session_manager.ensure_session()?;
|
||||
let mut params = query.to_params();
|
||||
|
||||
// Add additional parameters
|
||||
params.insert("txt_term".to_string(), term.to_string());
|
||||
params.insert("uniqueSessionId".to_string(), session_id);
|
||||
params.insert("sortColumn".to_string(), sort.to_string());
|
||||
params.insert(
|
||||
"sortDirection".to_string(),
|
||||
if sort_descending { "desc" } else { "asc" }.to_string(),
|
||||
);
|
||||
params.insert("startDatepicker".to_string(), String::new());
|
||||
params.insert("endDatepicker".to_string(), String::new());
|
||||
|
||||
let url = format!("{}/searchResults/searchResults", self.base_url);
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.query(¶ms)
|
||||
.send()
|
||||
) -> Result<SearchResult, BannerApiError> {
|
||||
self.perform_search(term, query, sort, sort_descending)
|
||||
.await
|
||||
.context("Failed to search courses")?;
|
||||
|
||||
let search_result: SearchResult = response
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse search response")?;
|
||||
|
||||
if !search_result.success {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Search marked as unsuccessful by Banner API"
|
||||
));
|
||||
}
|
||||
|
||||
Ok(search_result)
|
||||
}
|
||||
|
||||
/// Selects a term for the current session.
|
||||
pub async fn select_term(&self, term: &str) -> Result<()> {
|
||||
self.session_manager.select_term(term).await
|
||||
}
|
||||
|
||||
/// Retrieves a single course by CRN by issuing a minimal search
|
||||
pub async fn get_course_by_crn(&self, term: &str, crn: &str) -> Result<Option<Course>> {
|
||||
self.session_manager.reset_data_form().await?;
|
||||
// Ensure session is configured for this term
|
||||
self.select_term(term).await?;
|
||||
|
||||
let session_id = self.session_manager.ensure_session()?;
|
||||
pub async fn get_course_by_crn(
|
||||
&self,
|
||||
term: &str,
|
||||
crn: &str,
|
||||
) -> Result<Option<Course>, BannerApiError> {
|
||||
debug!(term = term, crn = crn, "Looking up course by CRN");
|
||||
|
||||
let query = SearchQuery::new()
|
||||
.course_reference_number(crn)
|
||||
.max_results(1);
|
||||
|
||||
let mut params = query.to_params();
|
||||
params.insert("txt_term".to_string(), term.to_string());
|
||||
params.insert("uniqueSessionId".to_string(), session_id);
|
||||
params.insert("sortColumn".to_string(), "subjectDescription".to_string());
|
||||
params.insert("sortDirection".to_string(), "asc".to_string());
|
||||
params.insert("startDatepicker".to_string(), String::new());
|
||||
params.insert("endDatepicker".to_string(), String::new());
|
||||
let search_result = self
|
||||
.perform_search(term, &query, "subjectDescription", false)
|
||||
.await?;
|
||||
|
||||
let url = format!("{}/searchResults/searchResults", self.base_url);
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.query(¶ms)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to search course by CRN")?;
|
||||
|
||||
let status = response.status();
|
||||
let body = response
|
||||
.text()
|
||||
.await
|
||||
.with_context(|| format!("Failed to read body (status={status})"))?;
|
||||
|
||||
let search_result: SearchResult = parse_json_with_context(&body).map_err(|e| {
|
||||
anyhow::anyhow!(
|
||||
"Failed to parse search response for CRN (status={status}, url={url}): {e}",
|
||||
)
|
||||
})?;
|
||||
|
||||
if !search_result.success {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Search marked as unsuccessful by Banner API"
|
||||
// Additional validation for CRN search
|
||||
if search_result.path_mode == Some("registration".to_string())
|
||||
&& search_result.data.is_none()
|
||||
{
|
||||
return Err(BannerApiError::InvalidSession(
|
||||
"Search result path mode is registration and data is none".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -355,62 +343,4 @@ impl BannerApi {
|
||||
.data
|
||||
.and_then(|courses| courses.into_iter().next()))
|
||||
}
|
||||
|
||||
/// Gets course details (placeholder - needs implementation).
|
||||
pub async fn get_course_details(&self, term: &str, crn: &str) -> Result<ClassDetails> {
|
||||
let body = serde_json::json!({
|
||||
"term": term,
|
||||
"courseReferenceNumber": crn,
|
||||
"first": "first"
|
||||
});
|
||||
|
||||
let url = format!("{}/searchResults/getClassDetails", self.base_url);
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
.json(&body)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to get course details")?;
|
||||
|
||||
let details: ClassDetails = response
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse course details response")?;
|
||||
|
||||
Ok(details)
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt to parse JSON and, on failure, include a contextual snippet around the error location
|
||||
fn parse_json_with_context<T: serde::de::DeserializeOwned>(body: &str) -> Result<T> {
|
||||
match serde_json::from_str::<T>(body) {
|
||||
Ok(value) => Ok(value),
|
||||
Err(err) => {
|
||||
let (line, column) = (err.line(), err.column());
|
||||
let snippet = build_error_snippet(body, line, column, 120);
|
||||
Err(anyhow::anyhow!(
|
||||
"{err} at line {line}, column {column}\nSnippet:\n{snippet}",
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build_error_snippet(body: &str, line: usize, column: usize, max_len: usize) -> String {
|
||||
let target_line = body.lines().nth(line.saturating_sub(1)).unwrap_or("");
|
||||
if target_line.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
|
||||
let start = column.saturating_sub(max_len.min(column));
|
||||
let end = (column + max_len).min(target_line.len());
|
||||
let slice = &target_line[start..end];
|
||||
|
||||
let mut indicator = String::new();
|
||||
if column > start {
|
||||
indicator.push_str(&" ".repeat(column - start - 1));
|
||||
indicator.push('^');
|
||||
}
|
||||
|
||||
format!("{slice}\n{indicator}")
|
||||
}
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
//! Error types for the Banner API client.
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum BannerApiError {
|
||||
#[error("Banner session is invalid or expired: {0}")]
|
||||
InvalidSession(String),
|
||||
#[error(transparent)]
|
||||
RequestFailed(#[from] anyhow::Error),
|
||||
}
|
||||
@@ -0,0 +1,415 @@
|
||||
//! JSON parsing utilities for the Banner API client.
|
||||
|
||||
use anyhow::Result;
|
||||
use serde_json::{self, Value};
|
||||
|
||||
/// Attempt to parse JSON and, on failure, include a contextual snippet of the
|
||||
/// line where the error occurred.
|
||||
///
|
||||
/// In debug builds, this provides detailed context including the full JSON object
|
||||
/// containing the error and type mismatch information. In release builds, it shows
|
||||
/// a minimal snippet to prevent dumping huge JSON bodies to production logs.
|
||||
pub fn parse_json_with_context<T: serde::de::DeserializeOwned>(body: &str) -> Result<T> {
|
||||
let jd = &mut serde_json::Deserializer::from_str(body);
|
||||
match serde_path_to_error::deserialize(jd) {
|
||||
Ok(value) => Ok(value),
|
||||
Err(err) => {
|
||||
let inner_err = err.inner();
|
||||
let (line, column) = (inner_err.line(), inner_err.column());
|
||||
let path = err.path().to_string();
|
||||
|
||||
let msg = inner_err.to_string();
|
||||
let loc = format!(" at line {line} column {column}");
|
||||
let msg_without_loc = msg.strip_suffix(&loc).unwrap_or(&msg).to_string();
|
||||
|
||||
// Build error message differently for debug vs release builds
|
||||
let final_err = if cfg!(debug_assertions) {
|
||||
// Debug mode: provide detailed context
|
||||
let type_info = parse_type_mismatch(&msg_without_loc);
|
||||
let context = extract_json_object_at_path(body, err.path(), line, column);
|
||||
|
||||
let mut err_msg = String::new();
|
||||
if !path.is_empty() && path != "." {
|
||||
err_msg.push_str(&format!("for path '{}'\n", path));
|
||||
}
|
||||
err_msg.push_str(&format!(
|
||||
"({}) at line {} column {}\n\n",
|
||||
type_info, line, column
|
||||
));
|
||||
err_msg.push_str(&context);
|
||||
|
||||
err_msg
|
||||
} else {
|
||||
// Release mode: minimal snippet to keep logs concise
|
||||
let snippet = build_error_snippet(body, line, column, 20);
|
||||
|
||||
let mut err_msg = String::new();
|
||||
if !path.is_empty() && path != "." {
|
||||
err_msg.push_str(&format!("for path '{}' ", path));
|
||||
}
|
||||
err_msg.push_str(&format!(
|
||||
"({}) at line {} column {}",
|
||||
msg_without_loc, line, column
|
||||
));
|
||||
err_msg.push_str(&format!("\n{}", snippet));
|
||||
|
||||
err_msg
|
||||
};
|
||||
|
||||
Err(anyhow::anyhow!(final_err))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract type mismatch information from a serde error message.
|
||||
///
|
||||
/// Parses error messages like "invalid type: null, expected a string" to extract
|
||||
/// the expected and actual types for clearer error reporting.
|
||||
///
|
||||
/// Returns a formatted string like "(expected a string, got null)" or the original
|
||||
/// message if parsing fails.
|
||||
fn parse_type_mismatch(error_msg: &str) -> String {
|
||||
// Try to parse "invalid type: X, expected Y" format
|
||||
if let Some(invalid_start) = error_msg.find("invalid type: ") {
|
||||
let after_prefix = &error_msg[invalid_start + "invalid type: ".len()..];
|
||||
|
||||
if let Some(comma_pos) = after_prefix.find(", expected ") {
|
||||
let actual_type = &after_prefix[..comma_pos];
|
||||
let expected_part = &after_prefix[comma_pos + ", expected ".len()..];
|
||||
|
||||
// Clean up expected part (remove " at line X column Y" if present)
|
||||
let expected_type = expected_part
|
||||
.split(" at line ")
|
||||
.next()
|
||||
.unwrap_or(expected_part)
|
||||
.trim();
|
||||
|
||||
return format!("expected {}, got {}", expected_type, actual_type);
|
||||
}
|
||||
}
|
||||
|
||||
// Try to parse "expected X at line Y" format
|
||||
if error_msg.starts_with("expected ")
|
||||
&& let Some(expected_part) = error_msg.split(" at line ").next()
|
||||
{
|
||||
return expected_part.to_string();
|
||||
}
|
||||
|
||||
// Fallback: return original message without location info
|
||||
error_msg.to_string()
|
||||
}
|
||||
|
||||
/// Extract and pretty-print the JSON object/array containing the parse error.
|
||||
///
|
||||
/// This function navigates to the error location using the serde path and extracts
|
||||
/// the parent object or array to provide better context for debugging.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `body` - The raw JSON string
|
||||
/// * `path` - The serde path to the error (e.g., "data[0].faculty[0].displayName")
|
||||
/// * `line` - Line number of the error (for fallback)
|
||||
/// * `column` - Column number of the error (for fallback)
|
||||
///
|
||||
/// # Returns
|
||||
/// A formatted string containing the JSON object with the error, or a fallback snippet
|
||||
fn extract_json_object_at_path(
|
||||
body: &str,
|
||||
path: &serde_path_to_error::Path,
|
||||
line: usize,
|
||||
column: usize,
|
||||
) -> String {
|
||||
// Try to parse the entire JSON structure
|
||||
let root_value: Value = match serde_json::from_str(body) {
|
||||
Ok(v) => v,
|
||||
Err(_) => {
|
||||
// If we can't parse the JSON at all, fall back to line snippet
|
||||
return build_error_snippet(body, line, column, 20);
|
||||
}
|
||||
};
|
||||
|
||||
// Navigate to the error location using the path
|
||||
let path_str = path.to_string();
|
||||
let segments = parse_path_segments(&path_str);
|
||||
|
||||
let (context_value, context_name) = navigate_to_context(&root_value, &segments);
|
||||
|
||||
// Pretty-print the context value with limited depth to avoid huge output
|
||||
match serde_json::to_string_pretty(&context_value) {
|
||||
Ok(pretty) => {
|
||||
// Limit output to ~50 lines to prevent log spam
|
||||
let lines: Vec<&str> = pretty.lines().collect();
|
||||
let truncated = if lines.len() > 50 {
|
||||
let mut result = lines[..47].join("\n");
|
||||
result.push_str("\n ... (truncated, ");
|
||||
result.push_str(&(lines.len() - 47).to_string());
|
||||
result.push_str(" more lines)");
|
||||
result
|
||||
} else {
|
||||
pretty
|
||||
};
|
||||
|
||||
format!("{} at '{}':\n{}", context_name, path_str, truncated)
|
||||
}
|
||||
Err(_) => {
|
||||
// Fallback to simple snippet if pretty-print fails
|
||||
build_error_snippet(body, line, column, 20)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a JSON path string into segments for navigation.
|
||||
///
|
||||
/// Converts paths like "data[0].faculty[1].displayName" into a sequence of
|
||||
/// object keys and array indices.
|
||||
fn parse_path_segments(path: &str) -> Vec<PathSegment> {
|
||||
let mut segments = Vec::new();
|
||||
let mut current = String::new();
|
||||
let mut in_bracket = false;
|
||||
|
||||
for ch in path.chars() {
|
||||
match ch {
|
||||
'.' if !in_bracket => {
|
||||
if !current.is_empty() {
|
||||
segments.push(PathSegment::Key(current.clone()));
|
||||
current.clear();
|
||||
}
|
||||
}
|
||||
'[' => {
|
||||
if !current.is_empty() {
|
||||
segments.push(PathSegment::Key(current.clone()));
|
||||
current.clear();
|
||||
}
|
||||
in_bracket = true;
|
||||
}
|
||||
']' => {
|
||||
if in_bracket && !current.is_empty() {
|
||||
if let Ok(index) = current.parse::<usize>() {
|
||||
segments.push(PathSegment::Index(index));
|
||||
}
|
||||
current.clear();
|
||||
}
|
||||
in_bracket = false;
|
||||
}
|
||||
_ => current.push(ch),
|
||||
}
|
||||
}
|
||||
|
||||
if !current.is_empty() {
|
||||
segments.push(PathSegment::Key(current));
|
||||
}
|
||||
|
||||
segments
|
||||
}
|
||||
|
||||
/// Represents a segment in a JSON path (either an object key or array index).
|
||||
#[derive(Debug)]
|
||||
enum PathSegment {
|
||||
Key(String),
|
||||
Index(usize),
|
||||
}
|
||||
|
||||
/// Navigate through a JSON value using path segments and return the appropriate context.
|
||||
///
|
||||
/// This function walks the JSON structure and returns the parent object/array that
|
||||
/// contains the error, providing meaningful context for debugging.
|
||||
///
|
||||
/// # Returns
|
||||
/// A tuple of (context_value, description) where context_value is the JSON to display
|
||||
/// and description is a human-readable name for what we're showing.
|
||||
fn navigate_to_context<'a>(
|
||||
mut current: &'a Value,
|
||||
segments: &[PathSegment],
|
||||
) -> (&'a Value, &'static str) {
|
||||
// If path is empty or just root, return the whole value
|
||||
if segments.is_empty() {
|
||||
return (current, "Root object");
|
||||
}
|
||||
|
||||
// Try to navigate to the parent of the error location
|
||||
// We want to show the containing object/array, not just the failing field
|
||||
let parent_depth = segments.len().saturating_sub(1);
|
||||
|
||||
for (i, segment) in segments.iter().enumerate() {
|
||||
// Stop one level before the end to show the parent context
|
||||
if i >= parent_depth {
|
||||
break;
|
||||
}
|
||||
|
||||
match segment {
|
||||
PathSegment::Key(key) => {
|
||||
if let Some(next) = current.get(key) {
|
||||
current = next;
|
||||
} else {
|
||||
// Can't navigate further, return what we have
|
||||
return (current, "Partial context (navigation stopped)");
|
||||
}
|
||||
}
|
||||
PathSegment::Index(idx) => {
|
||||
if let Some(next) = current.get(idx) {
|
||||
current = next;
|
||||
} else {
|
||||
return (current, "Partial context (index out of bounds)");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(current, "Object containing error")
|
||||
}
|
||||
|
||||
fn build_error_snippet(body: &str, line: usize, column: usize, context_len: usize) -> String {
|
||||
let target_line = body.lines().nth(line.saturating_sub(1)).unwrap_or("");
|
||||
if target_line.is_empty() {
|
||||
return "(empty line)".to_string();
|
||||
}
|
||||
|
||||
// column is 1-based, convert to 0-based for slicing
|
||||
let error_idx = column.saturating_sub(1);
|
||||
|
||||
let half_len = context_len / 2;
|
||||
let start = error_idx.saturating_sub(half_len);
|
||||
let end = (error_idx + half_len).min(target_line.len());
|
||||
|
||||
let slice = &target_line[start..end];
|
||||
let indicator_pos = error_idx - start;
|
||||
|
||||
let indicator = " ".repeat(indicator_pos) + "^";
|
||||
|
||||
format!("...{slice}...\n {indicator}")
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[test]
|
||||
fn test_parse_type_mismatch_invalid_type() {
|
||||
let msg = "invalid type: null, expected a string at line 45 column 29";
|
||||
let result = parse_type_mismatch(msg);
|
||||
assert_eq!(result, "expected a string, got null");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_type_mismatch_expected() {
|
||||
let msg = "expected value at line 1 column 1";
|
||||
let result = parse_type_mismatch(msg);
|
||||
assert_eq!(result, "expected value");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_path_segments_simple() {
|
||||
let segments = parse_path_segments("data.name");
|
||||
assert_eq!(segments.len(), 2);
|
||||
match &segments[0] {
|
||||
PathSegment::Key(k) => assert_eq!(k, "data"),
|
||||
_ => panic!("Expected Key segment"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_path_segments_with_array() {
|
||||
let segments = parse_path_segments("data[0].faculty[1].displayName");
|
||||
assert_eq!(segments.len(), 5);
|
||||
match &segments[0] {
|
||||
PathSegment::Key(k) => assert_eq!(k, "data"),
|
||||
_ => panic!("Expected Key segment"),
|
||||
}
|
||||
match &segments[1] {
|
||||
PathSegment::Index(i) => assert_eq!(*i, 0),
|
||||
_ => panic!("Expected Index segment"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_json_with_context_null_value() {
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct TestStruct {
|
||||
name: String,
|
||||
}
|
||||
|
||||
let json = r#"{"name": null}"#;
|
||||
let result: Result<TestStruct> = parse_json_with_context(json);
|
||||
|
||||
assert!(result.is_err());
|
||||
let err_msg = result.unwrap_err().to_string();
|
||||
|
||||
// Should contain path info
|
||||
assert!(err_msg.contains("name"));
|
||||
|
||||
// In debug mode, should contain detailed context
|
||||
if cfg!(debug_assertions) {
|
||||
assert!(err_msg.contains("expected"));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_navigate_to_context() {
|
||||
let json = r#"{"data": [{"faculty": [{"name": "John"}]}]}"#;
|
||||
let value: Value = serde_json::from_str(json).unwrap();
|
||||
|
||||
let segments = parse_path_segments("data[0].faculty[0].name");
|
||||
let (context, _) = navigate_to_context(&value, &segments);
|
||||
|
||||
// Should return the faculty[0] object (parent of 'name')
|
||||
assert!(context.is_object());
|
||||
assert!(context.get("name").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_realistic_banner_error() {
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Course {
|
||||
#[allow(dead_code)]
|
||||
#[serde(rename = "courseTitle")]
|
||||
course_title: String,
|
||||
faculty: Vec<Faculty>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Faculty {
|
||||
#[serde(rename = "displayName")]
|
||||
display_name: String,
|
||||
#[allow(dead_code)]
|
||||
email: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct SearchResult {
|
||||
data: Vec<Course>,
|
||||
}
|
||||
|
||||
// Simulate Banner API response with null faculty displayName
|
||||
// This mimics the actual error from SPN subject scrape
|
||||
let json = r#"{
|
||||
"data": [
|
||||
{
|
||||
"courseTitle": "Spanish Conversation",
|
||||
"faculty": [
|
||||
{
|
||||
"displayName": null,
|
||||
"email": "instructor@utsa.edu"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}"#;
|
||||
|
||||
let result: Result<SearchResult> = parse_json_with_context(json);
|
||||
assert!(result.is_err());
|
||||
|
||||
let err_msg = result.unwrap_err().to_string();
|
||||
println!("\n=== Error output in debug mode ===\n{}\n", err_msg);
|
||||
|
||||
// Verify error contains key information
|
||||
assert!(err_msg.contains("data[0].faculty[0].displayName"));
|
||||
|
||||
// In debug mode, should show detailed context
|
||||
if cfg!(debug_assertions) {
|
||||
// Should show type mismatch info
|
||||
assert!(err_msg.contains("expected") && err_msg.contains("got"));
|
||||
// Should show surrounding JSON context with the faculty object
|
||||
assert!(err_msg.contains("email"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
//! HTTP middleware for the Banner API client.
|
||||
|
||||
use http::Extensions;
|
||||
use reqwest::{Request, Response};
|
||||
use reqwest_middleware::{Middleware, Next};
|
||||
use tracing::{debug, trace, warn};
|
||||
|
||||
pub struct TransparentMiddleware;
|
||||
|
||||
/// Threshold for logging slow requests at DEBUG level (in milliseconds)
|
||||
const SLOW_REQUEST_THRESHOLD_MS: u128 = 1000;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Middleware for TransparentMiddleware {
|
||||
async fn handle(
|
||||
&self,
|
||||
req: Request,
|
||||
extensions: &mut Extensions,
|
||||
next: Next<'_>,
|
||||
) -> std::result::Result<Response, reqwest_middleware::Error> {
|
||||
let method = req.method().to_string();
|
||||
let path = req.url().path().to_string();
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
let response_result = next.run(req, extensions).await;
|
||||
let duration = start.elapsed();
|
||||
|
||||
match response_result {
|
||||
Ok(response) => {
|
||||
if response.status().is_success() {
|
||||
let duration_ms = duration.as_millis();
|
||||
if duration_ms >= SLOW_REQUEST_THRESHOLD_MS {
|
||||
debug!(
|
||||
method = method,
|
||||
path = path,
|
||||
status = response.status().as_u16(),
|
||||
duration_ms = duration_ms,
|
||||
"Request completed (slow)"
|
||||
);
|
||||
} else {
|
||||
trace!(
|
||||
method = method,
|
||||
path = path,
|
||||
status = response.status().as_u16(),
|
||||
duration_ms = duration_ms,
|
||||
"Request completed"
|
||||
);
|
||||
}
|
||||
Ok(response)
|
||||
} else {
|
||||
warn!(
|
||||
method = method,
|
||||
path = path,
|
||||
status = response.status().as_u16(),
|
||||
duration_ms = duration.as_millis(),
|
||||
"Request failed"
|
||||
);
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
warn!(
|
||||
method = method,
|
||||
path = path,
|
||||
duration_ms = duration.as_millis(),
|
||||
"Request failed"
|
||||
);
|
||||
Err(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
+7
-4
@@ -1,21 +1,24 @@
|
||||
#![allow(unused_imports)]
|
||||
|
||||
//! Banner API module for interacting with Ellucian Banner systems.
|
||||
//!
|
||||
//! This module provides functionality to:
|
||||
//! - Search for courses and retrieve course information
|
||||
//! - Manage Banner API sessions and authentication
|
||||
//! - Scrape course data and cache it in Redis
|
||||
//! - Generate ICS files and calendar links
|
||||
|
||||
pub mod api;
|
||||
pub mod errors;
|
||||
pub mod json;
|
||||
pub mod middleware;
|
||||
pub mod models;
|
||||
pub mod query;
|
||||
pub mod scraper;
|
||||
pub mod rate_limit_middleware;
|
||||
pub mod rate_limiter;
|
||||
pub mod session;
|
||||
pub mod util;
|
||||
|
||||
pub use api::*;
|
||||
pub use errors::*;
|
||||
pub use models::*;
|
||||
pub use query::*;
|
||||
pub use rate_limiter::*;
|
||||
pub use session::*;
|
||||
|
||||
@@ -11,6 +11,7 @@ pub struct Pair {
|
||||
pub type BannerTerm = Pair;
|
||||
|
||||
/// Represents an instructor in the Banner system
|
||||
#[allow(dead_code)]
|
||||
pub type Instructor = Pair;
|
||||
|
||||
impl BannerTerm {
|
||||
|
||||
@@ -76,9 +76,3 @@ impl Course {
|
||||
.unwrap_or("Unknown")
|
||||
}
|
||||
}
|
||||
|
||||
/// Class details (to be implemented)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ClassDetails {
|
||||
// TODO: Implement based on Banner API response
|
||||
}
|
||||
|
||||
+154
-125
@@ -1,10 +1,40 @@
|
||||
use bitflags::{Flags, bitflags};
|
||||
use chrono::{DateTime, NaiveDate, NaiveTime, Timelike, Utc};
|
||||
use bitflags::{bitflags, Flags};
|
||||
use chrono::{DateTime, NaiveDate, NaiveTime, Timelike, Utc, Weekday};
|
||||
use extension_traits::extension;
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use std::{cmp::Ordering, collections::HashSet, fmt::Display, str::FromStr};
|
||||
use std::{cmp::Ordering, str::FromStr};
|
||||
|
||||
use super::terms::Term;
|
||||
|
||||
#[extension(pub trait WeekdayExt)]
|
||||
impl Weekday {
|
||||
/// Short two-letter representation (used for ICS generation)
|
||||
fn to_short_string(self) -> &'static str {
|
||||
match self {
|
||||
Weekday::Mon => "Mo",
|
||||
Weekday::Tue => "Tu",
|
||||
Weekday::Wed => "We",
|
||||
Weekday::Thu => "Th",
|
||||
Weekday::Fri => "Fr",
|
||||
Weekday::Sat => "Sa",
|
||||
Weekday::Sun => "Su",
|
||||
}
|
||||
}
|
||||
|
||||
/// Full day name
|
||||
fn to_full_string(self) -> &'static str {
|
||||
match self {
|
||||
Weekday::Mon => "Monday",
|
||||
Weekday::Tue => "Tuesday",
|
||||
Weekday::Wed => "Wednesday",
|
||||
Weekday::Thu => "Thursday",
|
||||
Weekday::Fri => "Friday",
|
||||
Weekday::Sat => "Saturday",
|
||||
Weekday::Sun => "Sunday",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Deserialize a string field into a u32
|
||||
fn deserialize_string_to_u32<'de, D>(deserializer: D) -> Result<u32, D::Error>
|
||||
where
|
||||
@@ -33,7 +63,7 @@ pub struct FacultyItem {
|
||||
#[serde(deserialize_with = "deserialize_string_to_u32")]
|
||||
pub course_reference_number: u32, // CRN, e.g 27294
|
||||
pub display_name: String, // "LastName, FirstName"
|
||||
pub email_address: String, // e.g. FirstName.LastName@utsaedu
|
||||
pub email_address: Option<String>, // e.g. FirstName.LastName@utsaedu
|
||||
pub primary_indicator: bool,
|
||||
pub term: String, // e.g "202420"
|
||||
}
|
||||
@@ -42,11 +72,11 @@ pub struct FacultyItem {
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct MeetingTime {
|
||||
pub start_date: String, // MM/DD/YYYY, e.g 08/26/2025
|
||||
pub end_date: String, // MM/DD/YYYY, e.g 08/26/2025
|
||||
pub begin_time: String, // HHMM, e.g 1000
|
||||
pub end_time: String, // HHMM, e.g 1100
|
||||
pub category: String, // unknown meaning, e.g. 01, 02, etc
|
||||
pub start_date: String, // MM/DD/YYYY, e.g 08/26/2025
|
||||
pub end_date: String, // MM/DD/YYYY, e.g 08/26/2025
|
||||
pub begin_time: Option<String>, // HHMM, e.g 1000
|
||||
pub end_time: Option<String>, // HHMM, e.g 1100
|
||||
pub category: String, // unknown meaning, e.g. 01, 02, etc
|
||||
pub class: String, // internal class name, e.g. net.hedtech.banner.general.overallMeetingTimeDecorator
|
||||
pub monday: bool, // true if the meeting time occurs on Monday
|
||||
pub tuesday: bool, // true if the meeting time occurs on Tuesday
|
||||
@@ -55,15 +85,15 @@ pub struct MeetingTime {
|
||||
pub friday: bool, // true if the meeting time occurs on Friday
|
||||
pub saturday: bool, // true if the meeting time occurs on Saturday
|
||||
pub sunday: bool, // true if the meeting time occurs on Sunday
|
||||
pub room: String, // e.g. 1238
|
||||
pub room: Option<String>, // e.g. 1.238
|
||||
#[serde(deserialize_with = "deserialize_string_to_term")]
|
||||
pub term: Term, // e.g 202510
|
||||
pub building: String, // e.g NPB
|
||||
pub building_description: String, // e.g North Paseo Building
|
||||
pub campus: String, // campus code, e.g 11
|
||||
pub campus_description: String, // name of campus, e.g Main Campus
|
||||
pub building: Option<String>, // e.g NPB
|
||||
pub building_description: Option<String>, // e.g North Paseo Building
|
||||
pub campus: Option<String>, // campus code, e.g 11
|
||||
pub campus_description: Option<String>, // name of campus, e.g Main Campus
|
||||
pub course_reference_number: String, // CRN, e.g 27294
|
||||
pub credit_hour_session: f64, // e.g. 30
|
||||
pub credit_hour_session: Option<f64>, // e.g. 30
|
||||
pub hours_week: f64, // e.g. 30
|
||||
pub meeting_schedule_type: String, // e.g AFF
|
||||
pub meeting_type: String, // e.g HB, H2, H1, OS, OA, OH, ID, FF
|
||||
@@ -114,67 +144,33 @@ impl MeetingDays {
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for MeetingDays {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.bits().cmp(&other.bits())
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for MeetingDays {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.bits().cmp(&other.bits()))
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DayOfWeek> for MeetingDays {
|
||||
fn from(day: DayOfWeek) -> Self {
|
||||
impl From<Weekday> for MeetingDays {
|
||||
fn from(day: Weekday) -> Self {
|
||||
match day {
|
||||
DayOfWeek::Monday => MeetingDays::Monday,
|
||||
DayOfWeek::Tuesday => MeetingDays::Tuesday,
|
||||
DayOfWeek::Wednesday => MeetingDays::Wednesday,
|
||||
DayOfWeek::Thursday => MeetingDays::Thursday,
|
||||
DayOfWeek::Friday => MeetingDays::Friday,
|
||||
DayOfWeek::Saturday => MeetingDays::Saturday,
|
||||
DayOfWeek::Sunday => MeetingDays::Sunday,
|
||||
Weekday::Mon => MeetingDays::Monday,
|
||||
Weekday::Tue => MeetingDays::Tuesday,
|
||||
Weekday::Wed => MeetingDays::Wednesday,
|
||||
Weekday::Thu => MeetingDays::Thursday,
|
||||
Weekday::Fri => MeetingDays::Friday,
|
||||
Weekday::Sat => MeetingDays::Saturday,
|
||||
Weekday::Sun => MeetingDays::Sunday,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Days of the week for meeting schedules
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub enum DayOfWeek {
|
||||
Monday,
|
||||
Tuesday,
|
||||
Wednesday,
|
||||
Thursday,
|
||||
Friday,
|
||||
Saturday,
|
||||
Sunday,
|
||||
}
|
||||
|
||||
impl DayOfWeek {
|
||||
/// Convert to short string representation
|
||||
pub fn to_short_string(self) -> &'static str {
|
||||
match self {
|
||||
DayOfWeek::Monday => "Mo",
|
||||
DayOfWeek::Tuesday => "Tu",
|
||||
DayOfWeek::Wednesday => "We",
|
||||
DayOfWeek::Thursday => "Th",
|
||||
DayOfWeek::Friday => "Fr",
|
||||
DayOfWeek::Saturday => "Sa",
|
||||
DayOfWeek::Sunday => "Su",
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert to full string representation
|
||||
pub fn to_full_string(self) -> &'static str {
|
||||
match self {
|
||||
DayOfWeek::Monday => "Monday",
|
||||
DayOfWeek::Tuesday => "Tuesday",
|
||||
DayOfWeek::Wednesday => "Wednesday",
|
||||
DayOfWeek::Thursday => "Thursday",
|
||||
DayOfWeek::Friday => "Friday",
|
||||
DayOfWeek::Saturday => "Saturday",
|
||||
DayOfWeek::Sunday => "Sunday",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<MeetingDays> for DayOfWeek {
|
||||
impl TryFrom<MeetingDays> for Weekday {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(days: MeetingDays) -> Result<Self, Self::Error> {
|
||||
@@ -185,13 +181,13 @@ impl TryFrom<MeetingDays> for DayOfWeek {
|
||||
let count = days.into_iter().count();
|
||||
if count == 1 {
|
||||
return Ok(match days {
|
||||
MeetingDays::Monday => DayOfWeek::Monday,
|
||||
MeetingDays::Tuesday => DayOfWeek::Tuesday,
|
||||
MeetingDays::Wednesday => DayOfWeek::Wednesday,
|
||||
MeetingDays::Thursday => DayOfWeek::Thursday,
|
||||
MeetingDays::Friday => DayOfWeek::Friday,
|
||||
MeetingDays::Saturday => DayOfWeek::Saturday,
|
||||
MeetingDays::Sunday => DayOfWeek::Sunday,
|
||||
MeetingDays::Monday => Weekday::Mon,
|
||||
MeetingDays::Tuesday => Weekday::Tue,
|
||||
MeetingDays::Wednesday => Weekday::Wed,
|
||||
MeetingDays::Thursday => Weekday::Thu,
|
||||
MeetingDays::Friday => Weekday::Fri,
|
||||
MeetingDays::Saturday => Weekday::Sat,
|
||||
MeetingDays::Sunday => Weekday::Sun,
|
||||
_ => unreachable!(),
|
||||
});
|
||||
}
|
||||
@@ -252,10 +248,16 @@ impl TimeRange {
|
||||
let minute = time.minute();
|
||||
|
||||
let meridiem = if hour < 12 { "AM" } else { "PM" };
|
||||
format!("{hour}:{minute:02}{meridiem}")
|
||||
let display_hour = match hour {
|
||||
0 => 12,
|
||||
13..=23 => hour - 12,
|
||||
_ => hour,
|
||||
};
|
||||
format!("{display_hour}:{minute:02}{meridiem}")
|
||||
}
|
||||
|
||||
/// Get duration in minutes
|
||||
#[allow(dead_code)]
|
||||
pub fn duration_minutes(&self) -> i64 {
|
||||
let start_minutes = self.start.hour() as i64 * 60 + self.start.minute() as i64;
|
||||
let end_minutes = self.end.hour() as i64 * 60 + self.end.minute() as i64;
|
||||
@@ -296,10 +298,11 @@ impl DateRange {
|
||||
/// Get the number of weeks between start and end dates
|
||||
pub fn weeks_duration(&self) -> u32 {
|
||||
let duration = self.end.signed_duration_since(self.start);
|
||||
duration.num_weeks() as u32
|
||||
duration.num_weeks().max(0) as u32
|
||||
}
|
||||
|
||||
/// Check if a specific date falls within this range
|
||||
#[allow(dead_code)]
|
||||
pub fn contains_date(&self, date: NaiveDate) -> bool {
|
||||
date >= self.start && date <= self.end
|
||||
}
|
||||
@@ -317,10 +320,11 @@ pub enum MeetingType {
|
||||
Unknown(String),
|
||||
}
|
||||
|
||||
impl MeetingType {
|
||||
/// Parse from the meeting type string
|
||||
pub fn from_string(s: &str) -> Self {
|
||||
match s {
|
||||
impl std::str::FromStr for MeetingType {
|
||||
type Err = std::convert::Infallible;
|
||||
|
||||
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||
Ok(match s {
|
||||
"HB" | "H2" | "H1" => MeetingType::HybridBlended,
|
||||
"OS" => MeetingType::OnlineSynchronous,
|
||||
"OA" => MeetingType::OnlineAsynchronous,
|
||||
@@ -328,9 +332,11 @@ impl MeetingType {
|
||||
"ID" => MeetingType::IndependentStudy,
|
||||
"FF" => MeetingType::FaceToFace,
|
||||
other => MeetingType::Unknown(other.to_string()),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl MeetingType {
|
||||
/// Get description for the meeting type
|
||||
pub fn description(&self) -> &'static str {
|
||||
match self {
|
||||
@@ -347,42 +353,46 @@ impl MeetingType {
|
||||
|
||||
/// Meeting location information
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MeetingLocation {
|
||||
pub campus: String,
|
||||
pub building: String,
|
||||
pub building_description: String,
|
||||
pub room: String,
|
||||
pub is_online: bool,
|
||||
pub enum MeetingLocation {
|
||||
Online,
|
||||
InPerson {
|
||||
campus: String,
|
||||
campus_description: String,
|
||||
building: String,
|
||||
building_description: String,
|
||||
room: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl MeetingLocation {
|
||||
/// Create from raw MeetingTime data
|
||||
pub fn from_meeting_time(meeting_time: &MeetingTime) -> Self {
|
||||
let is_online = meeting_time.room.is_empty();
|
||||
if let (
|
||||
Some(campus),
|
||||
Some(campus_description),
|
||||
Some(building),
|
||||
Some(building_description),
|
||||
Some(room),
|
||||
) = (
|
||||
&meeting_time.campus,
|
||||
&meeting_time.campus_description,
|
||||
&meeting_time.building,
|
||||
&meeting_time.building_description,
|
||||
&meeting_time.room,
|
||||
) {
|
||||
if campus_description == "Internet" {
|
||||
return MeetingLocation::Online;
|
||||
}
|
||||
|
||||
MeetingLocation {
|
||||
campus: meeting_time.campus_description.clone(),
|
||||
building: meeting_time.building.clone(),
|
||||
building_description: meeting_time.building_description.clone(),
|
||||
room: meeting_time.room.clone(),
|
||||
is_online,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for MeetingLocation {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
if self.is_online {
|
||||
write!(f, "Online")
|
||||
MeetingLocation::InPerson {
|
||||
campus: campus.clone(),
|
||||
campus_description: campus_description.clone(),
|
||||
building: building.clone(),
|
||||
building_description: building_description.clone(),
|
||||
room: room.clone(),
|
||||
}
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{campus} | {building_name} | {building_code} {room}",
|
||||
campus = self.campus,
|
||||
building_name = self.building_description,
|
||||
building_code = self.building,
|
||||
room = self.room
|
||||
)
|
||||
MeetingLocation::Online
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -402,7 +412,11 @@ impl MeetingScheduleInfo {
|
||||
/// Create from raw MeetingTime data
|
||||
pub fn from_meeting_time(meeting_time: &MeetingTime) -> Self {
|
||||
let days = MeetingDays::from_meeting_time(meeting_time);
|
||||
let time_range = TimeRange::from_hhmm(&meeting_time.begin_time, &meeting_time.end_time);
|
||||
let time_range = match (&meeting_time.begin_time, &meeting_time.end_time) {
|
||||
(Some(begin), Some(end)) => TimeRange::from_hhmm(begin, end),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let date_range =
|
||||
DateRange::from_mm_dd_yyyy(&meeting_time.start_date, &meeting_time.end_date)
|
||||
.unwrap_or_else(|| {
|
||||
@@ -413,7 +427,7 @@ impl MeetingScheduleInfo {
|
||||
end: now,
|
||||
}
|
||||
});
|
||||
let meeting_type = MeetingType::from_string(&meeting_time.meeting_type);
|
||||
let meeting_type: MeetingType = meeting_time.meeting_type.parse().unwrap();
|
||||
let location = MeetingLocation::from_meeting_time(meeting_time);
|
||||
let duration_weeks = date_range.weeks_duration();
|
||||
|
||||
@@ -427,11 +441,11 @@ impl MeetingScheduleInfo {
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert the meeting days bitset to a enum vector
|
||||
pub fn days_of_week(&self) -> Vec<DayOfWeek> {
|
||||
/// Convert the meeting days bitset to a weekday vector
|
||||
pub fn days_of_week(&self) -> Vec<Weekday> {
|
||||
self.days
|
||||
.iter()
|
||||
.map(|day| <MeetingDays as TryInto<DayOfWeek>>::try_into(day).unwrap())
|
||||
.map(|day| <MeetingDays as TryInto<Weekday>>::try_into(day).unwrap())
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -459,9 +473,9 @@ impl MeetingScheduleInfo {
|
||||
);
|
||||
|
||||
if ambiguous {
|
||||
|day: &DayOfWeek| day.to_short_string().to_string()
|
||||
|day: &Weekday| day.to_short_string().to_string()
|
||||
} else {
|
||||
|day: &DayOfWeek| day.to_short_string().chars().next().unwrap().to_string()
|
||||
|day: &Weekday| day.to_short_string().chars().next().unwrap().to_string()
|
||||
}
|
||||
};
|
||||
|
||||
@@ -470,19 +484,34 @@ impl MeetingScheduleInfo {
|
||||
|
||||
/// Returns a formatted string representing the location of the meeting
|
||||
pub fn place_string(&self) -> String {
|
||||
if self.location.room.is_empty() {
|
||||
"Online".to_string()
|
||||
} else {
|
||||
format!(
|
||||
match &self.location {
|
||||
MeetingLocation::Online => "Online".to_string(),
|
||||
MeetingLocation::InPerson {
|
||||
campus,
|
||||
building,
|
||||
building_description,
|
||||
room,
|
||||
..
|
||||
} => format!(
|
||||
"{} | {} | {} {}",
|
||||
self.location.campus,
|
||||
self.location.building_description,
|
||||
self.location.building,
|
||||
self.location.room
|
||||
)
|
||||
campus, building_description, building, room
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/// Sort a slice of meeting schedule infos by start time, with stable fallback to day bits.
|
||||
///
|
||||
/// Meetings with a time range sort before those without one.
|
||||
/// Among meetings without a time range, ties break by day-of-week bits.
|
||||
pub fn sort_by_start_time(meetings: &mut [MeetingScheduleInfo]) {
|
||||
meetings.sort_unstable_by(|a, b| match (&a.time_range, &b.time_range) {
|
||||
(Some(a_time), Some(b_time)) => a_time.start.cmp(&b_time.start),
|
||||
(Some(_), None) => std::cmp::Ordering::Less,
|
||||
(None, Some(_)) => std::cmp::Ordering::Greater,
|
||||
(None, None) => a.days.bits().cmp(&b.days.bits()),
|
||||
});
|
||||
}
|
||||
|
||||
/// Get the start and end date times for the meeting
|
||||
///
|
||||
/// Uses the start and end times of the meeting if available, otherwise defaults to midnight (00:00:00.000).
|
||||
|
||||
@@ -10,8 +10,8 @@ pub struct SearchResult {
|
||||
pub total_count: i32,
|
||||
pub page_offset: i32,
|
||||
pub page_max_size: i32,
|
||||
pub path_mode: String,
|
||||
pub search_results_config: Vec<SearchResultConfig>,
|
||||
pub path_mode: Option<String>,
|
||||
pub search_results_config: Option<Vec<SearchResultConfig>>,
|
||||
pub data: Option<Vec<Course>>,
|
||||
}
|
||||
|
||||
|
||||
+209
-8
@@ -13,7 +13,7 @@ const CURRENT_YEAR: u32 = compile_time::date!().year() as u32;
|
||||
const VALID_YEARS: RangeInclusive<u32> = 2007..=(CURRENT_YEAR + 10);
|
||||
|
||||
/// Represents a term in the Banner system
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
pub struct Term {
|
||||
pub year: u32, // 2024, 2025, etc
|
||||
pub season: Season,
|
||||
@@ -29,7 +29,7 @@ pub enum TermPoint {
|
||||
}
|
||||
|
||||
/// Represents a season within a term
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
pub enum Season {
|
||||
Fall,
|
||||
Spring,
|
||||
@@ -147,11 +147,6 @@ impl Term {
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a long string representation of the term (e.g., "Fall 2025")
|
||||
pub fn to_long_string(&self) -> String {
|
||||
format!("{} {}", self.season, self.year)
|
||||
}
|
||||
}
|
||||
|
||||
impl TermPoint {
|
||||
@@ -193,7 +188,7 @@ impl std::fmt::Display for Term {
|
||||
|
||||
impl Season {
|
||||
/// Returns the season code as a string
|
||||
fn to_str(&self) -> &'static str {
|
||||
fn to_str(self) -> &'static str {
|
||||
match self {
|
||||
Season::Fall => "10",
|
||||
Season::Spring => "20",
|
||||
@@ -245,3 +240,209 @@ impl FromStr for Term {
|
||||
Ok(Term { year, season })
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// --- Season::from_str ---
|
||||
|
||||
#[test]
|
||||
fn test_season_from_str_fall() {
|
||||
assert_eq!(Season::from_str("10").unwrap(), Season::Fall);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_season_from_str_spring() {
|
||||
assert_eq!(Season::from_str("20").unwrap(), Season::Spring);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_season_from_str_summer() {
|
||||
assert_eq!(Season::from_str("30").unwrap(), Season::Summer);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_season_from_str_invalid() {
|
||||
for input in ["00", "40", "1", ""] {
|
||||
assert!(
|
||||
Season::from_str(input).is_err(),
|
||||
"expected Err for {input:?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Season Display ---
|
||||
|
||||
#[test]
|
||||
fn test_season_display() {
|
||||
assert_eq!(Season::Fall.to_string(), "Fall");
|
||||
assert_eq!(Season::Spring.to_string(), "Spring");
|
||||
assert_eq!(Season::Summer.to_string(), "Summer");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_season_to_str_roundtrip() {
|
||||
for season in [Season::Fall, Season::Spring, Season::Summer] {
|
||||
assert_eq!(Season::from_str(season.to_str()).unwrap(), season);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Term::from_str ---
|
||||
|
||||
#[test]
|
||||
fn test_term_from_str_valid_fall() {
|
||||
let term = Term::from_str("202510").unwrap();
|
||||
assert_eq!(term.year, 2025);
|
||||
assert_eq!(term.season, Season::Fall);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_term_from_str_valid_spring() {
|
||||
let term = Term::from_str("202520").unwrap();
|
||||
assert_eq!(term.year, 2025);
|
||||
assert_eq!(term.season, Season::Spring);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_term_from_str_valid_summer() {
|
||||
let term = Term::from_str("202530").unwrap();
|
||||
assert_eq!(term.year, 2025);
|
||||
assert_eq!(term.season, Season::Summer);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_term_from_str_too_short() {
|
||||
assert!(Term::from_str("20251").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_term_from_str_too_long() {
|
||||
assert!(Term::from_str("2025100").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_term_from_str_empty() {
|
||||
assert!(Term::from_str("").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_term_from_str_invalid_year_chars() {
|
||||
assert!(Term::from_str("abcd10").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_term_from_str_invalid_season() {
|
||||
assert!(Term::from_str("202540").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_term_from_str_year_below_range() {
|
||||
assert!(Term::from_str("200010").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_term_display_roundtrip() {
|
||||
for code in ["202510", "202520", "202530"] {
|
||||
let term = Term::from_str(code).unwrap();
|
||||
assert_eq!(term.to_string(), code);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Term::get_status_for_date ---
|
||||
|
||||
#[test]
|
||||
fn test_status_mid_spring() {
|
||||
let date = NaiveDate::from_ymd_opt(2025, 2, 15).unwrap();
|
||||
let status = Term::get_status_for_date(date);
|
||||
assert!(
|
||||
matches!(status, TermPoint::InTerm { current } if current.season == Season::Spring)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_status_mid_summer() {
|
||||
let date = NaiveDate::from_ymd_opt(2025, 7, 1).unwrap();
|
||||
let status = Term::get_status_for_date(date);
|
||||
assert!(
|
||||
matches!(status, TermPoint::InTerm { current } if current.season == Season::Summer)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_status_mid_fall() {
|
||||
let date = NaiveDate::from_ymd_opt(2025, 10, 15).unwrap();
|
||||
let status = Term::get_status_for_date(date);
|
||||
assert!(matches!(status, TermPoint::InTerm { current } if current.season == Season::Fall));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_status_between_fall_and_spring() {
|
||||
let date = NaiveDate::from_ymd_opt(2025, 1, 1).unwrap();
|
||||
let status = Term::get_status_for_date(date);
|
||||
assert!(
|
||||
matches!(status, TermPoint::BetweenTerms { next } if next.season == Season::Spring)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_status_between_spring_and_summer() {
|
||||
let date = NaiveDate::from_ymd_opt(2025, 5, 15).unwrap();
|
||||
let status = Term::get_status_for_date(date);
|
||||
assert!(
|
||||
matches!(status, TermPoint::BetweenTerms { next } if next.season == Season::Summer)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_status_between_summer_and_fall() {
|
||||
let date = NaiveDate::from_ymd_opt(2025, 8, 16).unwrap();
|
||||
let status = Term::get_status_for_date(date);
|
||||
assert!(matches!(status, TermPoint::BetweenTerms { next } if next.season == Season::Fall));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_status_after_fall_end() {
|
||||
let date = NaiveDate::from_ymd_opt(2025, 12, 15).unwrap();
|
||||
let status = Term::get_status_for_date(date);
|
||||
assert!(
|
||||
matches!(status, TermPoint::BetweenTerms { next } if next.season == Season::Spring)
|
||||
);
|
||||
// Year should roll over: fall 2025 ends → next spring is 2026
|
||||
let next_term = status.inner();
|
||||
assert_eq!(next_term.year, 2026);
|
||||
}
|
||||
|
||||
// --- TermPoint::inner ---
|
||||
|
||||
#[test]
|
||||
fn test_term_point_inner() {
|
||||
let in_term = TermPoint::InTerm {
|
||||
current: Term {
|
||||
year: 2025,
|
||||
season: Season::Fall,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
in_term.inner(),
|
||||
&Term {
|
||||
year: 2025,
|
||||
season: Season::Fall
|
||||
}
|
||||
);
|
||||
|
||||
let between = TermPoint::BetweenTerms {
|
||||
next: Term {
|
||||
year: 2026,
|
||||
season: Season::Spring,
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
between.inner(),
|
||||
&Term {
|
||||
year: 2026,
|
||||
season: Season::Spring
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
+213
-18
@@ -10,8 +10,9 @@ pub struct Range {
|
||||
pub high: i32,
|
||||
}
|
||||
|
||||
/// Builder for constructing Banner API search queries
|
||||
/// Builder for constructing Banner API search queries.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[allow(dead_code)]
|
||||
pub struct SearchQuery {
|
||||
subject: Option<String>,
|
||||
title: Option<String>,
|
||||
@@ -32,6 +33,7 @@ pub struct SearchQuery {
|
||||
course_number_range: Option<Range>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl SearchQuery {
|
||||
/// Creates a new SearchQuery with default values
|
||||
pub fn new() -> Self {
|
||||
@@ -155,11 +157,22 @@ impl SearchQuery {
|
||||
}
|
||||
|
||||
/// Sets the maximum number of results to return
|
||||
/// Clamped to a maximum of 500 to prevent excessive API load
|
||||
pub fn max_results(mut self, max_results: i32) -> Self {
|
||||
self.max_results = max_results;
|
||||
self.max_results = max_results.clamp(1, 500);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the subject field
|
||||
pub fn get_subject(&self) -> Option<&String> {
|
||||
self.subject.as_ref()
|
||||
}
|
||||
|
||||
/// Gets the max_results field
|
||||
pub fn get_max_results(&self) -> i32 {
|
||||
self.max_results
|
||||
}
|
||||
|
||||
/// Converts the query into URL parameters for the Banner API
|
||||
pub fn to_params(&self) -> HashMap<String, String> {
|
||||
let mut params = HashMap::new();
|
||||
@@ -180,7 +193,7 @@ impl SearchQuery {
|
||||
params.insert("txt_keywordlike".to_string(), keywords.join(" "));
|
||||
}
|
||||
|
||||
if self.open_only.is_some() {
|
||||
if self.open_only == Some(true) {
|
||||
params.insert("chk_open_only".to_string(), "true".to_string());
|
||||
}
|
||||
|
||||
@@ -242,26 +255,208 @@ impl SearchQuery {
|
||||
}
|
||||
}
|
||||
|
||||
/// Formats a Duration into hour, minute, and meridiem strings for Banner API
|
||||
/// Formats a Duration into hour, minute, and meridiem strings for Banner API.
|
||||
///
|
||||
/// Uses 12-hour format: midnight = 12:00 AM, noon = 12:00 PM.
|
||||
fn format_time_parameter(duration: Duration) -> (String, String, String) {
|
||||
let total_minutes = duration.as_secs() / 60;
|
||||
let hours = total_minutes / 60;
|
||||
let minutes = total_minutes % 60;
|
||||
|
||||
let minute_str = minutes.to_string();
|
||||
let meridiem = if hours >= 12 { "PM" } else { "AM" };
|
||||
let hour_12 = match hours % 12 {
|
||||
0 => 12,
|
||||
h => h,
|
||||
};
|
||||
|
||||
if hours >= 12 {
|
||||
let meridiem = "PM".to_string();
|
||||
let hour_str = if hours >= 13 {
|
||||
(hours - 12).to_string()
|
||||
} else {
|
||||
hours.to_string()
|
||||
};
|
||||
(hour_str, minute_str, meridiem)
|
||||
} else {
|
||||
let meridiem = "AM".to_string();
|
||||
let hour_str = hours.to_string();
|
||||
(hour_str, minute_str, meridiem)
|
||||
(
|
||||
hour_12.to_string(),
|
||||
minutes.to_string(),
|
||||
meridiem.to_string(),
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_new_defaults() {
|
||||
let q = SearchQuery::new();
|
||||
assert_eq!(q.get_max_results(), 8);
|
||||
assert!(q.get_subject().is_none());
|
||||
let params = q.to_params();
|
||||
assert_eq!(params.get("pageMaxSize").unwrap(), "8");
|
||||
assert_eq!(params.get("pageOffset").unwrap(), "0");
|
||||
assert_eq!(params.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_subject_param() {
|
||||
let params = SearchQuery::new().subject("CS").to_params();
|
||||
assert_eq!(params.get("txt_subject").unwrap(), "CS");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_title_trims_whitespace() {
|
||||
let params = SearchQuery::new().title(" Intro to CS ").to_params();
|
||||
assert_eq!(params.get("txt_courseTitle").unwrap(), "Intro to CS");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_crn_param() {
|
||||
let params = SearchQuery::new()
|
||||
.course_reference_number("12345")
|
||||
.to_params();
|
||||
assert_eq!(params.get("txt_courseReferenceNumber").unwrap(), "12345");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_keywords_joined_with_spaces() {
|
||||
let params = SearchQuery::new()
|
||||
.keyword("data")
|
||||
.keyword("science")
|
||||
.to_params();
|
||||
assert_eq!(params.get("txt_keywordlike").unwrap(), "data science");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_keywords_vec() {
|
||||
let params = SearchQuery::new()
|
||||
.keywords(vec!["machine".into(), "learning".into()])
|
||||
.to_params();
|
||||
assert_eq!(params.get("txt_keywordlike").unwrap(), "machine learning");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_open_only() {
|
||||
let params = SearchQuery::new().open_only(true).to_params();
|
||||
assert_eq!(params.get("chk_open_only").unwrap(), "true");
|
||||
|
||||
// open_only(false) should NOT set the param
|
||||
let params2 = SearchQuery::new().open_only(false).to_params();
|
||||
assert!(params2.get("chk_open_only").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_credits_range() {
|
||||
let params = SearchQuery::new().credits(3, 6).to_params();
|
||||
assert_eq!(params.get("txt_credithourlow").unwrap(), "3");
|
||||
assert_eq!(params.get("txt_credithourhigh").unwrap(), "6");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_course_number_range() {
|
||||
let params = SearchQuery::new().course_numbers(3000, 3999).to_params();
|
||||
assert_eq!(params.get("txt_course_number_range").unwrap(), "3000");
|
||||
assert_eq!(params.get("txt_course_number_range_to").unwrap(), "3999");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pagination() {
|
||||
let params = SearchQuery::new().offset(20).max_results(10).to_params();
|
||||
assert_eq!(params.get("pageOffset").unwrap(), "20");
|
||||
assert_eq!(params.get("pageMaxSize").unwrap(), "10");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_time_9am() {
|
||||
let (h, m, mer) = format_time_parameter(Duration::from_secs(9 * 3600));
|
||||
assert_eq!(h, "9");
|
||||
assert_eq!(m, "0");
|
||||
assert_eq!(mer, "AM");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_time_noon() {
|
||||
let (h, m, mer) = format_time_parameter(Duration::from_secs(12 * 3600));
|
||||
assert_eq!(h, "12");
|
||||
assert_eq!(m, "0");
|
||||
assert_eq!(mer, "PM");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_time_1pm() {
|
||||
let (h, m, mer) = format_time_parameter(Duration::from_secs(13 * 3600));
|
||||
assert_eq!(h, "1");
|
||||
assert_eq!(m, "0");
|
||||
assert_eq!(mer, "PM");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_time_930am() {
|
||||
let (h, m, mer) = format_time_parameter(Duration::from_secs(9 * 3600 + 30 * 60));
|
||||
assert_eq!(h, "9");
|
||||
assert_eq!(m, "30");
|
||||
assert_eq!(mer, "AM");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_time_midnight() {
|
||||
let (h, m, mer) = format_time_parameter(Duration::from_secs(0));
|
||||
assert_eq!(h, "12");
|
||||
assert_eq!(m, "0");
|
||||
assert_eq!(mer, "AM");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_time_params_in_query() {
|
||||
let params = SearchQuery::new()
|
||||
.start_time(Duration::from_secs(9 * 3600))
|
||||
.end_time(Duration::from_secs(17 * 3600))
|
||||
.to_params();
|
||||
assert_eq!(params.get("select_start_hour").unwrap(), "9");
|
||||
assert_eq!(params.get("select_start_ampm").unwrap(), "AM");
|
||||
assert_eq!(params.get("select_end_hour").unwrap(), "5");
|
||||
assert_eq!(params.get("select_end_ampm").unwrap(), "PM");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multi_value_params() {
|
||||
let params = SearchQuery::new()
|
||||
.campus(vec!["MAIN".into(), "DT".into()])
|
||||
.attributes(vec!["HONORS".into()])
|
||||
.instructor(vec![1001, 1002])
|
||||
.to_params();
|
||||
assert_eq!(params.get("txt_campus").unwrap(), "MAIN,DT");
|
||||
assert_eq!(params.get("txt_attribute").unwrap(), "HONORS");
|
||||
assert_eq!(params.get("txt_instructor").unwrap(), "1001,1002");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_display_minimal() {
|
||||
let display = SearchQuery::new().to_string();
|
||||
assert_eq!(display, "offset=0, maxResults=8");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_display_with_fields() {
|
||||
let display = SearchQuery::new()
|
||||
.subject("CS")
|
||||
.open_only(true)
|
||||
.max_results(10)
|
||||
.to_string();
|
||||
assert!(display.contains("subject=CS"));
|
||||
assert!(display.contains("openOnly=true"));
|
||||
assert!(display.contains("maxResults=10"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_full_query_param_count() {
|
||||
let params = SearchQuery::new()
|
||||
.subject("CS")
|
||||
.title("Intro")
|
||||
.course_reference_number("12345")
|
||||
.keyword("programming")
|
||||
.open_only(true)
|
||||
.credits(3, 4)
|
||||
.course_numbers(1000, 1999)
|
||||
.offset(0)
|
||||
.max_results(25)
|
||||
.to_params();
|
||||
// subject, title, crn, keyword, open_only, min_credits, max_credits,
|
||||
// course_number_range, course_number_range_to, pageOffset, pageMaxSize = 11
|
||||
assert_eq!(params.len(), 11);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -278,7 +473,7 @@ impl std::fmt::Display for SearchQuery {
|
||||
if let Some(ref keywords) = self.keywords {
|
||||
parts.push(format!("keywords={}", keywords.join(" ")));
|
||||
}
|
||||
if self.open_only.is_some() {
|
||||
if self.open_only == Some(true) {
|
||||
parts.push("openOnly=true".to_string());
|
||||
}
|
||||
if let Some(ref term_part) = self.term_part {
|
||||
|
||||
@@ -0,0 +1,84 @@
|
||||
//! HTTP middleware that enforces rate limiting for Banner API requests.
|
||||
|
||||
use crate::banner::rate_limiter::{RequestType, SharedRateLimiter};
|
||||
use http::Extensions;
|
||||
use reqwest::{Request, Response};
|
||||
use reqwest_middleware::{Middleware, Next};
|
||||
use tracing::debug;
|
||||
use url::Url;
|
||||
|
||||
/// Middleware that enforces rate limiting based on request URL patterns
|
||||
pub struct RateLimitMiddleware {
|
||||
rate_limiter: SharedRateLimiter,
|
||||
}
|
||||
|
||||
impl RateLimitMiddleware {
|
||||
/// Creates a new rate limiting middleware
|
||||
pub fn new(rate_limiter: SharedRateLimiter) -> Self {
|
||||
Self { rate_limiter }
|
||||
}
|
||||
|
||||
/// Returns a human-readable description of the rate limit for a request type
|
||||
fn get_rate_limit_description(request_type: RequestType) -> &'static str {
|
||||
match request_type {
|
||||
RequestType::Session => "6 rpm (~10s interval)",
|
||||
RequestType::Search => "30 rpm (~2s interval)",
|
||||
RequestType::Metadata => "20 rpm (~3s interval)",
|
||||
RequestType::Reset => "10 rpm (~6s interval)",
|
||||
}
|
||||
}
|
||||
|
||||
/// Determines the request type based on the URL path
|
||||
fn get_request_type(url: &Url) -> RequestType {
|
||||
let path = url.path();
|
||||
|
||||
if path.contains("/registration")
|
||||
|| path.contains("/selfServiceMenu")
|
||||
|| path.contains("/term/termSelection")
|
||||
{
|
||||
RequestType::Session
|
||||
} else if path.contains("/searchResults") || path.contains("/classSearch") {
|
||||
RequestType::Search
|
||||
} else if path.contains("/getTerms")
|
||||
|| path.contains("/getSubjects")
|
||||
|| path.contains("/getCampuses")
|
||||
{
|
||||
RequestType::Metadata
|
||||
} else if path.contains("/resetDataForm") {
|
||||
RequestType::Reset
|
||||
} else {
|
||||
// Default to search for unknown endpoints
|
||||
RequestType::Search
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Middleware for RateLimitMiddleware {
|
||||
async fn handle(
|
||||
&self,
|
||||
req: Request,
|
||||
extensions: &mut Extensions,
|
||||
next: Next<'_>,
|
||||
) -> std::result::Result<Response, reqwest_middleware::Error> {
|
||||
let request_type = Self::get_request_type(req.url());
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
self.rate_limiter.wait_for_permission(request_type).await;
|
||||
let wait_duration = start.elapsed();
|
||||
|
||||
// Only log if rate limiting caused significant delay (>= 500ms)
|
||||
if wait_duration.as_millis() >= 500 {
|
||||
let limit_desc = Self::get_rate_limit_description(request_type);
|
||||
debug!(
|
||||
request_type = ?request_type,
|
||||
wait_ms = wait_duration.as_millis(),
|
||||
rate_limit = limit_desc,
|
||||
"Rate limit caused delay"
|
||||
);
|
||||
}
|
||||
|
||||
// Make the actual request
|
||||
next.run(req, extensions).await
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,210 @@
|
||||
//! Rate limiting for Banner API requests to prevent overwhelming the server.
|
||||
|
||||
use crate::config::RateLimitingConfig;
|
||||
use governor::{
|
||||
Quota, RateLimiter,
|
||||
clock::DefaultClock,
|
||||
state::{InMemoryState, NotKeyed},
|
||||
};
|
||||
use std::num::NonZeroU32;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
/// Different types of Banner API requests with different rate limits
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum RequestType {
|
||||
/// Session creation and management (very conservative)
|
||||
Session,
|
||||
/// Course search requests (moderate)
|
||||
Search,
|
||||
/// Term and metadata requests (moderate)
|
||||
Metadata,
|
||||
/// Data form resets (low priority)
|
||||
Reset,
|
||||
}
|
||||
|
||||
/// A rate limiter that manages different request types with different limits
|
||||
pub struct BannerRateLimiter {
|
||||
session_limiter: RateLimiter<NotKeyed, InMemoryState, DefaultClock>,
|
||||
search_limiter: RateLimiter<NotKeyed, InMemoryState, DefaultClock>,
|
||||
metadata_limiter: RateLimiter<NotKeyed, InMemoryState, DefaultClock>,
|
||||
reset_limiter: RateLimiter<NotKeyed, InMemoryState, DefaultClock>,
|
||||
}
|
||||
|
||||
impl BannerRateLimiter {
|
||||
/// Creates a new rate limiter with the given configuration
|
||||
pub fn new(config: RateLimitingConfig) -> Self {
|
||||
let session_quota = Quota::with_period(Duration::from_secs(60) / config.session_rpm)
|
||||
.unwrap()
|
||||
.allow_burst(NonZeroU32::new(config.burst_allowance).unwrap());
|
||||
|
||||
let search_quota = Quota::with_period(Duration::from_secs(60) / config.search_rpm)
|
||||
.unwrap()
|
||||
.allow_burst(NonZeroU32::new(config.burst_allowance).unwrap());
|
||||
|
||||
let metadata_quota = Quota::with_period(Duration::from_secs(60) / config.metadata_rpm)
|
||||
.unwrap()
|
||||
.allow_burst(NonZeroU32::new(config.burst_allowance).unwrap());
|
||||
|
||||
let reset_quota = Quota::with_period(Duration::from_secs(60) / config.reset_rpm)
|
||||
.unwrap()
|
||||
.allow_burst(NonZeroU32::new(config.burst_allowance).unwrap());
|
||||
|
||||
Self {
|
||||
session_limiter: RateLimiter::direct(session_quota),
|
||||
search_limiter: RateLimiter::direct(search_quota),
|
||||
metadata_limiter: RateLimiter::direct(metadata_quota),
|
||||
reset_limiter: RateLimiter::direct(reset_quota),
|
||||
}
|
||||
}
|
||||
|
||||
/// Waits for permission to make a request of the given type
|
||||
pub async fn wait_for_permission(&self, request_type: RequestType) {
|
||||
let limiter = match request_type {
|
||||
RequestType::Session => &self.session_limiter,
|
||||
RequestType::Search => &self.search_limiter,
|
||||
RequestType::Metadata => &self.metadata_limiter,
|
||||
RequestType::Reset => &self.reset_limiter,
|
||||
};
|
||||
|
||||
// Wait until we can make the request (logging handled by middleware)
|
||||
limiter.until_ready().await;
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for BannerRateLimiter {
|
||||
fn default() -> Self {
|
||||
Self::new(RateLimitingConfig::default())
|
||||
}
|
||||
}
|
||||
|
||||
/// A shared rate limiter instance
|
||||
pub type SharedRateLimiter = Arc<BannerRateLimiter>;
|
||||
|
||||
/// Creates a new shared rate limiter with custom configuration
|
||||
pub fn create_shared_rate_limiter(config: Option<RateLimitingConfig>) -> SharedRateLimiter {
|
||||
Arc::new(BannerRateLimiter::new(config.unwrap_or_default()))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_new_with_default_config() {
|
||||
let _limiter = BannerRateLimiter::new(RateLimitingConfig::default());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_with_custom_config() {
|
||||
let config = RateLimitingConfig {
|
||||
session_rpm: 10,
|
||||
search_rpm: 30,
|
||||
metadata_rpm: 20,
|
||||
reset_rpm: 15,
|
||||
burst_allowance: 5,
|
||||
};
|
||||
let _limiter = BannerRateLimiter::new(config);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_with_minimum_valid_values() {
|
||||
let config = RateLimitingConfig {
|
||||
session_rpm: 1,
|
||||
search_rpm: 1,
|
||||
metadata_rpm: 1,
|
||||
reset_rpm: 1,
|
||||
burst_allowance: 1,
|
||||
};
|
||||
let _limiter = BannerRateLimiter::new(config);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_with_high_rpm_values() {
|
||||
let config = RateLimitingConfig {
|
||||
session_rpm: 10000,
|
||||
search_rpm: 10000,
|
||||
metadata_rpm: 10000,
|
||||
reset_rpm: 10000,
|
||||
burst_allowance: 1,
|
||||
};
|
||||
let _limiter = BannerRateLimiter::new(config);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_impl() {
|
||||
let _limiter = BannerRateLimiter::default();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_new_panics_on_zero_session_rpm() {
|
||||
let config = RateLimitingConfig {
|
||||
session_rpm: 0,
|
||||
..RateLimitingConfig::default()
|
||||
};
|
||||
let _limiter = BannerRateLimiter::new(config);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_new_panics_on_zero_search_rpm() {
|
||||
let config = RateLimitingConfig {
|
||||
search_rpm: 0,
|
||||
..RateLimitingConfig::default()
|
||||
};
|
||||
let _limiter = BannerRateLimiter::new(config);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_new_panics_on_zero_metadata_rpm() {
|
||||
let config = RateLimitingConfig {
|
||||
metadata_rpm: 0,
|
||||
..RateLimitingConfig::default()
|
||||
};
|
||||
let _limiter = BannerRateLimiter::new(config);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_new_panics_on_zero_reset_rpm() {
|
||||
let config = RateLimitingConfig {
|
||||
reset_rpm: 0,
|
||||
..RateLimitingConfig::default()
|
||||
};
|
||||
let _limiter = BannerRateLimiter::new(config);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_new_panics_on_zero_burst_allowance() {
|
||||
let config = RateLimitingConfig {
|
||||
burst_allowance: 0,
|
||||
..RateLimitingConfig::default()
|
||||
};
|
||||
let _limiter = BannerRateLimiter::new(config);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_wait_for_permission_completes() {
|
||||
let limiter = BannerRateLimiter::default();
|
||||
let timeout_duration = std::time::Duration::from_secs(1);
|
||||
|
||||
for request_type in [
|
||||
RequestType::Session,
|
||||
RequestType::Search,
|
||||
RequestType::Metadata,
|
||||
RequestType::Reset,
|
||||
] {
|
||||
let result =
|
||||
tokio::time::timeout(timeout_duration, limiter.wait_for_permission(request_type))
|
||||
.await;
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"wait_for_permission timed out for {:?}",
|
||||
request_type
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,292 +0,0 @@
|
||||
//! Course scraping functionality for the Banner API.
|
||||
|
||||
use crate::banner::{api::BannerApi, models::*, query::SearchQuery};
|
||||
use anyhow::{Context, Result};
|
||||
use redis::AsyncCommands;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::time;
|
||||
use tracing::{debug, error, info, warn};
|
||||
|
||||
/// Priority majors that should be scraped more frequently
|
||||
const PRIORITY_MAJORS: &[&str] = &["CS", "CPE", "MAT", "EE", "IS"];
|
||||
|
||||
/// Maximum number of courses to fetch per page
|
||||
const MAX_PAGE_SIZE: i32 = 500;
|
||||
|
||||
/// Course scraper for Banner API
|
||||
pub struct CourseScraper {
|
||||
api: Arc<BannerApi>,
|
||||
redis_client: redis::Client,
|
||||
}
|
||||
|
||||
impl CourseScraper {
|
||||
/// Creates a new course scraper
|
||||
pub fn new(api: Arc<BannerApi>, redis_url: &str) -> Result<Self> {
|
||||
let redis_client =
|
||||
redis::Client::open(redis_url).context("Failed to create Redis client")?;
|
||||
|
||||
Ok(Self { api, redis_client })
|
||||
}
|
||||
|
||||
/// Scrapes all courses and stores them in Redis
|
||||
pub async fn scrape_all(&self, term: &str) -> Result<()> {
|
||||
// Get all subjects
|
||||
let subjects = self
|
||||
.api
|
||||
.get_subjects("", term, 1, 100)
|
||||
.await
|
||||
.context("Failed to get subjects for scraping")?;
|
||||
|
||||
if subjects.is_empty() {
|
||||
return Err(anyhow::anyhow!("no subjects found for term {term}"));
|
||||
}
|
||||
|
||||
// Categorize subjects
|
||||
let (priority_subjects, other_subjects): (Vec<_>, Vec<_>) = subjects
|
||||
.into_iter()
|
||||
.partition(|subject| PRIORITY_MAJORS.contains(&subject.code.as_str()));
|
||||
|
||||
// Get expired subjects that need scraping
|
||||
let mut expired_subjects = Vec::new();
|
||||
expired_subjects.extend(self.get_expired_subjects(&priority_subjects, term).await?);
|
||||
expired_subjects.extend(self.get_expired_subjects(&other_subjects, term).await?);
|
||||
|
||||
if expired_subjects.is_empty() {
|
||||
info!("no expired subjects found, skipping scrape");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
info!(
|
||||
"scraping {count} subjects for term {term}",
|
||||
count = expired_subjects.len()
|
||||
);
|
||||
|
||||
// Scrape each expired subject
|
||||
for subject in expired_subjects {
|
||||
if let Err(e) = self.scrape_subject(&subject.code, term).await {
|
||||
error!(
|
||||
"failed to scrape subject {subject}: {e}",
|
||||
subject = subject.code
|
||||
);
|
||||
}
|
||||
|
||||
// Rate limiting between subjects
|
||||
time::sleep(Duration::from_secs(2)).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Gets subjects that have expired and need to be scraped
|
||||
async fn get_expired_subjects(&self, subjects: &[Pair], term: &str) -> Result<Vec<Pair>> {
|
||||
let mut conn = self
|
||||
.redis_client
|
||||
.get_multiplexed_async_connection()
|
||||
.await
|
||||
.context("Failed to get Redis connection")?;
|
||||
|
||||
let mut expired = Vec::new();
|
||||
|
||||
for subject in subjects {
|
||||
let key = format!("scraped:{code}:{term}", code = subject.code);
|
||||
let scraped: Option<String> = conn
|
||||
.get(&key)
|
||||
.await
|
||||
.context("Failed to check scrape status in Redis")?;
|
||||
|
||||
// If not scraped or marked as expired (empty/0), add to list
|
||||
if scraped.is_none() || scraped.as_deref() == Some("0") {
|
||||
expired.push(subject.clone());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(expired)
|
||||
}
|
||||
|
||||
/// Scrapes all courses for a specific subject
|
||||
pub async fn scrape_subject(&self, subject: &str, term: &str) -> Result<()> {
|
||||
let mut offset = 0;
|
||||
let mut total_courses = 0;
|
||||
|
||||
loop {
|
||||
let query = SearchQuery::new()
|
||||
.subject(subject)
|
||||
.offset(offset)
|
||||
.max_results(MAX_PAGE_SIZE * 2);
|
||||
|
||||
// Ensure session term is selected before searching
|
||||
self.api.select_term(term).await?;
|
||||
|
||||
let result = self
|
||||
.api
|
||||
.search(term, &query, "subjectDescription", false)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("failed to search for subject {subject} at offset {offset}")
|
||||
})?;
|
||||
|
||||
if !result.success {
|
||||
return Err(anyhow::anyhow!(
|
||||
"search marked unsuccessful for subject {subject}"
|
||||
));
|
||||
}
|
||||
|
||||
let course_count = result.data.as_ref().map(|v| v.len() as i32).unwrap_or(0);
|
||||
total_courses += course_count;
|
||||
|
||||
debug!(
|
||||
"retrieved {count} courses for subject {subject} at offset {offset}",
|
||||
count = course_count
|
||||
);
|
||||
|
||||
// Store each course in Redis
|
||||
for course in result.data.unwrap_or_default() {
|
||||
if let Err(e) = self.store_course(&course).await {
|
||||
error!(
|
||||
"failed to store course {crn}: {e}",
|
||||
crn = course.course_reference_number
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we got a full page and should continue
|
||||
if course_count >= MAX_PAGE_SIZE {
|
||||
if course_count > MAX_PAGE_SIZE {
|
||||
warn!(
|
||||
"course count {count} exceeds max page size {max_page_size}",
|
||||
count = course_count,
|
||||
max_page_size = MAX_PAGE_SIZE
|
||||
);
|
||||
}
|
||||
|
||||
offset += MAX_PAGE_SIZE;
|
||||
debug!("continuing to next page for subject {subject} at offset {offset}");
|
||||
|
||||
// Rate limiting between pages
|
||||
time::sleep(Duration::from_secs(3)).await;
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
info!(
|
||||
"scraped {count} total courses for subject {subject}",
|
||||
count = total_courses
|
||||
);
|
||||
|
||||
// Mark subject as scraped with expiry
|
||||
self.mark_subject_scraped(subject, term, total_courses)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Stores a course in Redis
|
||||
async fn store_course(&self, course: &Course) -> Result<()> {
|
||||
let mut conn = self
|
||||
.redis_client
|
||||
.get_multiplexed_async_connection()
|
||||
.await
|
||||
.context("Failed to get Redis connection")?;
|
||||
|
||||
let key = format!("class:{crn}", crn = course.course_reference_number);
|
||||
let serialized = serde_json::to_string(course).context("Failed to serialize course")?;
|
||||
|
||||
let _: () = conn
|
||||
.set(&key, serialized)
|
||||
.await
|
||||
.context("Failed to store course in Redis")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Marks a subject as scraped with appropriate expiry time
|
||||
async fn mark_subject_scraped(
|
||||
&self,
|
||||
subject: &str,
|
||||
term: &str,
|
||||
course_count: i32,
|
||||
) -> Result<()> {
|
||||
let mut conn = self
|
||||
.redis_client
|
||||
.get_multiplexed_async_connection()
|
||||
.await
|
||||
.context("Failed to get Redis connection")?;
|
||||
|
||||
let key = format!("scraped:{subject}:{term}", subject = subject);
|
||||
let expiry = self.calculate_expiry(subject, course_count);
|
||||
|
||||
let value = if course_count == 0 { -1 } else { course_count };
|
||||
|
||||
let _: () = conn
|
||||
.set_ex(&key, value, expiry.as_secs())
|
||||
.await
|
||||
.context("Failed to mark subject as scraped")?;
|
||||
|
||||
debug!(
|
||||
"marked subject {subject} as scraped with {count} courses, expiry: {expiry:?}",
|
||||
subject = subject,
|
||||
count = course_count,
|
||||
expiry = expiry
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Calculates expiry time for a scraped subject based on various factors
|
||||
fn calculate_expiry(&self, subject: &str, course_count: i32) -> Duration {
|
||||
// Base calculation: 1 hour per 100 courses
|
||||
let mut base_expiry = Duration::from_secs(3600 * (course_count as u64 / 100).max(1));
|
||||
|
||||
// Special handling for subjects with few courses
|
||||
if course_count < 50 {
|
||||
// Linear interpolation: 1 course = 12 hours, 49 courses = 1 hour
|
||||
let hours = 12.0 - ((course_count as f64 - 1.0) / 48.0) * 11.0;
|
||||
base_expiry = Duration::from_secs((hours * 3600.0) as u64);
|
||||
}
|
||||
|
||||
// Priority subjects get shorter expiry (more frequent updates)
|
||||
if PRIORITY_MAJORS.contains(&subject) {
|
||||
base_expiry /= 3;
|
||||
}
|
||||
|
||||
// Add random variance (±15%)
|
||||
let variance = (base_expiry.as_secs() as f64 * 0.15) as u64;
|
||||
let random_offset = (rand::random::<f64>() - 0.5) * 2.0 * variance as f64;
|
||||
|
||||
let final_expiry = if random_offset > 0.0 {
|
||||
base_expiry + Duration::from_secs(random_offset as u64)
|
||||
} else {
|
||||
base_expiry.saturating_sub(Duration::from_secs((-random_offset) as u64))
|
||||
};
|
||||
|
||||
// Ensure minimum of 1 hour
|
||||
final_expiry.max(Duration::from_secs(3600))
|
||||
}
|
||||
|
||||
/// Gets a course from Redis cache
|
||||
pub async fn get_course(&self, crn: &str) -> Result<Option<Course>> {
|
||||
let mut conn = self
|
||||
.redis_client
|
||||
.get_multiplexed_async_connection()
|
||||
.await
|
||||
.context("Failed to get Redis connection")?;
|
||||
|
||||
let key = format!("class:{crn}");
|
||||
let serialized: Option<String> = conn
|
||||
.get(&key)
|
||||
.await
|
||||
.context("Failed to get course from Redis")?;
|
||||
|
||||
match serialized {
|
||||
Some(data) => {
|
||||
let course: Course = serde_json::from_str(&data)
|
||||
.context("Failed to deserialize course from Redis")?;
|
||||
Ok(Some(course))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
+496
-119
@@ -1,133 +1,522 @@
|
||||
//! Session management for Banner API.
|
||||
|
||||
use crate::banner::util::user_agent;
|
||||
use anyhow::Result;
|
||||
use rand::distributions::{Alphanumeric, DistString};
|
||||
use reqwest::Client;
|
||||
use std::sync::Mutex;
|
||||
use crate::banner::BannerTerm;
|
||||
use crate::banner::models::Term;
|
||||
use anyhow::{Context, Result};
|
||||
use cookie::Cookie;
|
||||
use dashmap::DashMap;
|
||||
use governor::state::InMemoryState;
|
||||
use governor::{Quota, RateLimiter};
|
||||
use rand::distr::{Alphanumeric, SampleString};
|
||||
use reqwest_middleware::ClientWithMiddleware;
|
||||
use std::collections::{HashMap, VecDeque};
|
||||
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::{Arc, LazyLock};
|
||||
use std::time::{Duration, Instant};
|
||||
use tracing::{debug, info};
|
||||
use tokio::sync::{Mutex, Notify};
|
||||
use tracing::{debug, info, trace};
|
||||
use url::Url;
|
||||
|
||||
/// Session manager for Banner API interactions
|
||||
#[derive(Debug)]
|
||||
pub struct SessionManager {
|
||||
current_session: Mutex<Option<SessionData>>,
|
||||
base_url: String,
|
||||
client: Client,
|
||||
}
|
||||
const SESSION_EXPIRY: Duration = Duration::from_secs(25 * 60); // 25 minutes
|
||||
|
||||
// A global rate limiter to ensure we only try to create one new session every 10 seconds,
|
||||
// preventing us from overwhelming the server with session creation requests.
|
||||
static SESSION_CREATION_RATE_LIMITER: LazyLock<
|
||||
RateLimiter<governor::state::direct::NotKeyed, InMemoryState, governor::clock::DefaultClock>,
|
||||
> = LazyLock::new(|| RateLimiter::direct(Quota::with_period(Duration::from_secs(10)).unwrap()));
|
||||
|
||||
/// Represents an active anonymous session within the Banner API.
|
||||
/// Identified by multiple persistent cookies, as well as a client-generated "unique session ID".
|
||||
#[derive(Debug, Clone)]
|
||||
struct SessionData {
|
||||
session_id: String,
|
||||
pub struct BannerSession {
|
||||
// Randomly generated
|
||||
pub unique_session_id: String,
|
||||
// Timestamp of creation
|
||||
created_at: Instant,
|
||||
// Timestamp of last activity
|
||||
last_activity: Option<Instant>,
|
||||
// Cookie values from initial registration page
|
||||
jsessionid: String,
|
||||
ssb_cookie: String,
|
||||
}
|
||||
|
||||
impl SessionManager {
|
||||
const SESSION_EXPIRY: Duration = Duration::from_secs(25 * 60); // 25 minutes
|
||||
/// Generates a new session ID mimicking Banner's format
|
||||
fn generate_session_id() -> String {
|
||||
let random_part = Alphanumeric.sample_string(&mut rand::rng(), 5);
|
||||
let timestamp = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_millis();
|
||||
format!("{}{}", random_part, timestamp)
|
||||
}
|
||||
|
||||
/// Generates a timestamp-based nonce
|
||||
pub fn nonce() -> String {
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_millis()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
impl BannerSession {
|
||||
/// Creates a new session
|
||||
pub fn new(unique_session_id: &str, jsessionid: &str, ssb_cookie: &str) -> Self {
|
||||
let now = Instant::now();
|
||||
|
||||
/// Creates a new session manager
|
||||
pub fn new(base_url: String, client: Client) -> Self {
|
||||
Self {
|
||||
current_session: Mutex::new(None),
|
||||
base_url,
|
||||
client,
|
||||
created_at: now,
|
||||
last_activity: None,
|
||||
unique_session_id: unique_session_id.to_string(),
|
||||
jsessionid: jsessionid.to_string(),
|
||||
ssb_cookie: ssb_cookie.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensures a valid session is available, creating one if necessary
|
||||
pub fn ensure_session(&self) -> Result<String> {
|
||||
let start_time = std::time::Instant::now();
|
||||
let mut session_guard = self.current_session.lock().unwrap();
|
||||
/// Returns the unique session ID
|
||||
pub fn id(&self) -> &str {
|
||||
&self.unique_session_id
|
||||
}
|
||||
|
||||
if let Some(ref session) = *session_guard
|
||||
&& session.created_at.elapsed() < Self::SESSION_EXPIRY
|
||||
{
|
||||
let elapsed = start_time.elapsed();
|
||||
debug!(
|
||||
session_id = session.session_id,
|
||||
elapsed = format!("{:.2?}", elapsed),
|
||||
"reusing existing banner session"
|
||||
);
|
||||
return Ok(session.session_id.clone());
|
||||
/// Updates the last activity timestamp
|
||||
pub fn touch(&mut self) {
|
||||
self.last_activity = Some(Instant::now());
|
||||
}
|
||||
|
||||
/// Returns true if the session is expired
|
||||
pub fn is_expired(&self) -> bool {
|
||||
self.last_activity.unwrap_or(self.created_at).elapsed() > SESSION_EXPIRY
|
||||
}
|
||||
|
||||
/// Returns a string used to for the "Cookie" header
|
||||
pub fn cookie(&self) -> String {
|
||||
format!(
|
||||
"JSESSIONID={}; SSB_COOKIE={}",
|
||||
self.jsessionid, self.ssb_cookie
|
||||
)
|
||||
}
|
||||
|
||||
pub fn been_used(&self) -> bool {
|
||||
self.last_activity.is_some()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn new_with_created_at(
|
||||
unique_session_id: &str,
|
||||
jsessionid: &str,
|
||||
ssb_cookie: &str,
|
||||
created_at: Instant,
|
||||
) -> Self {
|
||||
Self {
|
||||
unique_session_id: unique_session_id.to_string(),
|
||||
created_at,
|
||||
last_activity: None,
|
||||
jsessionid: jsessionid.to_string(),
|
||||
ssb_cookie: ssb_cookie.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Generate new session
|
||||
let session_id = self.generate_session_id();
|
||||
*session_guard = Some(SessionData {
|
||||
session_id: session_id.clone(),
|
||||
created_at: Instant::now(),
|
||||
});
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
let elapsed = start_time.elapsed();
|
||||
debug!(
|
||||
session_id = session_id,
|
||||
elapsed = format!("{:.2?}", elapsed),
|
||||
"generated new banner session"
|
||||
#[test]
|
||||
fn test_new_session_creates_session() {
|
||||
let session = BannerSession::new("sess-1", "JSID123", "SSB456");
|
||||
assert_eq!(session.id(), "sess-1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fresh_session_not_expired() {
|
||||
let session = BannerSession::new("sess-1", "JSID123", "SSB456");
|
||||
assert!(!session.is_expired());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fresh_session_not_been_used() {
|
||||
let session = BannerSession::new("sess-1", "JSID123", "SSB456");
|
||||
assert!(!session.been_used());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_touch_marks_used() {
|
||||
let mut session = BannerSession::new("sess-1", "JSID123", "SSB456");
|
||||
session.touch();
|
||||
assert!(session.been_used());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_touched_session_not_expired() {
|
||||
let mut session = BannerSession::new("sess-1", "JSID123", "SSB456");
|
||||
session.touch();
|
||||
assert!(!session.is_expired());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cookie_format() {
|
||||
let session = BannerSession::new("sess-1", "JSID123", "SSB456");
|
||||
assert_eq!(session.cookie(), "JSESSIONID=JSID123; SSB_COOKIE=SSB456");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_id_returns_unique_session_id() {
|
||||
let session = BannerSession::new("my-unique-id", "JSID123", "SSB456");
|
||||
assert_eq!(session.id(), "my-unique-id");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expired_session() {
|
||||
let session = BannerSession::new_with_created_at(
|
||||
"sess-old",
|
||||
"JSID123",
|
||||
"SSB456",
|
||||
Instant::now() - Duration::from_secs(26 * 60),
|
||||
);
|
||||
Ok(session_id)
|
||||
assert!(session.is_expired());
|
||||
}
|
||||
|
||||
/// Generates a new session ID mimicking Banner's format
|
||||
fn generate_session_id(&self) -> String {
|
||||
let random_part = Alphanumeric.sample_string(&mut rand::thread_rng(), 5);
|
||||
let timestamp = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_millis();
|
||||
format!("{}{}", random_part, timestamp)
|
||||
#[test]
|
||||
fn test_not_quite_expired_session() {
|
||||
let session = BannerSession::new_with_created_at(
|
||||
"sess-recent",
|
||||
"JSID123",
|
||||
"SSB456",
|
||||
Instant::now() - Duration::from_secs(24 * 60),
|
||||
);
|
||||
assert!(!session.is_expired());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_at_expiry_boundary() {
|
||||
let session = BannerSession::new_with_created_at(
|
||||
"sess-boundary",
|
||||
"JSID123",
|
||||
"SSB456",
|
||||
Instant::now() - Duration::from_secs(25 * 60 + 1),
|
||||
);
|
||||
assert!(session.is_expired());
|
||||
}
|
||||
}
|
||||
|
||||
/// A smart pointer that returns a BannerSession to the pool when dropped.
|
||||
pub struct PooledSession {
|
||||
session: Option<BannerSession>,
|
||||
// This Arc points directly to the term-specific pool.
|
||||
pool: Arc<TermPool>,
|
||||
}
|
||||
|
||||
impl PooledSession {
|
||||
pub fn been_used(&self) -> bool {
|
||||
self.session.as_ref().unwrap().been_used()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for PooledSession {
|
||||
type Target = BannerSession;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
// The option is only ever None after drop is called, so this is safe.
|
||||
self.session.as_ref().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for PooledSession {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
self.session.as_mut().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
/// The magic happens here: when the guard goes out of scope, this is called.
|
||||
impl Drop for PooledSession {
|
||||
fn drop(&mut self) {
|
||||
if let Some(session) = self.session.take() {
|
||||
let pool = self.pool.clone();
|
||||
// Since drop() cannot be async, we spawn a task to return the session.
|
||||
tokio::spawn(async move {
|
||||
pool.release(session).await;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TermPool {
|
||||
sessions: Mutex<VecDeque<BannerSession>>,
|
||||
notifier: Notify,
|
||||
is_creating: Mutex<bool>,
|
||||
}
|
||||
|
||||
impl TermPool {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
sessions: Mutex::new(VecDeque::new()),
|
||||
notifier: Notify::new(),
|
||||
is_creating: Mutex::new(false),
|
||||
}
|
||||
}
|
||||
|
||||
async fn release(&self, session: BannerSession) {
|
||||
let id = session.unique_session_id.clone();
|
||||
if session.is_expired() {
|
||||
debug!(id = id, "Session expired, dropping");
|
||||
// Wake up a waiter, as it might need to create a new session
|
||||
// if this was the last one.
|
||||
self.notifier.notify_one();
|
||||
return;
|
||||
}
|
||||
|
||||
let mut queue = self.sessions.lock().await;
|
||||
queue.push_back(session);
|
||||
drop(queue); // Release lock before notifying
|
||||
|
||||
self.notifier.notify_one();
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SessionPool {
|
||||
sessions: DashMap<Term, Arc<TermPool>>,
|
||||
http: ClientWithMiddleware,
|
||||
base_url: String,
|
||||
}
|
||||
|
||||
impl SessionPool {
|
||||
pub fn new(http: ClientWithMiddleware, base_url: String) -> Self {
|
||||
Self {
|
||||
sessions: DashMap::new(),
|
||||
http,
|
||||
base_url,
|
||||
}
|
||||
}
|
||||
|
||||
/// Acquires a session from the pool.
|
||||
/// If no sessions are available, a new one is created on demand,
|
||||
/// respecting the global rate limit.
|
||||
pub async fn acquire(&self, term: Term) -> Result<PooledSession> {
|
||||
let term_pool = self
|
||||
.sessions
|
||||
.entry(term)
|
||||
.or_insert_with(|| Arc::new(TermPool::new()))
|
||||
.clone();
|
||||
|
||||
let start = Instant::now();
|
||||
let mut waited_for_creation = false;
|
||||
|
||||
loop {
|
||||
// Fast path: Try to get an existing, non-expired session.
|
||||
{
|
||||
let mut queue = term_pool.sessions.lock().await;
|
||||
if let Some(session) = queue.pop_front() {
|
||||
if !session.is_expired() {
|
||||
return Ok(PooledSession {
|
||||
session: Some(session),
|
||||
pool: Arc::clone(&term_pool),
|
||||
});
|
||||
} else {
|
||||
debug!(id = session.unique_session_id, "Discarded expired session");
|
||||
}
|
||||
}
|
||||
} // MutexGuard is dropped, lock is released.
|
||||
|
||||
// Slow path: No sessions available. We must either wait or become the creator.
|
||||
let mut is_creating_guard = term_pool.is_creating.lock().await;
|
||||
if *is_creating_guard {
|
||||
// Another task is already creating a session. Release the lock and wait.
|
||||
drop(is_creating_guard);
|
||||
if !waited_for_creation {
|
||||
trace!("Waiting for another task to create session");
|
||||
waited_for_creation = true;
|
||||
}
|
||||
term_pool.notifier.notified().await;
|
||||
// Loop back to the top to try the fast path again.
|
||||
continue;
|
||||
}
|
||||
|
||||
// This task is now the designated creator.
|
||||
*is_creating_guard = true;
|
||||
drop(is_creating_guard);
|
||||
|
||||
// Race: wait for a session to be returned OR for the rate limiter to allow a new one.
|
||||
trace!("Pool empty, creating new session");
|
||||
tokio::select! {
|
||||
_ = term_pool.notifier.notified() => {
|
||||
// A session was returned while we were waiting!
|
||||
// We are no longer the creator. Reset the flag and loop to race for the new session.
|
||||
let mut guard = term_pool.is_creating.lock().await;
|
||||
*guard = false;
|
||||
drop(guard);
|
||||
continue;
|
||||
}
|
||||
_ = SESSION_CREATION_RATE_LIMITER.until_ready() => {
|
||||
// The rate limit has elapsed. It's our job to create the session.
|
||||
let new_session_result = self.create_session(&term).await;
|
||||
|
||||
// After creation, we are no longer the creator. Reset the flag
|
||||
// and notify all other waiting tasks.
|
||||
let mut guard = term_pool.is_creating.lock().await;
|
||||
*guard = false;
|
||||
drop(guard);
|
||||
term_pool.notifier.notify_waiters();
|
||||
|
||||
match new_session_result {
|
||||
Ok(new_session) => {
|
||||
let elapsed = start.elapsed();
|
||||
debug!(
|
||||
id = new_session.unique_session_id,
|
||||
elapsed_ms = elapsed.as_millis(),
|
||||
"Created new session"
|
||||
);
|
||||
return Ok(PooledSession {
|
||||
session: Some(new_session),
|
||||
pool: term_pool,
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
// Propagate the error if session creation failed.
|
||||
return Err(e.context("Failed to create new session in pool"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets up initial session cookies by making required Banner API requests
|
||||
pub async fn setup(&self) -> Result<()> {
|
||||
info!("setting up banner session...");
|
||||
pub async fn create_session(&self, term: &Term) -> Result<BannerSession> {
|
||||
info!(term = %term, "setting up banner session");
|
||||
|
||||
let request_paths = ["/registration/registration", "/selfServiceMenu/data"];
|
||||
// The 'register' or 'search' registration page
|
||||
let initial_registration = self
|
||||
.http
|
||||
.get(format!("{}/registration", self.base_url))
|
||||
.send()
|
||||
.await?;
|
||||
// TODO: Validate success
|
||||
|
||||
for path in &request_paths {
|
||||
let url = format!("{}{}", self.base_url, path);
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.query(&[("_", Self::nonce())])
|
||||
.header("User-Agent", user_agent())
|
||||
.send()
|
||||
.await?;
|
||||
let cookies = initial_registration
|
||||
.headers()
|
||||
.get_all("Set-Cookie")
|
||||
.iter()
|
||||
.filter_map(|header_value| {
|
||||
if let Ok(cookie_str) = header_value.to_str() {
|
||||
if let Ok(cookie) = Cookie::parse(cookie_str) {
|
||||
Some((cookie.name().to_string(), cookie.value().to_string()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<HashMap<String, String>>();
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Failed to setup session, request to {} returned {}",
|
||||
path,
|
||||
response.status()
|
||||
));
|
||||
}
|
||||
let jsessionid = cookies
|
||||
.get("JSESSIONID")
|
||||
.ok_or_else(|| anyhow::anyhow!("JSESSIONID cookie missing"))?;
|
||||
let ssb_cookie = cookies
|
||||
.get("SSB_COOKIE")
|
||||
.ok_or_else(|| anyhow::anyhow!("SSB_COOKIE cookie missing"))?;
|
||||
let cookie_header = format!("JSESSIONID={}; SSB_COOKIE={}", jsessionid, ssb_cookie);
|
||||
|
||||
self.http
|
||||
.get(format!("{}/selfServiceMenu/data", self.base_url))
|
||||
.header("Cookie", &cookie_header)
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()
|
||||
.context("Failed to get data page")?;
|
||||
|
||||
self.http
|
||||
.get(format!("{}/term/termSelection", self.base_url))
|
||||
.header("Cookie", &cookie_header)
|
||||
.query(&[("mode", "search")])
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()
|
||||
.context("Failed to get term selection page")?;
|
||||
// TODO: Validate success
|
||||
|
||||
let terms = self.get_terms("", 1, 10).await?;
|
||||
if !terms.iter().any(|t| t.code == term.to_string()) {
|
||||
return Err(anyhow::anyhow!("Failed to get term search response"));
|
||||
}
|
||||
|
||||
// Note: Cookie validation would require additional setup in a real implementation
|
||||
debug!("session setup complete");
|
||||
Ok(())
|
||||
let specific_term_search_response = self.get_terms(&term.to_string(), 1, 10).await?;
|
||||
if !specific_term_search_response
|
||||
.iter()
|
||||
.any(|t| t.code == term.to_string())
|
||||
{
|
||||
return Err(anyhow::anyhow!("Failed to get term search response"));
|
||||
}
|
||||
|
||||
let unique_session_id = generate_session_id();
|
||||
self.select_term(&term.to_string(), &unique_session_id, &cookie_header)
|
||||
.await?;
|
||||
|
||||
Ok(BannerSession::new(
|
||||
&unique_session_id,
|
||||
jsessionid,
|
||||
ssb_cookie,
|
||||
))
|
||||
}
|
||||
|
||||
/// Retrieves a list of terms from the Banner API.
|
||||
pub async fn get_terms(
|
||||
&self,
|
||||
search: &str,
|
||||
page: i32,
|
||||
max_results: i32,
|
||||
) -> Result<Vec<BannerTerm>> {
|
||||
if page <= 0 {
|
||||
return Err(anyhow::anyhow!("Page must be greater than 0"));
|
||||
}
|
||||
|
||||
let url = format!("{}/classSearch/getTerms", self.base_url);
|
||||
let params = [
|
||||
("searchTerm", search),
|
||||
("offset", &page.to_string()),
|
||||
("max", &max_results.to_string()),
|
||||
("_", &nonce()),
|
||||
];
|
||||
|
||||
let response = self
|
||||
.http
|
||||
.get(&url)
|
||||
.query(¶ms)
|
||||
.send()
|
||||
.await
|
||||
.with_context(|| "Failed to get terms".to_string())?;
|
||||
|
||||
let terms: Vec<BannerTerm> = response
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse terms response")?;
|
||||
|
||||
Ok(terms)
|
||||
}
|
||||
|
||||
/// Selects a term for the current session
|
||||
pub async fn select_term(&self, term: &str) -> Result<()> {
|
||||
let session_id = self.ensure_session()?;
|
||||
|
||||
pub async fn select_term(
|
||||
&self,
|
||||
term: &str,
|
||||
unique_session_id: &str,
|
||||
cookie_header: &str,
|
||||
) -> Result<()> {
|
||||
let form_data = [
|
||||
("term", term),
|
||||
("studyPath", ""),
|
||||
("studyPathText", ""),
|
||||
("startDatepicker", ""),
|
||||
("endDatepicker", ""),
|
||||
("uniqueSessionId", &session_id),
|
||||
("uniqueSessionId", unique_session_id),
|
||||
];
|
||||
|
||||
let url = format!("{}/term/search", self.base_url);
|
||||
let response = self
|
||||
.client
|
||||
.http
|
||||
.post(&url)
|
||||
.header("Cookie", cookie_header)
|
||||
.query(&[("mode", "search")])
|
||||
.form(&form_data)
|
||||
.header("User-Agent", user_agent())
|
||||
.header("Content-Type", "application/x-www-form-urlencoded")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
@@ -141,18 +530,36 @@ impl SessionManager {
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct RedirectResponse {
|
||||
#[serde(rename = "fwdUrl")]
|
||||
#[serde(rename = "fwdURL")]
|
||||
fwd_url: String,
|
||||
}
|
||||
|
||||
let redirect: RedirectResponse = response.json().await?;
|
||||
|
||||
let base_url_path = self
|
||||
.base_url
|
||||
.parse::<Url>()
|
||||
.context("Failed to parse base URL")?
|
||||
.path()
|
||||
.to_string();
|
||||
let non_overlap_redirect =
|
||||
redirect
|
||||
.fwd_url
|
||||
.strip_prefix(&base_url_path)
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!(
|
||||
"Redirect URL '{}' does not start with expected prefix '{}'",
|
||||
redirect.fwd_url,
|
||||
base_url_path
|
||||
)
|
||||
})?;
|
||||
|
||||
// Follow the redirect
|
||||
let redirect_url = format!("{}{}", self.base_url, redirect.fwd_url);
|
||||
let redirect_url = format!("{}{}", self.base_url, non_overlap_redirect);
|
||||
let redirect_response = self
|
||||
.client
|
||||
.http
|
||||
.get(&redirect_url)
|
||||
.header("User-Agent", user_agent())
|
||||
.header("Cookie", cookie_header)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
@@ -163,36 +570,6 @@ impl SessionManager {
|
||||
));
|
||||
}
|
||||
|
||||
debug!("successfully selected term: {}", term);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Resets the data form (required before new searches)
|
||||
pub async fn reset_data_form(&self) -> Result<()> {
|
||||
let url = format!("{}/classSearch/resetDataForm", self.base_url);
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
.header("User-Agent", user_agent())
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Failed to reset data form: {}",
|
||||
response.status()
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Generates a timestamp-based nonce
|
||||
pub fn nonce() -> String {
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_millis()
|
||||
.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
+1
-1
@@ -2,5 +2,5 @@
|
||||
|
||||
/// Returns a browser-like user agent string.
|
||||
pub fn user_agent() -> &'static str {
|
||||
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36"
|
||||
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36"
|
||||
}
|
||||
|
||||
@@ -0,0 +1,131 @@
|
||||
use banner::banner::{BannerApi, SearchQuery, Term};
|
||||
use banner::config::Config;
|
||||
use banner::error::Result;
|
||||
use figment::{Figment, providers::Env};
|
||||
use futures::future;
|
||||
use tracing::{error, info};
|
||||
use tracing_subscriber::{EnvFilter, FmtSubscriber};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
// Configure logging
|
||||
let filter = EnvFilter::try_from_default_env()
|
||||
.unwrap_or_else(|_| EnvFilter::new("info,banner=trace,reqwest=debug,hyper=info"));
|
||||
let subscriber = FmtSubscriber::builder()
|
||||
.with_env_filter(filter)
|
||||
.with_target(true)
|
||||
.finish();
|
||||
tracing::subscriber::set_global_default(subscriber).expect("setting default subscriber failed");
|
||||
|
||||
info!("Starting Banner search test");
|
||||
|
||||
dotenvy::dotenv().ok();
|
||||
|
||||
// Load configuration
|
||||
let config: Config = Figment::new()
|
||||
.merge(Env::raw())
|
||||
.extract()
|
||||
.expect("Failed to load config");
|
||||
|
||||
info!(
|
||||
banner_base_url = config.banner_base_url,
|
||||
"Configuration loaded"
|
||||
);
|
||||
|
||||
// Create Banner API client
|
||||
let banner_api = BannerApi::new_with_config(config.banner_base_url, config.rate_limiting)
|
||||
.expect("Failed to create BannerApi");
|
||||
|
||||
// Get current term
|
||||
let term = Term::get_current().inner().to_string();
|
||||
info!(term = term, "Using current term");
|
||||
|
||||
// Define multiple search queries
|
||||
let queries = vec![
|
||||
(
|
||||
"CS Courses",
|
||||
SearchQuery::new().subject("CS").max_results(10),
|
||||
),
|
||||
(
|
||||
"Math Courses",
|
||||
SearchQuery::new().subject("MAT").max_results(10),
|
||||
),
|
||||
(
|
||||
"3000-level CS",
|
||||
SearchQuery::new()
|
||||
.subject("CS")
|
||||
.course_numbers(3000, 3999)
|
||||
.max_results(8),
|
||||
),
|
||||
(
|
||||
"High Credit Courses",
|
||||
SearchQuery::new().credits(4, 6).max_results(8),
|
||||
),
|
||||
(
|
||||
"Programming Courses",
|
||||
SearchQuery::new().keyword("programming").max_results(6),
|
||||
),
|
||||
];
|
||||
|
||||
info!(query_count = queries.len(), "Executing concurrent searches");
|
||||
|
||||
// Execute all searches concurrently
|
||||
let search_futures = queries.into_iter().map(|(label, query)| {
|
||||
info!(label = %label, "Starting search");
|
||||
let banner_api = &banner_api;
|
||||
let term = &term;
|
||||
async move {
|
||||
let result = banner_api
|
||||
.search(term, &query, "subjectDescription", false)
|
||||
.await;
|
||||
(label, result)
|
||||
}
|
||||
});
|
||||
|
||||
// Wait for all searches to complete
|
||||
let search_results = future::join_all(search_futures)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter_map(|(label, result)| match result {
|
||||
Ok(search_result) => {
|
||||
info!(
|
||||
label = label,
|
||||
success = search_result.success,
|
||||
total_count = search_result.total_count,
|
||||
"Search completed successfully"
|
||||
);
|
||||
Some((label, search_result))
|
||||
}
|
||||
Err(e) => {
|
||||
error!(label = label, error = ?e, "Search failed");
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Process and display results
|
||||
for (label, search_result) in search_results {
|
||||
println!("\n=== {} ===", label);
|
||||
if let Some(courses) = &search_result.data {
|
||||
if courses.is_empty() {
|
||||
println!(" No courses found");
|
||||
} else {
|
||||
println!(" Found {} courses:", courses.len());
|
||||
for course in courses {
|
||||
println!(
|
||||
" {} {} - {} (CRN: {})",
|
||||
course.subject,
|
||||
course.course_number,
|
||||
course.course_title,
|
||||
course.course_reference_number
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!(" No courses found");
|
||||
}
|
||||
}
|
||||
|
||||
info!("Search test completed");
|
||||
Ok(())
|
||||
}
|
||||
+20
-32
@@ -1,10 +1,10 @@
|
||||
//! Google Calendar command implementation.
|
||||
|
||||
use crate::banner::{Course, DayOfWeek, MeetingScheduleInfo};
|
||||
use crate::banner::{Course, MeetingScheduleInfo};
|
||||
use crate::bot::{Context, Error, utils};
|
||||
use chrono::NaiveDate;
|
||||
use chrono::{NaiveDate, Weekday};
|
||||
use std::collections::HashMap;
|
||||
use tracing::{error, info};
|
||||
use tracing::info;
|
||||
use url::Url;
|
||||
|
||||
/// Generate a link to create a Google Calendar event for a course
|
||||
@@ -22,19 +22,12 @@ pub async fn gcal(
|
||||
let term = course.term.clone();
|
||||
|
||||
// Get meeting times
|
||||
let meeting_times = match ctx
|
||||
let meeting_times = ctx
|
||||
.data()
|
||||
.app_state
|
||||
.banner_api
|
||||
.get_course_meeting_time(&term, &crn.to_string())
|
||||
.await
|
||||
{
|
||||
Ok(meeting_time) => meeting_time,
|
||||
Err(e) => {
|
||||
error!("failed to get meeting times: {}", e);
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
.await?;
|
||||
|
||||
struct LinkDetail {
|
||||
link: String,
|
||||
@@ -46,25 +39,18 @@ pub async fn gcal(
|
||||
1.. => {
|
||||
// Sort meeting times by start time of their TimeRange
|
||||
let mut sorted_meeting_times = meeting_times.to_vec();
|
||||
sorted_meeting_times.sort_unstable_by(|a, b| {
|
||||
// Primary sort: by start time
|
||||
match (&a.time_range, &b.time_range) {
|
||||
(Some(a_time), Some(b_time)) => a_time.start.cmp(&b_time.start),
|
||||
(Some(_), None) => std::cmp::Ordering::Less,
|
||||
(None, Some(_)) => std::cmp::Ordering::Greater,
|
||||
(None, None) => a.days.bits().cmp(&b.days.bits()),
|
||||
}
|
||||
});
|
||||
MeetingScheduleInfo::sort_by_start_time(&mut sorted_meeting_times);
|
||||
|
||||
let links = sorted_meeting_times
|
||||
.iter()
|
||||
.map(|m| {
|
||||
let link = generate_gcal_url(&course, m)?;
|
||||
let days = m.days_string().unwrap_or_else(|| "TBA".to_string());
|
||||
let detail = match &m.time_range {
|
||||
Some(range) => {
|
||||
format!("{} {}", m.days_string().unwrap(), range.format_12hr())
|
||||
format!("{days} {}", range.format_12hr())
|
||||
}
|
||||
None => m.days_string().unwrap(),
|
||||
None => days,
|
||||
};
|
||||
Ok(LinkDetail { link, detail })
|
||||
})
|
||||
@@ -84,7 +70,7 @@ pub async fn gcal(
|
||||
)
|
||||
.await?;
|
||||
|
||||
info!("gcal command completed for CRN: {}", crn);
|
||||
info!(crn = %crn, "gcal command completed");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -112,7 +98,9 @@ fn generate_gcal_url(
|
||||
"CRN: {}\nInstructor: {}\nDays: {}",
|
||||
course.course_reference_number,
|
||||
instructor_name,
|
||||
meeting_time.days_string().unwrap()
|
||||
meeting_time
|
||||
.days_string()
|
||||
.unwrap_or_else(|| "TBA".to_string())
|
||||
);
|
||||
|
||||
// The event location
|
||||
@@ -140,13 +128,13 @@ fn generate_rrule(meeting_time: &MeetingScheduleInfo, end_date: NaiveDate) -> St
|
||||
let by_day = days_of_week
|
||||
.iter()
|
||||
.map(|day| match day {
|
||||
DayOfWeek::Monday => "MO",
|
||||
DayOfWeek::Tuesday => "TU",
|
||||
DayOfWeek::Wednesday => "WE",
|
||||
DayOfWeek::Thursday => "TH",
|
||||
DayOfWeek::Friday => "FR",
|
||||
DayOfWeek::Saturday => "SA",
|
||||
DayOfWeek::Sunday => "SU",
|
||||
Weekday::Mon => "MO",
|
||||
Weekday::Tue => "TU",
|
||||
Weekday::Wed => "WE",
|
||||
Weekday::Thu => "TH",
|
||||
Weekday::Fri => "FR",
|
||||
Weekday::Sat => "SA",
|
||||
Weekday::Sun => "SU",
|
||||
})
|
||||
.collect::<Vec<&str>>()
|
||||
.join(",");
|
||||
|
||||
+360
-6
@@ -1,8 +1,82 @@
|
||||
//! ICS command implementation for generating calendar files.
|
||||
|
||||
use crate::banner::{Course, MeetingDays, MeetingScheduleInfo, WeekdayExt};
|
||||
use crate::bot::{Context, Error, utils};
|
||||
use chrono::{Datelike, Duration, NaiveDate, Utc, Weekday};
|
||||
use serenity::all::CreateAttachment;
|
||||
use tracing::info;
|
||||
|
||||
/// Find the nth occurrence of a weekday in a given month/year (1-based).
|
||||
fn nth_weekday_of_month(year: i32, month: u32, weekday: Weekday, n: u32) -> Option<NaiveDate> {
|
||||
let first = NaiveDate::from_ymd_opt(year, month, 1)?;
|
||||
let days_ahead = (weekday.num_days_from_monday() as i64
|
||||
- first.weekday().num_days_from_monday() as i64)
|
||||
.rem_euclid(7) as u32;
|
||||
let day = 1 + days_ahead + 7 * (n - 1);
|
||||
NaiveDate::from_ymd_opt(year, month, day)
|
||||
}
|
||||
|
||||
/// Compute a consecutive range of dates starting from `start` for `count` days.
|
||||
fn date_range(start: NaiveDate, count: i64) -> Vec<NaiveDate> {
|
||||
(0..count)
|
||||
.filter_map(|i| start.checked_add_signed(Duration::days(i)))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Compute university holidays for a given year.
|
||||
///
|
||||
/// Federal holidays use weekday-of-month rules so they're correct for any year.
|
||||
/// University-specific breaks (Fall Break, Spring Break, Winter Holiday) are derived
|
||||
/// from anchoring federal holidays or using UTSA's typical scheduling patterns.
|
||||
fn compute_holidays_for_year(year: i32) -> Vec<(&'static str, Vec<NaiveDate>)> {
|
||||
let mut holidays = Vec::new();
|
||||
|
||||
// Labor Day: 1st Monday of September
|
||||
if let Some(d) = nth_weekday_of_month(year, 9, Weekday::Mon, 1) {
|
||||
holidays.push(("Labor Day", vec![d]));
|
||||
}
|
||||
|
||||
// Fall Break: Mon-Tue of Columbus Day week (2nd Monday of October + Tuesday)
|
||||
if let Some(mon) = nth_weekday_of_month(year, 10, Weekday::Mon, 2) {
|
||||
holidays.push(("Fall Break", date_range(mon, 2)));
|
||||
}
|
||||
|
||||
// Day before Thanksgiving: Wednesday before 4th Thursday of November
|
||||
if let Some(thu) = nth_weekday_of_month(year, 11, Weekday::Thu, 4)
|
||||
&& let Some(wed) = thu.checked_sub_signed(Duration::days(1))
|
||||
{
|
||||
holidays.push(("Day Before Thanksgiving", vec![wed]));
|
||||
}
|
||||
|
||||
// Thanksgiving: 4th Thursday of November + Friday
|
||||
if let Some(thu) = nth_weekday_of_month(year, 11, Weekday::Thu, 4) {
|
||||
holidays.push(("Thanksgiving", date_range(thu, 2)));
|
||||
}
|
||||
|
||||
// Winter Holiday: Dec 23-31
|
||||
if let Some(start) = NaiveDate::from_ymd_opt(year, 12, 23) {
|
||||
holidays.push(("Winter Holiday", date_range(start, 9)));
|
||||
}
|
||||
|
||||
// New Year's Day: January 1
|
||||
if let Some(d) = NaiveDate::from_ymd_opt(year, 1, 1) {
|
||||
holidays.push(("New Year's Day", vec![d]));
|
||||
}
|
||||
|
||||
// MLK Day: 3rd Monday of January
|
||||
if let Some(d) = nth_weekday_of_month(year, 1, Weekday::Mon, 3) {
|
||||
holidays.push(("MLK Day", vec![d]));
|
||||
}
|
||||
|
||||
// Spring Break: full week (Mon-Sat) starting the 2nd or 3rd Monday of March
|
||||
// UTSA typically uses the 2nd full week of March
|
||||
if let Some(mon) = nth_weekday_of_month(year, 3, Weekday::Mon, 2) {
|
||||
holidays.push(("Spring Break", date_range(mon, 6)));
|
||||
}
|
||||
|
||||
holidays
|
||||
}
|
||||
|
||||
/// Generate an ICS file for a course
|
||||
#[poise::command(slash_command, prefix_command)]
|
||||
pub async fn ics(
|
||||
@@ -12,14 +86,294 @@ pub async fn ics(
|
||||
ctx.defer().await?;
|
||||
|
||||
let course = utils::get_course_by_crn(&ctx, crn).await?;
|
||||
let term = course.term.clone();
|
||||
|
||||
// TODO: Implement actual ICS file generation
|
||||
ctx.say(format!(
|
||||
"ICS generation for '{}' is not yet implemented.",
|
||||
course.display_title()
|
||||
))
|
||||
// Get meeting times
|
||||
let meeting_times = ctx
|
||||
.data()
|
||||
.app_state
|
||||
.banner_api
|
||||
.get_course_meeting_time(&term, &crn.to_string())
|
||||
.await?;
|
||||
|
||||
if meeting_times.is_empty() {
|
||||
ctx.say("No meeting times found for this course.").await?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Sort meeting times by start time
|
||||
let mut sorted_meeting_times = meeting_times.to_vec();
|
||||
MeetingScheduleInfo::sort_by_start_time(&mut sorted_meeting_times);
|
||||
|
||||
// Generate ICS content
|
||||
let (ics_content, excluded_holidays) =
|
||||
generate_ics_content(&course, &term, &sorted_meeting_times)?;
|
||||
|
||||
// Create file attachment
|
||||
let filename = format!(
|
||||
"{subject}_{number}_{section}.ics",
|
||||
subject = course.subject.replace(" ", "_"),
|
||||
number = course.course_number,
|
||||
section = course.sequence_number,
|
||||
);
|
||||
|
||||
let file = CreateAttachment::bytes(ics_content.into_bytes(), filename.clone());
|
||||
|
||||
// Build response content
|
||||
let mut response_content = format!(
|
||||
"📅 Generated ICS calendar for **{}**\n\n**Meeting Times:**\n{}",
|
||||
course.display_title(),
|
||||
sorted_meeting_times
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, m)| {
|
||||
let time_info = match &m.time_range {
|
||||
Some(range) => format!(
|
||||
"{} {}",
|
||||
m.days_string().unwrap_or("TBA".to_string()),
|
||||
range.format_12hr()
|
||||
),
|
||||
None => m.days_string().unwrap_or("TBA".to_string()),
|
||||
};
|
||||
format!("{}. {}", i + 1, time_info)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
);
|
||||
|
||||
// Add holiday exclusion information
|
||||
if !excluded_holidays.is_empty() {
|
||||
let count = excluded_holidays.len();
|
||||
let count_text = if count == 1 {
|
||||
"1 date was".to_string()
|
||||
} else {
|
||||
format!("{} dates were", count)
|
||||
};
|
||||
response_content.push_str(&format!("\n\n{} excluded from the ICS file:\n", count_text));
|
||||
response_content.push_str(
|
||||
&excluded_holidays
|
||||
.iter()
|
||||
.map(|s| format!("- {}", s))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n"),
|
||||
);
|
||||
}
|
||||
|
||||
ctx.send(
|
||||
poise::CreateReply::default()
|
||||
.content(response_content)
|
||||
.attachment(file),
|
||||
)
|
||||
.await?;
|
||||
|
||||
info!("ics command completed for CRN: {}", crn);
|
||||
info!(crn = %crn, "ics command completed");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Generate ICS content for a course and its meeting times
|
||||
fn generate_ics_content(
|
||||
course: &Course,
|
||||
term: &str,
|
||||
meeting_times: &[MeetingScheduleInfo],
|
||||
) -> Result<(String, Vec<String>), anyhow::Error> {
|
||||
let mut ics_content = String::new();
|
||||
let mut excluded_holidays = Vec::new();
|
||||
|
||||
// ICS header
|
||||
ics_content.push_str("BEGIN:VCALENDAR\r\n");
|
||||
ics_content.push_str("VERSION:2.0\r\n");
|
||||
ics_content.push_str("PRODID:-//Banner Bot//Course Calendar//EN\r\n");
|
||||
ics_content.push_str("CALSCALE:GREGORIAN\r\n");
|
||||
ics_content.push_str("METHOD:PUBLISH\r\n");
|
||||
|
||||
// Calendar name
|
||||
ics_content.push_str(&format!(
|
||||
"X-WR-CALNAME:{} - {}\r\n",
|
||||
course.display_title(),
|
||||
term
|
||||
));
|
||||
|
||||
// Generate events for each meeting time
|
||||
for (index, meeting_time) in meeting_times.iter().enumerate() {
|
||||
let (event_content, holidays) = generate_event_content(course, meeting_time, index)?;
|
||||
ics_content.push_str(&event_content);
|
||||
excluded_holidays.extend(holidays);
|
||||
}
|
||||
|
||||
// ICS footer
|
||||
ics_content.push_str("END:VCALENDAR\r\n");
|
||||
|
||||
Ok((ics_content, excluded_holidays))
|
||||
}
|
||||
|
||||
/// Generate ICS event content for a single meeting time
|
||||
fn generate_event_content(
|
||||
course: &Course,
|
||||
meeting_time: &MeetingScheduleInfo,
|
||||
index: usize,
|
||||
) -> Result<(String, Vec<String>), anyhow::Error> {
|
||||
let course_title = course.display_title();
|
||||
let instructor_name = course.primary_instructor_name();
|
||||
let location = meeting_time.place_string();
|
||||
|
||||
// Create event title with meeting index if multiple meetings
|
||||
let event_title = if index > 0 {
|
||||
format!("{} (Meeting {})", course_title, index + 1)
|
||||
} else {
|
||||
course_title
|
||||
};
|
||||
|
||||
// Create event description
|
||||
let description = format!(
|
||||
"CRN: {}\\nInstructor: {}\\nDays: {}\\nMeeting Type: {}",
|
||||
course.course_reference_number,
|
||||
instructor_name,
|
||||
meeting_time.days_string().unwrap_or("TBA".to_string()),
|
||||
meeting_time.meeting_type.description()
|
||||
);
|
||||
|
||||
// Get start and end times
|
||||
let (start_dt, end_dt) = meeting_time.datetime_range();
|
||||
|
||||
// Format datetimes for ICS (UTC format)
|
||||
let start_utc = start_dt.with_timezone(&Utc);
|
||||
let end_utc = end_dt.with_timezone(&Utc);
|
||||
|
||||
let start_str = start_utc.format("%Y%m%dT%H%M%SZ").to_string();
|
||||
let end_str = end_utc.format("%Y%m%dT%H%M%SZ").to_string();
|
||||
|
||||
// Generate unique ID for the event
|
||||
let uid = format!(
|
||||
"{}-{}-{}@banner-bot.local",
|
||||
course.course_reference_number,
|
||||
index,
|
||||
start_utc.timestamp()
|
||||
);
|
||||
|
||||
let mut event_content = String::new();
|
||||
|
||||
// Event header
|
||||
event_content.push_str("BEGIN:VEVENT\r\n");
|
||||
event_content.push_str(&format!("UID:{}\r\n", uid));
|
||||
event_content.push_str(&format!("DTSTART:{}\r\n", start_str));
|
||||
event_content.push_str(&format!("DTEND:{}\r\n", end_str));
|
||||
event_content.push_str(&format!("SUMMARY:{}\r\n", escape_ics_text(&event_title)));
|
||||
event_content.push_str(&format!(
|
||||
"DESCRIPTION:{}\r\n",
|
||||
escape_ics_text(&description)
|
||||
));
|
||||
event_content.push_str(&format!("LOCATION:{}\r\n", escape_ics_text(&location)));
|
||||
|
||||
// Add recurrence rule if there are specific days and times
|
||||
if !meeting_time.days.is_empty() && meeting_time.time_range.is_some() {
|
||||
let days_of_week = meeting_time.days_of_week();
|
||||
let by_day: Vec<String> = days_of_week
|
||||
.iter()
|
||||
.map(|day| day.to_short_string().to_uppercase())
|
||||
.collect();
|
||||
|
||||
if !by_day.is_empty() {
|
||||
let until_date = meeting_time
|
||||
.date_range
|
||||
.end
|
||||
.format("%Y%m%dT000000Z")
|
||||
.to_string();
|
||||
|
||||
event_content.push_str(&format!(
|
||||
"RRULE:FREQ=WEEKLY;BYDAY={};UNTIL={}\r\n",
|
||||
by_day.join(","),
|
||||
until_date
|
||||
));
|
||||
|
||||
// Add holiday exceptions (EXDATE) if the class would meet on holiday dates
|
||||
let holiday_exceptions = get_holiday_exceptions(meeting_time);
|
||||
if let Some(exdate_property) = generate_exdate_property(&holiday_exceptions, start_utc)
|
||||
{
|
||||
event_content.push_str(&format!("{}\r\n", exdate_property));
|
||||
}
|
||||
|
||||
// Collect holiday names for reporting
|
||||
let start_year = meeting_time.date_range.start.year();
|
||||
let end_year = meeting_time.date_range.end.year();
|
||||
let all_holidays: Vec<_> = (start_year..=end_year)
|
||||
.flat_map(compute_holidays_for_year)
|
||||
.collect();
|
||||
|
||||
let mut holiday_names = Vec::new();
|
||||
for (holiday_name, holiday_dates) in &all_holidays {
|
||||
for &exception_date in &holiday_exceptions {
|
||||
if holiday_dates.contains(&exception_date) {
|
||||
holiday_names.push(format!(
|
||||
"{} ({})",
|
||||
holiday_name,
|
||||
exception_date.format("%a, %b %d")
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
holiday_names.sort();
|
||||
holiday_names.dedup();
|
||||
|
||||
event_content.push_str("END:VEVENT\r\n");
|
||||
return Ok((event_content, holiday_names));
|
||||
}
|
||||
}
|
||||
|
||||
// Event footer
|
||||
event_content.push_str("END:VEVENT\r\n");
|
||||
|
||||
Ok((event_content, Vec::new()))
|
||||
}
|
||||
|
||||
/// Check if a class meets on a specific date based on its meeting days
|
||||
fn class_meets_on_date(meeting_time: &MeetingScheduleInfo, date: NaiveDate) -> bool {
|
||||
let day: MeetingDays = date.weekday().into();
|
||||
meeting_time.days.contains(day)
|
||||
}
|
||||
|
||||
/// Get holiday dates that fall within the course date range and would conflict with class meetings
|
||||
fn get_holiday_exceptions(meeting_time: &MeetingScheduleInfo) -> Vec<NaiveDate> {
|
||||
let start_year = meeting_time.date_range.start.year();
|
||||
let end_year = meeting_time.date_range.end.year();
|
||||
|
||||
(start_year..=end_year)
|
||||
.flat_map(compute_holidays_for_year)
|
||||
.flat_map(|(_, dates)| dates)
|
||||
.filter(|&date| {
|
||||
date >= meeting_time.date_range.start
|
||||
&& date <= meeting_time.date_range.end
|
||||
&& class_meets_on_date(meeting_time, date)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Generate EXDATE property for holiday exceptions
|
||||
fn generate_exdate_property(
|
||||
exceptions: &[NaiveDate],
|
||||
start_time: chrono::DateTime<Utc>,
|
||||
) -> Option<String> {
|
||||
if exceptions.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut exdate_values = Vec::new();
|
||||
|
||||
for &exception_date in exceptions {
|
||||
// Create a datetime for the exception using the same time as the start time
|
||||
let exception_datetime = exception_date.and_time(start_time.time()).and_utc();
|
||||
|
||||
let exdate_str = exception_datetime.format("%Y%m%dT%H%M%SZ").to_string();
|
||||
exdate_values.push(exdate_str);
|
||||
}
|
||||
|
||||
Some(format!("EXDATE:{}", exdate_values.join(",")))
|
||||
}
|
||||
|
||||
/// Escape text for ICS format
|
||||
fn escape_ics_text(text: &str) -> String {
|
||||
text.replace("\\", "\\\\")
|
||||
.replace(";", "\\;")
|
||||
.replace(",", "\\,")
|
||||
.replace("\n", "\\n")
|
||||
.replace("\r", "")
|
||||
}
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
//! Bot commands module.
|
||||
|
||||
pub mod gcal;
|
||||
pub mod ics;
|
||||
pub mod search;
|
||||
pub mod terms;
|
||||
pub mod time;
|
||||
pub mod ics;
|
||||
pub mod gcal;
|
||||
|
||||
pub use gcal::gcal;
|
||||
pub use ics::ics;
|
||||
pub use search::search;
|
||||
pub use terms::terms;
|
||||
pub use time::time;
|
||||
pub use ics::ics;
|
||||
pub use gcal::gcal;
|
||||
|
||||
+116
-9
@@ -4,8 +4,12 @@ use crate::banner::{SearchQuery, Term};
|
||||
use crate::bot::{Context, Error};
|
||||
use anyhow::anyhow;
|
||||
use regex::Regex;
|
||||
use std::sync::LazyLock;
|
||||
use tracing::info;
|
||||
|
||||
static RANGE_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(\d{1,4})-(\d{1,4})?").unwrap());
|
||||
static WILDCARD_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(\d+)(x+)").unwrap());
|
||||
|
||||
/// Search for courses with various filters
|
||||
#[poise::command(slash_command, prefix_command)]
|
||||
pub async fn search(
|
||||
@@ -20,8 +24,8 @@ pub async fn search(
|
||||
// Defer the response since this might take a while
|
||||
ctx.defer().await?;
|
||||
|
||||
// Build the search query
|
||||
let mut query = SearchQuery::new().credits(3, 6);
|
||||
// Build the search query — no default credit filter so all courses are visible
|
||||
let mut query = SearchQuery::new();
|
||||
|
||||
if let Some(title) = title {
|
||||
query = query.title(title);
|
||||
@@ -82,8 +86,7 @@ fn parse_course_code(input: &str) -> Result<(i32, i32), Error> {
|
||||
|
||||
// Handle range format (e.g, "3000-3999")
|
||||
if input.contains('-') {
|
||||
let re = Regex::new(r"(\d{1,4})-(\d{1,4})?").unwrap();
|
||||
if let Some(captures) = re.captures(input) {
|
||||
if let Some(captures) = RANGE_RE.captures(input) {
|
||||
let low: i32 = captures[1].parse()?;
|
||||
let high = if captures.get(2).is_some() {
|
||||
captures[2].parse()?
|
||||
@@ -92,9 +95,7 @@ fn parse_course_code(input: &str) -> Result<(i32, i32), Error> {
|
||||
};
|
||||
|
||||
if low > high {
|
||||
return Err(anyhow!(
|
||||
"Invalid range: low value greater than high value"
|
||||
));
|
||||
return Err(anyhow!("Invalid range: low value greater than high value"));
|
||||
}
|
||||
|
||||
if low < 1000 || high > 9999 {
|
||||
@@ -112,8 +113,7 @@ fn parse_course_code(input: &str) -> Result<(i32, i32), Error> {
|
||||
return Err(anyhow!("Wildcard format must be exactly 4 characters"));
|
||||
}
|
||||
|
||||
let re = Regex::new(r"(\d+)(x+)").unwrap();
|
||||
if let Some(captures) = re.captures(input) {
|
||||
if let Some(captures) = WILDCARD_RE.captures(input) {
|
||||
let prefix: i32 = captures[1].parse()?;
|
||||
let x_count = captures[2].len();
|
||||
|
||||
@@ -140,3 +140,110 @@ fn parse_course_code(input: &str) -> Result<(i32, i32), Error> {
|
||||
|
||||
Err(anyhow!("Invalid course code format"))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// --- Single codes ---
|
||||
|
||||
#[test]
|
||||
fn test_parse_single_code() {
|
||||
assert_eq!(parse_course_code("3743").unwrap(), (3743, 3743));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_single_code_boundaries() {
|
||||
assert_eq!(parse_course_code("1000").unwrap(), (1000, 1000));
|
||||
assert_eq!(parse_course_code("9999").unwrap(), (9999, 9999));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_single_code_below_range() {
|
||||
assert!(parse_course_code("0999").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_single_code_wrong_length() {
|
||||
assert!(parse_course_code("123").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_single_code_non_numeric() {
|
||||
assert!(parse_course_code("abcd").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_single_code_trimmed() {
|
||||
assert_eq!(parse_course_code(" 3743 ").unwrap(), (3743, 3743));
|
||||
}
|
||||
|
||||
// --- Ranges ---
|
||||
|
||||
#[test]
|
||||
fn test_parse_range_full() {
|
||||
assert_eq!(parse_course_code("3000-3999").unwrap(), (3000, 3999));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_range_same() {
|
||||
assert_eq!(parse_course_code("3000-3000").unwrap(), (3000, 3000));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_range_open() {
|
||||
assert_eq!(parse_course_code("3000-").unwrap(), (3000, 9999));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_range_inverted() {
|
||||
assert!(parse_course_code("5000-3000").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_range_below_1000() {
|
||||
assert!(parse_course_code("500-999").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_range_above_9999() {
|
||||
assert!(parse_course_code("9000-10000").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_range_full_valid() {
|
||||
assert_eq!(parse_course_code("1000-9999").unwrap(), (1000, 9999));
|
||||
}
|
||||
|
||||
// --- Wildcards ---
|
||||
|
||||
#[test]
|
||||
fn test_parse_wildcard_one_x() {
|
||||
assert_eq!(parse_course_code("300x").unwrap(), (3000, 3009));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_wildcard_two_x() {
|
||||
assert_eq!(parse_course_code("30xx").unwrap(), (3000, 3099));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_wildcard_three_x() {
|
||||
assert_eq!(parse_course_code("3xxx").unwrap(), (3000, 3999));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_wildcard_9xxx() {
|
||||
assert_eq!(parse_course_code("9xxx").unwrap(), (9000, 9999));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_wildcard_wrong_length() {
|
||||
assert!(parse_course_code("3xxxx").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_wildcard_0xxx() {
|
||||
assert!(parse_course_code("0xxx").is_err());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ pub async fn terms(
|
||||
.data()
|
||||
.app_state
|
||||
.banner_api
|
||||
.sessions
|
||||
.get_terms(&search_term, page_number, max_results)
|
||||
.await?;
|
||||
|
||||
@@ -46,7 +47,11 @@ fn format_term(term: &BannerTerm, current_term_code: &str) -> String {
|
||||
} else {
|
||||
""
|
||||
};
|
||||
let is_archived = if term.is_archived() { " (archived)" } else { "" };
|
||||
let is_archived = if term.is_archived() {
|
||||
" (archived)"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
format!(
|
||||
"- `{}`: {}{}{}",
|
||||
term.code, term.description, is_current, is_archived
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
//! Time command implementation for course meeting times.
|
||||
|
||||
use crate::bot::{utils, Context, Error};
|
||||
use tracing::info;
|
||||
|
||||
/// Get meeting times for a specific course
|
||||
#[poise::command(slash_command, prefix_command)]
|
||||
pub async fn time(
|
||||
ctx: Context<'_>,
|
||||
#[description = "Course Reference Number (CRN)"] crn: i32,
|
||||
) -> Result<(), Error> {
|
||||
ctx.defer().await?;
|
||||
|
||||
let course = utils::get_course_by_crn(&ctx, crn).await?;
|
||||
|
||||
// TODO: Implement actual meeting time retrieval and display
|
||||
ctx.say(format!(
|
||||
"Meeting time display for '{}' is not yet implemented.",
|
||||
course.display_title()
|
||||
))
|
||||
.await?;
|
||||
|
||||
info!("time command completed for CRN: {}", crn);
|
||||
Ok(())
|
||||
}
|
||||
+1
-3
@@ -1,10 +1,9 @@
|
||||
use crate::app_state::AppState;
|
||||
use crate::error::Error;
|
||||
use crate::state::AppState;
|
||||
|
||||
pub mod commands;
|
||||
pub mod utils;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Data {
|
||||
pub app_state: AppState,
|
||||
} // User data, which is stored and accessible in all command invocations
|
||||
@@ -15,7 +14,6 @@ pub fn get_commands() -> Vec<poise::Command<Data, Error>> {
|
||||
vec![
|
||||
commands::search(),
|
||||
commands::terms(),
|
||||
commands::time(),
|
||||
commands::ics(),
|
||||
commands::gcal(),
|
||||
]
|
||||
|
||||
+2
-2
@@ -13,12 +13,12 @@ pub async fn get_course_by_crn(ctx: &Context<'_>, crn: i32) -> Result<Course> {
|
||||
let current_term_status = Term::get_current();
|
||||
let term = current_term_status.inner();
|
||||
|
||||
// Fetch live course data from Redis cache via AppState
|
||||
// Fetch live course data from database via AppState
|
||||
app_state
|
||||
.get_course_or_fetch(&term.to_string(), &crn.to_string())
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(%e, crn, "failed to fetch course data");
|
||||
error!(error = %e, crn = %crn, "failed to fetch course data");
|
||||
e
|
||||
})
|
||||
}
|
||||
|
||||
+180
@@ -0,0 +1,180 @@
|
||||
use clap::Parser;
|
||||
|
||||
/// Banner Discord Bot - Course availability monitoring
|
||||
///
|
||||
/// This application runs multiple services that can be controlled via CLI arguments:
|
||||
/// - bot: Discord bot for course monitoring commands
|
||||
/// - web: HTTP server for web interface and API
|
||||
/// - scraper: Background service for scraping course data
|
||||
///
|
||||
/// Use --services to specify which services to run, or --disable-services to exclude specific services.
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
pub struct Args {
|
||||
/// Log formatter to use
|
||||
#[arg(long, value_enum, default_value_t = default_tracing_format())]
|
||||
pub tracing: TracingFormat,
|
||||
|
||||
/// Services to run (comma-separated). Default: all services
|
||||
///
|
||||
/// Examples:
|
||||
/// --services bot,web # Run only bot and web services
|
||||
/// --services scraper # Run only the scraper service
|
||||
#[arg(long, value_delimiter = ',', conflicts_with = "disable_services")]
|
||||
pub services: Option<Vec<ServiceName>>,
|
||||
|
||||
/// Services to disable (comma-separated)
|
||||
///
|
||||
/// Examples:
|
||||
/// --disable-services bot # Run web and scraper only
|
||||
/// --disable-services bot,web # Run only the scraper service
|
||||
#[arg(long, value_delimiter = ',', conflicts_with = "services")]
|
||||
pub disable_services: Option<Vec<ServiceName>>,
|
||||
}
|
||||
|
||||
#[derive(clap::ValueEnum, Clone, Debug)]
|
||||
pub enum TracingFormat {
|
||||
/// Use pretty formatter (default in debug mode)
|
||||
Pretty,
|
||||
/// Use JSON formatter (default in release mode)
|
||||
Json,
|
||||
}
|
||||
|
||||
#[derive(clap::ValueEnum, Clone, Debug, PartialEq)]
|
||||
pub enum ServiceName {
|
||||
/// Discord bot for course monitoring commands
|
||||
Bot,
|
||||
/// HTTP server for web interface and API
|
||||
Web,
|
||||
/// Background service for scraping course data
|
||||
Scraper,
|
||||
}
|
||||
|
||||
impl ServiceName {
|
||||
/// Get all available services
|
||||
pub fn all() -> Vec<ServiceName> {
|
||||
vec![ServiceName::Bot, ServiceName::Web, ServiceName::Scraper]
|
||||
}
|
||||
|
||||
/// Convert to string for service registration
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
ServiceName::Bot => "bot",
|
||||
ServiceName::Web => "web",
|
||||
ServiceName::Scraper => "scraper",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine which services should be enabled based on CLI arguments
|
||||
pub fn determine_enabled_services(args: &Args) -> Result<Vec<ServiceName>, anyhow::Error> {
|
||||
match (&args.services, &args.disable_services) {
|
||||
(Some(services), None) => {
|
||||
// User specified which services to run
|
||||
Ok(services.clone())
|
||||
}
|
||||
(None, Some(disabled)) => {
|
||||
// User specified which services to disable
|
||||
let enabled: Vec<ServiceName> = ServiceName::all()
|
||||
.into_iter()
|
||||
.filter(|s| !disabled.contains(s))
|
||||
.collect();
|
||||
Ok(enabled)
|
||||
}
|
||||
(None, None) => {
|
||||
// Default: run all services
|
||||
Ok(ServiceName::all())
|
||||
}
|
||||
(Some(_), Some(_)) => {
|
||||
// This should be prevented by clap's conflicts_with, but just in case
|
||||
Err(anyhow::anyhow!(
|
||||
"Cannot specify both --services and --disable-services"
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
const DEFAULT_TRACING_FORMAT: TracingFormat = TracingFormat::Pretty;
|
||||
#[cfg(not(debug_assertions))]
|
||||
const DEFAULT_TRACING_FORMAT: TracingFormat = TracingFormat::Json;
|
||||
|
||||
fn default_tracing_format() -> TracingFormat {
|
||||
DEFAULT_TRACING_FORMAT
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn args_with_services(
|
||||
services: Option<Vec<ServiceName>>,
|
||||
disable: Option<Vec<ServiceName>>,
|
||||
) -> Args {
|
||||
Args {
|
||||
tracing: TracingFormat::Pretty,
|
||||
services,
|
||||
disable_services: disable,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_enables_all_services() {
|
||||
let result = determine_enabled_services(&args_with_services(None, None)).unwrap();
|
||||
assert_eq!(result.len(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_explicit_services_only_those() {
|
||||
let result =
|
||||
determine_enabled_services(&args_with_services(Some(vec![ServiceName::Web]), None))
|
||||
.unwrap();
|
||||
assert_eq!(result.len(), 1);
|
||||
assert_eq!(result[0].as_str(), "web");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_disable_bot_leaves_web_and_scraper() {
|
||||
let result =
|
||||
determine_enabled_services(&args_with_services(None, Some(vec![ServiceName::Bot])))
|
||||
.unwrap();
|
||||
assert_eq!(result.len(), 2);
|
||||
assert!(result.iter().all(|s| s.as_str() != "bot"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_disable_all_leaves_empty() {
|
||||
let result = determine_enabled_services(&args_with_services(
|
||||
None,
|
||||
Some(vec![
|
||||
ServiceName::Bot,
|
||||
ServiceName::Web,
|
||||
ServiceName::Scraper,
|
||||
]),
|
||||
))
|
||||
.unwrap();
|
||||
assert!(result.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_both_specified_returns_error() {
|
||||
let result = determine_enabled_services(&args_with_services(
|
||||
Some(vec![ServiceName::Web]),
|
||||
Some(vec![ServiceName::Bot]),
|
||||
));
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_service_name_as_str() {
|
||||
assert_eq!(ServiceName::Bot.as_str(), "bot");
|
||||
assert_eq!(ServiceName::Web.as_str(), "web");
|
||||
assert_eq!(ServiceName::Scraper.as_str(), "scraper");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_service_name_all() {
|
||||
let all = ServiceName::all();
|
||||
assert_eq!(all.len(), 3);
|
||||
}
|
||||
}
|
||||
+196
-14
@@ -8,24 +8,23 @@ use fundu::{DurationParser, TimeUnit};
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use std::time::Duration;
|
||||
|
||||
/// Application configuration loaded from environment variables
|
||||
/// Main application configuration containing all sub-configurations
|
||||
#[derive(Deserialize)]
|
||||
pub struct Config {
|
||||
/// Discord bot token for authentication
|
||||
pub bot_token: String,
|
||||
/// Port for the web server
|
||||
/// Log level for the application
|
||||
///
|
||||
/// This value is used to set the log level for this application's target specifically.
|
||||
/// e.g. "debug" would be similar to "warn,banner=debug,..."
|
||||
///
|
||||
/// Valid values are: "trace", "debug", "info", "warn", "error"
|
||||
/// Defaults to "info" if not specified
|
||||
#[serde(default = "default_log_level")]
|
||||
pub log_level: String,
|
||||
/// Port for the web server (default: 8080)
|
||||
#[serde(default = "default_port")]
|
||||
pub port: u16,
|
||||
/// Database connection URL
|
||||
pub database_url: String,
|
||||
/// Redis connection URL
|
||||
pub redis_url: String,
|
||||
/// Base URL for banner generation service
|
||||
pub banner_base_url: String,
|
||||
/// Target Discord guild ID where the bot operates
|
||||
pub bot_target_guild: u64,
|
||||
/// Discord application ID
|
||||
pub bot_app_id: u64,
|
||||
/// Graceful shutdown timeout duration
|
||||
///
|
||||
/// Accepts both numeric values (seconds) and duration strings
|
||||
@@ -35,11 +34,29 @@ pub struct Config {
|
||||
deserialize_with = "deserialize_duration"
|
||||
)]
|
||||
pub shutdown_timeout: Duration,
|
||||
/// Discord bot token for authentication
|
||||
pub bot_token: String,
|
||||
/// Target Discord guild ID where the bot operates
|
||||
pub bot_target_guild: u64,
|
||||
|
||||
/// Base URL for banner generation service
|
||||
///
|
||||
/// Defaults to "https://ssbprod.utsa.edu/StudentRegistrationSsb/ssb" if not specified
|
||||
#[serde(default = "default_banner_base_url")]
|
||||
pub banner_base_url: String,
|
||||
/// Rate limiting configuration for Banner API requests
|
||||
#[serde(default = "default_rate_limiting")]
|
||||
pub rate_limiting: RateLimitingConfig,
|
||||
}
|
||||
|
||||
/// Default port of 3000
|
||||
/// Default log level of "info"
|
||||
fn default_log_level() -> String {
|
||||
"info".to_string()
|
||||
}
|
||||
|
||||
/// Default port of 8080
|
||||
fn default_port() -> u16 {
|
||||
3000
|
||||
8080
|
||||
}
|
||||
|
||||
/// Default shutdown timeout of 8 seconds
|
||||
@@ -47,6 +64,73 @@ fn default_shutdown_timeout() -> Duration {
|
||||
Duration::from_secs(8)
|
||||
}
|
||||
|
||||
/// Default banner base URL
|
||||
fn default_banner_base_url() -> String {
|
||||
"https://ssbprod.utsa.edu/StudentRegistrationSsb/ssb".to_string()
|
||||
}
|
||||
|
||||
/// Rate limiting configuration for Banner API requests
|
||||
#[derive(Deserialize, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct RateLimitingConfig {
|
||||
/// Requests per minute for session operations (very conservative)
|
||||
#[serde(default = "default_session_rpm")]
|
||||
pub session_rpm: u32,
|
||||
/// Requests per minute for search operations (moderate)
|
||||
#[serde(default = "default_search_rpm")]
|
||||
pub search_rpm: u32,
|
||||
/// Requests per minute for metadata operations (moderate)
|
||||
#[serde(default = "default_metadata_rpm")]
|
||||
pub metadata_rpm: u32,
|
||||
/// Requests per minute for reset operations (low priority)
|
||||
#[serde(default = "default_reset_rpm")]
|
||||
pub reset_rpm: u32,
|
||||
/// Burst allowance (extra requests allowed in short bursts)
|
||||
#[serde(default = "default_burst_allowance")]
|
||||
pub burst_allowance: u32,
|
||||
}
|
||||
|
||||
/// Default rate limiting configuration
|
||||
fn default_rate_limiting() -> RateLimitingConfig {
|
||||
RateLimitingConfig::default()
|
||||
}
|
||||
|
||||
impl Default for RateLimitingConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
session_rpm: default_session_rpm(),
|
||||
search_rpm: default_search_rpm(),
|
||||
metadata_rpm: default_metadata_rpm(),
|
||||
reset_rpm: default_reset_rpm(),
|
||||
burst_allowance: default_burst_allowance(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Default session requests per minute (6 = 1 every 10 seconds)
|
||||
fn default_session_rpm() -> u32 {
|
||||
6
|
||||
}
|
||||
|
||||
/// Default search requests per minute (30 = 1 every 2 seconds)
|
||||
fn default_search_rpm() -> u32 {
|
||||
30
|
||||
}
|
||||
|
||||
/// Default metadata requests per minute (20 = 1 every 3 seconds)
|
||||
fn default_metadata_rpm() -> u32 {
|
||||
20
|
||||
}
|
||||
|
||||
/// Default reset requests per minute (10 = 1 every 6 seconds)
|
||||
fn default_reset_rpm() -> u32 {
|
||||
10
|
||||
}
|
||||
|
||||
/// Default burst allowance (3 extra requests)
|
||||
fn default_burst_allowance() -> u32 {
|
||||
3
|
||||
}
|
||||
|
||||
/// Duration parser configured to handle various time units with seconds as default
|
||||
///
|
||||
/// Supports:
|
||||
@@ -131,3 +215,101 @@ where
|
||||
|
||||
deserializer.deserialize_any(DurationVisitor)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct DurationWrapper {
|
||||
#[serde(deserialize_with = "deserialize_duration")]
|
||||
value: Duration,
|
||||
}
|
||||
|
||||
fn parse(json: &str) -> Result<Duration, String> {
|
||||
serde_json::from_str::<DurationWrapper>(json)
|
||||
.map(|w| w.value)
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duration_from_integer_seconds() {
|
||||
let d = parse(r#"{"value": 30}"#).unwrap();
|
||||
assert_eq!(d, Duration::from_secs(30));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duration_from_string_seconds() {
|
||||
let d = parse(r#"{"value": "30s"}"#).unwrap();
|
||||
assert_eq!(d, Duration::from_secs(30));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duration_from_string_minutes() {
|
||||
let d = parse(r#"{"value": "2m"}"#).unwrap();
|
||||
assert_eq!(d, Duration::from_secs(120));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duration_from_string_milliseconds() {
|
||||
let d = parse(r#"{"value": "1500ms"}"#).unwrap();
|
||||
assert_eq!(d, Duration::from_millis(1500));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duration_from_string_with_space() {
|
||||
let d = parse(r#"{"value": "2 m"}"#).unwrap();
|
||||
assert_eq!(d, Duration::from_secs(120));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duration_from_string_multiple_units() {
|
||||
let d = parse(r#"{"value": "1m 30s"}"#).unwrap();
|
||||
assert_eq!(d, Duration::from_secs(90));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duration_from_bare_number_string() {
|
||||
let d = parse(r#"{"value": "45"}"#).unwrap();
|
||||
assert_eq!(d, Duration::from_secs(45));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duration_zero() {
|
||||
let d = parse(r#"{"value": 0}"#).unwrap();
|
||||
assert_eq!(d, Duration::from_secs(0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duration_negative_rejected() {
|
||||
let err = parse(r#"{"value": -5}"#).unwrap_err();
|
||||
assert!(err.contains("negative"), "expected negative error: {err}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duration_invalid_string_rejected() {
|
||||
let err = parse(r#"{"value": "notaduration"}"#).unwrap_err();
|
||||
assert!(
|
||||
err.contains("Invalid duration"),
|
||||
"expected invalid format error: {err}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_config_values() {
|
||||
assert_eq!(default_port(), 8080);
|
||||
assert_eq!(default_shutdown_timeout(), Duration::from_secs(8));
|
||||
assert_eq!(default_log_level(), "info");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_rate_limiting() {
|
||||
let rl = default_rate_limiting();
|
||||
assert_eq!(rl.session_rpm, 6);
|
||||
assert_eq!(rl.search_rpm, 30);
|
||||
assert_eq!(rl.metadata_rpm, 20);
|
||||
assert_eq!(rl.reset_rpm, 10);
|
||||
assert_eq!(rl.burst_allowance, 3);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,325 @@
|
||||
//! Batch database operations for improved performance.
|
||||
|
||||
use crate::banner::Course;
|
||||
use crate::data::models::DbMeetingTime;
|
||||
use crate::error::Result;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashSet;
|
||||
use std::time::Instant;
|
||||
use tracing::info;
|
||||
|
||||
/// Convert a Banner API course's meeting times to the DB JSONB shape.
|
||||
fn to_db_meeting_times(course: &Course) -> serde_json::Value {
|
||||
let meetings: Vec<DbMeetingTime> = course
|
||||
.meetings_faculty
|
||||
.iter()
|
||||
.map(|mf| {
|
||||
let mt = &mf.meeting_time;
|
||||
DbMeetingTime {
|
||||
begin_time: mt.begin_time.clone(),
|
||||
end_time: mt.end_time.clone(),
|
||||
start_date: mt.start_date.clone(),
|
||||
end_date: mt.end_date.clone(),
|
||||
monday: mt.monday,
|
||||
tuesday: mt.tuesday,
|
||||
wednesday: mt.wednesday,
|
||||
thursday: mt.thursday,
|
||||
friday: mt.friday,
|
||||
saturday: mt.saturday,
|
||||
sunday: mt.sunday,
|
||||
building: mt.building.clone(),
|
||||
building_description: mt.building_description.clone(),
|
||||
room: mt.room.clone(),
|
||||
campus: mt.campus.clone(),
|
||||
meeting_type: mt.meeting_type.clone(),
|
||||
meeting_schedule_type: mt.meeting_schedule_type.clone(),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
serde_json::to_value(meetings).unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Convert a Banner API course's section attributes to a JSONB array of code strings.
|
||||
fn to_db_attributes(course: &Course) -> serde_json::Value {
|
||||
let codes: Vec<&str> = course
|
||||
.section_attributes
|
||||
.iter()
|
||||
.map(|a| a.code.as_str())
|
||||
.collect();
|
||||
serde_json::to_value(codes).unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Extract the campus code from the first meeting time (Banner doesn't put it on the course directly).
|
||||
fn extract_campus_code(course: &Course) -> Option<String> {
|
||||
course
|
||||
.meetings_faculty
|
||||
.first()
|
||||
.and_then(|mf| mf.meeting_time.campus.clone())
|
||||
}
|
||||
|
||||
/// Batch upsert courses in a single database query.
|
||||
///
|
||||
/// Performs a bulk INSERT...ON CONFLICT DO UPDATE for all courses, including
|
||||
/// new fields (meeting times, attributes, instructor data). Returns the
|
||||
/// database IDs for all upserted courses (in input order) so instructors
|
||||
/// can be linked.
|
||||
///
|
||||
/// # Performance
|
||||
/// - Reduces N database round-trips to 3 (courses, instructors, junction)
|
||||
/// - Typical usage: 50-200 courses per batch
|
||||
pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<()> {
|
||||
if courses.is_empty() {
|
||||
info!("No courses to upsert, skipping batch operation");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let start = Instant::now();
|
||||
let course_count = courses.len();
|
||||
|
||||
// Step 1: Upsert courses with all fields, returning IDs
|
||||
let course_ids = upsert_courses(courses, db_pool).await?;
|
||||
|
||||
// Step 2: Upsert instructors (deduplicated across batch)
|
||||
upsert_instructors(courses, db_pool).await?;
|
||||
|
||||
// Step 3: Link courses to instructors via junction table
|
||||
upsert_course_instructors(courses, &course_ids, db_pool).await?;
|
||||
|
||||
let duration = start.elapsed();
|
||||
info!(
|
||||
courses_count = course_count,
|
||||
duration_ms = duration.as_millis(),
|
||||
"Batch upserted courses with instructors"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Upsert all courses and return their database IDs in input order.
|
||||
async fn upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<Vec<i32>> {
|
||||
let crns: Vec<&str> = courses
|
||||
.iter()
|
||||
.map(|c| c.course_reference_number.as_str())
|
||||
.collect();
|
||||
let subjects: Vec<&str> = courses.iter().map(|c| c.subject.as_str()).collect();
|
||||
let course_numbers: Vec<&str> = courses.iter().map(|c| c.course_number.as_str()).collect();
|
||||
let titles: Vec<&str> = courses.iter().map(|c| c.course_title.as_str()).collect();
|
||||
let term_codes: Vec<&str> = courses.iter().map(|c| c.term.as_str()).collect();
|
||||
let enrollments: Vec<i32> = courses.iter().map(|c| c.enrollment).collect();
|
||||
let max_enrollments: Vec<i32> = courses.iter().map(|c| c.maximum_enrollment).collect();
|
||||
let wait_counts: Vec<i32> = courses.iter().map(|c| c.wait_count).collect();
|
||||
let wait_capacities: Vec<i32> = courses.iter().map(|c| c.wait_capacity).collect();
|
||||
|
||||
// New scalar fields
|
||||
let sequence_numbers: Vec<Option<&str>> = courses
|
||||
.iter()
|
||||
.map(|c| Some(c.sequence_number.as_str()))
|
||||
.collect();
|
||||
let parts_of_term: Vec<Option<&str>> = courses
|
||||
.iter()
|
||||
.map(|c| Some(c.part_of_term.as_str()))
|
||||
.collect();
|
||||
let instructional_methods: Vec<Option<&str>> = courses
|
||||
.iter()
|
||||
.map(|c| Some(c.instructional_method.as_str()))
|
||||
.collect();
|
||||
let campuses: Vec<Option<String>> = courses.iter().map(extract_campus_code).collect();
|
||||
let credit_hours: Vec<Option<i32>> = courses.iter().map(|c| c.credit_hours).collect();
|
||||
let credit_hour_lows: Vec<Option<i32>> = courses.iter().map(|c| c.credit_hour_low).collect();
|
||||
let credit_hour_highs: Vec<Option<i32>> = courses.iter().map(|c| c.credit_hour_high).collect();
|
||||
let cross_lists: Vec<Option<&str>> = courses.iter().map(|c| c.cross_list.as_deref()).collect();
|
||||
let cross_list_capacities: Vec<Option<i32>> =
|
||||
courses.iter().map(|c| c.cross_list_capacity).collect();
|
||||
let cross_list_counts: Vec<Option<i32>> = courses.iter().map(|c| c.cross_list_count).collect();
|
||||
let link_identifiers: Vec<Option<&str>> = courses
|
||||
.iter()
|
||||
.map(|c| c.link_identifier.as_deref())
|
||||
.collect();
|
||||
let is_section_linkeds: Vec<Option<bool>> =
|
||||
courses.iter().map(|c| Some(c.is_section_linked)).collect();
|
||||
|
||||
// JSONB fields
|
||||
let meeting_times_json: Vec<serde_json::Value> =
|
||||
courses.iter().map(to_db_meeting_times).collect();
|
||||
let attributes_json: Vec<serde_json::Value> = courses.iter().map(to_db_attributes).collect();
|
||||
|
||||
let rows = sqlx::query_scalar::<_, i32>(
|
||||
r#"
|
||||
INSERT INTO courses (
|
||||
crn, subject, course_number, title, term_code,
|
||||
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at,
|
||||
sequence_number, part_of_term, instructional_method, campus,
|
||||
credit_hours, credit_hour_low, credit_hour_high,
|
||||
cross_list, cross_list_capacity, cross_list_count,
|
||||
link_identifier, is_section_linked,
|
||||
meeting_times, attributes
|
||||
)
|
||||
SELECT
|
||||
v.crn, v.subject, v.course_number, v.title, v.term_code,
|
||||
v.enrollment, v.max_enrollment, v.wait_count, v.wait_capacity, NOW(),
|
||||
v.sequence_number, v.part_of_term, v.instructional_method, v.campus,
|
||||
v.credit_hours, v.credit_hour_low, v.credit_hour_high,
|
||||
v.cross_list, v.cross_list_capacity, v.cross_list_count,
|
||||
v.link_identifier, v.is_section_linked,
|
||||
v.meeting_times, v.attributes
|
||||
FROM UNNEST(
|
||||
$1::text[], $2::text[], $3::text[], $4::text[], $5::text[],
|
||||
$6::int4[], $7::int4[], $8::int4[], $9::int4[],
|
||||
$10::text[], $11::text[], $12::text[], $13::text[],
|
||||
$14::int4[], $15::int4[], $16::int4[],
|
||||
$17::text[], $18::int4[], $19::int4[],
|
||||
$20::text[], $21::bool[],
|
||||
$22::jsonb[], $23::jsonb[]
|
||||
) AS v(
|
||||
crn, subject, course_number, title, term_code,
|
||||
enrollment, max_enrollment, wait_count, wait_capacity,
|
||||
sequence_number, part_of_term, instructional_method, campus,
|
||||
credit_hours, credit_hour_low, credit_hour_high,
|
||||
cross_list, cross_list_capacity, cross_list_count,
|
||||
link_identifier, is_section_linked,
|
||||
meeting_times, attributes
|
||||
)
|
||||
ON CONFLICT (crn, term_code)
|
||||
DO UPDATE SET
|
||||
subject = EXCLUDED.subject,
|
||||
course_number = EXCLUDED.course_number,
|
||||
title = EXCLUDED.title,
|
||||
enrollment = EXCLUDED.enrollment,
|
||||
max_enrollment = EXCLUDED.max_enrollment,
|
||||
wait_count = EXCLUDED.wait_count,
|
||||
wait_capacity = EXCLUDED.wait_capacity,
|
||||
last_scraped_at = EXCLUDED.last_scraped_at,
|
||||
sequence_number = EXCLUDED.sequence_number,
|
||||
part_of_term = EXCLUDED.part_of_term,
|
||||
instructional_method = EXCLUDED.instructional_method,
|
||||
campus = EXCLUDED.campus,
|
||||
credit_hours = EXCLUDED.credit_hours,
|
||||
credit_hour_low = EXCLUDED.credit_hour_low,
|
||||
credit_hour_high = EXCLUDED.credit_hour_high,
|
||||
cross_list = EXCLUDED.cross_list,
|
||||
cross_list_capacity = EXCLUDED.cross_list_capacity,
|
||||
cross_list_count = EXCLUDED.cross_list_count,
|
||||
link_identifier = EXCLUDED.link_identifier,
|
||||
is_section_linked = EXCLUDED.is_section_linked,
|
||||
meeting_times = EXCLUDED.meeting_times,
|
||||
attributes = EXCLUDED.attributes
|
||||
RETURNING id
|
||||
"#,
|
||||
)
|
||||
.bind(&crns)
|
||||
.bind(&subjects)
|
||||
.bind(&course_numbers)
|
||||
.bind(&titles)
|
||||
.bind(&term_codes)
|
||||
.bind(&enrollments)
|
||||
.bind(&max_enrollments)
|
||||
.bind(&wait_counts)
|
||||
.bind(&wait_capacities)
|
||||
.bind(&sequence_numbers)
|
||||
.bind(&parts_of_term)
|
||||
.bind(&instructional_methods)
|
||||
.bind(&campuses)
|
||||
.bind(&credit_hours)
|
||||
.bind(&credit_hour_lows)
|
||||
.bind(&credit_hour_highs)
|
||||
.bind(&cross_lists)
|
||||
.bind(&cross_list_capacities)
|
||||
.bind(&cross_list_counts)
|
||||
.bind(&link_identifiers)
|
||||
.bind(&is_section_linkeds)
|
||||
.bind(&meeting_times_json)
|
||||
.bind(&attributes_json)
|
||||
.fetch_all(db_pool)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch upsert courses: {}", e))?;
|
||||
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
/// Deduplicate and upsert all instructors from the batch.
|
||||
async fn upsert_instructors(courses: &[Course], db_pool: &PgPool) -> Result<()> {
|
||||
let mut seen = HashSet::new();
|
||||
let mut banner_ids = Vec::new();
|
||||
let mut display_names = Vec::new();
|
||||
let mut emails: Vec<Option<&str>> = Vec::new();
|
||||
|
||||
for course in courses {
|
||||
for faculty in &course.faculty {
|
||||
if seen.insert(faculty.banner_id.as_str()) {
|
||||
banner_ids.push(faculty.banner_id.as_str());
|
||||
display_names.push(faculty.display_name.as_str());
|
||||
emails.push(faculty.email_address.as_deref());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if banner_ids.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO instructors (banner_id, display_name, email)
|
||||
SELECT * FROM UNNEST($1::text[], $2::text[], $3::text[])
|
||||
ON CONFLICT (banner_id)
|
||||
DO UPDATE SET
|
||||
display_name = EXCLUDED.display_name,
|
||||
email = COALESCE(EXCLUDED.email, instructors.email)
|
||||
"#,
|
||||
)
|
||||
.bind(&banner_ids)
|
||||
.bind(&display_names)
|
||||
.bind(&emails)
|
||||
.execute(db_pool)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch upsert instructors: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Link courses to their instructors via the junction table.
|
||||
async fn upsert_course_instructors(
|
||||
courses: &[Course],
|
||||
course_ids: &[i32],
|
||||
db_pool: &PgPool,
|
||||
) -> Result<()> {
|
||||
let mut cids = Vec::new();
|
||||
let mut iids = Vec::new();
|
||||
let mut primaries = Vec::new();
|
||||
|
||||
for (course, &course_id) in courses.iter().zip(course_ids) {
|
||||
for faculty in &course.faculty {
|
||||
cids.push(course_id);
|
||||
iids.push(faculty.banner_id.as_str());
|
||||
primaries.push(faculty.primary_indicator);
|
||||
}
|
||||
}
|
||||
|
||||
if cids.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Delete existing links for these courses then re-insert.
|
||||
// This handles instructor changes cleanly.
|
||||
sqlx::query("DELETE FROM course_instructors WHERE course_id = ANY($1)")
|
||||
.bind(&cids)
|
||||
.execute(db_pool)
|
||||
.await?;
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO course_instructors (course_id, instructor_id, is_primary)
|
||||
SELECT * FROM UNNEST($1::int4[], $2::text[], $3::bool[])
|
||||
ON CONFLICT (course_id, instructor_id)
|
||||
DO UPDATE SET is_primary = EXCLUDED.is_primary
|
||||
"#,
|
||||
)
|
||||
.bind(&cids)
|
||||
.bind(&iids)
|
||||
.bind(&primaries)
|
||||
.execute(db_pool)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch upsert course_instructors: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -0,0 +1,236 @@
|
||||
//! Database query functions for courses, used by the web API.
|
||||
|
||||
use crate::data::models::{Course, CourseInstructorDetail};
|
||||
use crate::error::Result;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Column to sort search results by.
|
||||
#[derive(Debug, Clone, Copy, serde::Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SortColumn {
|
||||
CourseCode,
|
||||
Title,
|
||||
Instructor,
|
||||
Time,
|
||||
Seats,
|
||||
}
|
||||
|
||||
/// Sort direction.
|
||||
#[derive(Debug, Clone, Copy, serde::Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SortDirection {
|
||||
Asc,
|
||||
Desc,
|
||||
}
|
||||
|
||||
/// Shared WHERE clause for course search filters.
|
||||
///
|
||||
/// Parameters $1-$8 match the bind order in `search_courses`.
|
||||
const SEARCH_WHERE: &str = r#"
|
||||
WHERE term_code = $1
|
||||
AND ($2::text[] IS NULL OR subject = ANY($2))
|
||||
AND ($3::text IS NULL OR title_search @@ plainto_tsquery('simple', $3) OR title ILIKE '%' || $3 || '%')
|
||||
AND ($4::int IS NULL OR course_number::int >= $4)
|
||||
AND ($5::int IS NULL OR course_number::int <= $5)
|
||||
AND ($6::bool = false OR max_enrollment > enrollment)
|
||||
AND ($7::text IS NULL OR instructional_method = $7)
|
||||
AND ($8::text IS NULL OR campus = $8)
|
||||
"#;
|
||||
|
||||
/// Build a safe ORDER BY clause from typed sort parameters.
|
||||
///
|
||||
/// All column names are hardcoded string literals — no caller input is interpolated.
|
||||
fn sort_clause(column: Option<SortColumn>, direction: Option<SortDirection>) -> String {
|
||||
let dir = match direction.unwrap_or(SortDirection::Asc) {
|
||||
SortDirection::Asc => "ASC",
|
||||
SortDirection::Desc => "DESC",
|
||||
};
|
||||
|
||||
match column {
|
||||
Some(SortColumn::CourseCode) => {
|
||||
format!("subject {dir}, course_number {dir}, sequence_number {dir}")
|
||||
}
|
||||
Some(SortColumn::Title) => format!("title {dir}"),
|
||||
Some(SortColumn::Instructor) => {
|
||||
format!(
|
||||
"(SELECT i.display_name FROM course_instructors ci \
|
||||
JOIN instructors i ON i.banner_id = ci.instructor_id \
|
||||
WHERE ci.course_id = courses.id AND ci.is_primary = true \
|
||||
LIMIT 1) {dir} NULLS LAST"
|
||||
)
|
||||
}
|
||||
Some(SortColumn::Time) => {
|
||||
format!("(meeting_times->0->>'begin_time') {dir} NULLS LAST")
|
||||
}
|
||||
Some(SortColumn::Seats) => {
|
||||
format!("(max_enrollment - enrollment) {dir}")
|
||||
}
|
||||
None => "subject ASC, course_number ASC, sequence_number ASC".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Search courses by term with optional filters.
|
||||
///
|
||||
/// Returns `(courses, total_count)` for pagination. Uses FTS tsvector for word
|
||||
/// search and falls back to trigram ILIKE for substring matching.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn search_courses(
|
||||
db_pool: &PgPool,
|
||||
term_code: &str,
|
||||
subject: Option<&[String]>,
|
||||
title_query: Option<&str>,
|
||||
course_number_low: Option<i32>,
|
||||
course_number_high: Option<i32>,
|
||||
open_only: bool,
|
||||
instructional_method: Option<&str>,
|
||||
campus: Option<&str>,
|
||||
limit: i32,
|
||||
offset: i32,
|
||||
sort_by: Option<SortColumn>,
|
||||
sort_dir: Option<SortDirection>,
|
||||
) -> Result<(Vec<Course>, i64)> {
|
||||
let order_by = sort_clause(sort_by, sort_dir);
|
||||
|
||||
let data_query = format!(
|
||||
"SELECT * FROM courses {SEARCH_WHERE} ORDER BY {order_by} LIMIT $9 OFFSET $10"
|
||||
);
|
||||
let count_query = format!("SELECT COUNT(*) FROM courses {SEARCH_WHERE}");
|
||||
|
||||
let courses = sqlx::query_as::<_, Course>(&data_query)
|
||||
.bind(term_code)
|
||||
.bind(subject)
|
||||
.bind(title_query)
|
||||
.bind(course_number_low)
|
||||
.bind(course_number_high)
|
||||
.bind(open_only)
|
||||
.bind(instructional_method)
|
||||
.bind(campus)
|
||||
.bind(limit)
|
||||
.bind(offset)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
let total: (i64,) = sqlx::query_as(&count_query)
|
||||
.bind(term_code)
|
||||
.bind(subject)
|
||||
.bind(title_query)
|
||||
.bind(course_number_low)
|
||||
.bind(course_number_high)
|
||||
.bind(open_only)
|
||||
.bind(instructional_method)
|
||||
.bind(campus)
|
||||
.fetch_one(db_pool)
|
||||
.await?;
|
||||
|
||||
Ok((courses, total.0))
|
||||
}
|
||||
|
||||
/// Get a single course by CRN and term.
|
||||
pub async fn get_course_by_crn(
|
||||
db_pool: &PgPool,
|
||||
crn: &str,
|
||||
term_code: &str,
|
||||
) -> Result<Option<Course>> {
|
||||
let course =
|
||||
sqlx::query_as::<_, Course>("SELECT * FROM courses WHERE crn = $1 AND term_code = $2")
|
||||
.bind(crn)
|
||||
.bind(term_code)
|
||||
.fetch_optional(db_pool)
|
||||
.await?;
|
||||
Ok(course)
|
||||
}
|
||||
|
||||
/// Get instructors for a single course by course ID.
|
||||
pub async fn get_course_instructors(
|
||||
db_pool: &PgPool,
|
||||
course_id: i32,
|
||||
) -> Result<Vec<CourseInstructorDetail>> {
|
||||
let rows = sqlx::query_as::<_, CourseInstructorDetail>(
|
||||
r#"
|
||||
SELECT i.banner_id, i.display_name, i.email, ci.is_primary,
|
||||
rp.avg_rating, rp.num_ratings,
|
||||
ci.course_id
|
||||
FROM course_instructors ci
|
||||
JOIN instructors i ON i.banner_id = ci.instructor_id
|
||||
LEFT JOIN rmp_professors rp ON rp.legacy_id = i.rmp_legacy_id
|
||||
WHERE ci.course_id = $1
|
||||
ORDER BY ci.is_primary DESC, i.display_name
|
||||
"#,
|
||||
)
|
||||
.bind(course_id)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
/// Batch-fetch instructors for multiple courses in a single query.
|
||||
///
|
||||
/// Returns a map of `course_id → Vec<CourseInstructorDetail>`.
|
||||
pub async fn get_instructors_for_courses(
|
||||
db_pool: &PgPool,
|
||||
course_ids: &[i32],
|
||||
) -> Result<HashMap<i32, Vec<CourseInstructorDetail>>> {
|
||||
if course_ids.is_empty() {
|
||||
return Ok(HashMap::new());
|
||||
}
|
||||
|
||||
let rows = sqlx::query_as::<_, CourseInstructorDetail>(
|
||||
r#"
|
||||
SELECT i.banner_id, i.display_name, i.email, ci.is_primary,
|
||||
rp.avg_rating, rp.num_ratings,
|
||||
ci.course_id
|
||||
FROM course_instructors ci
|
||||
JOIN instructors i ON i.banner_id = ci.instructor_id
|
||||
LEFT JOIN rmp_professors rp ON rp.legacy_id = i.rmp_legacy_id
|
||||
WHERE ci.course_id = ANY($1)
|
||||
ORDER BY ci.course_id, ci.is_primary DESC, i.display_name
|
||||
"#,
|
||||
)
|
||||
.bind(course_ids)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
let mut map: HashMap<i32, Vec<CourseInstructorDetail>> = HashMap::new();
|
||||
for row in rows {
|
||||
// course_id is always present in the batch query
|
||||
let cid = row.course_id.unwrap_or_default();
|
||||
map.entry(cid).or_default().push(row);
|
||||
}
|
||||
Ok(map)
|
||||
}
|
||||
|
||||
/// Get subjects for a term, sorted by total enrollment (descending).
|
||||
///
|
||||
/// Returns only subjects that have courses in the given term, with their
|
||||
/// descriptions from reference_data and enrollment totals for ranking.
|
||||
pub async fn get_subjects_by_enrollment(
|
||||
db_pool: &PgPool,
|
||||
term_code: &str,
|
||||
) -> Result<Vec<(String, String, i64)>> {
|
||||
let rows: Vec<(String, String, i64)> = sqlx::query_as(
|
||||
r#"
|
||||
SELECT c.subject,
|
||||
COALESCE(rd.description, c.subject),
|
||||
COALESCE(SUM(c.enrollment), 0) as total_enrollment
|
||||
FROM courses c
|
||||
LEFT JOIN reference_data rd ON rd.category = 'subject' AND rd.code = c.subject
|
||||
WHERE c.term_code = $1
|
||||
GROUP BY c.subject, rd.description
|
||||
ORDER BY total_enrollment DESC
|
||||
"#,
|
||||
)
|
||||
.bind(term_code)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
/// Get all distinct term codes that have courses in the DB.
|
||||
pub async fn get_available_terms(db_pool: &PgPool) -> Result<Vec<String>> {
|
||||
let rows: Vec<(String,)> =
|
||||
sqlx::query_as("SELECT DISTINCT term_code FROM courses ORDER BY term_code DESC")
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
Ok(rows.into_iter().map(|(tc,)| tc).collect())
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
//! Database models and schema.
|
||||
|
||||
pub mod batch;
|
||||
pub mod courses;
|
||||
pub mod models;
|
||||
pub mod reference;
|
||||
pub mod rmp;
|
||||
pub mod scrape_jobs;
|
||||
@@ -0,0 +1,157 @@
|
||||
//! `sqlx` models for the database schema.
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use ts_rs::TS;
|
||||
|
||||
/// Represents a meeting time stored as JSONB in the courses table.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
pub struct DbMeetingTime {
|
||||
pub begin_time: Option<String>,
|
||||
pub end_time: Option<String>,
|
||||
pub start_date: String,
|
||||
pub end_date: String,
|
||||
pub monday: bool,
|
||||
pub tuesday: bool,
|
||||
pub wednesday: bool,
|
||||
pub thursday: bool,
|
||||
pub friday: bool,
|
||||
pub saturday: bool,
|
||||
pub sunday: bool,
|
||||
pub building: Option<String>,
|
||||
pub building_description: Option<String>,
|
||||
pub room: Option<String>,
|
||||
pub campus: Option<String>,
|
||||
pub meeting_type: String,
|
||||
pub meeting_schedule_type: String,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct Course {
|
||||
pub id: i32,
|
||||
pub crn: String,
|
||||
pub subject: String,
|
||||
pub course_number: String,
|
||||
pub title: String,
|
||||
pub term_code: String,
|
||||
pub enrollment: i32,
|
||||
pub max_enrollment: i32,
|
||||
pub wait_count: i32,
|
||||
pub wait_capacity: i32,
|
||||
pub last_scraped_at: DateTime<Utc>,
|
||||
// New scalar fields
|
||||
pub sequence_number: Option<String>,
|
||||
pub part_of_term: Option<String>,
|
||||
pub instructional_method: Option<String>,
|
||||
pub campus: Option<String>,
|
||||
pub credit_hours: Option<i32>,
|
||||
pub credit_hour_low: Option<i32>,
|
||||
pub credit_hour_high: Option<i32>,
|
||||
pub cross_list: Option<String>,
|
||||
pub cross_list_capacity: Option<i32>,
|
||||
pub cross_list_count: Option<i32>,
|
||||
pub link_identifier: Option<String>,
|
||||
pub is_section_linked: Option<bool>,
|
||||
// JSONB fields
|
||||
pub meeting_times: Value,
|
||||
pub attributes: Value,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct Instructor {
|
||||
pub banner_id: String,
|
||||
pub display_name: String,
|
||||
pub email: Option<String>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct CourseInstructor {
|
||||
pub course_id: i32,
|
||||
pub instructor_id: String,
|
||||
pub is_primary: bool,
|
||||
}
|
||||
|
||||
/// Joined instructor data for a course (from course_instructors + instructors + rmp_professors).
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct CourseInstructorDetail {
|
||||
pub banner_id: String,
|
||||
pub display_name: String,
|
||||
pub email: Option<String>,
|
||||
pub is_primary: bool,
|
||||
pub avg_rating: Option<f32>,
|
||||
pub num_ratings: Option<i32>,
|
||||
/// Present when fetched via batch query; `None` for single-course queries.
|
||||
pub course_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct ReferenceData {
|
||||
pub category: String,
|
||||
pub code: String,
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct CourseMetric {
|
||||
pub id: i32,
|
||||
pub course_id: i32,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
pub enrollment: i32,
|
||||
pub wait_count: i32,
|
||||
pub seats_available: i32,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct CourseAudit {
|
||||
pub id: i32,
|
||||
pub course_id: i32,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
pub field_changed: String,
|
||||
pub old_value: String,
|
||||
pub new_value: String,
|
||||
}
|
||||
|
||||
/// The priority level of a scrape job.
|
||||
#[derive(sqlx::Type, Copy, Debug, Clone)]
|
||||
#[sqlx(type_name = "scrape_priority", rename_all = "PascalCase")]
|
||||
pub enum ScrapePriority {
|
||||
Low,
|
||||
Medium,
|
||||
High,
|
||||
Critical,
|
||||
}
|
||||
|
||||
/// The type of target for a scrape job, determining how the payload is interpreted.
|
||||
#[derive(sqlx::Type, Copy, Debug, Clone)]
|
||||
#[sqlx(type_name = "target_type", rename_all = "PascalCase")]
|
||||
pub enum TargetType {
|
||||
Subject,
|
||||
CourseRange,
|
||||
CrnList,
|
||||
SingleCrn,
|
||||
}
|
||||
|
||||
/// Represents a queryable job from the database.
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct ScrapeJob {
|
||||
pub id: i32,
|
||||
pub target_type: TargetType,
|
||||
pub target_payload: Value,
|
||||
pub priority: ScrapePriority,
|
||||
pub execute_at: DateTime<Utc>,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub locked_at: Option<DateTime<Utc>>,
|
||||
/// Number of retry attempts for this job (non-negative, enforced by CHECK constraint)
|
||||
pub retry_count: i32,
|
||||
/// Maximum number of retry attempts allowed (non-negative, enforced by CHECK constraint)
|
||||
pub max_retries: i32,
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
//! Database operations for the `reference_data` table (code→description lookups).
|
||||
|
||||
use crate::data::models::ReferenceData;
|
||||
use crate::error::Result;
|
||||
use html_escape::decode_html_entities;
|
||||
use sqlx::PgPool;
|
||||
|
||||
/// Batch upsert reference data entries.
|
||||
pub async fn batch_upsert(entries: &[ReferenceData], db_pool: &PgPool) -> Result<()> {
|
||||
if entries.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let categories: Vec<&str> = entries.iter().map(|e| e.category.as_str()).collect();
|
||||
let codes: Vec<&str> = entries.iter().map(|e| e.code.as_str()).collect();
|
||||
let descriptions: Vec<String> = entries
|
||||
.iter()
|
||||
.map(|e| decode_html_entities(&e.description).into_owned())
|
||||
.collect();
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO reference_data (category, code, description)
|
||||
SELECT * FROM UNNEST($1::text[], $2::text[], $3::text[])
|
||||
ON CONFLICT (category, code)
|
||||
DO UPDATE SET description = EXCLUDED.description
|
||||
"#,
|
||||
)
|
||||
.bind(&categories)
|
||||
.bind(&codes)
|
||||
.bind(&descriptions)
|
||||
.execute(db_pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get all reference data entries for a category.
|
||||
pub async fn get_by_category(category: &str, db_pool: &PgPool) -> Result<Vec<ReferenceData>> {
|
||||
let rows = sqlx::query_as::<_, ReferenceData>(
|
||||
"SELECT category, code, description FROM reference_data WHERE category = $1 ORDER BY description",
|
||||
)
|
||||
.bind(category)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
/// Get all reference data entries (for cache initialization).
|
||||
pub async fn get_all(db_pool: &PgPool) -> Result<Vec<ReferenceData>> {
|
||||
let rows = sqlx::query_as::<_, ReferenceData>(
|
||||
"SELECT category, code, description FROM reference_data ORDER BY category, description",
|
||||
)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
Ok(rows)
|
||||
}
|
||||
+311
@@ -0,0 +1,311 @@
|
||||
//! Database operations for RateMyProfessors data.
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::rmp::RmpProfessor;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
/// Bulk upsert RMP professors using the UNNEST pattern.
|
||||
///
|
||||
/// Deduplicates by `legacy_id` before inserting — the RMP API can return
|
||||
/// the same professor on multiple pages.
|
||||
pub async fn batch_upsert_rmp_professors(
|
||||
professors: &[RmpProfessor],
|
||||
db_pool: &PgPool,
|
||||
) -> Result<()> {
|
||||
if professors.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Deduplicate: keep last occurrence per legacy_id (latest page wins)
|
||||
let mut seen = HashSet::new();
|
||||
let deduped: Vec<&RmpProfessor> = professors
|
||||
.iter()
|
||||
.rev()
|
||||
.filter(|p| seen.insert(p.legacy_id))
|
||||
.collect();
|
||||
|
||||
let legacy_ids: Vec<i32> = deduped.iter().map(|p| p.legacy_id).collect();
|
||||
let graphql_ids: Vec<&str> = deduped.iter().map(|p| p.graphql_id.as_str()).collect();
|
||||
let first_names: Vec<String> = deduped
|
||||
.iter()
|
||||
.map(|p| p.first_name.trim().to_string())
|
||||
.collect();
|
||||
let first_name_refs: Vec<&str> = first_names.iter().map(|s| s.as_str()).collect();
|
||||
let last_names: Vec<String> = deduped
|
||||
.iter()
|
||||
.map(|p| p.last_name.trim().to_string())
|
||||
.collect();
|
||||
let last_name_refs: Vec<&str> = last_names.iter().map(|s| s.as_str()).collect();
|
||||
let departments: Vec<Option<&str>> = deduped.iter().map(|p| p.department.as_deref()).collect();
|
||||
let avg_ratings: Vec<Option<f32>> = deduped.iter().map(|p| p.avg_rating).collect();
|
||||
let avg_difficulties: Vec<Option<f32>> = deduped.iter().map(|p| p.avg_difficulty).collect();
|
||||
let num_ratings: Vec<i32> = deduped.iter().map(|p| p.num_ratings).collect();
|
||||
let would_take_again_pcts: Vec<Option<f32>> =
|
||||
deduped.iter().map(|p| p.would_take_again_pct).collect();
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO rmp_professors (
|
||||
legacy_id, graphql_id, first_name, last_name, department,
|
||||
avg_rating, avg_difficulty, num_ratings, would_take_again_pct,
|
||||
last_synced_at
|
||||
)
|
||||
SELECT
|
||||
v.legacy_id, v.graphql_id, v.first_name, v.last_name, v.department,
|
||||
v.avg_rating, v.avg_difficulty, v.num_ratings, v.would_take_again_pct,
|
||||
NOW()
|
||||
FROM UNNEST(
|
||||
$1::int4[], $2::text[], $3::text[], $4::text[], $5::text[],
|
||||
$6::real[], $7::real[], $8::int4[], $9::real[]
|
||||
) AS v(
|
||||
legacy_id, graphql_id, first_name, last_name, department,
|
||||
avg_rating, avg_difficulty, num_ratings, would_take_again_pct
|
||||
)
|
||||
ON CONFLICT (legacy_id)
|
||||
DO UPDATE SET
|
||||
graphql_id = EXCLUDED.graphql_id,
|
||||
first_name = EXCLUDED.first_name,
|
||||
last_name = EXCLUDED.last_name,
|
||||
department = EXCLUDED.department,
|
||||
avg_rating = EXCLUDED.avg_rating,
|
||||
avg_difficulty = EXCLUDED.avg_difficulty,
|
||||
num_ratings = EXCLUDED.num_ratings,
|
||||
would_take_again_pct = EXCLUDED.would_take_again_pct,
|
||||
last_synced_at = EXCLUDED.last_synced_at
|
||||
"#,
|
||||
)
|
||||
.bind(&legacy_ids)
|
||||
.bind(&graphql_ids)
|
||||
.bind(&first_name_refs)
|
||||
.bind(&last_name_refs)
|
||||
.bind(&departments)
|
||||
.bind(&avg_ratings)
|
||||
.bind(&avg_difficulties)
|
||||
.bind(&num_ratings)
|
||||
.bind(&would_take_again_pcts)
|
||||
.execute(db_pool)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch upsert RMP professors: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Normalize a name for matching: lowercase, trim, strip trailing periods.
|
||||
fn normalize(s: &str) -> String {
|
||||
s.trim().to_lowercase().trim_end_matches('.').to_string()
|
||||
}
|
||||
|
||||
/// Parse Banner's "Last, First Middle" display name into (last, first) tokens.
|
||||
///
|
||||
/// Returns `None` if the format is unparseable (no comma, empty parts).
|
||||
fn parse_display_name(display_name: &str) -> Option<(String, String)> {
|
||||
let (last_part, first_part) = display_name.split_once(',')?;
|
||||
let last = normalize(last_part);
|
||||
// Take only the first token of the first-name portion to drop middle names/initials.
|
||||
let first = normalize(first_part.split_whitespace().next()?);
|
||||
if last.is_empty() || first.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some((last, first))
|
||||
}
|
||||
|
||||
/// Auto-match instructors to RMP professors by normalized name.
|
||||
///
|
||||
/// Loads all pending instructors and all RMP professors, then matches in Rust
|
||||
/// using normalized name comparison. Only assigns a match when exactly one RMP
|
||||
/// professor matches a given instructor.
|
||||
pub async fn auto_match_instructors(db_pool: &PgPool) -> Result<u64> {
|
||||
// Load pending instructors
|
||||
let instructors: Vec<(String, String)> = sqlx::query_as(
|
||||
"SELECT banner_id, display_name FROM instructors WHERE rmp_match_status = 'pending'",
|
||||
)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
if instructors.is_empty() {
|
||||
info!(matched = 0, "No pending instructors to match");
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
// Load all RMP professors
|
||||
let professors: Vec<(i32, String, String)> =
|
||||
sqlx::query_as("SELECT legacy_id, first_name, last_name FROM rmp_professors")
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
// Build a lookup: (normalized_last, normalized_first) -> list of legacy_ids
|
||||
let mut rmp_index: HashMap<(String, String), Vec<i32>> = HashMap::new();
|
||||
for (legacy_id, first, last) in &professors {
|
||||
let key = (normalize(last), normalize(first));
|
||||
rmp_index.entry(key).or_default().push(*legacy_id);
|
||||
}
|
||||
|
||||
// Match each instructor
|
||||
let mut matches: Vec<(i32, String)> = Vec::new(); // (legacy_id, banner_id)
|
||||
let mut no_comma = 0u64;
|
||||
let mut no_match = 0u64;
|
||||
let mut ambiguous = 0u64;
|
||||
|
||||
for (banner_id, display_name) in &instructors {
|
||||
let Some((last, first)) = parse_display_name(display_name) else {
|
||||
no_comma += 1;
|
||||
continue;
|
||||
};
|
||||
|
||||
let key = (last, first);
|
||||
match rmp_index.get(&key) {
|
||||
Some(ids) if ids.len() == 1 => {
|
||||
matches.push((ids[0], banner_id.clone()));
|
||||
}
|
||||
Some(ids) => {
|
||||
ambiguous += 1;
|
||||
debug!(
|
||||
banner_id,
|
||||
display_name,
|
||||
candidates = ids.len(),
|
||||
"Ambiguous RMP match, skipping"
|
||||
);
|
||||
}
|
||||
None => {
|
||||
no_match += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if no_comma > 0 || ambiguous > 0 {
|
||||
warn!(
|
||||
total_pending = instructors.len(),
|
||||
no_comma,
|
||||
no_match,
|
||||
ambiguous,
|
||||
matched = matches.len(),
|
||||
"RMP matching diagnostics"
|
||||
);
|
||||
}
|
||||
|
||||
// Batch update matches
|
||||
if matches.is_empty() {
|
||||
info!(matched = 0, "Auto-matched instructors to RMP professors");
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let legacy_ids: Vec<i32> = matches.iter().map(|(id, _)| *id).collect();
|
||||
let banner_ids: Vec<&str> = matches.iter().map(|(_, bid)| bid.as_str()).collect();
|
||||
|
||||
let result = sqlx::query(
|
||||
r#"
|
||||
UPDATE instructors i
|
||||
SET
|
||||
rmp_legacy_id = m.legacy_id,
|
||||
rmp_match_status = 'auto'
|
||||
FROM UNNEST($1::int4[], $2::text[]) AS m(legacy_id, banner_id)
|
||||
WHERE i.banner_id = m.banner_id
|
||||
"#,
|
||||
)
|
||||
.bind(&legacy_ids)
|
||||
.bind(&banner_ids)
|
||||
.execute(db_pool)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to update instructor RMP matches: {}", e))?;
|
||||
|
||||
let matched = result.rows_affected();
|
||||
info!(matched, "Auto-matched instructors to RMP professors");
|
||||
Ok(matched)
|
||||
}
|
||||
|
||||
/// Retrieve RMP rating data for an instructor by banner_id.
|
||||
///
|
||||
/// Returns `(avg_rating, num_ratings)` if the instructor has an RMP match.
|
||||
#[allow(dead_code)]
|
||||
pub async fn get_instructor_rmp_data(
|
||||
db_pool: &PgPool,
|
||||
banner_id: &str,
|
||||
) -> Result<Option<(f32, i32)>> {
|
||||
let row: Option<(f32, i32)> = sqlx::query_as(
|
||||
r#"
|
||||
SELECT rp.avg_rating, rp.num_ratings
|
||||
FROM instructors i
|
||||
JOIN rmp_professors rp ON rp.legacy_id = i.rmp_legacy_id
|
||||
WHERE i.banner_id = $1
|
||||
AND rp.avg_rating IS NOT NULL
|
||||
"#,
|
||||
)
|
||||
.bind(banner_id)
|
||||
.fetch_optional(db_pool)
|
||||
.await?;
|
||||
Ok(row)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parse_standard_name() {
|
||||
assert_eq!(
|
||||
parse_display_name("Smith, John"),
|
||||
Some(("smith".into(), "john".into()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_name_with_middle() {
|
||||
assert_eq!(
|
||||
parse_display_name("Smith, John David"),
|
||||
Some(("smith".into(), "john".into()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_name_with_middle_initial() {
|
||||
assert_eq!(
|
||||
parse_display_name("Garcia, Maria L."),
|
||||
Some(("garcia".into(), "maria".into()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_name_with_suffix_in_last() {
|
||||
// Banner may encode "Jr." as part of the last name.
|
||||
// normalize() strips trailing periods so "Jr." becomes "jr".
|
||||
assert_eq!(
|
||||
parse_display_name("Smith Jr., James"),
|
||||
Some(("smith jr".into(), "james".into()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_no_comma_returns_none() {
|
||||
assert_eq!(parse_display_name("SingleName"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_empty_first_returns_none() {
|
||||
assert_eq!(parse_display_name("Smith,"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_empty_last_returns_none() {
|
||||
assert_eq!(parse_display_name(", John"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_extra_whitespace() {
|
||||
assert_eq!(
|
||||
parse_display_name(" Doe , Jane Marie "),
|
||||
Some(("doe".into(), "jane".into()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_trims_and_lowercases() {
|
||||
assert_eq!(normalize(" FOO "), "foo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_strips_trailing_period() {
|
||||
assert_eq!(normalize("Jr."), "jr");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,177 @@
|
||||
//! Database operations for scrape job queue management.
|
||||
|
||||
use crate::data::models::{ScrapeJob, ScrapePriority, TargetType};
|
||||
use crate::error::Result;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashSet;
|
||||
|
||||
/// Atomically fetch and lock the next available scrape job.
|
||||
///
|
||||
/// Uses `FOR UPDATE SKIP LOCKED` to allow multiple workers to poll the queue
|
||||
/// concurrently without conflicts. Only jobs that are unlocked and ready to
|
||||
/// execute (based on `execute_at`) are considered.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `db_pool` - PostgreSQL connection pool
|
||||
///
|
||||
/// # Returns
|
||||
/// * `Ok(Some(job))` if a job was successfully fetched and locked
|
||||
/// * `Ok(None)` if no jobs are available
|
||||
pub async fn fetch_and_lock_job(db_pool: &PgPool) -> Result<Option<ScrapeJob>> {
|
||||
let mut tx = db_pool.begin().await?;
|
||||
|
||||
let job = sqlx::query_as::<_, ScrapeJob>(
|
||||
"SELECT * FROM scrape_jobs WHERE locked_at IS NULL AND execute_at <= NOW() ORDER BY priority DESC, execute_at ASC LIMIT 1 FOR UPDATE SKIP LOCKED"
|
||||
)
|
||||
.fetch_optional(&mut *tx)
|
||||
.await?;
|
||||
|
||||
if let Some(ref job) = job {
|
||||
sqlx::query("UPDATE scrape_jobs SET locked_at = NOW() WHERE id = $1")
|
||||
.bind(job.id)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
tx.commit().await?;
|
||||
|
||||
Ok(job)
|
||||
}
|
||||
|
||||
/// Delete a scrape job by ID.
|
||||
///
|
||||
/// Typically called after a job has been successfully processed or permanently failed.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `job_id` - The database ID of the job to delete
|
||||
/// * `db_pool` - PostgreSQL connection pool
|
||||
pub async fn delete_job(job_id: i32, db_pool: &PgPool) -> Result<()> {
|
||||
sqlx::query("DELETE FROM scrape_jobs WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.execute(db_pool)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Unlock a scrape job by clearing its `locked_at` timestamp.
|
||||
///
|
||||
/// Used to release a job back to the queue, e.g. during graceful shutdown.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `job_id` - The database ID of the job to unlock
|
||||
/// * `db_pool` - PostgreSQL connection pool
|
||||
pub async fn unlock_job(job_id: i32, db_pool: &PgPool) -> Result<()> {
|
||||
sqlx::query("UPDATE scrape_jobs SET locked_at = NULL WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.execute(db_pool)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Atomically unlock a job and increment its retry count.
|
||||
///
|
||||
/// Returns whether the job still has retries remaining. This is determined
|
||||
/// atomically in the database to avoid race conditions between workers.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `job_id` - The database ID of the job
|
||||
/// * `max_retries` - Maximum number of retries allowed for this job
|
||||
/// * `db_pool` - PostgreSQL connection pool
|
||||
///
|
||||
/// # Returns
|
||||
/// * `Ok(true)` if the job was unlocked and retries remain
|
||||
/// * `Ok(false)` if the job has exhausted its retries
|
||||
pub async fn unlock_and_increment_retry(
|
||||
job_id: i32,
|
||||
max_retries: i32,
|
||||
db_pool: &PgPool,
|
||||
) -> Result<bool> {
|
||||
let result = sqlx::query_scalar::<_, Option<i32>>(
|
||||
"UPDATE scrape_jobs
|
||||
SET locked_at = NULL, retry_count = retry_count + 1
|
||||
WHERE id = $1
|
||||
RETURNING CASE WHEN retry_count < $2 THEN retry_count ELSE NULL END",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(max_retries)
|
||||
.fetch_one(db_pool)
|
||||
.await?;
|
||||
|
||||
Ok(result.is_some())
|
||||
}
|
||||
|
||||
/// Find existing unlocked job payloads matching the given target type and candidates.
|
||||
///
|
||||
/// Returns a set of stringified JSON payloads that already exist in the queue,
|
||||
/// used for deduplication when scheduling new jobs.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `target_type` - The target type to filter by
|
||||
/// * `candidate_payloads` - Candidate payloads to check against existing jobs
|
||||
/// * `db_pool` - PostgreSQL connection pool
|
||||
///
|
||||
/// # Returns
|
||||
/// A `HashSet` of stringified JSON payloads that already have pending jobs
|
||||
pub async fn find_existing_job_payloads(
|
||||
target_type: TargetType,
|
||||
candidate_payloads: &[serde_json::Value],
|
||||
db_pool: &PgPool,
|
||||
) -> Result<HashSet<String>> {
|
||||
let existing_jobs: Vec<(serde_json::Value,)> = sqlx::query_as(
|
||||
"SELECT target_payload FROM scrape_jobs
|
||||
WHERE target_type = $1 AND target_payload = ANY($2) AND locked_at IS NULL",
|
||||
)
|
||||
.bind(target_type)
|
||||
.bind(candidate_payloads)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
let existing_payloads = existing_jobs
|
||||
.into_iter()
|
||||
.map(|(payload,)| payload.to_string())
|
||||
.collect();
|
||||
|
||||
Ok(existing_payloads)
|
||||
}
|
||||
|
||||
/// Batch insert scrape jobs using UNNEST for a single round-trip.
|
||||
///
|
||||
/// All jobs are inserted with `execute_at` set to the current time.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `jobs` - Slice of `(payload, target_type, priority)` tuples to insert
|
||||
/// * `db_pool` - PostgreSQL connection pool
|
||||
pub async fn batch_insert_jobs(
|
||||
jobs: &[(serde_json::Value, TargetType, ScrapePriority)],
|
||||
db_pool: &PgPool,
|
||||
) -> Result<()> {
|
||||
if jobs.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut target_types: Vec<String> = Vec::with_capacity(jobs.len());
|
||||
let mut payloads: Vec<serde_json::Value> = Vec::with_capacity(jobs.len());
|
||||
let mut priorities: Vec<String> = Vec::with_capacity(jobs.len());
|
||||
|
||||
for (payload, target_type, priority) in jobs {
|
||||
target_types.push(format!("{target_type:?}"));
|
||||
payloads.push(payload.clone());
|
||||
priorities.push(format!("{priority:?}"));
|
||||
}
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO scrape_jobs (target_type, target_payload, priority, execute_at)
|
||||
SELECT v.target_type::target_type, v.payload, v.priority::scrape_priority, NOW()
|
||||
FROM UNNEST($1::text[], $2::jsonb[], $3::text[])
|
||||
AS v(target_type, payload, priority)
|
||||
"#,
|
||||
)
|
||||
.bind(&target_types)
|
||||
.bind(&payloads)
|
||||
.bind(&priorities)
|
||||
.execute(db_pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -0,0 +1,269 @@
|
||||
//! Custom tracing formatter
|
||||
|
||||
use serde::Serialize;
|
||||
use serde_json::{Map, Value};
|
||||
use std::fmt;
|
||||
use time::macros::format_description;
|
||||
use time::{OffsetDateTime, format_description::FormatItem};
|
||||
use tracing::field::{Field, Visit};
|
||||
use tracing::{Event, Level, Subscriber};
|
||||
use tracing_subscriber::fmt::format::Writer;
|
||||
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields, FormattedFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
use yansi::Paint;
|
||||
|
||||
/// Cached format description for timestamps
|
||||
const TIMESTAMP_FORMAT: &[FormatItem<'static>] =
|
||||
format_description!("[hour]:[minute]:[second].[subsecond digits:5]");
|
||||
|
||||
/// A custom formatter with enhanced timestamp formatting
|
||||
///
|
||||
/// Re-implementation of the Full formatter with improved timestamp display.
|
||||
pub struct CustomPrettyFormatter;
|
||||
|
||||
impl<S, N> FormatEvent<S, N> for CustomPrettyFormatter
|
||||
where
|
||||
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||
N: for<'a> FormatFields<'a> + 'static,
|
||||
{
|
||||
fn format_event(
|
||||
&self,
|
||||
ctx: &FmtContext<'_, S, N>,
|
||||
mut writer: Writer<'_>,
|
||||
event: &Event<'_>,
|
||||
) -> fmt::Result {
|
||||
let meta = event.metadata();
|
||||
|
||||
// 1) Timestamp (dimmed when ANSI)
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let formatted_time = now.format(&TIMESTAMP_FORMAT).map_err(|e| {
|
||||
eprintln!("Failed to format timestamp: {}", e);
|
||||
fmt::Error
|
||||
})?;
|
||||
write_dimmed(&mut writer, formatted_time)?;
|
||||
writer.write_char(' ')?;
|
||||
|
||||
// 2) Colored 5-char level like Full
|
||||
write_colored_level(&mut writer, meta.level())?;
|
||||
writer.write_char(' ')?;
|
||||
|
||||
// 3) Span scope chain (bold names, fields in braces, dimmed ':')
|
||||
if let Some(scope) = ctx.event_scope() {
|
||||
let mut saw_any = false;
|
||||
for span in scope.from_root() {
|
||||
write_bold(&mut writer, span.metadata().name())?;
|
||||
saw_any = true;
|
||||
|
||||
write_dimmed(&mut writer, ":")?;
|
||||
|
||||
let ext = span.extensions();
|
||||
if let Some(fields) = &ext.get::<FormattedFields<N>>()
|
||||
&& !fields.fields.is_empty()
|
||||
{
|
||||
write_bold(&mut writer, "{")?;
|
||||
writer.write_str(fields.fields.as_str())?;
|
||||
write_bold(&mut writer, "}")?;
|
||||
}
|
||||
write_dimmed(&mut writer, ":")?;
|
||||
}
|
||||
|
||||
if saw_any {
|
||||
writer.write_char(' ')?;
|
||||
}
|
||||
}
|
||||
|
||||
// 4) Target (dimmed), then a space
|
||||
if writer.has_ansi_escapes() {
|
||||
write!(writer, "{}: ", Paint::new(meta.target()).dim())?;
|
||||
} else {
|
||||
write!(writer, "{}: ", meta.target())?;
|
||||
}
|
||||
|
||||
// 5) Event fields
|
||||
ctx.format_fields(writer.by_ref(), event)?;
|
||||
|
||||
// 6) Newline
|
||||
writeln!(writer)
|
||||
}
|
||||
}
|
||||
|
||||
/// A custom JSON formatter that flattens fields to root level
|
||||
///
|
||||
/// Outputs logs in the format: { "message": "...", "level": "...", "customAttribute": "..." }
|
||||
pub struct CustomJsonFormatter;
|
||||
|
||||
impl<S, N> FormatEvent<S, N> for CustomJsonFormatter
|
||||
where
|
||||
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||
N: for<'a> FormatFields<'a> + 'static,
|
||||
{
|
||||
fn format_event(
|
||||
&self,
|
||||
ctx: &FmtContext<'_, S, N>,
|
||||
mut writer: Writer<'_>,
|
||||
event: &Event<'_>,
|
||||
) -> fmt::Result {
|
||||
let meta = event.metadata();
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct EventFields {
|
||||
message: String,
|
||||
level: String,
|
||||
target: String,
|
||||
#[serde(flatten)]
|
||||
spans: Map<String, Value>,
|
||||
#[serde(flatten)]
|
||||
fields: Map<String, Value>,
|
||||
}
|
||||
|
||||
let (message, fields, spans) = {
|
||||
let mut message: Option<String> = None;
|
||||
let mut fields: Map<String, Value> = Map::new();
|
||||
let mut spans: Map<String, Value> = Map::new();
|
||||
|
||||
struct FieldVisitor<'a> {
|
||||
message: &'a mut Option<String>,
|
||||
fields: &'a mut Map<String, Value>,
|
||||
}
|
||||
|
||||
impl<'a> Visit for FieldVisitor<'a> {
|
||||
fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) {
|
||||
let key = field.name();
|
||||
if key == "message" {
|
||||
*self.message = Some(format!("{:?}", value));
|
||||
} else {
|
||||
// Use typed methods for better performance
|
||||
self.fields
|
||||
.insert(key.to_string(), Value::String(format!("{:?}", value)));
|
||||
}
|
||||
}
|
||||
|
||||
fn record_str(&mut self, field: &Field, value: &str) {
|
||||
let key = field.name();
|
||||
if key == "message" {
|
||||
*self.message = Some(value.to_string());
|
||||
} else {
|
||||
self.fields
|
||||
.insert(key.to_string(), Value::String(value.to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
fn record_i64(&mut self, field: &Field, value: i64) {
|
||||
let key = field.name();
|
||||
if key != "message" {
|
||||
self.fields.insert(
|
||||
key.to_string(),
|
||||
Value::Number(serde_json::Number::from(value)),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn record_u64(&mut self, field: &Field, value: u64) {
|
||||
let key = field.name();
|
||||
if key != "message" {
|
||||
self.fields.insert(
|
||||
key.to_string(),
|
||||
Value::Number(serde_json::Number::from(value)),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn record_bool(&mut self, field: &Field, value: bool) {
|
||||
let key = field.name();
|
||||
if key != "message" {
|
||||
self.fields.insert(key.to_string(), Value::Bool(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut visitor = FieldVisitor {
|
||||
message: &mut message,
|
||||
fields: &mut fields,
|
||||
};
|
||||
event.record(&mut visitor);
|
||||
|
||||
// Collect span information from the span hierarchy
|
||||
if let Some(scope) = ctx.event_scope() {
|
||||
for span in scope.from_root() {
|
||||
let span_name = span.metadata().name().to_string();
|
||||
let mut span_fields: Map<String, Value> = Map::new();
|
||||
|
||||
// Try to extract fields from FormattedFields
|
||||
let ext = span.extensions();
|
||||
if let Some(formatted_fields) = ext.get::<FormattedFields<N>>() {
|
||||
// Try to parse as JSON first
|
||||
if let Ok(json_fields) = serde_json::from_str::<Map<String, Value>>(
|
||||
formatted_fields.fields.as_str(),
|
||||
) {
|
||||
span_fields.extend(json_fields);
|
||||
} else {
|
||||
// If not valid JSON, treat the entire field string as a single field
|
||||
span_fields.insert(
|
||||
"raw".to_string(),
|
||||
Value::String(formatted_fields.fields.as_str().to_string()),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Insert span as a nested object directly into the spans map
|
||||
spans.insert(span_name, Value::Object(span_fields));
|
||||
}
|
||||
}
|
||||
|
||||
(message, fields, spans)
|
||||
};
|
||||
|
||||
let json = EventFields {
|
||||
message: message.unwrap_or_default(),
|
||||
level: meta.level().to_string(),
|
||||
target: meta.target().to_string(),
|
||||
spans,
|
||||
fields,
|
||||
};
|
||||
|
||||
writeln!(
|
||||
writer,
|
||||
"{}",
|
||||
serde_json::to_string(&json).unwrap_or_else(|_| "{}".to_string())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Write the verbosity level with the same coloring/alignment as the Full formatter.
|
||||
fn write_colored_level(writer: &mut Writer<'_>, level: &Level) -> fmt::Result {
|
||||
if writer.has_ansi_escapes() {
|
||||
let paint = match *level {
|
||||
Level::TRACE => Paint::new("TRACE").magenta(),
|
||||
Level::DEBUG => Paint::new("DEBUG").blue(),
|
||||
Level::INFO => Paint::new(" INFO").green(),
|
||||
Level::WARN => Paint::new(" WARN").yellow(),
|
||||
Level::ERROR => Paint::new("ERROR").red(),
|
||||
};
|
||||
write!(writer, "{}", paint)
|
||||
} else {
|
||||
// Right-pad to width 5 like Full's non-ANSI mode
|
||||
match *level {
|
||||
Level::TRACE => write!(writer, "{:>5}", "TRACE"),
|
||||
Level::DEBUG => write!(writer, "{:>5}", "DEBUG"),
|
||||
Level::INFO => write!(writer, "{:>5}", " INFO"),
|
||||
Level::WARN => write!(writer, "{:>5}", " WARN"),
|
||||
Level::ERROR => write!(writer, "{:>5}", "ERROR"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn write_dimmed(writer: &mut Writer<'_>, s: impl fmt::Display) -> fmt::Result {
|
||||
if writer.has_ansi_escapes() {
|
||||
write!(writer, "{}", Paint::new(s).dim())
|
||||
} else {
|
||||
write!(writer, "{}", s)
|
||||
}
|
||||
}
|
||||
|
||||
fn write_bold(writer: &mut Writer<'_>, s: impl fmt::Display) -> fmt::Result {
|
||||
if writer.has_ansi_escapes() {
|
||||
write!(writer, "{}", Paint::new(s).bold())
|
||||
} else {
|
||||
write!(writer, "{}", s)
|
||||
}
|
||||
}
|
||||
+12
-1
@@ -1,6 +1,17 @@
|
||||
pub mod app_state;
|
||||
pub mod app;
|
||||
pub mod banner;
|
||||
pub mod bot;
|
||||
pub mod cli;
|
||||
pub mod config;
|
||||
pub mod data;
|
||||
pub mod error;
|
||||
pub mod formatter;
|
||||
pub mod logging;
|
||||
pub mod rmp;
|
||||
pub mod scraper;
|
||||
pub mod services;
|
||||
pub mod signals;
|
||||
pub mod state;
|
||||
pub mod status;
|
||||
pub mod utils;
|
||||
pub mod web;
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
use crate::cli::TracingFormat;
|
||||
use crate::config::Config;
|
||||
use crate::formatter;
|
||||
use tracing_subscriber::fmt::format::JsonFields;
|
||||
use tracing_subscriber::{EnvFilter, FmtSubscriber};
|
||||
|
||||
/// Configure and initialize logging for the application
|
||||
pub fn setup_logging(config: &Config, tracing_format: TracingFormat) {
|
||||
// Configure logging based on config
|
||||
// Note: Even when base_level is trace or debug, we suppress trace logs from noisy
|
||||
// infrastructure modules to keep output readable. These modules use debug for important
|
||||
// events and trace only for very detailed debugging.
|
||||
let filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| {
|
||||
let base_level = &config.log_level;
|
||||
EnvFilter::new(format!(
|
||||
"warn,banner={},banner::rate_limiter=warn,banner::session=debug,banner::rate_limit_middleware=warn,banner::middleware=debug",
|
||||
base_level
|
||||
))
|
||||
});
|
||||
|
||||
// Select formatter based on CLI args
|
||||
let use_pretty = match tracing_format {
|
||||
TracingFormat::Pretty => true,
|
||||
TracingFormat::Json => false,
|
||||
};
|
||||
|
||||
let subscriber: Box<dyn tracing::Subscriber + Send + Sync> = if use_pretty {
|
||||
Box::new(
|
||||
FmtSubscriber::builder()
|
||||
.with_target(true)
|
||||
.event_format(formatter::CustomPrettyFormatter)
|
||||
.with_env_filter(filter)
|
||||
.finish(),
|
||||
)
|
||||
} else {
|
||||
Box::new(
|
||||
FmtSubscriber::builder()
|
||||
.with_target(true)
|
||||
.event_format(formatter::CustomJsonFormatter)
|
||||
.fmt_fields(JsonFields::new())
|
||||
.with_env_filter(filter)
|
||||
.finish(),
|
||||
)
|
||||
};
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).expect("setting default subscriber failed");
|
||||
}
|
||||
+45
-187
@@ -1,48 +1,48 @@
|
||||
use serenity::all::{ClientBuilder, GatewayIntents};
|
||||
use tokio::signal;
|
||||
use tracing::{error, info, warn};
|
||||
use tracing_subscriber::{EnvFilter, FmtSubscriber};
|
||||
use crate::app::App;
|
||||
use crate::cli::{Args, ServiceName, determine_enabled_services};
|
||||
use crate::logging::setup_logging;
|
||||
use clap::Parser;
|
||||
use std::process::ExitCode;
|
||||
use tracing::info;
|
||||
|
||||
use crate::app_state::AppState;
|
||||
use crate::banner::BannerApi;
|
||||
use crate::banner::scraper::CourseScraper;
|
||||
use crate::bot::{Data, get_commands};
|
||||
use crate::config::Config;
|
||||
use crate::services::manager::ServiceManager;
|
||||
use crate::services::{ServiceResult, bot::BotService, web::WebService};
|
||||
use crate::web::routes::BannerState;
|
||||
use figment::{Figment, providers::Env};
|
||||
use std::sync::Arc;
|
||||
|
||||
mod app_state;
|
||||
mod app;
|
||||
mod banner;
|
||||
mod bot;
|
||||
mod cli;
|
||||
mod config;
|
||||
mod data;
|
||||
mod error;
|
||||
mod formatter;
|
||||
mod logging;
|
||||
mod rmp;
|
||||
mod scraper;
|
||||
mod services;
|
||||
mod signals;
|
||||
mod state;
|
||||
mod status;
|
||||
mod web;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
async fn main() -> ExitCode {
|
||||
dotenvy::dotenv().ok();
|
||||
|
||||
// Configure logging
|
||||
let filter =
|
||||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("warn,banner=debug"));
|
||||
let subscriber = {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
FmtSubscriber::builder()
|
||||
}
|
||||
#[cfg(not(debug_assertions))]
|
||||
{
|
||||
FmtSubscriber::builder().json()
|
||||
}
|
||||
}
|
||||
.with_env_filter(filter)
|
||||
.with_target(true)
|
||||
.finish();
|
||||
tracing::subscriber::set_global_default(subscriber).expect("setting default subscriber failed");
|
||||
// Parse CLI arguments
|
||||
let args = Args::parse();
|
||||
|
||||
// Determine which services should be enabled
|
||||
let enabled_services: Vec<ServiceName> =
|
||||
determine_enabled_services(&args).expect("Failed to determine enabled services");
|
||||
|
||||
// Create and initialize the application
|
||||
let mut app = App::new().await.expect("Failed to initialize application");
|
||||
|
||||
// Setup logging — must happen before any info!() calls to avoid silently dropped logs
|
||||
setup_logging(app.config(), args.tracing);
|
||||
|
||||
info!(
|
||||
enabled_services = ?enabled_services,
|
||||
"services configuration loaded"
|
||||
);
|
||||
|
||||
// Log application startup context
|
||||
info!(
|
||||
@@ -52,163 +52,21 @@ async fn main() {
|
||||
} else {
|
||||
"production"
|
||||
},
|
||||
"starting banner system"
|
||||
"starting banner"
|
||||
);
|
||||
|
||||
let config: Config = Figment::new()
|
||||
.merge(Env::prefixed("APP_"))
|
||||
.extract()
|
||||
.expect("Failed to load config");
|
||||
// Setup services (web, scraper)
|
||||
app.setup_services(&enabled_services)
|
||||
.expect("Failed to setup services");
|
||||
|
||||
info!(
|
||||
port = config.port,
|
||||
shutdown_timeout = format!("{:.2?}", config.shutdown_timeout),
|
||||
banner_base_url = config.banner_base_url,
|
||||
"configuration loaded"
|
||||
);
|
||||
|
||||
// Create BannerApi and AppState
|
||||
let banner_api =
|
||||
BannerApi::new(config.banner_base_url.clone()).expect("Failed to create BannerApi");
|
||||
banner_api
|
||||
.setup()
|
||||
.await
|
||||
.expect("Failed to set up BannerApi session");
|
||||
|
||||
let banner_api_arc = Arc::new(banner_api);
|
||||
let app_state = AppState::new(banner_api_arc.clone(), &config.redis_url)
|
||||
.expect("Failed to create AppState");
|
||||
|
||||
// Create CourseScraper for web service
|
||||
let scraper = CourseScraper::new(banner_api_arc.clone(), &config.redis_url)
|
||||
.expect("Failed to create CourseScraper");
|
||||
|
||||
// Create BannerState for web service
|
||||
let banner_state = BannerState {
|
||||
api: banner_api_arc,
|
||||
scraper: Arc::new(scraper),
|
||||
};
|
||||
|
||||
// Configure the client with your Discord bot token in the environment
|
||||
let intents = GatewayIntents::non_privileged();
|
||||
|
||||
let bot_target_guild = config.bot_target_guild;
|
||||
|
||||
let framework = poise::Framework::builder()
|
||||
.options(poise::FrameworkOptions {
|
||||
commands: get_commands(),
|
||||
..Default::default()
|
||||
})
|
||||
.setup(move |ctx, _ready, framework| {
|
||||
let app_state = app_state.clone();
|
||||
Box::pin(async move {
|
||||
poise::builtins::register_in_guild(
|
||||
ctx,
|
||||
&framework.options().commands,
|
||||
bot_target_guild.into(),
|
||||
)
|
||||
.await?;
|
||||
poise::builtins::register_globally(ctx, &framework.options().commands).await?;
|
||||
Ok(Data { app_state })
|
||||
})
|
||||
})
|
||||
.build();
|
||||
|
||||
let client = ClientBuilder::new(config.bot_token, intents)
|
||||
.framework(framework)
|
||||
.await
|
||||
.expect("Failed to build client");
|
||||
|
||||
// Extract shutdown timeout before moving config
|
||||
let shutdown_timeout = config.shutdown_timeout;
|
||||
let port = config.port;
|
||||
|
||||
// Create service manager
|
||||
let mut service_manager = ServiceManager::new();
|
||||
|
||||
// Register services with the manager
|
||||
let bot_service = Box::new(BotService::new(client));
|
||||
let web_service = Box::new(WebService::new(port, banner_state));
|
||||
|
||||
service_manager.register_service("bot", bot_service);
|
||||
service_manager.register_service("web", web_service);
|
||||
|
||||
// Spawn all registered services
|
||||
service_manager.spawn_all();
|
||||
|
||||
// Set up CTRL+C signal handling
|
||||
let ctrl_c = async {
|
||||
signal::ctrl_c()
|
||||
// Setup bot service if enabled
|
||||
if enabled_services.contains(&ServiceName::Bot) {
|
||||
app.setup_bot_service()
|
||||
.await
|
||||
.expect("Failed to install CTRL+C signal handler");
|
||||
info!("received ctrl+c, gracefully shutting down...");
|
||||
};
|
||||
|
||||
// Main application loop - wait for services or CTRL+C
|
||||
let mut exit_code = 0;
|
||||
|
||||
tokio::select! {
|
||||
(service_name, result) = service_manager.run() => {
|
||||
// A service completed unexpectedly
|
||||
match result {
|
||||
ServiceResult::GracefulShutdown => {
|
||||
info!(service = service_name, "service completed gracefully");
|
||||
}
|
||||
ServiceResult::NormalCompletion => {
|
||||
warn!(service = service_name, "service completed unexpectedly");
|
||||
exit_code = 1;
|
||||
}
|
||||
ServiceResult::Error(e) => {
|
||||
error!(service = service_name, error = ?e, "service failed");
|
||||
exit_code = 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Shutdown remaining services
|
||||
match service_manager.shutdown(shutdown_timeout).await {
|
||||
Ok(elapsed) => {
|
||||
info!(
|
||||
remaining = format!("{:.2?}", shutdown_timeout - elapsed),
|
||||
"graceful shutdown complete"
|
||||
);
|
||||
}
|
||||
Err(pending_services) => {
|
||||
warn!(
|
||||
pending_count = pending_services.len(),
|
||||
pending_services = ?pending_services,
|
||||
"graceful shutdown elapsed - {} service(s) did not complete",
|
||||
pending_services.len()
|
||||
);
|
||||
|
||||
// Non-zero exit code, default to 2 if not set
|
||||
exit_code = if exit_code == 0 { 2 } else { exit_code };
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = ctrl_c => {
|
||||
// User requested shutdown
|
||||
info!("user requested shutdown via ctrl+c");
|
||||
match service_manager.shutdown(shutdown_timeout).await {
|
||||
Ok(elapsed) => {
|
||||
info!(
|
||||
remaining = format!("{:.2?}", shutdown_timeout - elapsed),
|
||||
"graceful shutdown complete"
|
||||
);
|
||||
info!("graceful shutdown complete");
|
||||
}
|
||||
Err(pending_services) => {
|
||||
warn!(
|
||||
pending_count = pending_services.len(),
|
||||
pending_services = ?pending_services,
|
||||
"graceful shutdown elapsed - {} service(s) did not complete",
|
||||
pending_services.len()
|
||||
);
|
||||
exit_code = 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
.expect("Failed to setup bot service");
|
||||
}
|
||||
|
||||
info!(exit_code, "application shutdown complete");
|
||||
std::process::exit(exit_code);
|
||||
// Start all services and run the application
|
||||
app.start_services();
|
||||
app.run().await
|
||||
}
|
||||
|
||||
+156
@@ -0,0 +1,156 @@
|
||||
//! RateMyProfessors GraphQL client for bulk professor data sync.
|
||||
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{debug, info};
|
||||
|
||||
/// UTSA's school ID on RateMyProfessors (base64 of "School-1516").
|
||||
const UTSA_SCHOOL_ID: &str = "U2Nob29sLTE1MTY=";
|
||||
|
||||
/// Basic auth header value (base64 of "test:test").
|
||||
const AUTH_HEADER: &str = "Basic dGVzdDp0ZXN0";
|
||||
|
||||
/// GraphQL endpoint.
|
||||
const GRAPHQL_URL: &str = "https://www.ratemyprofessors.com/graphql";
|
||||
|
||||
/// Page size for paginated fetches.
|
||||
const PAGE_SIZE: u32 = 100;
|
||||
|
||||
/// A professor record from RateMyProfessors.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RmpProfessor {
|
||||
pub legacy_id: i32,
|
||||
pub graphql_id: String,
|
||||
pub first_name: String,
|
||||
pub last_name: String,
|
||||
pub department: Option<String>,
|
||||
pub avg_rating: Option<f32>,
|
||||
pub avg_difficulty: Option<f32>,
|
||||
pub num_ratings: i32,
|
||||
pub would_take_again_pct: Option<f32>,
|
||||
}
|
||||
|
||||
/// Client for fetching professor data from RateMyProfessors.
|
||||
pub struct RmpClient {
|
||||
http: reqwest::Client,
|
||||
}
|
||||
|
||||
impl Default for RmpClient {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl RmpClient {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
http: reqwest::Client::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Fetch all professors for UTSA via paginated GraphQL queries.
|
||||
pub async fn fetch_all_professors(&self) -> Result<Vec<RmpProfessor>> {
|
||||
let mut all = Vec::new();
|
||||
let mut cursor: Option<String> = None;
|
||||
|
||||
loop {
|
||||
let after_clause = match &cursor {
|
||||
Some(c) => format!(r#", after: "{}""#, c),
|
||||
None => String::new(),
|
||||
};
|
||||
|
||||
let query = format!(
|
||||
r#"query {{
|
||||
newSearch {{
|
||||
teachers(query: {{ text: "", schoolID: "{school_id}" }}, first: {page_size}{after}) {{
|
||||
edges {{
|
||||
cursor
|
||||
node {{
|
||||
id
|
||||
legacyId
|
||||
firstName
|
||||
lastName
|
||||
department
|
||||
avgRating
|
||||
avgDifficulty
|
||||
numRatings
|
||||
wouldTakeAgainPercent
|
||||
}}
|
||||
}}
|
||||
pageInfo {{
|
||||
hasNextPage
|
||||
endCursor
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}"#,
|
||||
school_id = UTSA_SCHOOL_ID,
|
||||
page_size = PAGE_SIZE,
|
||||
after = after_clause,
|
||||
);
|
||||
|
||||
let body = serde_json::json!({ "query": query });
|
||||
|
||||
let resp = self
|
||||
.http
|
||||
.post(GRAPHQL_URL)
|
||||
.header("Authorization", AUTH_HEADER)
|
||||
.json(&body)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let status = resp.status();
|
||||
if !status.is_success() {
|
||||
let text = resp.text().await.unwrap_or_default();
|
||||
anyhow::bail!("RMP GraphQL request failed ({status}): {text}");
|
||||
}
|
||||
|
||||
let json: serde_json::Value = resp.json().await?;
|
||||
|
||||
let teachers = &json["data"]["newSearch"]["teachers"];
|
||||
let edges = teachers["edges"]
|
||||
.as_array()
|
||||
.ok_or_else(|| anyhow::anyhow!("Missing edges in RMP response"))?;
|
||||
|
||||
for edge in edges {
|
||||
let node = &edge["node"];
|
||||
let wta = node["wouldTakeAgainPercent"]
|
||||
.as_f64()
|
||||
.map(|v| v as f32)
|
||||
.filter(|&v| v >= 0.0);
|
||||
|
||||
all.push(RmpProfessor {
|
||||
legacy_id: node["legacyId"]
|
||||
.as_i64()
|
||||
.ok_or_else(|| anyhow::anyhow!("Missing legacyId"))?
|
||||
as i32,
|
||||
graphql_id: node["id"]
|
||||
.as_str()
|
||||
.ok_or_else(|| anyhow::anyhow!("Missing id"))?
|
||||
.to_string(),
|
||||
first_name: node["firstName"].as_str().unwrap_or_default().to_string(),
|
||||
last_name: node["lastName"].as_str().unwrap_or_default().to_string(),
|
||||
department: node["department"].as_str().map(|s| s.to_string()),
|
||||
avg_rating: node["avgRating"].as_f64().map(|v| v as f32),
|
||||
avg_difficulty: node["avgDifficulty"].as_f64().map(|v| v as f32),
|
||||
num_ratings: node["numRatings"].as_i64().unwrap_or(0) as i32,
|
||||
would_take_again_pct: wta,
|
||||
});
|
||||
}
|
||||
|
||||
let page_info = &teachers["pageInfo"];
|
||||
let has_next = page_info["hasNextPage"].as_bool().unwrap_or(false);
|
||||
|
||||
if !has_next {
|
||||
break;
|
||||
}
|
||||
|
||||
cursor = page_info["endCursor"].as_str().map(|s| s.to_string());
|
||||
|
||||
debug!(fetched = all.len(), "RMP pagination: fetching next page");
|
||||
}
|
||||
|
||||
info!(total = all.len(), "Fetched all RMP professors");
|
||||
Ok(all)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,150 @@
|
||||
pub mod subject;
|
||||
|
||||
use crate::banner::BannerApi;
|
||||
use crate::data::models::TargetType;
|
||||
use crate::error::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use thiserror::Error;
|
||||
|
||||
/// Errors that can occur during job parsing
|
||||
#[derive(Debug, Error)]
|
||||
pub enum JobParseError {
|
||||
#[error("Invalid JSON in job payload: {0}")]
|
||||
InvalidJson(#[from] serde_json::Error),
|
||||
#[error("Unsupported target type: {0:?}")]
|
||||
UnsupportedTargetType(TargetType),
|
||||
}
|
||||
|
||||
/// Errors that can occur during job processing
|
||||
#[derive(Debug, Error)]
|
||||
pub enum JobError {
|
||||
#[error("Recoverable error: {0}")]
|
||||
Recoverable(#[source] anyhow::Error),
|
||||
#[error("Unrecoverable error: {0}")]
|
||||
Unrecoverable(#[source] anyhow::Error),
|
||||
}
|
||||
|
||||
/// Common trait interface for all job types
|
||||
#[async_trait::async_trait]
|
||||
pub trait Job: Send + Sync {
|
||||
/// The target type this job handles
|
||||
#[allow(dead_code)]
|
||||
fn target_type(&self) -> TargetType;
|
||||
|
||||
/// Process the job with the given API client and database pool
|
||||
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<()>;
|
||||
|
||||
/// Get a human-readable description of the job
|
||||
fn description(&self) -> String;
|
||||
}
|
||||
|
||||
/// Main job enum that dispatches to specific job implementations
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum JobType {
|
||||
Subject(subject::SubjectJob),
|
||||
}
|
||||
|
||||
impl JobType {
|
||||
/// Create a job from the target type and payload
|
||||
pub fn from_target_type_and_payload(
|
||||
target_type: TargetType,
|
||||
payload: serde_json::Value,
|
||||
) -> Result<Self, JobParseError> {
|
||||
match target_type {
|
||||
TargetType::Subject => {
|
||||
let subject_job: subject::SubjectJob =
|
||||
serde_json::from_value(payload).map_err(JobParseError::InvalidJson)?;
|
||||
Ok(JobType::Subject(subject_job))
|
||||
}
|
||||
_ => Err(JobParseError::UnsupportedTargetType(target_type)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert to a Job trait object
|
||||
pub fn boxed(self) -> Box<dyn Job> {
|
||||
match self {
|
||||
JobType::Subject(job) => Box::new(job),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
|
||||
// --- Valid dispatch ---
|
||||
|
||||
#[test]
|
||||
fn test_from_target_subject_valid() {
|
||||
let result =
|
||||
JobType::from_target_type_and_payload(TargetType::Subject, json!({"subject": "CS"}));
|
||||
assert!(matches!(result, Ok(JobType::Subject(_))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_target_subject_empty_string() {
|
||||
let result =
|
||||
JobType::from_target_type_and_payload(TargetType::Subject, json!({"subject": ""}));
|
||||
assert!(matches!(result, Ok(JobType::Subject(_))));
|
||||
}
|
||||
|
||||
// --- Invalid JSON ---
|
||||
|
||||
#[test]
|
||||
fn test_from_target_subject_missing_field() {
|
||||
let result = JobType::from_target_type_and_payload(TargetType::Subject, json!({}));
|
||||
assert!(matches!(result, Err(JobParseError::InvalidJson(_))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_target_subject_wrong_type() {
|
||||
let result =
|
||||
JobType::from_target_type_and_payload(TargetType::Subject, json!({"subject": 123}));
|
||||
assert!(matches!(result, Err(JobParseError::InvalidJson(_))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_target_subject_null_payload() {
|
||||
let result = JobType::from_target_type_and_payload(TargetType::Subject, json!(null));
|
||||
assert!(matches!(result, Err(JobParseError::InvalidJson(_))));
|
||||
}
|
||||
|
||||
// --- Unsupported target types ---
|
||||
|
||||
#[test]
|
||||
fn test_from_target_unsupported_variants() {
|
||||
let unsupported = [
|
||||
TargetType::CourseRange,
|
||||
TargetType::CrnList,
|
||||
TargetType::SingleCrn,
|
||||
];
|
||||
for target_type in unsupported {
|
||||
let result =
|
||||
JobType::from_target_type_and_payload(target_type, json!({"subject": "CS"}));
|
||||
assert!(
|
||||
matches!(result, Err(JobParseError::UnsupportedTargetType(_))),
|
||||
"expected UnsupportedTargetType for {target_type:?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Error Display ---
|
||||
|
||||
#[test]
|
||||
fn test_job_parse_error_display() {
|
||||
let invalid_json_err =
|
||||
JobType::from_target_type_and_payload(TargetType::Subject, json!(null)).unwrap_err();
|
||||
let display = invalid_json_err.to_string();
|
||||
assert!(display.contains("Invalid JSON"), "got: {display}");
|
||||
|
||||
let unsupported_err =
|
||||
JobType::from_target_type_and_payload(TargetType::CrnList, json!({})).unwrap_err();
|
||||
let display = unsupported_err.to_string();
|
||||
assert!(
|
||||
display.contains("Unsupported target type"),
|
||||
"got: {display}"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
use super::Job;
|
||||
use crate::banner::{BannerApi, SearchQuery, Term};
|
||||
use crate::data::batch::batch_upsert_courses;
|
||||
use crate::data::models::TargetType;
|
||||
use crate::error::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use tracing::{debug, info};
|
||||
|
||||
/// Job implementation for scraping subject data
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SubjectJob {
|
||||
pub subject: String,
|
||||
}
|
||||
|
||||
impl SubjectJob {
|
||||
pub fn new(subject: String) -> Self {
|
||||
Self { subject }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Job for SubjectJob {
|
||||
fn target_type(&self) -> TargetType {
|
||||
TargetType::Subject
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, banner_api, db_pool), fields(subject = %self.subject))]
|
||||
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<()> {
|
||||
let subject_code = &self.subject;
|
||||
|
||||
// Get the current term
|
||||
let term = Term::get_current().inner().to_string();
|
||||
let query = SearchQuery::new().subject(subject_code).max_results(500);
|
||||
|
||||
let search_result = banner_api
|
||||
.search(&term, &query, "subjectDescription", false)
|
||||
.await?;
|
||||
|
||||
if let Some(courses_from_api) = search_result.data {
|
||||
info!(
|
||||
subject = %subject_code,
|
||||
count = courses_from_api.len(),
|
||||
"Found courses"
|
||||
);
|
||||
batch_upsert_courses(&courses_from_api, db_pool).await?;
|
||||
}
|
||||
|
||||
debug!(subject = %subject_code, "Subject job completed");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
format!("Scrape subject: {}", self.subject)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,134 @@
|
||||
pub mod jobs;
|
||||
pub mod scheduler;
|
||||
pub mod worker;
|
||||
|
||||
use crate::banner::BannerApi;
|
||||
use crate::services::Service;
|
||||
use crate::state::ReferenceCache;
|
||||
use crate::status::{ServiceStatus, ServiceStatusRegistry};
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{RwLock, broadcast};
|
||||
use tokio::task::JoinHandle;
|
||||
use tracing::{info, warn};
|
||||
|
||||
use self::scheduler::Scheduler;
|
||||
use self::worker::Worker;
|
||||
|
||||
/// The main service that will be managed by the application's `ServiceManager`.
|
||||
///
|
||||
/// It holds the shared resources (database pool, API client) and manages the
|
||||
/// lifecycle of the Scheduler and Worker tasks.
|
||||
pub struct ScraperService {
|
||||
db_pool: PgPool,
|
||||
banner_api: Arc<BannerApi>,
|
||||
reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
service_statuses: ServiceStatusRegistry,
|
||||
scheduler_handle: Option<JoinHandle<()>>,
|
||||
worker_handles: Vec<JoinHandle<()>>,
|
||||
shutdown_tx: Option<broadcast::Sender<()>>,
|
||||
}
|
||||
|
||||
impl ScraperService {
|
||||
/// Creates a new `ScraperService`.
|
||||
pub fn new(
|
||||
db_pool: PgPool,
|
||||
banner_api: Arc<BannerApi>,
|
||||
reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
service_statuses: ServiceStatusRegistry,
|
||||
) -> Self {
|
||||
Self {
|
||||
db_pool,
|
||||
banner_api,
|
||||
reference_cache,
|
||||
service_statuses,
|
||||
scheduler_handle: None,
|
||||
worker_handles: Vec::new(),
|
||||
shutdown_tx: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Starts the scheduler and a pool of workers.
|
||||
pub fn start(&mut self) {
|
||||
info!("ScraperService starting");
|
||||
|
||||
// Create shutdown channel
|
||||
let (shutdown_tx, _) = broadcast::channel(1);
|
||||
self.shutdown_tx = Some(shutdown_tx.clone());
|
||||
|
||||
let scheduler = Scheduler::new(
|
||||
self.db_pool.clone(),
|
||||
self.banner_api.clone(),
|
||||
self.reference_cache.clone(),
|
||||
);
|
||||
let shutdown_rx = shutdown_tx.subscribe();
|
||||
let scheduler_handle = tokio::spawn(async move {
|
||||
scheduler.run(shutdown_rx).await;
|
||||
});
|
||||
self.scheduler_handle = Some(scheduler_handle);
|
||||
info!("Scheduler task spawned");
|
||||
|
||||
let worker_count = 4; // This could be configurable
|
||||
for i in 0..worker_count {
|
||||
let worker = Worker::new(i, self.db_pool.clone(), self.banner_api.clone());
|
||||
let shutdown_rx = shutdown_tx.subscribe();
|
||||
let worker_handle = tokio::spawn(async move {
|
||||
worker.run(shutdown_rx).await;
|
||||
});
|
||||
self.worker_handles.push(worker_handle);
|
||||
}
|
||||
info!(
|
||||
worker_count = self.worker_handles.len(),
|
||||
"Spawned worker tasks"
|
||||
);
|
||||
self.service_statuses.set("scraper", ServiceStatus::Active);
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Service for ScraperService {
|
||||
fn name(&self) -> &'static str {
|
||||
"scraper"
|
||||
}
|
||||
|
||||
async fn run(&mut self) -> Result<(), anyhow::Error> {
|
||||
self.start();
|
||||
std::future::pending::<()>().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn shutdown(&mut self) -> Result<(), anyhow::Error> {
|
||||
self.service_statuses
|
||||
.set("scraper", ServiceStatus::Disabled);
|
||||
info!("Shutting down scraper service");
|
||||
|
||||
// Send shutdown signal to all tasks
|
||||
if let Some(shutdown_tx) = self.shutdown_tx.take() {
|
||||
let _ = shutdown_tx.send(());
|
||||
} else {
|
||||
warn!("No shutdown channel found for scraper service");
|
||||
return Err(anyhow::anyhow!("No shutdown channel available"));
|
||||
}
|
||||
|
||||
// Collect all handles
|
||||
let mut all_handles = Vec::new();
|
||||
if let Some(handle) = self.scheduler_handle.take() {
|
||||
all_handles.push(handle);
|
||||
}
|
||||
all_handles.append(&mut self.worker_handles);
|
||||
|
||||
// Wait for all tasks to complete (no internal timeout - let ServiceManager handle it)
|
||||
let results = futures::future::join_all(all_handles).await;
|
||||
let failed = results.iter().filter(|r| r.is_err()).count();
|
||||
if failed > 0 {
|
||||
warn!(
|
||||
failed_count = failed,
|
||||
"Some scraper tasks panicked during shutdown"
|
||||
);
|
||||
return Err(anyhow::anyhow!("{} task(s) panicked", failed));
|
||||
}
|
||||
|
||||
info!("All scraper tasks shutdown gracefully");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,344 @@
|
||||
use crate::banner::{BannerApi, Term};
|
||||
use crate::data::models::{ReferenceData, ScrapePriority, TargetType};
|
||||
use crate::data::scrape_jobs;
|
||||
use crate::error::Result;
|
||||
use crate::rmp::RmpClient;
|
||||
use crate::scraper::jobs::subject::SubjectJob;
|
||||
use crate::state::ReferenceCache;
|
||||
use serde_json::json;
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
use tokio::sync::{RwLock, broadcast};
|
||||
use tokio::time;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::{debug, error, info, warn};
|
||||
|
||||
/// How often reference data is re-scraped (6 hours).
|
||||
const REFERENCE_DATA_INTERVAL: Duration = Duration::from_secs(6 * 60 * 60);
|
||||
|
||||
/// How often RMP data is synced (24 hours).
|
||||
const RMP_SYNC_INTERVAL: Duration = Duration::from_secs(24 * 60 * 60);
|
||||
|
||||
/// Periodically analyzes data and enqueues prioritized scrape jobs.
|
||||
pub struct Scheduler {
|
||||
db_pool: PgPool,
|
||||
banner_api: Arc<BannerApi>,
|
||||
reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
}
|
||||
|
||||
impl Scheduler {
|
||||
pub fn new(
|
||||
db_pool: PgPool,
|
||||
banner_api: Arc<BannerApi>,
|
||||
reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
db_pool,
|
||||
banner_api,
|
||||
reference_cache,
|
||||
}
|
||||
}
|
||||
|
||||
/// Runs the scheduler's main loop with graceful shutdown support.
|
||||
///
|
||||
/// The scheduler wakes up every 60 seconds to analyze data and enqueue jobs.
|
||||
/// When a shutdown signal is received:
|
||||
/// 1. Any in-progress scheduling work is gracefully cancelled via CancellationToken
|
||||
/// 2. The scheduler waits up to 5 seconds for work to complete
|
||||
/// 3. If timeout occurs, the task is abandoned (it will be aborted when dropped)
|
||||
///
|
||||
/// This ensures that shutdown is responsive even if scheduling work is blocked.
|
||||
pub async fn run(&self, mut shutdown_rx: broadcast::Receiver<()>) {
|
||||
info!("Scheduler service started");
|
||||
|
||||
let work_interval = Duration::from_secs(60);
|
||||
let mut next_run = time::Instant::now();
|
||||
let mut current_work: Option<(tokio::task::JoinHandle<()>, CancellationToken)> = None;
|
||||
// Scrape reference data immediately on first cycle
|
||||
let mut last_ref_scrape = Instant::now() - REFERENCE_DATA_INTERVAL;
|
||||
// Sync RMP data immediately on first cycle
|
||||
let mut last_rmp_sync = Instant::now() - RMP_SYNC_INTERVAL;
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
_ = time::sleep_until(next_run) => {
|
||||
let cancel_token = CancellationToken::new();
|
||||
|
||||
let should_scrape_ref = last_ref_scrape.elapsed() >= REFERENCE_DATA_INTERVAL;
|
||||
let should_sync_rmp = last_rmp_sync.elapsed() >= RMP_SYNC_INTERVAL;
|
||||
|
||||
// Spawn work in separate task to allow graceful cancellation during shutdown.
|
||||
let work_handle = tokio::spawn({
|
||||
let db_pool = self.db_pool.clone();
|
||||
let banner_api = self.banner_api.clone();
|
||||
let cancel_token = cancel_token.clone();
|
||||
let reference_cache = self.reference_cache.clone();
|
||||
|
||||
async move {
|
||||
tokio::select! {
|
||||
_ = async {
|
||||
// RMP sync is independent of Banner API — run it
|
||||
// concurrently with reference data scraping so it
|
||||
// doesn't wait behind rate-limited Banner calls.
|
||||
let rmp_fut = async {
|
||||
if should_sync_rmp
|
||||
&& let Err(e) = Self::sync_rmp_data(&db_pool).await
|
||||
{
|
||||
error!(error = ?e, "Failed to sync RMP data");
|
||||
}
|
||||
};
|
||||
|
||||
let ref_fut = async {
|
||||
if should_scrape_ref
|
||||
&& let Err(e) = Self::scrape_reference_data(&db_pool, &banner_api, &reference_cache).await
|
||||
{
|
||||
error!(error = ?e, "Failed to scrape reference data");
|
||||
}
|
||||
};
|
||||
|
||||
tokio::join!(rmp_fut, ref_fut);
|
||||
|
||||
if let Err(e) = Self::schedule_jobs_impl(&db_pool, &banner_api).await {
|
||||
error!(error = ?e, "Failed to schedule jobs");
|
||||
}
|
||||
} => {}
|
||||
_ = cancel_token.cancelled() => {
|
||||
debug!("Scheduling work cancelled gracefully");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if should_scrape_ref {
|
||||
last_ref_scrape = Instant::now();
|
||||
}
|
||||
if should_sync_rmp {
|
||||
last_rmp_sync = Instant::now();
|
||||
}
|
||||
|
||||
current_work = Some((work_handle, cancel_token));
|
||||
next_run = time::Instant::now() + work_interval;
|
||||
}
|
||||
_ = shutdown_rx.recv() => {
|
||||
info!("Scheduler received shutdown signal");
|
||||
|
||||
if let Some((handle, cancel_token)) = current_work.take() {
|
||||
cancel_token.cancel();
|
||||
|
||||
// Wait briefly for graceful completion
|
||||
if tokio::time::timeout(Duration::from_secs(5), handle).await.is_err() {
|
||||
warn!("Scheduling work did not complete within 5s, abandoning");
|
||||
} else {
|
||||
debug!("Scheduling work completed gracefully");
|
||||
}
|
||||
}
|
||||
|
||||
info!("Scheduler exiting gracefully");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Core scheduling logic that analyzes data and creates scrape jobs.
|
||||
///
|
||||
/// Strategy:
|
||||
/// 1. Fetch all subjects for the current term from Banner API
|
||||
/// 2. Query existing jobs in a single batch query
|
||||
/// 3. Create jobs only for subjects that don't have pending jobs
|
||||
///
|
||||
/// This is a static method (not &self) to allow it to be called from spawned tasks.
|
||||
#[tracing::instrument(skip_all, fields(term))]
|
||||
async fn schedule_jobs_impl(db_pool: &PgPool, banner_api: &BannerApi) -> Result<()> {
|
||||
// For now, we will implement a simple baseline scheduling strategy:
|
||||
// 1. Get a list of all subjects from the Banner API.
|
||||
// 2. Query existing jobs for all subjects in a single query.
|
||||
// 3. Create new jobs only for subjects that don't have existing jobs.
|
||||
let term = Term::get_current().inner().to_string();
|
||||
|
||||
tracing::Span::current().record("term", term.as_str());
|
||||
debug!(term = term, "Enqueuing subject jobs");
|
||||
|
||||
let subjects = banner_api.get_subjects("", &term, 1, 500).await?;
|
||||
debug!(
|
||||
subject_count = subjects.len(),
|
||||
"Retrieved subjects from API"
|
||||
);
|
||||
|
||||
// Create payloads for all subjects
|
||||
let subject_payloads: Vec<_> = subjects
|
||||
.iter()
|
||||
.map(|subject| json!({ "subject": subject.code }))
|
||||
.collect();
|
||||
|
||||
// Query existing jobs for all subjects in a single query
|
||||
let existing_payloads = scrape_jobs::find_existing_job_payloads(
|
||||
TargetType::Subject,
|
||||
&subject_payloads,
|
||||
db_pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Filter out subjects that already have jobs and prepare new jobs
|
||||
let mut skipped_count = 0;
|
||||
let new_jobs: Vec<_> = subjects
|
||||
.into_iter()
|
||||
.filter_map(|subject| {
|
||||
let job = SubjectJob::new(subject.code.clone());
|
||||
let payload = serde_json::to_value(&job).unwrap();
|
||||
let payload_str = payload.to_string();
|
||||
|
||||
if existing_payloads.contains(&payload_str) {
|
||||
skipped_count += 1;
|
||||
None
|
||||
} else {
|
||||
Some((payload, subject.code))
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
if skipped_count > 0 {
|
||||
debug!(count = skipped_count, "Skipped subjects with existing jobs");
|
||||
}
|
||||
|
||||
// Insert all new jobs in a single batch
|
||||
if !new_jobs.is_empty() {
|
||||
for (_, subject_code) in &new_jobs {
|
||||
debug!(subject = subject_code, "New job enqueued for subject");
|
||||
}
|
||||
|
||||
let jobs: Vec<_> = new_jobs
|
||||
.into_iter()
|
||||
.map(|(payload, _)| (payload, TargetType::Subject, ScrapePriority::Low))
|
||||
.collect();
|
||||
|
||||
scrape_jobs::batch_insert_jobs(&jobs, db_pool).await?;
|
||||
}
|
||||
|
||||
debug!("Job scheduling complete");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Fetch all RMP professors, upsert to DB, and auto-match against Banner instructors.
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn sync_rmp_data(db_pool: &PgPool) -> Result<()> {
|
||||
info!("Starting RMP data sync");
|
||||
|
||||
let client = RmpClient::new();
|
||||
let professors = client.fetch_all_professors().await?;
|
||||
let total = professors.len();
|
||||
|
||||
crate::data::rmp::batch_upsert_rmp_professors(&professors, db_pool).await?;
|
||||
info!(total, "RMP professors upserted");
|
||||
|
||||
let matched = crate::data::rmp::auto_match_instructors(db_pool).await?;
|
||||
info!(total, matched, "RMP sync complete");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Scrape all reference data categories from Banner and upsert to DB, then refresh cache.
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn scrape_reference_data(
|
||||
db_pool: &PgPool,
|
||||
banner_api: &BannerApi,
|
||||
reference_cache: &Arc<RwLock<ReferenceCache>>,
|
||||
) -> Result<()> {
|
||||
let term = Term::get_current().inner().to_string();
|
||||
info!(term = %term, "Scraping reference data");
|
||||
|
||||
let mut all_entries = Vec::new();
|
||||
|
||||
// Terms (fetched via session pool, no active session needed)
|
||||
match banner_api.sessions.get_terms("", 1, 500).await {
|
||||
Ok(terms) => {
|
||||
debug!(count = terms.len(), "Fetched terms");
|
||||
all_entries.extend(terms.into_iter().map(|t| ReferenceData {
|
||||
category: "term".to_string(),
|
||||
code: t.code,
|
||||
description: t.description,
|
||||
}));
|
||||
}
|
||||
Err(e) => warn!(error = ?e, "Failed to fetch terms"),
|
||||
}
|
||||
|
||||
// Subjects
|
||||
match banner_api.get_subjects("", &term, 1, 500).await {
|
||||
Ok(pairs) => {
|
||||
debug!(count = pairs.len(), "Fetched subjects");
|
||||
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
|
||||
category: "subject".to_string(),
|
||||
code: p.code,
|
||||
description: p.description,
|
||||
}));
|
||||
}
|
||||
Err(e) => warn!(error = ?e, "Failed to fetch subjects"),
|
||||
}
|
||||
|
||||
// Campuses
|
||||
match banner_api.get_campuses(&term).await {
|
||||
Ok(pairs) => {
|
||||
debug!(count = pairs.len(), "Fetched campuses");
|
||||
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
|
||||
category: "campus".to_string(),
|
||||
code: p.code,
|
||||
description: p.description,
|
||||
}));
|
||||
}
|
||||
Err(e) => warn!(error = ?e, "Failed to fetch campuses"),
|
||||
}
|
||||
|
||||
// Instructional methods
|
||||
match banner_api.get_instructional_methods(&term).await {
|
||||
Ok(pairs) => {
|
||||
debug!(count = pairs.len(), "Fetched instructional methods");
|
||||
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
|
||||
category: "instructional_method".to_string(),
|
||||
code: p.code,
|
||||
description: p.description,
|
||||
}));
|
||||
}
|
||||
Err(e) => warn!(error = ?e, "Failed to fetch instructional methods"),
|
||||
}
|
||||
|
||||
// Parts of term
|
||||
match banner_api.get_parts_of_term(&term).await {
|
||||
Ok(pairs) => {
|
||||
debug!(count = pairs.len(), "Fetched parts of term");
|
||||
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
|
||||
category: "part_of_term".to_string(),
|
||||
code: p.code,
|
||||
description: p.description,
|
||||
}));
|
||||
}
|
||||
Err(e) => warn!(error = ?e, "Failed to fetch parts of term"),
|
||||
}
|
||||
|
||||
// Attributes
|
||||
match banner_api.get_attributes(&term).await {
|
||||
Ok(pairs) => {
|
||||
debug!(count = pairs.len(), "Fetched attributes");
|
||||
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
|
||||
category: "attribute".to_string(),
|
||||
code: p.code,
|
||||
description: p.description,
|
||||
}));
|
||||
}
|
||||
Err(e) => warn!(error = ?e, "Failed to fetch attributes"),
|
||||
}
|
||||
|
||||
// Batch upsert all entries
|
||||
let total = all_entries.len();
|
||||
crate::data::reference::batch_upsert(&all_entries, db_pool).await?;
|
||||
info!(total_entries = total, "Reference data upserted to DB");
|
||||
|
||||
// Refresh in-memory cache
|
||||
let all = crate::data::reference::get_all(db_pool).await?;
|
||||
let count = all.len();
|
||||
*reference_cache.write().await = ReferenceCache::from_entries(all);
|
||||
info!(entries = count, "Reference cache refreshed");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,265 @@
|
||||
use crate::banner::{BannerApi, BannerApiError};
|
||||
use crate::data::models::ScrapeJob;
|
||||
use crate::data::scrape_jobs;
|
||||
use crate::error::Result;
|
||||
use crate::scraper::jobs::{JobError, JobType};
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::time;
|
||||
use tracing::{Instrument, debug, error, info, trace, warn};
|
||||
|
||||
/// A single worker instance.
|
||||
///
|
||||
/// Each worker runs in its own asynchronous task and continuously polls the
|
||||
/// database for scrape jobs to execute.
|
||||
pub struct Worker {
|
||||
id: usize, // For logging purposes
|
||||
db_pool: PgPool,
|
||||
banner_api: Arc<BannerApi>,
|
||||
}
|
||||
|
||||
impl Worker {
|
||||
pub fn new(id: usize, db_pool: PgPool, banner_api: Arc<BannerApi>) -> Self {
|
||||
Self {
|
||||
id,
|
||||
db_pool,
|
||||
banner_api,
|
||||
}
|
||||
}
|
||||
|
||||
/// Runs the worker's main loop.
|
||||
pub async fn run(&self, mut shutdown_rx: broadcast::Receiver<()>) {
|
||||
info!(worker_id = self.id, "Worker started");
|
||||
|
||||
loop {
|
||||
// Fetch and lock a job, racing against shutdown signal
|
||||
let job = tokio::select! {
|
||||
_ = shutdown_rx.recv() => {
|
||||
info!(worker_id = self.id, "Worker received shutdown signal, exiting gracefully");
|
||||
break;
|
||||
}
|
||||
result = self.fetch_and_lock_job() => {
|
||||
match result {
|
||||
Ok(Some(job)) => job,
|
||||
Ok(None) => {
|
||||
trace!(worker_id = self.id, "No jobs available, waiting");
|
||||
time::sleep(Duration::from_secs(5)).await;
|
||||
continue;
|
||||
}
|
||||
Err(e) => {
|
||||
warn!(worker_id = self.id, error = ?e, "Failed to fetch job, waiting");
|
||||
time::sleep(Duration::from_secs(10)).await;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let job_id = job.id;
|
||||
let retry_count = job.retry_count;
|
||||
let max_retries = job.max_retries;
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
// Process the job, racing against shutdown signal
|
||||
let process_result = tokio::select! {
|
||||
_ = shutdown_rx.recv() => {
|
||||
self.handle_shutdown_during_processing(job_id).await;
|
||||
break;
|
||||
}
|
||||
result = self.process_job(job) => result
|
||||
};
|
||||
|
||||
let duration = start.elapsed();
|
||||
|
||||
// Handle the job processing result
|
||||
self.handle_job_result(job_id, retry_count, max_retries, process_result, duration)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
/// Atomically fetches a job from the queue, locking it for processing.
|
||||
///
|
||||
/// This uses a `FOR UPDATE SKIP LOCKED` query to ensure that multiple
|
||||
/// workers can poll the queue concurrently without conflicts.
|
||||
async fn fetch_and_lock_job(&self) -> Result<Option<ScrapeJob>> {
|
||||
scrape_jobs::fetch_and_lock_job(&self.db_pool).await
|
||||
}
|
||||
|
||||
async fn process_job(&self, job: ScrapeJob) -> Result<(), JobError> {
|
||||
// Convert the database job to our job type
|
||||
let job_type = JobType::from_target_type_and_payload(job.target_type, job.target_payload)
|
||||
.map_err(|e| JobError::Unrecoverable(anyhow::anyhow!(e)))?; // Parse errors are unrecoverable
|
||||
|
||||
// Get the job implementation
|
||||
let job_impl = job_type.boxed();
|
||||
|
||||
// Create span with job context
|
||||
let span = tracing::info_span!(
|
||||
"process_job",
|
||||
job_id = job.id,
|
||||
job_type = job_impl.description()
|
||||
);
|
||||
|
||||
async move {
|
||||
debug!(
|
||||
worker_id = self.id,
|
||||
job_id = job.id,
|
||||
description = job_impl.description(),
|
||||
"Processing job"
|
||||
);
|
||||
|
||||
// Process the job - API errors are recoverable
|
||||
job_impl
|
||||
.process(&self.banner_api, &self.db_pool)
|
||||
.await
|
||||
.map_err(JobError::Recoverable)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
.instrument(span)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn delete_job(&self, job_id: i32) -> Result<()> {
|
||||
scrape_jobs::delete_job(job_id, &self.db_pool).await
|
||||
}
|
||||
|
||||
async fn unlock_job(&self, job_id: i32) -> Result<()> {
|
||||
scrape_jobs::unlock_job(job_id, &self.db_pool).await
|
||||
}
|
||||
|
||||
async fn unlock_and_increment_retry(&self, job_id: i32, max_retries: i32) -> Result<bool> {
|
||||
scrape_jobs::unlock_and_increment_retry(job_id, max_retries, &self.db_pool).await
|
||||
}
|
||||
|
||||
/// Handle shutdown signal received during job processing
|
||||
async fn handle_shutdown_during_processing(&self, job_id: i32) {
|
||||
info!(
|
||||
worker_id = self.id,
|
||||
job_id, "Shutdown received during job processing"
|
||||
);
|
||||
|
||||
if let Err(e) = self.unlock_job(job_id).await {
|
||||
warn!(
|
||||
worker_id = self.id,
|
||||
job_id,
|
||||
error = ?e,
|
||||
"Failed to unlock job during shutdown"
|
||||
);
|
||||
} else {
|
||||
debug!(worker_id = self.id, job_id, "Job unlocked during shutdown");
|
||||
}
|
||||
|
||||
info!(worker_id = self.id, "Worker exiting gracefully");
|
||||
}
|
||||
|
||||
/// Handle the result of job processing
|
||||
async fn handle_job_result(
|
||||
&self,
|
||||
job_id: i32,
|
||||
retry_count: i32,
|
||||
max_retries: i32,
|
||||
result: Result<(), JobError>,
|
||||
duration: std::time::Duration,
|
||||
) {
|
||||
match result {
|
||||
Ok(()) => {
|
||||
debug!(
|
||||
worker_id = self.id,
|
||||
job_id,
|
||||
duration_ms = duration.as_millis(),
|
||||
"Job completed successfully"
|
||||
);
|
||||
if let Err(e) = self.delete_job(job_id).await {
|
||||
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete completed job");
|
||||
}
|
||||
}
|
||||
Err(JobError::Recoverable(e)) => {
|
||||
self.handle_recoverable_error(job_id, retry_count, max_retries, e, duration)
|
||||
.await;
|
||||
}
|
||||
Err(JobError::Unrecoverable(e)) => {
|
||||
error!(
|
||||
worker_id = self.id,
|
||||
job_id,
|
||||
duration_ms = duration.as_millis(),
|
||||
error = ?e,
|
||||
"Job corrupted, deleting"
|
||||
);
|
||||
if let Err(e) = self.delete_job(job_id).await {
|
||||
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete corrupted job");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Handle recoverable errors by logging appropriately and unlocking the job
|
||||
async fn handle_recoverable_error(
|
||||
&self,
|
||||
job_id: i32,
|
||||
retry_count: i32,
|
||||
max_retries: i32,
|
||||
e: anyhow::Error,
|
||||
duration: std::time::Duration,
|
||||
) {
|
||||
let next_attempt = retry_count.saturating_add(1);
|
||||
let remaining_retries = max_retries.saturating_sub(next_attempt);
|
||||
|
||||
// Log the error appropriately based on type
|
||||
if let Some(BannerApiError::InvalidSession(_)) = e.downcast_ref::<BannerApiError>() {
|
||||
warn!(
|
||||
worker_id = self.id,
|
||||
job_id,
|
||||
duration_ms = duration.as_millis(),
|
||||
retry_attempt = next_attempt,
|
||||
max_retries = max_retries,
|
||||
remaining_retries = remaining_retries,
|
||||
"Invalid session detected, will retry"
|
||||
);
|
||||
} else {
|
||||
error!(
|
||||
worker_id = self.id,
|
||||
job_id,
|
||||
duration_ms = duration.as_millis(),
|
||||
retry_attempt = next_attempt,
|
||||
max_retries = max_retries,
|
||||
remaining_retries = remaining_retries,
|
||||
error = ?e,
|
||||
"Failed to process job, will retry"
|
||||
);
|
||||
}
|
||||
|
||||
// Atomically unlock and increment retry count, checking if retry is allowed
|
||||
match self.unlock_and_increment_retry(job_id, max_retries).await {
|
||||
Ok(can_retry) if can_retry => {
|
||||
debug!(
|
||||
worker_id = self.id,
|
||||
job_id,
|
||||
retry_attempt = next_attempt,
|
||||
remaining_retries = remaining_retries,
|
||||
"Job unlocked for retry"
|
||||
);
|
||||
}
|
||||
Ok(_) => {
|
||||
// Max retries exceeded (detected atomically)
|
||||
error!(
|
||||
worker_id = self.id,
|
||||
job_id,
|
||||
duration_ms = duration.as_millis(),
|
||||
retry_count = next_attempt,
|
||||
max_retries = max_retries,
|
||||
error = ?e,
|
||||
"Job failed permanently (max retries exceeded), deleting"
|
||||
);
|
||||
if let Err(e) = self.delete_job(job_id).await {
|
||||
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete failed job");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!(worker_id = self.id, job_id, error = ?e, "Failed to unlock and increment retry count");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
+211
-3
@@ -1,20 +1,205 @@
|
||||
use super::Service;
|
||||
use crate::bot::{Data, get_commands};
|
||||
use crate::config::Config;
|
||||
use crate::state::AppState;
|
||||
use crate::status::{ServiceStatus, ServiceStatusRegistry};
|
||||
use num_format::{Locale, ToFormattedString};
|
||||
use serenity::Client;
|
||||
use serenity::all::{ActivityData, ClientBuilder, GatewayIntents};
|
||||
use std::sync::Arc;
|
||||
use tracing::{debug, error};
|
||||
use std::time::Duration;
|
||||
use tokio::sync::{Mutex, broadcast};
|
||||
use tokio::task::JoinHandle;
|
||||
use tracing::{debug, error, info, warn};
|
||||
|
||||
/// Discord bot service implementation
|
||||
pub struct BotService {
|
||||
client: Client,
|
||||
shard_manager: Arc<serenity::gateway::ShardManager>,
|
||||
status_task_handle: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||
status_shutdown_tx: Option<broadcast::Sender<()>>,
|
||||
service_statuses: ServiceStatusRegistry,
|
||||
}
|
||||
|
||||
impl BotService {
|
||||
pub fn new(client: Client) -> Self {
|
||||
/// Create a new Discord bot client with full configuration
|
||||
pub async fn create_client(
|
||||
config: &Config,
|
||||
app_state: AppState,
|
||||
status_task_handle: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||
status_shutdown_rx: broadcast::Receiver<()>,
|
||||
) -> Result<Client, anyhow::Error> {
|
||||
let intents = GatewayIntents::non_privileged();
|
||||
let bot_target_guild = config.bot_target_guild;
|
||||
|
||||
let framework = poise::Framework::builder()
|
||||
.options(poise::FrameworkOptions {
|
||||
commands: get_commands(),
|
||||
pre_command: |ctx| {
|
||||
Box::pin(async move {
|
||||
let content = match ctx {
|
||||
poise::Context::Application(_) => ctx.invocation_string(),
|
||||
poise::Context::Prefix(prefix) => prefix.msg.content.to_string(),
|
||||
};
|
||||
let channel_name = ctx
|
||||
.channel_id()
|
||||
.name(ctx.http())
|
||||
.await
|
||||
.unwrap_or("unknown".to_string());
|
||||
|
||||
let span = tracing::Span::current();
|
||||
span.record("command_name", ctx.command().qualified_name.as_str());
|
||||
span.record("invocation", ctx.invocation_string());
|
||||
span.record("msg.content", content.as_str());
|
||||
span.record("msg.author", ctx.author().tag().as_str());
|
||||
span.record("msg.id", ctx.id());
|
||||
span.record("msg.channel_id", ctx.channel_id().get());
|
||||
span.record("msg.channel", channel_name.as_str());
|
||||
|
||||
tracing::info!(
|
||||
command_name = ctx.command().qualified_name.as_str(),
|
||||
invocation = ctx.invocation_string(),
|
||||
msg.content = %content,
|
||||
msg.author = %ctx.author().tag(),
|
||||
msg.author_id = %ctx.author().id,
|
||||
msg.id = %ctx.id(),
|
||||
msg.channel = %channel_name.as_str(),
|
||||
msg.channel_id = %ctx.channel_id(),
|
||||
"{} invoked by {}",
|
||||
ctx.command().name,
|
||||
ctx.author().tag()
|
||||
);
|
||||
})
|
||||
},
|
||||
on_error: |error| {
|
||||
Box::pin(async move {
|
||||
if let Err(e) = poise::builtins::on_error(error).await {
|
||||
tracing::error!(error = %e, "Fatal error while sending error message");
|
||||
}
|
||||
})
|
||||
},
|
||||
..Default::default()
|
||||
})
|
||||
.setup(move |ctx, _ready, framework| {
|
||||
let app_state = app_state.clone();
|
||||
let status_task_handle = status_task_handle.clone();
|
||||
Box::pin(async move {
|
||||
poise::builtins::register_in_guild(
|
||||
ctx,
|
||||
&framework.options().commands,
|
||||
bot_target_guild.into(),
|
||||
)
|
||||
.await?;
|
||||
poise::builtins::register_globally(ctx, &framework.options().commands).await?;
|
||||
|
||||
// Start status update task with shutdown support
|
||||
let handle = Self::start_status_update_task(
|
||||
ctx.clone(),
|
||||
app_state.clone(),
|
||||
status_shutdown_rx,
|
||||
);
|
||||
*status_task_handle.lock().await = Some(handle);
|
||||
|
||||
app_state.service_statuses.set("bot", ServiceStatus::Active);
|
||||
|
||||
Ok(Data { app_state })
|
||||
})
|
||||
})
|
||||
.build();
|
||||
|
||||
Ok(ClientBuilder::new(config.bot_token.clone(), intents)
|
||||
.framework(framework)
|
||||
.await?)
|
||||
}
|
||||
|
||||
/// Start the status update task for the Discord bot with graceful shutdown support
|
||||
fn start_status_update_task(
|
||||
ctx: serenity::client::Context,
|
||||
app_state: AppState,
|
||||
mut shutdown_rx: broadcast::Receiver<()>,
|
||||
) -> JoinHandle<()> {
|
||||
tokio::spawn(async move {
|
||||
let max_interval = Duration::from_secs(300); // 5 minutes
|
||||
let base_interval = Duration::from_secs(30);
|
||||
let mut interval = tokio::time::interval(base_interval);
|
||||
let mut previous_course_count: Option<i64> = None;
|
||||
|
||||
// This runs once immediately on startup, then with adaptive intervals
|
||||
loop {
|
||||
tokio::select! {
|
||||
_ = interval.tick() => {
|
||||
// Get the course count, update the activity if it has changed/hasn't been set this session
|
||||
let course_count = match app_state.get_course_count().await {
|
||||
Ok(count) => count,
|
||||
Err(e) => {
|
||||
warn!(error = %e, "Failed to fetch course count for status update");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if previous_course_count.is_none() || previous_course_count != Some(course_count) {
|
||||
ctx.set_activity(Some(ActivityData::playing(format!(
|
||||
"Querying {:} classes",
|
||||
course_count.to_formatted_string(&Locale::en)
|
||||
))));
|
||||
}
|
||||
|
||||
// Increase or reset the interval
|
||||
interval = tokio::time::interval(
|
||||
// Avoid logging the first 'change'
|
||||
if course_count != previous_course_count.unwrap_or(0) {
|
||||
if previous_course_count.is_some() {
|
||||
debug!(
|
||||
new_course_count = course_count,
|
||||
last_interval = interval.period().as_secs(),
|
||||
"Course count changed, resetting interval"
|
||||
);
|
||||
}
|
||||
|
||||
// Record the new course count
|
||||
previous_course_count = Some(course_count);
|
||||
|
||||
// Reset to base interval
|
||||
base_interval
|
||||
} else {
|
||||
// Increase interval by 10% (up to maximum)
|
||||
let new_interval = interval.period().mul_f32(1.1).min(max_interval);
|
||||
debug!(
|
||||
current_course_count = course_count,
|
||||
last_interval = interval.period().as_secs(),
|
||||
new_interval = new_interval.as_secs(),
|
||||
"Course count unchanged, increasing interval"
|
||||
);
|
||||
|
||||
new_interval
|
||||
},
|
||||
);
|
||||
|
||||
// Reset the interval, otherwise it will tick again immediately
|
||||
interval.reset();
|
||||
}
|
||||
_ = shutdown_rx.recv() => {
|
||||
info!("Status update task received shutdown signal");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
client: Client,
|
||||
status_task_handle: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||
status_shutdown_tx: broadcast::Sender<()>,
|
||||
service_statuses: ServiceStatusRegistry,
|
||||
) -> Self {
|
||||
let shard_manager = client.shard_manager.clone();
|
||||
|
||||
Self {
|
||||
client,
|
||||
shard_manager,
|
||||
status_task_handle,
|
||||
status_shutdown_tx: Some(status_shutdown_tx),
|
||||
service_statuses,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -28,7 +213,7 @@ impl Service for BotService {
|
||||
async fn run(&mut self) -> Result<(), anyhow::Error> {
|
||||
match self.client.start().await {
|
||||
Ok(()) => {
|
||||
debug!(service = "bot", "stopped early.");
|
||||
warn!(service = "bot", "stopped early");
|
||||
Err(anyhow::anyhow!("bot stopped early"))
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -39,6 +224,29 @@ impl Service for BotService {
|
||||
}
|
||||
|
||||
async fn shutdown(&mut self) -> Result<(), anyhow::Error> {
|
||||
self.service_statuses.set("bot", ServiceStatus::Disabled);
|
||||
// Signal status update task to stop
|
||||
if let Some(status_shutdown_tx) = self.status_shutdown_tx.take() {
|
||||
let _ = status_shutdown_tx.send(());
|
||||
}
|
||||
|
||||
// Wait for status update task to complete (with timeout)
|
||||
let handle = self.status_task_handle.lock().await.take();
|
||||
if let Some(handle) = handle {
|
||||
match tokio::time::timeout(Duration::from_secs(2), handle).await {
|
||||
Ok(Ok(())) => {
|
||||
debug!("Status update task completed gracefully");
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
warn!(error = ?e, "Status update task panicked");
|
||||
}
|
||||
Err(_) => {
|
||||
warn!("Status update task did not complete within 2s timeout");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Shutdown Discord shards
|
||||
self.shard_manager.shutdown_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
+127
-64
@@ -1,24 +1,35 @@
|
||||
use std::collections::HashMap;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::task::JoinHandle;
|
||||
use tracing::{debug, error, info, warn};
|
||||
use tokio::sync::{broadcast, mpsc};
|
||||
use tracing::{debug, info, trace, warn};
|
||||
|
||||
use crate::services::{Service, ServiceResult, run_service};
|
||||
|
||||
/// Manages multiple services and their lifecycle
|
||||
pub struct ServiceManager {
|
||||
registered_services: HashMap<String, Box<dyn Service>>,
|
||||
running_services: HashMap<String, JoinHandle<ServiceResult>>,
|
||||
service_handles: HashMap<String, tokio::task::AbortHandle>,
|
||||
completion_rx: Option<mpsc::UnboundedReceiver<(String, ServiceResult)>>,
|
||||
completion_tx: mpsc::UnboundedSender<(String, ServiceResult)>,
|
||||
shutdown_tx: broadcast::Sender<()>,
|
||||
}
|
||||
|
||||
impl Default for ServiceManager {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ServiceManager {
|
||||
pub fn new() -> Self {
|
||||
let (shutdown_tx, _) = broadcast::channel(1);
|
||||
let (completion_tx, completion_rx) = mpsc::unbounded_channel();
|
||||
|
||||
Self {
|
||||
registered_services: HashMap::new(),
|
||||
running_services: HashMap::new(),
|
||||
service_handles: HashMap::new(),
|
||||
completion_rx: Some(completion_rx),
|
||||
completion_tx,
|
||||
shutdown_tx,
|
||||
}
|
||||
}
|
||||
@@ -28,6 +39,11 @@ impl ServiceManager {
|
||||
self.registered_services.insert(name.to_string(), service);
|
||||
}
|
||||
|
||||
/// Check if there are any registered services
|
||||
pub fn has_services(&self) -> bool {
|
||||
!self.registered_services.is_empty()
|
||||
}
|
||||
|
||||
/// Spawn all registered services
|
||||
pub fn spawn_all(&mut self) {
|
||||
let service_count = self.registered_services.len();
|
||||
@@ -35,8 +51,20 @@ impl ServiceManager {
|
||||
|
||||
for (name, service) in self.registered_services.drain() {
|
||||
let shutdown_rx = self.shutdown_tx.subscribe();
|
||||
let handle = tokio::spawn(run_service(service, shutdown_rx));
|
||||
self.running_services.insert(name, handle);
|
||||
let completion_tx = self.completion_tx.clone();
|
||||
let name_clone = name.clone();
|
||||
|
||||
// Spawn service task
|
||||
let handle = tokio::spawn(async move {
|
||||
let result = run_service(service, shutdown_rx).await;
|
||||
// Send completion notification
|
||||
let _ = completion_tx.send((name_clone, result));
|
||||
});
|
||||
|
||||
// Store abort handle for shutdown control
|
||||
self.service_handles
|
||||
.insert(name.clone(), handle.abort_handle());
|
||||
debug!(service = name, id = ?handle.id(), "service spawned");
|
||||
}
|
||||
|
||||
info!(
|
||||
@@ -50,7 +78,7 @@ impl ServiceManager {
|
||||
/// Run all services until one completes or fails
|
||||
/// Returns the first service that completes and its result
|
||||
pub async fn run(&mut self) -> (String, ServiceResult) {
|
||||
if self.running_services.is_empty() {
|
||||
if self.service_handles.is_empty() {
|
||||
return (
|
||||
"none".to_string(),
|
||||
ServiceResult::Error(anyhow::anyhow!("No services to run")),
|
||||
@@ -59,99 +87,134 @@ impl ServiceManager {
|
||||
|
||||
info!(
|
||||
"servicemanager running {} services",
|
||||
self.running_services.len()
|
||||
self.service_handles.len()
|
||||
);
|
||||
|
||||
// Wait for any service to complete
|
||||
loop {
|
||||
let mut completed_services = Vec::new();
|
||||
// Wait for any service to complete via the channel
|
||||
let completion_rx = self
|
||||
.completion_rx
|
||||
.as_mut()
|
||||
.expect("completion_rx should be available");
|
||||
|
||||
for (name, handle) in &mut self.running_services {
|
||||
if handle.is_finished() {
|
||||
completed_services.push(name.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(completed_name) = completed_services.first() {
|
||||
let handle = self.running_services.remove(completed_name).unwrap();
|
||||
match handle.await {
|
||||
Ok(result) => {
|
||||
return (completed_name.clone(), result);
|
||||
}
|
||||
Err(e) => {
|
||||
error!(service = completed_name, "service task panicked: {e}");
|
||||
return (
|
||||
completed_name.clone(),
|
||||
ServiceResult::Error(anyhow::anyhow!("Task panic: {e}")),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Small delay to prevent busy-waiting
|
||||
tokio::time::sleep(Duration::from_millis(10)).await;
|
||||
}
|
||||
completion_rx
|
||||
.recv()
|
||||
.await
|
||||
.map(|(name, result)| {
|
||||
self.service_handles.remove(&name);
|
||||
(name, result)
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
(
|
||||
"channel_closed".to_string(),
|
||||
ServiceResult::Error(anyhow::anyhow!("Completion channel closed")),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/// Shutdown all services gracefully with a timeout.
|
||||
///
|
||||
/// If any service fails to shutdown, it will return an error containing the names of the services that failed to shutdown.
|
||||
/// If all services shutdown successfully, the function will return the duration elapsed.
|
||||
/// All services receive the shutdown signal simultaneously and shut down in parallel.
|
||||
/// Each service gets the full timeout duration (they don't share/consume from a budget).
|
||||
/// If any service fails to shutdown within the timeout, it will be aborted.
|
||||
///
|
||||
/// Returns the elapsed time if all succeed, or a list of failed service names.
|
||||
pub async fn shutdown(&mut self, timeout: Duration) -> Result<Duration, Vec<String>> {
|
||||
let service_count = self.running_services.len();
|
||||
let service_names: Vec<_> = self.running_services.keys().cloned().collect();
|
||||
let service_count = self.service_handles.len();
|
||||
let service_names: Vec<_> = self.service_handles.keys().cloned().collect();
|
||||
|
||||
info!(
|
||||
service_count,
|
||||
services = ?service_names,
|
||||
timeout = format!("{:.2?}", timeout),
|
||||
"shutting down {} services with {:?} timeout",
|
||||
"shutting down {} services in parallel with {:?} timeout each",
|
||||
service_count,
|
||||
timeout
|
||||
);
|
||||
|
||||
// Send shutdown signal to all services
|
||||
if service_count == 0 {
|
||||
return Ok(Duration::ZERO);
|
||||
}
|
||||
|
||||
// Send shutdown signal to all services simultaneously
|
||||
let _ = self.shutdown_tx.send(());
|
||||
|
||||
// Wait for all services to complete
|
||||
let start_time = std::time::Instant::now();
|
||||
let mut pending_services = Vec::new();
|
||||
|
||||
for (name, handle) in self.running_services.drain() {
|
||||
match tokio::time::timeout(timeout, handle).await {
|
||||
Ok(Ok(_)) => {
|
||||
debug!(service = name, "service shutdown completed");
|
||||
// Collect results from all services with timeout
|
||||
let completion_rx = self
|
||||
.completion_rx
|
||||
.as_mut()
|
||||
.expect("completion_rx should be available");
|
||||
|
||||
// Collect all completion results with a single timeout
|
||||
let collect_future = async {
|
||||
let mut collected: Vec<Option<(String, ServiceResult)>> = Vec::new();
|
||||
for _ in 0..service_count {
|
||||
if let Some(result) = completion_rx.recv().await {
|
||||
collected.push(Some(result));
|
||||
} else {
|
||||
collected.push(None);
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
warn!(service = name, error = ?e, "service shutdown failed");
|
||||
pending_services.push(name);
|
||||
}
|
||||
Err(_) => {
|
||||
warn!(service = name, "service shutdown timed out");
|
||||
pending_services.push(name);
|
||||
}
|
||||
collected
|
||||
};
|
||||
|
||||
let results = match tokio::time::timeout(timeout, collect_future).await {
|
||||
Ok(results) => results,
|
||||
Err(_) => {
|
||||
// Timeout exceeded - abort all remaining services
|
||||
warn!(
|
||||
timeout = format!("{:.2?}", timeout),
|
||||
"shutdown timeout exceeded - aborting all remaining services"
|
||||
);
|
||||
|
||||
let failed: Vec<String> = self.service_handles.keys().cloned().collect();
|
||||
for handle in self.service_handles.values() {
|
||||
handle.abort();
|
||||
}
|
||||
self.service_handles.clear();
|
||||
|
||||
return Err(failed);
|
||||
}
|
||||
};
|
||||
|
||||
// Process results and identify failures
|
||||
let mut failed_services = Vec::new();
|
||||
for (name, service_result) in results.into_iter().flatten() {
|
||||
self.service_handles.remove(&name);
|
||||
|
||||
if matches!(service_result, ServiceResult::GracefulShutdown) {
|
||||
trace!(service = name, "service shutdown completed");
|
||||
} else {
|
||||
warn!(
|
||||
service = name,
|
||||
result = ?service_result,
|
||||
"service shutdown with non-graceful result"
|
||||
);
|
||||
failed_services.push(name);
|
||||
}
|
||||
}
|
||||
|
||||
let elapsed = start_time.elapsed();
|
||||
if pending_services.is_empty() {
|
||||
|
||||
if failed_services.is_empty() {
|
||||
info!(
|
||||
service_count,
|
||||
elapsed = format!("{:.2?}", elapsed),
|
||||
"services shutdown completed: {}",
|
||||
"all services shutdown successfully: {}",
|
||||
service_names.join(", ")
|
||||
);
|
||||
Ok(elapsed)
|
||||
} else {
|
||||
warn!(
|
||||
pending_count = pending_services.len(),
|
||||
pending_services = ?pending_services,
|
||||
failed_count = failed_services.len(),
|
||||
failed_services = ?failed_services,
|
||||
elapsed = format!("{:.2?}", elapsed),
|
||||
"services shutdown completed with {} pending: {}",
|
||||
pending_services.len(),
|
||||
pending_services.join(", ")
|
||||
"{} service(s) failed to shutdown gracefully: {}",
|
||||
failed_services.len(),
|
||||
failed_services.join(", ")
|
||||
);
|
||||
Err(pending_services)
|
||||
Err(failed_services)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
+5
-1
@@ -23,7 +23,11 @@ pub trait Service: Send + Sync {
|
||||
|
||||
/// Gracefully shutdown the service
|
||||
///
|
||||
/// An 'Ok' result does not mean the service has completed shutdown, it merely means that the service shutdown was initiated.
|
||||
/// Implementations should initiate shutdown and MAY wait for completion.
|
||||
/// Services are expected to respond to this call and begin cleanup promptly.
|
||||
/// When managed by ServiceManager, the configured timeout (default 8s) applies to
|
||||
/// ALL services combined, not per-service. Services should complete shutdown as
|
||||
/// quickly as possible to avoid timeout.
|
||||
async fn shutdown(&mut self) -> Result<(), anyhow::Error>;
|
||||
}
|
||||
|
||||
|
||||
+53
-16
@@ -1,25 +1,53 @@
|
||||
use super::Service;
|
||||
use crate::web::{BannerState, create_router};
|
||||
use crate::state::AppState;
|
||||
use crate::status::ServiceStatus;
|
||||
use crate::web::create_router;
|
||||
use std::net::SocketAddr;
|
||||
use tokio::net::TcpListener;
|
||||
use tokio::sync::broadcast;
|
||||
use tracing::{debug, info, warn};
|
||||
use tracing::{info, trace, warn};
|
||||
|
||||
/// Web server service implementation
|
||||
pub struct WebService {
|
||||
port: u16,
|
||||
banner_state: BannerState,
|
||||
app_state: AppState,
|
||||
shutdown_tx: Option<broadcast::Sender<()>>,
|
||||
}
|
||||
|
||||
impl WebService {
|
||||
pub fn new(port: u16, banner_state: BannerState) -> Self {
|
||||
pub fn new(port: u16, app_state: AppState) -> Self {
|
||||
Self {
|
||||
port,
|
||||
banner_state,
|
||||
app_state,
|
||||
shutdown_tx: None,
|
||||
}
|
||||
}
|
||||
/// Periodically pings the database and updates the "database" service status.
|
||||
async fn db_health_check_loop(state: AppState, mut shutdown_rx: broadcast::Receiver<()>) {
|
||||
use std::time::Duration;
|
||||
let mut interval = tokio::time::interval(Duration::from_secs(30));
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
_ = interval.tick() => {
|
||||
let status = match sqlx::query_scalar::<_, i32>("SELECT 1")
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
{
|
||||
Ok(_) => ServiceStatus::Connected,
|
||||
Err(e) => {
|
||||
warn!(error = %e, "DB health check failed");
|
||||
ServiceStatus::Error
|
||||
}
|
||||
};
|
||||
state.service_statuses.set("database", status);
|
||||
}
|
||||
_ = shutdown_rx.recv() => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
@@ -30,44 +58,53 @@ impl Service for WebService {
|
||||
|
||||
async fn run(&mut self) -> Result<(), anyhow::Error> {
|
||||
// Create the main router with Banner API routes
|
||||
let app = create_router(self.banner_state.clone());
|
||||
let app = create_router(self.app_state.clone());
|
||||
|
||||
let addr = SocketAddr::from(([0, 0, 0, 0], self.port));
|
||||
info!(
|
||||
service = "web",
|
||||
link = format!("http://localhost:{}", addr.port()),
|
||||
"starting web server",
|
||||
);
|
||||
|
||||
let listener = TcpListener::bind(addr).await?;
|
||||
debug!(
|
||||
self.app_state
|
||||
.service_statuses
|
||||
.set("web", ServiceStatus::Active);
|
||||
info!(
|
||||
service = "web",
|
||||
"web server listening on {}",
|
||||
format!("http://{}", addr)
|
||||
address = %addr,
|
||||
link = format!("http://localhost:{}", addr.port()),
|
||||
"web server listening"
|
||||
);
|
||||
|
||||
// Create internal shutdown channel for axum graceful shutdown
|
||||
let (shutdown_tx, mut shutdown_rx) = broadcast::channel(1);
|
||||
self.shutdown_tx = Some(shutdown_tx);
|
||||
self.shutdown_tx = Some(shutdown_tx.clone());
|
||||
|
||||
// Spawn background DB health check
|
||||
let health_state = self.app_state.clone();
|
||||
let health_shutdown_rx = shutdown_tx.subscribe();
|
||||
tokio::spawn(async move {
|
||||
Self::db_health_check_loop(health_state, health_shutdown_rx).await;
|
||||
});
|
||||
|
||||
// Use axum's graceful shutdown with the internal shutdown signal
|
||||
axum::serve(listener, app)
|
||||
.with_graceful_shutdown(async move {
|
||||
let _ = shutdown_rx.recv().await;
|
||||
debug!(
|
||||
trace!(
|
||||
service = "web",
|
||||
"received shutdown signal, starting graceful shutdown"
|
||||
);
|
||||
})
|
||||
.await?;
|
||||
|
||||
trace!(service = "web", "graceful shutdown completed");
|
||||
info!(service = "web", "web server stopped");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn shutdown(&mut self) -> Result<(), anyhow::Error> {
|
||||
if let Some(shutdown_tx) = self.shutdown_tx.take() {
|
||||
let _ = shutdown_tx.send(());
|
||||
trace!(service = "web", "sent shutdown signal to axum");
|
||||
} else {
|
||||
warn!(
|
||||
service = "web",
|
||||
|
||||
+106
@@ -0,0 +1,106 @@
|
||||
use crate::services::ServiceResult;
|
||||
use crate::services::manager::ServiceManager;
|
||||
use std::process::ExitCode;
|
||||
use std::time::Duration;
|
||||
use tokio::signal;
|
||||
use tracing::{error, info, warn};
|
||||
|
||||
/// Handle application shutdown signals and graceful shutdown
|
||||
pub async fn handle_shutdown_signals(
|
||||
mut service_manager: ServiceManager,
|
||||
shutdown_timeout: Duration,
|
||||
) -> ExitCode {
|
||||
// Set up signal handling for both SIGINT (Ctrl+C) and SIGTERM
|
||||
let ctrl_c = async {
|
||||
signal::ctrl_c()
|
||||
.await
|
||||
.expect("Failed to install CTRL+C signal handler");
|
||||
info!("received ctrl+c, gracefully shutting down...");
|
||||
};
|
||||
|
||||
#[cfg(unix)]
|
||||
let sigterm = async {
|
||||
use tokio::signal::unix::{SignalKind, signal};
|
||||
let mut sigterm_stream =
|
||||
signal(SignalKind::terminate()).expect("Failed to install SIGTERM signal handler");
|
||||
sigterm_stream.recv().await;
|
||||
info!("received SIGTERM, gracefully shutting down...");
|
||||
};
|
||||
|
||||
#[cfg(not(unix))]
|
||||
let sigterm = async {
|
||||
// On non-Unix systems, create a future that never completes
|
||||
// This ensures the select! macro works correctly
|
||||
std::future::pending::<()>().await;
|
||||
};
|
||||
|
||||
// Main application loop - wait for services or signals
|
||||
let mut exit_code = ExitCode::SUCCESS;
|
||||
|
||||
tokio::select! {
|
||||
(service_name, result) = service_manager.run() => {
|
||||
// A service completed unexpectedly
|
||||
match result {
|
||||
ServiceResult::GracefulShutdown => {
|
||||
info!(service = service_name, "service completed gracefully");
|
||||
}
|
||||
ServiceResult::NormalCompletion => {
|
||||
warn!(service = service_name, "service completed unexpectedly");
|
||||
exit_code = ExitCode::FAILURE;
|
||||
}
|
||||
ServiceResult::Error(e) => {
|
||||
error!(service = service_name, error = ?e, "service failed");
|
||||
exit_code = ExitCode::FAILURE;
|
||||
}
|
||||
}
|
||||
|
||||
// Shutdown remaining services
|
||||
exit_code = handle_graceful_shutdown(service_manager, shutdown_timeout, exit_code).await;
|
||||
}
|
||||
_ = ctrl_c => {
|
||||
// User requested shutdown via Ctrl+C
|
||||
info!("user requested shutdown via ctrl+c");
|
||||
exit_code = handle_graceful_shutdown(service_manager, shutdown_timeout, ExitCode::SUCCESS).await;
|
||||
}
|
||||
_ = sigterm => {
|
||||
// System requested shutdown via SIGTERM
|
||||
info!("system requested shutdown via SIGTERM");
|
||||
exit_code = handle_graceful_shutdown(service_manager, shutdown_timeout, ExitCode::SUCCESS).await;
|
||||
}
|
||||
}
|
||||
|
||||
info!(exit_code = ?exit_code, "application shutdown complete");
|
||||
exit_code
|
||||
}
|
||||
|
||||
/// Handle graceful shutdown of remaining services
|
||||
async fn handle_graceful_shutdown(
|
||||
mut service_manager: ServiceManager,
|
||||
shutdown_timeout: Duration,
|
||||
current_exit_code: ExitCode,
|
||||
) -> ExitCode {
|
||||
match service_manager.shutdown(shutdown_timeout).await {
|
||||
Ok(elapsed) => {
|
||||
info!(
|
||||
remaining = format!("{:.2?}", shutdown_timeout - elapsed),
|
||||
"graceful shutdown complete"
|
||||
);
|
||||
current_exit_code
|
||||
}
|
||||
Err(pending_services) => {
|
||||
warn!(
|
||||
pending_count = pending_services.len(),
|
||||
pending_services = ?pending_services,
|
||||
"graceful shutdown elapsed - {} service(s) did not complete",
|
||||
pending_services.len()
|
||||
);
|
||||
|
||||
// Non-zero exit code, default to FAILURE if not set
|
||||
if current_exit_code == ExitCode::SUCCESS {
|
||||
ExitCode::FAILURE
|
||||
} else {
|
||||
current_exit_code
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
+112
@@ -0,0 +1,112 @@
|
||||
//! Application state shared across components (bot, web, scheduler).
|
||||
|
||||
use crate::banner::BannerApi;
|
||||
use crate::banner::Course;
|
||||
use crate::data::models::ReferenceData;
|
||||
use crate::status::ServiceStatusRegistry;
|
||||
use anyhow::Result;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
/// In-memory cache for reference data (code→description lookups).
|
||||
///
|
||||
/// Loaded from the `reference_data` table on startup and refreshed periodically.
|
||||
/// Uses a two-level HashMap so lookups take `&str` without allocating.
|
||||
pub struct ReferenceCache {
|
||||
/// category → (code → description)
|
||||
data: HashMap<String, HashMap<String, String>>,
|
||||
}
|
||||
|
||||
impl Default for ReferenceCache {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ReferenceCache {
|
||||
/// Create an empty cache.
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
data: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Build cache from a list of reference data entries.
|
||||
pub fn from_entries(entries: Vec<ReferenceData>) -> Self {
|
||||
let mut data: HashMap<String, HashMap<String, String>> = HashMap::new();
|
||||
for e in entries {
|
||||
data.entry(e.category)
|
||||
.or_default()
|
||||
.insert(e.code, e.description);
|
||||
}
|
||||
Self { data }
|
||||
}
|
||||
|
||||
/// Look up a description by category and code. Zero allocations.
|
||||
pub fn lookup(&self, category: &str, code: &str) -> Option<&str> {
|
||||
self.data
|
||||
.get(category)
|
||||
.and_then(|codes| codes.get(code))
|
||||
.map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Get all `(code, description)` pairs for a category, sorted by description.
|
||||
pub fn entries_for_category(&self, category: &str) -> Vec<(&str, &str)> {
|
||||
let Some(codes) = self.data.get(category) else {
|
||||
return Vec::new();
|
||||
};
|
||||
let mut entries: Vec<(&str, &str)> = codes
|
||||
.iter()
|
||||
.map(|(code, desc)| (code.as_str(), desc.as_str()))
|
||||
.collect();
|
||||
entries.sort_by(|a, b| a.1.cmp(b.1));
|
||||
entries
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
pub banner_api: Arc<BannerApi>,
|
||||
pub db_pool: PgPool,
|
||||
pub service_statuses: ServiceStatusRegistry,
|
||||
pub reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
pub fn new(banner_api: Arc<BannerApi>, db_pool: PgPool) -> Self {
|
||||
Self {
|
||||
banner_api,
|
||||
db_pool,
|
||||
service_statuses: ServiceStatusRegistry::new(),
|
||||
reference_cache: Arc::new(RwLock::new(ReferenceCache::new())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Initialize the reference cache from the database.
|
||||
pub async fn load_reference_cache(&self) -> Result<()> {
|
||||
let entries = crate::data::reference::get_all(&self.db_pool).await?;
|
||||
let count = entries.len();
|
||||
let cache = ReferenceCache::from_entries(entries);
|
||||
*self.reference_cache.write().await = cache;
|
||||
tracing::info!(entries = count, "Reference cache loaded");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get a course by CRN directly from Banner API
|
||||
pub async fn get_course_or_fetch(&self, term: &str, crn: &str) -> Result<Course> {
|
||||
self.banner_api
|
||||
.get_course_by_crn(term, crn)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow::anyhow!("Course not found for CRN {crn}"))
|
||||
}
|
||||
|
||||
/// Get the total number of courses in the database
|
||||
pub async fn get_course_count(&self) -> Result<i64> {
|
||||
let count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM courses")
|
||||
.fetch_one(&self.db_pool)
|
||||
.await?;
|
||||
Ok(count.0)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
|
||||
use dashmap::DashMap;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
/// Health status of a service.
|
||||
#[derive(Debug, Clone, Serialize, PartialEq, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[ts(export)]
|
||||
pub enum ServiceStatus {
|
||||
#[allow(dead_code)]
|
||||
Starting,
|
||||
Active,
|
||||
Connected,
|
||||
Disabled,
|
||||
Error,
|
||||
}
|
||||
|
||||
/// A timestamped status entry for a service.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StatusEntry {
|
||||
pub status: ServiceStatus,
|
||||
#[allow(dead_code)]
|
||||
pub updated_at: Instant,
|
||||
}
|
||||
|
||||
/// Thread-safe registry for services to self-report their health status.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ServiceStatusRegistry {
|
||||
inner: Arc<DashMap<String, StatusEntry>>,
|
||||
}
|
||||
|
||||
impl ServiceStatusRegistry {
|
||||
/// Creates a new empty registry.
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Inserts or updates the status for a named service.
|
||||
pub fn set(&self, name: &str, status: ServiceStatus) {
|
||||
self.inner.insert(
|
||||
name.to_owned(),
|
||||
StatusEntry {
|
||||
status,
|
||||
updated_at: Instant::now(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/// Returns the current status of a named service, if present.
|
||||
#[allow(dead_code)]
|
||||
pub fn get(&self, name: &str) -> Option<ServiceStatus> {
|
||||
self.inner.get(name).map(|entry| entry.status.clone())
|
||||
}
|
||||
|
||||
/// Returns a snapshot of all service statuses.
|
||||
pub fn all(&self) -> Vec<(String, ServiceStatus)> {
|
||||
self.inner
|
||||
.iter()
|
||||
.map(|entry| (entry.key().clone(), entry.value().status.clone()))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
pub mod shutdown;
|
||||
@@ -0,0 +1,18 @@
|
||||
use tokio::task::JoinHandle;
|
||||
use tracing::warn;
|
||||
|
||||
/// Helper for joining multiple task handles with proper error handling.
|
||||
///
|
||||
/// This function waits for all tasks to complete and reports any that panicked.
|
||||
/// Returns an error if any task panicked, otherwise returns Ok.
|
||||
pub async fn join_tasks(handles: Vec<JoinHandle<()>>) -> Result<(), anyhow::Error> {
|
||||
let results = futures::future::join_all(handles).await;
|
||||
|
||||
let failed = results.iter().filter(|r| r.is_err()).count();
|
||||
if failed > 0 {
|
||||
warn!(failed_count = failed, "Some tasks panicked during shutdown");
|
||||
Err(anyhow::anyhow!("{} task(s) panicked", failed))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
//! Embedded assets for the web frontend
|
||||
//!
|
||||
//! This module handles serving static assets that are embedded into the binary
|
||||
//! at compile time using rust-embed.
|
||||
|
||||
use dashmap::DashMap;
|
||||
use rapidhash::v3::rapidhash_v3;
|
||||
use rust_embed::RustEmbed;
|
||||
use std::fmt;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
/// Embedded web assets from the dist directory
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "web/dist/"]
|
||||
#[include = "*"]
|
||||
#[exclude = "*.map"]
|
||||
pub struct WebAssets;
|
||||
|
||||
/// RapidHash hash type for asset content (u64 native output size)
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct AssetHash(u64);
|
||||
|
||||
impl AssetHash {
|
||||
/// Create a new AssetHash from u64 value
|
||||
pub fn new(hash: u64) -> Self {
|
||||
Self(hash)
|
||||
}
|
||||
|
||||
/// Get the hash as a hex string
|
||||
pub fn to_hex(&self) -> String {
|
||||
format!("{:016x}", self.0)
|
||||
}
|
||||
|
||||
/// Get the hash as a quoted hex string
|
||||
pub fn quoted(&self) -> String {
|
||||
format!("\"{}\"", self.to_hex())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for AssetHash {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.to_hex())
|
||||
}
|
||||
}
|
||||
|
||||
/// Metadata for an asset including MIME type and RapidHash hash
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AssetMetadata {
|
||||
pub mime_type: Option<String>,
|
||||
pub hash: AssetHash,
|
||||
}
|
||||
|
||||
impl AssetMetadata {
|
||||
/// Check if the etag matches the asset hash
|
||||
pub fn etag_matches(&self, etag: &str) -> bool {
|
||||
// Remove quotes if present (ETags are typically quoted)
|
||||
let etag = etag.trim_matches('"');
|
||||
|
||||
// ETags generated from u64 hex should be 16 characters
|
||||
etag.len() == 16
|
||||
&& u64::from_str_radix(etag, 16)
|
||||
.map(|parsed| parsed == self.hash.0)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
/// Global cache for asset metadata to avoid repeated calculations
|
||||
static ASSET_CACHE: LazyLock<DashMap<String, AssetMetadata>> = LazyLock::new(DashMap::new);
|
||||
|
||||
/// Get cached asset metadata for a file path, caching on-demand
|
||||
/// Returns AssetMetadata containing MIME type and RapidHash hash
|
||||
pub fn get_asset_metadata_cached(path: &str, content: &[u8]) -> AssetMetadata {
|
||||
// Check cache first
|
||||
if let Some(cached) = ASSET_CACHE.get(path) {
|
||||
return cached.value().clone();
|
||||
}
|
||||
|
||||
// Calculate MIME type
|
||||
let mime_type = mime_guess::from_path(path)
|
||||
.first()
|
||||
.map(|mime| mime.to_string());
|
||||
|
||||
// Calculate RapidHash hash (using u64 native output size)
|
||||
let hash_value = rapidhash_v3(content);
|
||||
let hash = AssetHash::new(hash_value);
|
||||
|
||||
let metadata = AssetMetadata { mime_type, hash };
|
||||
|
||||
// Only cache if we haven't exceeded the limit
|
||||
if ASSET_CACHE.len() < 1000 {
|
||||
ASSET_CACHE.insert(path.to_string(), metadata.clone());
|
||||
}
|
||||
|
||||
metadata
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
//! Web API module for the banner application.
|
||||
|
||||
#[cfg(feature = "embed-assets")]
|
||||
pub mod assets;
|
||||
pub mod routes;
|
||||
|
||||
pub use routes::*;
|
||||
|
||||
+575
-51
@@ -1,62 +1,292 @@
|
||||
//! Web API endpoints for Banner bot monitoring and metrics.
|
||||
|
||||
use axum::{Router, extract::State, response::Json, routing::get};
|
||||
use axum::{
|
||||
Router,
|
||||
body::Body,
|
||||
extract::{Path, Query, Request, State},
|
||||
http::StatusCode as AxumStatusCode,
|
||||
response::{Json, Response},
|
||||
routing::get,
|
||||
};
|
||||
#[cfg(feature = "embed-assets")]
|
||||
use axum::{
|
||||
http::{HeaderMap, HeaderValue, StatusCode, Uri},
|
||||
response::{Html, IntoResponse},
|
||||
};
|
||||
#[cfg(feature = "embed-assets")]
|
||||
use http::header;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Value, json};
|
||||
use std::sync::Arc;
|
||||
use tracing::info;
|
||||
use std::{collections::BTreeMap, time::Duration};
|
||||
use ts_rs::TS;
|
||||
|
||||
/// Shared application state for web server
|
||||
#[derive(Clone)]
|
||||
pub struct BannerState {
|
||||
pub api: Arc<crate::banner::BannerApi>,
|
||||
pub scraper: Arc<crate::banner::scraper::CourseScraper>,
|
||||
use crate::state::AppState;
|
||||
use crate::status::ServiceStatus;
|
||||
#[cfg(not(feature = "embed-assets"))]
|
||||
use tower_http::cors::{Any, CorsLayer};
|
||||
use tower_http::{classify::ServerErrorsFailureClass, timeout::TimeoutLayer, trace::TraceLayer};
|
||||
use tracing::{Span, debug, trace, warn};
|
||||
|
||||
#[cfg(feature = "embed-assets")]
|
||||
use crate::web::assets::{WebAssets, get_asset_metadata_cached};
|
||||
|
||||
/// Set appropriate caching headers based on asset type
|
||||
#[cfg(feature = "embed-assets")]
|
||||
fn set_caching_headers(response: &mut Response, path: &str, etag: &str) {
|
||||
let headers = response.headers_mut();
|
||||
|
||||
// Set ETag
|
||||
if let Ok(etag_value) = HeaderValue::from_str(etag) {
|
||||
headers.insert(header::ETAG, etag_value);
|
||||
}
|
||||
|
||||
// Set Cache-Control based on asset type
|
||||
let cache_control = if path.starts_with("assets/") {
|
||||
// Static assets with hashed filenames - long-term cache
|
||||
"public, max-age=31536000, immutable"
|
||||
} else if path == "index.html" {
|
||||
// HTML files - short-term cache
|
||||
"public, max-age=300"
|
||||
} else {
|
||||
match path.split_once('.').map(|(_, extension)| extension) {
|
||||
Some(ext) => match ext {
|
||||
// CSS/JS files - medium-term cache
|
||||
"css" | "js" => "public, max-age=86400",
|
||||
// Images - long-term cache
|
||||
"png" | "jpg" | "jpeg" | "gif" | "svg" | "ico" => "public, max-age=2592000",
|
||||
// Default for other files
|
||||
_ => "public, max-age=3600",
|
||||
},
|
||||
// Default for files without an extension
|
||||
None => "public, max-age=3600",
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(cache_control_value) = HeaderValue::from_str(cache_control) {
|
||||
headers.insert(header::CACHE_CONTROL, cache_control_value);
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates the web server router
|
||||
pub fn create_router(state: BannerState) -> Router {
|
||||
Router::new()
|
||||
.route("/", get(root))
|
||||
pub fn create_router(app_state: AppState) -> Router {
|
||||
let api_router = Router::new()
|
||||
.route("/health", get(health))
|
||||
.route("/status", get(status))
|
||||
.route("/metrics", get(metrics))
|
||||
.with_state(state)
|
||||
.route("/courses/search", get(search_courses))
|
||||
.route("/courses/{term}/{crn}", get(get_course))
|
||||
.route("/terms", get(get_terms))
|
||||
.route("/subjects", get(get_subjects))
|
||||
.route("/reference/{category}", get(get_reference))
|
||||
.with_state(app_state);
|
||||
|
||||
let mut router = Router::new().nest("/api", api_router);
|
||||
|
||||
// When embed-assets feature is enabled, serve embedded static assets
|
||||
#[cfg(feature = "embed-assets")]
|
||||
{
|
||||
router = router.fallback(fallback);
|
||||
}
|
||||
|
||||
// Without embed-assets, enable CORS for dev proxy to Vite
|
||||
#[cfg(not(feature = "embed-assets"))]
|
||||
{
|
||||
router = router.layer(
|
||||
CorsLayer::new()
|
||||
.allow_origin(Any)
|
||||
.allow_methods(Any)
|
||||
.allow_headers(Any),
|
||||
);
|
||||
}
|
||||
|
||||
router.layer((
|
||||
TraceLayer::new_for_http()
|
||||
.make_span_with(|request: &Request<Body>| {
|
||||
tracing::debug_span!("request", path = request.uri().path())
|
||||
})
|
||||
.on_request(())
|
||||
.on_body_chunk(())
|
||||
.on_eos(())
|
||||
.on_response(
|
||||
|response: &Response<Body>, latency: Duration, _span: &Span| {
|
||||
let latency_threshold = if cfg!(debug_assertions) {
|
||||
Duration::from_millis(100)
|
||||
} else {
|
||||
Duration::from_millis(1000)
|
||||
};
|
||||
|
||||
// Format latency, status, and code
|
||||
let (latency_str, status) = (
|
||||
format!("{latency:.2?}"),
|
||||
format!(
|
||||
"{} {}",
|
||||
response.status().as_u16(),
|
||||
response.status().canonical_reason().unwrap_or("??")
|
||||
),
|
||||
);
|
||||
|
||||
// Log in warn if latency is above threshold, otherwise debug
|
||||
if latency > latency_threshold {
|
||||
warn!(latency = latency_str, status = status, "Response");
|
||||
} else {
|
||||
debug!(latency = latency_str, status = status, "Response");
|
||||
}
|
||||
},
|
||||
)
|
||||
.on_failure(
|
||||
|error: ServerErrorsFailureClass, latency: Duration, _span: &Span| {
|
||||
warn!(
|
||||
error = ?error,
|
||||
latency = format!("{latency:.2?}"),
|
||||
"Request failed"
|
||||
);
|
||||
},
|
||||
),
|
||||
TimeoutLayer::new(Duration::from_secs(10)),
|
||||
))
|
||||
}
|
||||
|
||||
async fn root() -> Json<Value> {
|
||||
Json(json!({
|
||||
"message": "Banner Discord Bot API",
|
||||
"version": "0.1.0",
|
||||
"endpoints": {
|
||||
"health": "/health",
|
||||
"status": "/status",
|
||||
"metrics": "/metrics"
|
||||
/// Handler that extracts request information for caching
|
||||
#[cfg(feature = "embed-assets")]
|
||||
async fn fallback(request: Request) -> Response {
|
||||
let uri = request.uri().clone();
|
||||
let headers = request.headers().clone();
|
||||
handle_spa_fallback_with_headers(uri, headers).await
|
||||
}
|
||||
|
||||
/// Handles SPA routing by serving index.html for non-API, non-asset requests
|
||||
/// This version includes HTTP caching headers and ETag support
|
||||
#[cfg(feature = "embed-assets")]
|
||||
async fn handle_spa_fallback_with_headers(uri: Uri, request_headers: HeaderMap) -> Response {
|
||||
let path = uri.path().trim_start_matches('/');
|
||||
|
||||
if let Some(content) = WebAssets::get(path) {
|
||||
// Get asset metadata (MIME type and hash) with caching
|
||||
let metadata = get_asset_metadata_cached(path, &content.data);
|
||||
|
||||
// Check if client has a matching ETag (conditional request)
|
||||
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
||||
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
|
||||
{
|
||||
return StatusCode::NOT_MODIFIED.into_response();
|
||||
}
|
||||
}))
|
||||
|
||||
// Use cached MIME type, only set Content-Type if we have a valid MIME type
|
||||
let mut response = (
|
||||
[(
|
||||
header::CONTENT_TYPE,
|
||||
// For unknown types, set to application/octet-stream
|
||||
metadata
|
||||
.mime_type
|
||||
.unwrap_or("application/octet-stream".to_string()),
|
||||
)],
|
||||
content.data,
|
||||
)
|
||||
.into_response();
|
||||
|
||||
// Set caching headers
|
||||
set_caching_headers(&mut response, path, &metadata.hash.quoted());
|
||||
|
||||
return response;
|
||||
} else {
|
||||
// Any assets that are not found should be treated as a 404, not falling back to the SPA index.html
|
||||
if path.starts_with("assets/") {
|
||||
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to the SPA index.html
|
||||
match WebAssets::get("index.html") {
|
||||
Some(content) => {
|
||||
let metadata = get_asset_metadata_cached("index.html", &content.data);
|
||||
|
||||
// Check if client has a matching ETag for index.html
|
||||
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
||||
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
|
||||
{
|
||||
return StatusCode::NOT_MODIFIED.into_response();
|
||||
}
|
||||
|
||||
let mut response = Html(content.data).into_response();
|
||||
set_caching_headers(&mut response, "index.html", &metadata.hash.quoted());
|
||||
response
|
||||
}
|
||||
None => (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to load index.html",
|
||||
)
|
||||
.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Health check endpoint
|
||||
async fn health() -> Json<Value> {
|
||||
info!("health check requested");
|
||||
trace!("health check requested");
|
||||
Json(json!({
|
||||
"status": "healthy",
|
||||
"timestamp": chrono::Utc::now().to_rfc3339()
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[ts(export)]
|
||||
pub struct ServiceInfo {
|
||||
name: String,
|
||||
status: ServiceStatus,
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[ts(export)]
|
||||
pub struct StatusResponse {
|
||||
status: ServiceStatus,
|
||||
version: String,
|
||||
commit: String,
|
||||
services: BTreeMap<String, ServiceInfo>,
|
||||
}
|
||||
|
||||
/// Status endpoint showing bot and system status
|
||||
async fn status(State(_state): State<BannerState>) -> Json<Value> {
|
||||
// For now, return basic status without accessing private fields
|
||||
async fn status(State(state): State<AppState>) -> Json<StatusResponse> {
|
||||
let mut services = BTreeMap::new();
|
||||
|
||||
for (name, svc_status) in state.service_statuses.all() {
|
||||
services.insert(
|
||||
name.clone(),
|
||||
ServiceInfo {
|
||||
name,
|
||||
status: svc_status,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
let overall_status = if services
|
||||
.values()
|
||||
.any(|s| matches!(s.status, ServiceStatus::Error))
|
||||
{
|
||||
ServiceStatus::Error
|
||||
} else if !services.is_empty()
|
||||
&& services
|
||||
.values()
|
||||
.all(|s| matches!(s.status, ServiceStatus::Active | ServiceStatus::Connected))
|
||||
{
|
||||
ServiceStatus::Active
|
||||
} else if services.is_empty() {
|
||||
ServiceStatus::Disabled
|
||||
} else {
|
||||
ServiceStatus::Active
|
||||
};
|
||||
|
||||
Json(StatusResponse {
|
||||
status: overall_status,
|
||||
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
commit: env!("GIT_COMMIT_HASH").to_string(),
|
||||
services,
|
||||
})
|
||||
}
|
||||
|
||||
/// Metrics endpoint for monitoring
|
||||
async fn metrics() -> Json<Value> {
|
||||
// For now, return basic metrics structure
|
||||
Json(json!({
|
||||
"status": "operational",
|
||||
"bot": {
|
||||
"status": "running",
|
||||
"uptime": "TODO: implement uptime tracking"
|
||||
},
|
||||
"cache": {
|
||||
"status": "connected",
|
||||
"courses": "TODO: implement course counting",
|
||||
"subjects": "TODO: implement subject counting"
|
||||
},
|
||||
"banner_api": {
|
||||
"status": "connected"
|
||||
},
|
||||
@@ -64,23 +294,317 @@ async fn status(State(_state): State<BannerState>) -> Json<Value> {
|
||||
}))
|
||||
}
|
||||
|
||||
/// Metrics endpoint for monitoring
|
||||
async fn metrics(State(_state): State<BannerState>) -> Json<Value> {
|
||||
// For now, return basic metrics structure
|
||||
Json(json!({
|
||||
"redis": {
|
||||
"status": "connected",
|
||||
"connected_clients": "TODO: implement client counting",
|
||||
"used_memory": "TODO: implement memory tracking"
|
||||
// ============================================================
|
||||
// Course search & detail API
|
||||
// ============================================================
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SubjectsParams {
|
||||
term: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SearchParams {
|
||||
term: String,
|
||||
#[serde(default)]
|
||||
subject: Vec<String>,
|
||||
q: Option<String>,
|
||||
course_number_low: Option<i32>,
|
||||
course_number_high: Option<i32>,
|
||||
#[serde(default)]
|
||||
open_only: bool,
|
||||
instructional_method: Option<String>,
|
||||
campus: Option<String>,
|
||||
#[serde(default = "default_limit")]
|
||||
limit: i32,
|
||||
#[serde(default)]
|
||||
offset: i32,
|
||||
sort_by: Option<SortColumn>,
|
||||
sort_dir: Option<SortDirection>,
|
||||
}
|
||||
|
||||
use crate::data::courses::{SortColumn, SortDirection};
|
||||
|
||||
fn default_limit() -> i32 {
|
||||
25
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct CourseResponse {
|
||||
crn: String,
|
||||
subject: String,
|
||||
course_number: String,
|
||||
title: String,
|
||||
term_code: String,
|
||||
sequence_number: Option<String>,
|
||||
instructional_method: Option<String>,
|
||||
campus: Option<String>,
|
||||
enrollment: i32,
|
||||
max_enrollment: i32,
|
||||
wait_count: i32,
|
||||
wait_capacity: i32,
|
||||
credit_hours: Option<i32>,
|
||||
credit_hour_low: Option<i32>,
|
||||
credit_hour_high: Option<i32>,
|
||||
cross_list: Option<String>,
|
||||
cross_list_capacity: Option<i32>,
|
||||
cross_list_count: Option<i32>,
|
||||
link_identifier: Option<String>,
|
||||
is_section_linked: Option<bool>,
|
||||
part_of_term: Option<String>,
|
||||
meeting_times: Vec<crate::data::models::DbMeetingTime>,
|
||||
attributes: Vec<String>,
|
||||
instructors: Vec<InstructorResponse>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct InstructorResponse {
|
||||
banner_id: String,
|
||||
display_name: String,
|
||||
email: Option<String>,
|
||||
is_primary: bool,
|
||||
rmp_rating: Option<f32>,
|
||||
rmp_num_ratings: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct SearchResponse {
|
||||
courses: Vec<CourseResponse>,
|
||||
total_count: i32,
|
||||
offset: i32,
|
||||
limit: i32,
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct CodeDescription {
|
||||
code: String,
|
||||
description: String,
|
||||
}
|
||||
|
||||
/// Build a `CourseResponse` from a DB course with pre-fetched instructor details.
|
||||
fn build_course_response(
|
||||
course: &crate::data::models::Course,
|
||||
instructors: Vec<crate::data::models::CourseInstructorDetail>,
|
||||
) -> CourseResponse {
|
||||
let instructors = instructors
|
||||
.into_iter()
|
||||
.map(|i| InstructorResponse {
|
||||
banner_id: i.banner_id,
|
||||
display_name: i.display_name,
|
||||
email: i.email,
|
||||
is_primary: i.is_primary,
|
||||
rmp_rating: i.avg_rating,
|
||||
rmp_num_ratings: i.num_ratings,
|
||||
})
|
||||
.collect();
|
||||
|
||||
CourseResponse {
|
||||
crn: course.crn.clone(),
|
||||
subject: course.subject.clone(),
|
||||
course_number: course.course_number.clone(),
|
||||
title: course.title.clone(),
|
||||
term_code: course.term_code.clone(),
|
||||
sequence_number: course.sequence_number.clone(),
|
||||
instructional_method: course.instructional_method.clone(),
|
||||
campus: course.campus.clone(),
|
||||
enrollment: course.enrollment,
|
||||
max_enrollment: course.max_enrollment,
|
||||
wait_count: course.wait_count,
|
||||
wait_capacity: course.wait_capacity,
|
||||
credit_hours: course.credit_hours,
|
||||
credit_hour_low: course.credit_hour_low,
|
||||
credit_hour_high: course.credit_hour_high,
|
||||
cross_list: course.cross_list.clone(),
|
||||
cross_list_capacity: course.cross_list_capacity,
|
||||
cross_list_count: course.cross_list_count,
|
||||
link_identifier: course.link_identifier.clone(),
|
||||
is_section_linked: course.is_section_linked,
|
||||
part_of_term: course.part_of_term.clone(),
|
||||
meeting_times: serde_json::from_value(course.meeting_times.clone()).unwrap_or_default(),
|
||||
attributes: serde_json::from_value(course.attributes.clone()).unwrap_or_default(),
|
||||
instructors,
|
||||
}
|
||||
}
|
||||
|
||||
/// `GET /api/courses/search`
|
||||
async fn search_courses(
|
||||
State(state): State<AppState>,
|
||||
axum_extra::extract::Query(params): axum_extra::extract::Query<SearchParams>,
|
||||
) -> Result<Json<SearchResponse>, (AxumStatusCode, String)> {
|
||||
let limit = params.limit.clamp(1, 100);
|
||||
let offset = params.offset.max(0);
|
||||
|
||||
let (courses, total_count) = crate::data::courses::search_courses(
|
||||
&state.db_pool,
|
||||
¶ms.term,
|
||||
if params.subject.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(¶ms.subject)
|
||||
},
|
||||
"cache": {
|
||||
"courses": {
|
||||
"count": "TODO: implement course counting"
|
||||
},
|
||||
"subjects": {
|
||||
"count": "TODO: implement subject counting"
|
||||
}
|
||||
},
|
||||
"timestamp": chrono::Utc::now().to_rfc3339()
|
||||
params.q.as_deref(),
|
||||
params.course_number_low,
|
||||
params.course_number_high,
|
||||
params.open_only,
|
||||
params.instructional_method.as_deref(),
|
||||
params.campus.as_deref(),
|
||||
limit,
|
||||
offset,
|
||||
params.sort_by,
|
||||
params.sort_dir,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Course search failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Search failed".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Batch-fetch all instructors in a single query instead of N+1
|
||||
let course_ids: Vec<i32> = courses.iter().map(|c| c.id).collect();
|
||||
let mut instructor_map =
|
||||
crate::data::courses::get_instructors_for_courses(&state.db_pool, &course_ids)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
let course_responses: Vec<CourseResponse> = courses
|
||||
.iter()
|
||||
.map(|course| {
|
||||
let instructors = instructor_map.remove(&course.id).unwrap_or_default();
|
||||
build_course_response(course, instructors)
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(SearchResponse {
|
||||
courses: course_responses,
|
||||
total_count: total_count as i32,
|
||||
offset,
|
||||
limit,
|
||||
}))
|
||||
}
|
||||
|
||||
/// `GET /api/courses/:term/:crn`
|
||||
async fn get_course(
|
||||
State(state): State<AppState>,
|
||||
Path((term, crn)): Path<(String, String)>,
|
||||
) -> Result<Json<CourseResponse>, (AxumStatusCode, String)> {
|
||||
let course = crate::data::courses::get_course_by_crn(&state.db_pool, &crn, &term)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Course lookup failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Lookup failed".to_string(),
|
||||
)
|
||||
})?
|
||||
.ok_or_else(|| (AxumStatusCode::NOT_FOUND, "Course not found".to_string()))?;
|
||||
|
||||
let instructors = crate::data::courses::get_course_instructors(&state.db_pool, course.id)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
Ok(Json(build_course_response(&course, instructors)))
|
||||
}
|
||||
|
||||
/// `GET /api/terms`
|
||||
async fn get_terms(
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Vec<CodeDescription>>, (AxumStatusCode, String)> {
|
||||
let cache = state.reference_cache.read().await;
|
||||
let term_codes = crate::data::courses::get_available_terms(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Failed to get terms");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to get terms".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let terms: Vec<CodeDescription> = term_codes
|
||||
.into_iter()
|
||||
.map(|code| {
|
||||
let description = cache
|
||||
.lookup("term", &code)
|
||||
.unwrap_or("Unknown Term")
|
||||
.to_string();
|
||||
CodeDescription { code, description }
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(terms))
|
||||
}
|
||||
|
||||
/// `GET /api/subjects?term=202620`
|
||||
async fn get_subjects(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<SubjectsParams>,
|
||||
) -> Result<Json<Vec<CodeDescription>>, (AxumStatusCode, String)> {
|
||||
let rows = crate::data::courses::get_subjects_by_enrollment(&state.db_pool, ¶ms.term)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Failed to get subjects");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to get subjects".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let subjects: Vec<CodeDescription> = rows
|
||||
.into_iter()
|
||||
.map(|(code, description, _enrollment)| CodeDescription { code, description })
|
||||
.collect();
|
||||
|
||||
Ok(Json(subjects))
|
||||
}
|
||||
|
||||
/// `GET /api/reference/:category`
|
||||
async fn get_reference(
|
||||
State(state): State<AppState>,
|
||||
Path(category): Path<String>,
|
||||
) -> Result<Json<Vec<CodeDescription>>, (AxumStatusCode, String)> {
|
||||
let cache = state.reference_cache.read().await;
|
||||
let entries = cache.entries_for_category(&category);
|
||||
|
||||
if entries.is_empty() {
|
||||
// Fall back to DB query in case cache doesn't have this category
|
||||
drop(cache);
|
||||
let rows = crate::data::reference::get_by_category(&category, &state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, category = %category, "Reference lookup failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Lookup failed".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
return Ok(Json(
|
||||
rows.into_iter()
|
||||
.map(|r| CodeDescription {
|
||||
code: r.code,
|
||||
description: r.description,
|
||||
})
|
||||
.collect(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Json(
|
||||
entries
|
||||
.into_iter()
|
||||
.map(|(code, desc)| CodeDescription {
|
||||
code: code.to_string(),
|
||||
description: desc.to_string(),
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
}
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
use banner::utils::shutdown::join_tasks;
|
||||
use tokio::task::JoinHandle;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_join_tasks_success() {
|
||||
// Create some tasks that complete successfully
|
||||
let handles: Vec<JoinHandle<()>> = vec![
|
||||
tokio::spawn(async { tokio::time::sleep(tokio::time::Duration::from_millis(10)).await }),
|
||||
tokio::spawn(async { tokio::time::sleep(tokio::time::Duration::from_millis(20)).await }),
|
||||
tokio::spawn(async { /* immediate completion */ }),
|
||||
];
|
||||
|
||||
// All tasks should complete successfully
|
||||
let result = join_tasks(handles).await;
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Expected all tasks to complete successfully"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_join_tasks_with_panic() {
|
||||
// Create some tasks, including one that panics
|
||||
let handles: Vec<JoinHandle<()>> = vec![
|
||||
tokio::spawn(async { tokio::time::sleep(tokio::time::Duration::from_millis(10)).await }),
|
||||
tokio::spawn(async { panic!("intentional test panic") }),
|
||||
tokio::spawn(async { /* immediate completion */ }),
|
||||
];
|
||||
|
||||
// Should return an error because one task panicked
|
||||
let result = join_tasks(handles).await;
|
||||
assert!(result.is_err(), "Expected an error when a task panics");
|
||||
|
||||
let error_msg = result.unwrap_err().to_string();
|
||||
assert!(
|
||||
error_msg.contains("1 task(s) panicked"),
|
||||
"Error message should mention panicked tasks"
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,212 @@
|
||||
mod helpers;
|
||||
|
||||
use banner::data::batch::batch_upsert_courses;
|
||||
use sqlx::PgPool;
|
||||
|
||||
#[sqlx::test]
|
||||
async fn test_batch_upsert_empty_slice(pool: PgPool) {
|
||||
batch_upsert_courses(&[], &pool).await.unwrap();
|
||||
|
||||
let count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM courses")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(count.0, 0);
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn test_batch_upsert_inserts_new_courses(pool: PgPool) {
|
||||
let courses = vec![
|
||||
helpers::make_course("10001", "202510", "CS", "1083", "Intro to CS", 25, 30, 0, 5),
|
||||
helpers::make_course(
|
||||
"10002",
|
||||
"202510",
|
||||
"MAT",
|
||||
"1214",
|
||||
"Calculus I",
|
||||
40,
|
||||
45,
|
||||
3,
|
||||
10,
|
||||
),
|
||||
];
|
||||
|
||||
batch_upsert_courses(&courses, &pool).await.unwrap();
|
||||
|
||||
let rows: Vec<(String, String, String, String, i32, i32, i32, i32)> = sqlx::query_as(
|
||||
"SELECT crn, subject, course_number, title, enrollment, max_enrollment, wait_count, wait_capacity
|
||||
FROM courses ORDER BY crn",
|
||||
)
|
||||
.fetch_all(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(rows.len(), 2);
|
||||
|
||||
let (crn, subject, course_number, title, enrollment, max_enrollment, wait_count, wait_capacity) =
|
||||
&rows[0];
|
||||
assert_eq!(crn, "10001");
|
||||
assert_eq!(subject, "CS");
|
||||
assert_eq!(course_number, "1083");
|
||||
assert_eq!(title, "Intro to CS");
|
||||
assert_eq!(*enrollment, 25);
|
||||
assert_eq!(*max_enrollment, 30);
|
||||
assert_eq!(*wait_count, 0);
|
||||
assert_eq!(*wait_capacity, 5);
|
||||
|
||||
let (crn, subject, ..) = &rows[1];
|
||||
assert_eq!(crn, "10002");
|
||||
assert_eq!(subject, "MAT");
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn test_batch_upsert_updates_existing(pool: PgPool) {
|
||||
let initial = vec![helpers::make_course(
|
||||
"20001",
|
||||
"202510",
|
||||
"CS",
|
||||
"3443",
|
||||
"App Programming",
|
||||
10,
|
||||
35,
|
||||
0,
|
||||
5,
|
||||
)];
|
||||
batch_upsert_courses(&initial, &pool).await.unwrap();
|
||||
|
||||
// Upsert the same CRN+term with updated enrollment
|
||||
let updated = vec![helpers::make_course(
|
||||
"20001",
|
||||
"202510",
|
||||
"CS",
|
||||
"3443",
|
||||
"App Programming",
|
||||
30,
|
||||
35,
|
||||
2,
|
||||
5,
|
||||
)];
|
||||
batch_upsert_courses(&updated, &pool).await.unwrap();
|
||||
|
||||
let count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM courses")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(count.0, 1, "upsert should not create a duplicate row");
|
||||
|
||||
let (enrollment, wait_count): (i32, i32) =
|
||||
sqlx::query_as("SELECT enrollment, wait_count FROM courses WHERE crn = '20001'")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(enrollment, 30);
|
||||
assert_eq!(wait_count, 2);
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn test_batch_upsert_mixed_insert_and_update(pool: PgPool) {
|
||||
let initial = vec![
|
||||
helpers::make_course("30001", "202510", "CS", "1083", "Intro to CS", 10, 30, 0, 5),
|
||||
helpers::make_course(
|
||||
"30002",
|
||||
"202510",
|
||||
"CS",
|
||||
"2073",
|
||||
"Computer Architecture",
|
||||
20,
|
||||
30,
|
||||
0,
|
||||
5,
|
||||
),
|
||||
];
|
||||
batch_upsert_courses(&initial, &pool).await.unwrap();
|
||||
|
||||
// Update both existing courses and add a new one
|
||||
let mixed = vec![
|
||||
helpers::make_course("30001", "202510", "CS", "1083", "Intro to CS", 15, 30, 1, 5),
|
||||
helpers::make_course(
|
||||
"30002",
|
||||
"202510",
|
||||
"CS",
|
||||
"2073",
|
||||
"Computer Architecture",
|
||||
25,
|
||||
30,
|
||||
0,
|
||||
5,
|
||||
),
|
||||
helpers::make_course(
|
||||
"30003",
|
||||
"202510",
|
||||
"MAT",
|
||||
"1214",
|
||||
"Calculus I",
|
||||
40,
|
||||
45,
|
||||
3,
|
||||
10,
|
||||
),
|
||||
];
|
||||
batch_upsert_courses(&mixed, &pool).await.unwrap();
|
||||
|
||||
let count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM courses")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(count.0, 3, "should have 2 updated + 1 new = 3 total rows");
|
||||
|
||||
// Verify updated values
|
||||
let (enrollment,): (i32,) =
|
||||
sqlx::query_as("SELECT enrollment FROM courses WHERE crn = '30001'")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(enrollment, 15);
|
||||
|
||||
let (enrollment,): (i32,) =
|
||||
sqlx::query_as("SELECT enrollment FROM courses WHERE crn = '30002'")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(enrollment, 25);
|
||||
|
||||
// Verify new row
|
||||
let (subject,): (String,) = sqlx::query_as("SELECT subject FROM courses WHERE crn = '30003'")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(subject, "MAT");
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn test_batch_upsert_unique_constraint_crn_term(pool: PgPool) {
|
||||
// Same CRN, different term codes → should produce two separate rows
|
||||
let courses = vec![
|
||||
helpers::make_course("40001", "202510", "CS", "1083", "Intro to CS", 25, 30, 0, 5),
|
||||
helpers::make_course("40001", "202520", "CS", "1083", "Intro to CS", 10, 30, 0, 5),
|
||||
];
|
||||
|
||||
batch_upsert_courses(&courses, &pool).await.unwrap();
|
||||
|
||||
let count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM courses WHERE crn = '40001'")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
count.0, 2,
|
||||
"same CRN with different term codes should be separate rows"
|
||||
);
|
||||
|
||||
let rows: Vec<(String, i32)> = sqlx::query_as(
|
||||
"SELECT term_code, enrollment FROM courses WHERE crn = '40001' ORDER BY term_code",
|
||||
)
|
||||
.fetch_all(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(rows[0].0, "202510");
|
||||
assert_eq!(rows[0].1, 25);
|
||||
assert_eq!(rows[1].0, "202520");
|
||||
assert_eq!(rows[1].1, 10);
|
||||
}
|
||||
@@ -0,0 +1,435 @@
|
||||
mod helpers;
|
||||
|
||||
use banner::data::models::{ScrapePriority, TargetType};
|
||||
use banner::data::scrape_jobs;
|
||||
use serde_json::json;
|
||||
use sqlx::PgPool;
|
||||
|
||||
// ── fetch_and_lock_job ──────────────────────────────────────────────
|
||||
|
||||
#[sqlx::test]
|
||||
async fn fetch_and_lock_empty_queue(pool: PgPool) {
|
||||
let result = scrape_jobs::fetch_and_lock_job(&pool).await.unwrap();
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn fetch_and_lock_returns_job_and_sets_locked_at(pool: PgPool) {
|
||||
let id = helpers::insert_scrape_job(
|
||||
&pool,
|
||||
TargetType::Subject,
|
||||
json!({"subject": "CS"}),
|
||||
ScrapePriority::Medium,
|
||||
false,
|
||||
0,
|
||||
3,
|
||||
)
|
||||
.await;
|
||||
|
||||
let job = scrape_jobs::fetch_and_lock_job(&pool)
|
||||
.await
|
||||
.unwrap()
|
||||
.expect("should return a job");
|
||||
|
||||
assert_eq!(job.id, id);
|
||||
assert!(matches!(job.target_type, TargetType::Subject));
|
||||
assert_eq!(job.target_payload, json!({"subject": "CS"}));
|
||||
|
||||
// Verify locked_at was set in the database
|
||||
let (locked_at,): (Option<chrono::DateTime<chrono::Utc>>,) =
|
||||
sqlx::query_as("SELECT locked_at FROM scrape_jobs WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(locked_at.is_some(), "locked_at should be set after fetch");
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn fetch_and_lock_skips_locked_jobs(pool: PgPool) {
|
||||
helpers::insert_scrape_job(
|
||||
&pool,
|
||||
TargetType::Subject,
|
||||
json!({"subject": "CS"}),
|
||||
ScrapePriority::Medium,
|
||||
true, // locked
|
||||
0,
|
||||
3,
|
||||
)
|
||||
.await;
|
||||
|
||||
let result = scrape_jobs::fetch_and_lock_job(&pool).await.unwrap();
|
||||
assert!(result.is_none(), "locked jobs should be skipped");
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn fetch_and_lock_skips_future_execute_at(pool: PgPool) {
|
||||
// Insert a job with execute_at in the future via raw SQL
|
||||
sqlx::query(
|
||||
"INSERT INTO scrape_jobs (target_type, target_payload, priority, execute_at)
|
||||
VALUES ('Subject', '{\"subject\": \"CS\"}', 'Medium', NOW() + INTERVAL '1 hour')",
|
||||
)
|
||||
.execute(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let result = scrape_jobs::fetch_and_lock_job(&pool).await.unwrap();
|
||||
assert!(result.is_none(), "future execute_at jobs should be skipped");
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn fetch_and_lock_priority_desc_ordering(pool: PgPool) {
|
||||
// Insert low priority first, then critical
|
||||
helpers::insert_scrape_job(
|
||||
&pool,
|
||||
TargetType::Subject,
|
||||
json!({"subject": "LOW"}),
|
||||
ScrapePriority::Low,
|
||||
false,
|
||||
0,
|
||||
3,
|
||||
)
|
||||
.await;
|
||||
|
||||
helpers::insert_scrape_job(
|
||||
&pool,
|
||||
TargetType::Subject,
|
||||
json!({"subject": "CRIT"}),
|
||||
ScrapePriority::Critical,
|
||||
false,
|
||||
0,
|
||||
3,
|
||||
)
|
||||
.await;
|
||||
|
||||
let job = scrape_jobs::fetch_and_lock_job(&pool)
|
||||
.await
|
||||
.unwrap()
|
||||
.expect("should return a job");
|
||||
|
||||
assert_eq!(
|
||||
job.target_payload,
|
||||
json!({"subject": "CRIT"}),
|
||||
"Critical priority should be fetched before Low"
|
||||
);
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn fetch_and_lock_execute_at_asc_ordering(pool: PgPool) {
|
||||
// Insert an older job and a newer job, both same priority
|
||||
sqlx::query(
|
||||
"INSERT INTO scrape_jobs (target_type, target_payload, priority, execute_at)
|
||||
VALUES ('Subject', '{\"subject\": \"OLDER\"}', 'Medium', NOW() - INTERVAL '2 hours')",
|
||||
)
|
||||
.execute(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO scrape_jobs (target_type, target_payload, priority, execute_at)
|
||||
VALUES ('Subject', '{\"subject\": \"NEWER\"}', 'Medium', NOW() - INTERVAL '1 hour')",
|
||||
)
|
||||
.execute(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let job = scrape_jobs::fetch_and_lock_job(&pool)
|
||||
.await
|
||||
.unwrap()
|
||||
.expect("should return a job");
|
||||
|
||||
assert_eq!(
|
||||
job.target_payload,
|
||||
json!({"subject": "OLDER"}),
|
||||
"Older execute_at should be fetched first"
|
||||
);
|
||||
}
|
||||
|
||||
// ── delete_job ──────────────────────────────────────────────────────
|
||||
|
||||
#[sqlx::test]
|
||||
async fn delete_job_removes_row(pool: PgPool) {
|
||||
let id = helpers::insert_scrape_job(
|
||||
&pool,
|
||||
TargetType::SingleCrn,
|
||||
json!({"crn": "12345"}),
|
||||
ScrapePriority::High,
|
||||
false,
|
||||
0,
|
||||
3,
|
||||
)
|
||||
.await;
|
||||
|
||||
scrape_jobs::delete_job(id, &pool).await.unwrap();
|
||||
|
||||
let (count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM scrape_jobs WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(count, 0, "row should be deleted");
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn delete_job_nonexistent_id_no_error(pool: PgPool) {
|
||||
// Deleting a non-existent ID should not error
|
||||
scrape_jobs::delete_job(999_999, &pool).await.unwrap();
|
||||
}
|
||||
|
||||
// ── unlock_job ──────────────────────────────────────────────────────
|
||||
|
||||
#[sqlx::test]
|
||||
async fn unlock_job_clears_locked_at(pool: PgPool) {
|
||||
let id = helpers::insert_scrape_job(
|
||||
&pool,
|
||||
TargetType::CrnList,
|
||||
json!({"crns": [1, 2, 3]}),
|
||||
ScrapePriority::Medium,
|
||||
true, // locked
|
||||
0,
|
||||
3,
|
||||
)
|
||||
.await;
|
||||
|
||||
scrape_jobs::unlock_job(id, &pool).await.unwrap();
|
||||
|
||||
let (locked_at,): (Option<chrono::DateTime<chrono::Utc>>,) =
|
||||
sqlx::query_as("SELECT locked_at FROM scrape_jobs WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(locked_at.is_none(), "locked_at should be cleared");
|
||||
}
|
||||
|
||||
// ── unlock_and_increment_retry ──────────────────────────────────────
|
||||
|
||||
#[sqlx::test]
|
||||
async fn unlock_and_increment_retry_has_retries_remaining(pool: PgPool) {
|
||||
let id = helpers::insert_scrape_job(
|
||||
&pool,
|
||||
TargetType::Subject,
|
||||
json!({"subject": "CS"}),
|
||||
ScrapePriority::Medium,
|
||||
true,
|
||||
0, // retry_count
|
||||
3, // max_retries
|
||||
)
|
||||
.await;
|
||||
|
||||
let has_retries = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(has_retries, "should have retries remaining (0→1, max=3)");
|
||||
|
||||
// Verify state in DB
|
||||
let (retry_count, locked_at): (i32, Option<chrono::DateTime<chrono::Utc>>) =
|
||||
sqlx::query_as("SELECT retry_count, locked_at FROM scrape_jobs WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(retry_count, 1);
|
||||
assert!(locked_at.is_none(), "should be unlocked");
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn unlock_and_increment_retry_exhausted(pool: PgPool) {
|
||||
let id = helpers::insert_scrape_job(
|
||||
&pool,
|
||||
TargetType::Subject,
|
||||
json!({"subject": "CS"}),
|
||||
ScrapePriority::Medium,
|
||||
true,
|
||||
2, // retry_count
|
||||
3, // max_retries
|
||||
)
|
||||
.await;
|
||||
|
||||
let has_retries = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(
|
||||
!has_retries,
|
||||
"should NOT have retries remaining (2→3, max=3)"
|
||||
);
|
||||
|
||||
let (retry_count,): (i32,) =
|
||||
sqlx::query_as("SELECT retry_count FROM scrape_jobs WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(retry_count, 3);
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn unlock_and_increment_retry_already_exceeded(pool: PgPool) {
|
||||
let id = helpers::insert_scrape_job(
|
||||
&pool,
|
||||
TargetType::Subject,
|
||||
json!({"subject": "CS"}),
|
||||
ScrapePriority::Medium,
|
||||
true,
|
||||
5, // retry_count already past max
|
||||
3, // max_retries
|
||||
)
|
||||
.await;
|
||||
|
||||
let has_retries = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(
|
||||
!has_retries,
|
||||
"should NOT have retries remaining (5→6, max=3)"
|
||||
);
|
||||
|
||||
let (retry_count,): (i32,) =
|
||||
sqlx::query_as("SELECT retry_count FROM scrape_jobs WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(retry_count, 6);
|
||||
}
|
||||
|
||||
// ── find_existing_job_payloads ──────────────────────────────────────
|
||||
|
||||
#[sqlx::test]
|
||||
async fn find_existing_payloads_returns_matching(pool: PgPool) {
|
||||
let payload_a = json!({"subject": "CS"});
|
||||
let payload_b = json!({"subject": "MAT"});
|
||||
let payload_c = json!({"subject": "ENG"});
|
||||
|
||||
// Insert A and B as Subject jobs
|
||||
helpers::insert_scrape_job(
|
||||
&pool,
|
||||
TargetType::Subject,
|
||||
payload_a.clone(),
|
||||
ScrapePriority::Medium,
|
||||
false,
|
||||
0,
|
||||
3,
|
||||
)
|
||||
.await;
|
||||
helpers::insert_scrape_job(
|
||||
&pool,
|
||||
TargetType::Subject,
|
||||
payload_b.clone(),
|
||||
ScrapePriority::Medium,
|
||||
false,
|
||||
0,
|
||||
3,
|
||||
)
|
||||
.await;
|
||||
// Insert C as a different target type
|
||||
helpers::insert_scrape_job(
|
||||
&pool,
|
||||
TargetType::SingleCrn,
|
||||
payload_c.clone(),
|
||||
ScrapePriority::Medium,
|
||||
false,
|
||||
0,
|
||||
3,
|
||||
)
|
||||
.await;
|
||||
|
||||
let candidates = vec![payload_a.clone(), payload_b.clone(), payload_c.clone()];
|
||||
let existing = scrape_jobs::find_existing_job_payloads(TargetType::Subject, &candidates, &pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(existing.contains(&payload_a.to_string()));
|
||||
assert!(existing.contains(&payload_b.to_string()));
|
||||
// payload_c is SingleCrn, not Subject — should not match
|
||||
assert!(!existing.contains(&payload_c.to_string()));
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn find_existing_payloads_ignores_locked(pool: PgPool) {
|
||||
let payload = json!({"subject": "CS"});
|
||||
|
||||
helpers::insert_scrape_job(
|
||||
&pool,
|
||||
TargetType::Subject,
|
||||
payload.clone(),
|
||||
ScrapePriority::Medium,
|
||||
true, // locked
|
||||
0,
|
||||
3,
|
||||
)
|
||||
.await;
|
||||
|
||||
let candidates = vec![payload.clone()];
|
||||
let existing = scrape_jobs::find_existing_job_payloads(TargetType::Subject, &candidates, &pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(existing.is_empty(), "locked jobs should be ignored");
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn find_existing_payloads_empty_candidates(pool: PgPool) {
|
||||
// Insert a job so the table isn't empty
|
||||
helpers::insert_scrape_job(
|
||||
&pool,
|
||||
TargetType::Subject,
|
||||
json!({"subject": "CS"}),
|
||||
ScrapePriority::Medium,
|
||||
false,
|
||||
0,
|
||||
3,
|
||||
)
|
||||
.await;
|
||||
|
||||
let existing = scrape_jobs::find_existing_job_payloads(TargetType::Subject, &[], &pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(
|
||||
existing.is_empty(),
|
||||
"empty candidates should return empty result"
|
||||
);
|
||||
}
|
||||
|
||||
// ── batch_insert_jobs ───────────────────────────────────────────────
|
||||
|
||||
#[sqlx::test]
|
||||
async fn batch_insert_jobs_inserts_multiple(pool: PgPool) {
|
||||
let jobs = vec![
|
||||
(
|
||||
json!({"subject": "CS"}),
|
||||
TargetType::Subject,
|
||||
ScrapePriority::High,
|
||||
),
|
||||
(
|
||||
json!({"subject": "MAT"}),
|
||||
TargetType::Subject,
|
||||
ScrapePriority::Medium,
|
||||
),
|
||||
(
|
||||
json!({"crn": "12345"}),
|
||||
TargetType::SingleCrn,
|
||||
ScrapePriority::Low,
|
||||
),
|
||||
];
|
||||
|
||||
scrape_jobs::batch_insert_jobs(&jobs, &pool).await.unwrap();
|
||||
|
||||
let (count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM scrape_jobs")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(count, 3);
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn batch_insert_jobs_empty_slice(pool: PgPool) {
|
||||
scrape_jobs::batch_insert_jobs(&[], &pool).await.unwrap();
|
||||
|
||||
let (count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM scrape_jobs")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(count, 0);
|
||||
}
|
||||
@@ -0,0 +1,88 @@
|
||||
use banner::banner::Course;
|
||||
use banner::data::models::{ScrapePriority, TargetType};
|
||||
use chrono::Utc;
|
||||
use sqlx::PgPool;
|
||||
|
||||
/// Build a test `Course` (Banner API model) with sensible defaults.
|
||||
///
|
||||
/// Only the fields used by `batch_upsert_courses` need meaningful values;
|
||||
/// the rest are filled with harmless placeholders.
|
||||
pub fn make_course(
|
||||
crn: &str,
|
||||
term: &str,
|
||||
subject: &str,
|
||||
course_number: &str,
|
||||
title: &str,
|
||||
enrollment: i32,
|
||||
max_enrollment: i32,
|
||||
wait_count: i32,
|
||||
wait_capacity: i32,
|
||||
) -> Course {
|
||||
Course {
|
||||
id: 0,
|
||||
term: term.to_owned(),
|
||||
term_desc: String::new(),
|
||||
course_reference_number: crn.to_owned(),
|
||||
part_of_term: "1".to_owned(),
|
||||
course_number: course_number.to_owned(),
|
||||
subject: subject.to_owned(),
|
||||
subject_description: subject.to_owned(),
|
||||
sequence_number: "001".to_owned(),
|
||||
campus_description: "Main Campus".to_owned(),
|
||||
schedule_type_description: "Lecture".to_owned(),
|
||||
course_title: title.to_owned(),
|
||||
credit_hours: Some(3),
|
||||
maximum_enrollment: max_enrollment,
|
||||
enrollment,
|
||||
seats_available: max_enrollment - enrollment,
|
||||
wait_capacity,
|
||||
wait_count,
|
||||
cross_list: None,
|
||||
cross_list_capacity: None,
|
||||
cross_list_count: None,
|
||||
cross_list_available: None,
|
||||
credit_hour_high: None,
|
||||
credit_hour_low: None,
|
||||
credit_hour_indicator: None,
|
||||
open_section: enrollment < max_enrollment,
|
||||
link_identifier: None,
|
||||
is_section_linked: false,
|
||||
subject_course: format!("{subject}{course_number}"),
|
||||
reserved_seat_summary: None,
|
||||
instructional_method: "FF".to_owned(),
|
||||
instructional_method_description: "Face to Face".to_owned(),
|
||||
section_attributes: vec![],
|
||||
faculty: vec![],
|
||||
meetings_faculty: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert a scrape job row directly via SQL, returning the generated ID.
|
||||
pub async fn insert_scrape_job(
|
||||
pool: &PgPool,
|
||||
target_type: TargetType,
|
||||
payload: serde_json::Value,
|
||||
priority: ScrapePriority,
|
||||
locked: bool,
|
||||
retry_count: i32,
|
||||
max_retries: i32,
|
||||
) -> i32 {
|
||||
let locked_at = if locked { Some(Utc::now()) } else { None };
|
||||
|
||||
let (id,): (i32,) = sqlx::query_as(
|
||||
"INSERT INTO scrape_jobs (target_type, target_payload, priority, execute_at, locked_at, retry_count, max_retries)
|
||||
VALUES ($1, $2, $3, NOW(), $4, $5, $6)
|
||||
RETURNING id",
|
||||
)
|
||||
.bind(target_type)
|
||||
.bind(payload)
|
||||
.bind(priority)
|
||||
.bind(locked_at)
|
||||
.bind(retry_count)
|
||||
.bind(max_retries)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.expect("insert_scrape_job failed");
|
||||
|
||||
id
|
||||
}
|
||||
Vendored
+8
@@ -0,0 +1,8 @@
|
||||
node_modules
|
||||
.DS_Store
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
count.txt
|
||||
.env
|
||||
.svelte-kit
|
||||
Vendored
+11
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"files.watcherExclude": {
|
||||
"**/routeTree.gen.ts": true
|
||||
},
|
||||
"search.exclude": {
|
||||
"**/routeTree.gen.ts": true
|
||||
},
|
||||
"files.readonlyInclude": {
|
||||
"**/routeTree.gen.ts": true
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/1.9.4/schema.json",
|
||||
"vcs": {
|
||||
"enabled": true,
|
||||
"clientKind": "git",
|
||||
"useIgnoreFile": true
|
||||
},
|
||||
"files": {
|
||||
"ignoreUnknown": false,
|
||||
"ignore": ["dist/", "node_modules/", ".svelte-kit/", "src/lib/bindings/"]
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
"indentStyle": "space",
|
||||
"indentWidth": 2,
|
||||
"lineWidth": 100,
|
||||
"lineEnding": "lf"
|
||||
},
|
||||
"javascript": {
|
||||
"formatter": {
|
||||
"quoteStyle": "double",
|
||||
"trailingCommas": "es5",
|
||||
"semicolons": "always",
|
||||
"arrowParentheses": "always"
|
||||
}
|
||||
},
|
||||
"linter": {
|
||||
"enabled": false
|
||||
}
|
||||
}
|
||||
+521
@@ -0,0 +1,521 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"configVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "banner-web",
|
||||
"dependencies": {
|
||||
"overlayscrollbars": "^2.14.0",
|
||||
"overlayscrollbars-svelte": "^0.5.5",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.9.4",
|
||||
"@fontsource-variable/inter": "^5.2.5",
|
||||
"@lucide/svelte": "^0.563.0",
|
||||
"@sveltejs/adapter-static": "^3.0.8",
|
||||
"@sveltejs/kit": "^2.16.0",
|
||||
"@sveltejs/vite-plugin-svelte": "^5.0.3",
|
||||
"@tailwindcss/vite": "^4.0.0",
|
||||
"@tanstack/table-core": "^8.21.3",
|
||||
"@types/node": "^25.1.0",
|
||||
"bits-ui": "^1.3.7",
|
||||
"clsx": "^2.1.1",
|
||||
"jsdom": "^26.0.0",
|
||||
"svelte": "^5.19.0",
|
||||
"svelte-check": "^4.1.4",
|
||||
"tailwind-merge": "^3.0.1",
|
||||
"tailwindcss": "^4.0.0",
|
||||
"typescript": "^5.7.2",
|
||||
"vite": "^6.3.5",
|
||||
"vitest": "^3.0.5",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@asamuzakjp/css-color": ["@asamuzakjp/css-color@3.2.0", "", { "dependencies": { "@csstools/css-calc": "^2.1.3", "@csstools/css-color-parser": "^3.0.9", "@csstools/css-parser-algorithms": "^3.0.4", "@csstools/css-tokenizer": "^3.0.3", "lru-cache": "^10.4.3" } }, "sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw=="],
|
||||
|
||||
"@biomejs/biome": ["@biomejs/biome@1.9.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "1.9.4", "@biomejs/cli-darwin-x64": "1.9.4", "@biomejs/cli-linux-arm64": "1.9.4", "@biomejs/cli-linux-arm64-musl": "1.9.4", "@biomejs/cli-linux-x64": "1.9.4", "@biomejs/cli-linux-x64-musl": "1.9.4", "@biomejs/cli-win32-arm64": "1.9.4", "@biomejs/cli-win32-x64": "1.9.4" }, "bin": { "biome": "bin/biome" } }, "sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog=="],
|
||||
|
||||
"@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@1.9.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bFBsPWrNvkdKrNCYeAp+xo2HecOGPAy9WyNyB/jKnnedgzl4W4Hb9ZMzYNbf8dMCGmUdSavlYHiR01QaYR58cw=="],
|
||||
|
||||
"@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@1.9.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-ngYBh/+bEedqkSevPVhLP4QfVPCpb+4BBe2p7Xs32dBgs7rh9nY2AIYUL6BgLw1JVXV8GlpKmb/hNiuIxfPfZg=="],
|
||||
|
||||
"@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-fJIW0+LYujdjUgJJuwesP4EjIBl/N/TcOX3IvIHJQNsAqvV2CHIogsmA94BPG6jZATS4Hi+xv4SkBBQSt1N4/g=="],
|
||||
|
||||
"@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-v665Ct9WCRjGa8+kTr0CzApU0+XXtRgwmzIf1SeKSGAv+2scAlW6JR5PMFo6FzqqZ64Po79cKODKf3/AAmECqA=="],
|
||||
|
||||
"@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-lRCJv/Vi3Vlwmbd6K+oQ0KhLHMAysN8lXoCI7XeHlxaajk06u7G+UsFSO01NAs5iYuWKmVZjmiOzJ0OJmGsMwg=="],
|
||||
|
||||
"@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-gEhi/jSBhZ2m6wjV530Yy8+fNqG8PAinM3oV7CyO+6c3CEh16Eizm21uHVsyVBEB6RIM8JHIl6AGYCv6Q6Q9Tg=="],
|
||||
|
||||
"@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@1.9.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-tlbhLk+WXZmgwoIKwHIHEBZUwxml7bRJgk0X2sPyNR3S93cdRq6XulAZRQJ17FYGGzWne0fgrXBKpl7l4M87Hg=="],
|
||||
|
||||
"@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@1.9.4", "", { "os": "win32", "cpu": "x64" }, "sha512-8Y5wMhVIPaWe6jw2H+KlEm4wP/f7EW3810ZLmDlrEEy5KvBsb9ECEfu/kMWD484ijfQ8+nIi0giMgu9g1UAuuA=="],
|
||||
|
||||
"@csstools/color-helpers": ["@csstools/color-helpers@5.1.0", "", {}, "sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA=="],
|
||||
|
||||
"@csstools/css-calc": ["@csstools/css-calc@2.1.4", "", { "peerDependencies": { "@csstools/css-parser-algorithms": "^3.0.5", "@csstools/css-tokenizer": "^3.0.4" } }, "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ=="],
|
||||
|
||||
"@csstools/css-color-parser": ["@csstools/css-color-parser@3.1.0", "", { "dependencies": { "@csstools/color-helpers": "^5.1.0", "@csstools/css-calc": "^2.1.4" }, "peerDependencies": { "@csstools/css-parser-algorithms": "^3.0.5", "@csstools/css-tokenizer": "^3.0.4" } }, "sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA=="],
|
||||
|
||||
"@csstools/css-parser-algorithms": ["@csstools/css-parser-algorithms@3.0.5", "", { "peerDependencies": { "@csstools/css-tokenizer": "^3.0.4" } }, "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ=="],
|
||||
|
||||
"@csstools/css-tokenizer": ["@csstools/css-tokenizer@3.0.4", "", {}, "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw=="],
|
||||
|
||||
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA=="],
|
||||
|
||||
"@esbuild/android-arm": ["@esbuild/android-arm@0.25.12", "", { "os": "android", "cpu": "arm" }, "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg=="],
|
||||
|
||||
"@esbuild/android-arm64": ["@esbuild/android-arm64@0.25.12", "", { "os": "android", "cpu": "arm64" }, "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg=="],
|
||||
|
||||
"@esbuild/android-x64": ["@esbuild/android-x64@0.25.12", "", { "os": "android", "cpu": "x64" }, "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg=="],
|
||||
|
||||
"@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.25.12", "", { "os": "darwin", "cpu": "arm64" }, "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg=="],
|
||||
|
||||
"@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.25.12", "", { "os": "darwin", "cpu": "x64" }, "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA=="],
|
||||
|
||||
"@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.25.12", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg=="],
|
||||
|
||||
"@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.25.12", "", { "os": "freebsd", "cpu": "x64" }, "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ=="],
|
||||
|
||||
"@esbuild/linux-arm": ["@esbuild/linux-arm@0.25.12", "", { "os": "linux", "cpu": "arm" }, "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw=="],
|
||||
|
||||
"@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.25.12", "", { "os": "linux", "cpu": "arm64" }, "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ=="],
|
||||
|
||||
"@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.25.12", "", { "os": "linux", "cpu": "ia32" }, "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA=="],
|
||||
|
||||
"@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng=="],
|
||||
|
||||
"@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw=="],
|
||||
|
||||
"@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.25.12", "", { "os": "linux", "cpu": "ppc64" }, "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA=="],
|
||||
|
||||
"@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w=="],
|
||||
|
||||
"@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.25.12", "", { "os": "linux", "cpu": "s390x" }, "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg=="],
|
||||
|
||||
"@esbuild/linux-x64": ["@esbuild/linux-x64@0.25.12", "", { "os": "linux", "cpu": "x64" }, "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw=="],
|
||||
|
||||
"@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.25.12", "", { "os": "none", "cpu": "arm64" }, "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg=="],
|
||||
|
||||
"@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.25.12", "", { "os": "none", "cpu": "x64" }, "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ=="],
|
||||
|
||||
"@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.25.12", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A=="],
|
||||
|
||||
"@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.25.12", "", { "os": "openbsd", "cpu": "x64" }, "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw=="],
|
||||
|
||||
"@esbuild/openharmony-arm64": ["@esbuild/openharmony-arm64@0.25.12", "", { "os": "none", "cpu": "arm64" }, "sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg=="],
|
||||
|
||||
"@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.25.12", "", { "os": "sunos", "cpu": "x64" }, "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w=="],
|
||||
|
||||
"@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.25.12", "", { "os": "win32", "cpu": "arm64" }, "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg=="],
|
||||
|
||||
"@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.25.12", "", { "os": "win32", "cpu": "ia32" }, "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ=="],
|
||||
|
||||
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.12", "", { "os": "win32", "cpu": "x64" }, "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA=="],
|
||||
|
||||
"@floating-ui/core": ["@floating-ui/core@1.7.4", "", { "dependencies": { "@floating-ui/utils": "^0.2.10" } }, "sha512-C3HlIdsBxszvm5McXlB8PeOEWfBhcGBTZGkGlWc2U0KFY5IwG5OQEuQ8rq52DZmcHDlPLd+YFBK+cZcytwIFWg=="],
|
||||
|
||||
"@floating-ui/dom": ["@floating-ui/dom@1.7.5", "", { "dependencies": { "@floating-ui/core": "^1.7.4", "@floating-ui/utils": "^0.2.10" } }, "sha512-N0bD2kIPInNHUHehXhMke1rBGs1dwqvC9O9KYMyyjK7iXt7GAhnro7UlcuYcGdS/yYOlq0MAVgrow8IbWJwyqg=="],
|
||||
|
||||
"@floating-ui/utils": ["@floating-ui/utils@0.2.10", "", {}, "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ=="],
|
||||
|
||||
"@fontsource-variable/inter": ["@fontsource-variable/inter@5.2.8", "", {}, "sha512-kOfP2D+ykbcX/P3IFnokOhVRNoTozo5/JxhAIVYLpea/UBmCQ/YWPBfWIDuBImXX/15KH+eKh4xpEUyS2sQQGQ=="],
|
||||
|
||||
"@internationalized/date": ["@internationalized/date@3.10.1", "", { "dependencies": { "@swc/helpers": "^0.5.0" } }, "sha512-oJrXtQiAXLvT9clCf1K4kxp3eKsQhIaZqxEyowkBcsvZDdZkbWrVmnGknxs5flTD0VGsxrxKgBCZty1EzoiMzA=="],
|
||||
|
||||
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="],
|
||||
|
||||
"@jridgewell/remapping": ["@jridgewell/remapping@2.3.5", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ=="],
|
||||
|
||||
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="],
|
||||
|
||||
"@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.5", "", {}, "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="],
|
||||
|
||||
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="],
|
||||
|
||||
"@lucide/svelte": ["@lucide/svelte@0.563.1", "", { "peerDependencies": { "svelte": "^5" } }, "sha512-Kt+MbnE5D9RsuI/csmf7M+HWxALe57x3A0DhQ8pPnnUpneh7zuldrYjlT+veWtk+tVnp5doQtaAAxLujzIlhBw=="],
|
||||
|
||||
"@polka/url": ["@polka/url@1.0.0-next.29", "", {}, "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww=="],
|
||||
|
||||
"@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.57.0", "", { "os": "android", "cpu": "arm" }, "sha512-tPgXB6cDTndIe1ah7u6amCI1T0SsnlOuKgg10Xh3uizJk4e5M1JGaUMk7J4ciuAUcFpbOiNhm2XIjP9ON0dUqA=="],
|
||||
|
||||
"@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.57.0", "", { "os": "android", "cpu": "arm64" }, "sha512-sa4LyseLLXr1onr97StkU1Nb7fWcg6niokTwEVNOO7awaKaoRObQ54+V/hrF/BP1noMEaaAW6Fg2d/CfLiq3Mg=="],
|
||||
|
||||
"@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.57.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-/NNIj9A7yLjKdmkx5dC2XQ9DmjIECpGpwHoGmA5E1AhU0fuICSqSWScPhN1yLCkEdkCwJIDu2xIeLPs60MNIVg=="],
|
||||
|
||||
"@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.57.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-xoh8abqgPrPYPr7pTYipqnUi1V3em56JzE/HgDgitTqZBZ3yKCWI+7KUkceM6tNweyUKYru1UMi7FC060RyKwA=="],
|
||||
|
||||
"@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.57.0", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-PCkMh7fNahWSbA0OTUQ2OpYHpjZZr0hPr8lId8twD7a7SeWrvT3xJVyza+dQwXSSq4yEQTMoXgNOfMCsn8584g=="],
|
||||
|
||||
"@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.57.0", "", { "os": "freebsd", "cpu": "x64" }, "sha512-1j3stGx+qbhXql4OCDZhnK7b01s6rBKNybfsX+TNrEe9JNq4DLi1yGiR1xW+nL+FNVvI4D02PUnl6gJ/2y6WJA=="],
|
||||
|
||||
"@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.57.0", "", { "os": "linux", "cpu": "arm" }, "sha512-eyrr5W08Ms9uM0mLcKfM/Uzx7hjhz2bcjv8P2uynfj0yU8GGPdz8iYrBPhiLOZqahoAMB8ZiolRZPbbU2MAi6Q=="],
|
||||
|
||||
"@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.57.0", "", { "os": "linux", "cpu": "arm" }, "sha512-Xds90ITXJCNyX9pDhqf85MKWUI4lqjiPAipJ8OLp8xqI2Ehk+TCVhF9rvOoN8xTbcafow3QOThkNnrM33uCFQA=="],
|
||||
|
||||
"@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.57.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-Xws2KA4CLvZmXjy46SQaXSejuKPhwVdaNinldoYfqruZBaJHqVo6hnRa8SDo9z7PBW5x84SH64+izmldCgbezw=="],
|
||||
|
||||
"@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.57.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-hrKXKbX5FdaRJj7lTMusmvKbhMJSGWJ+w++4KmjiDhpTgNlhYobMvKfDoIWecy4O60K6yA4SnztGuNTQF+Lplw=="],
|
||||
|
||||
"@rollup/rollup-linux-loong64-gnu": ["@rollup/rollup-linux-loong64-gnu@4.57.0", "", { "os": "linux", "cpu": "none" }, "sha512-6A+nccfSDGKsPm00d3xKcrsBcbqzCTAukjwWK6rbuAnB2bHaL3r9720HBVZ/no7+FhZLz/U3GwwZZEh6tOSI8Q=="],
|
||||
|
||||
"@rollup/rollup-linux-loong64-musl": ["@rollup/rollup-linux-loong64-musl@4.57.0", "", { "os": "linux", "cpu": "none" }, "sha512-4P1VyYUe6XAJtQH1Hh99THxr0GKMMwIXsRNOceLrJnaHTDgk1FTcTimDgneRJPvB3LqDQxUmroBclQ1S0cIJwQ=="],
|
||||
|
||||
"@rollup/rollup-linux-ppc64-gnu": ["@rollup/rollup-linux-ppc64-gnu@4.57.0", "", { "os": "linux", "cpu": "ppc64" }, "sha512-8Vv6pLuIZCMcgXre6c3nOPhE0gjz1+nZP6T+hwWjr7sVH8k0jRkH+XnfjjOTglyMBdSKBPPz54/y1gToSKwrSQ=="],
|
||||
|
||||
"@rollup/rollup-linux-ppc64-musl": ["@rollup/rollup-linux-ppc64-musl@4.57.0", "", { "os": "linux", "cpu": "ppc64" }, "sha512-r1te1M0Sm2TBVD/RxBPC6RZVwNqUTwJTA7w+C/IW5v9Ssu6xmxWEi+iJQlpBhtUiT1raJ5b48pI8tBvEjEFnFA=="],
|
||||
|
||||
"@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.57.0", "", { "os": "linux", "cpu": "none" }, "sha512-say0uMU/RaPm3CDQLxUUTF2oNWL8ysvHkAjcCzV2znxBr23kFfaxocS9qJm+NdkRhF8wtdEEAJuYcLPhSPbjuQ=="],
|
||||
|
||||
"@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.57.0", "", { "os": "linux", "cpu": "none" }, "sha512-/MU7/HizQGsnBREtRpcSbSV1zfkoxSTR7wLsRmBPQ8FwUj5sykrP1MyJTvsxP5KBq9SyE6kH8UQQQwa0ASeoQQ=="],
|
||||
|
||||
"@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.57.0", "", { "os": "linux", "cpu": "s390x" }, "sha512-Q9eh+gUGILIHEaJf66aF6a414jQbDnn29zeu0eX3dHMuysnhTvsUvZTCAyZ6tJhUjnvzBKE4FtuaYxutxRZpOg=="],
|
||||
|
||||
"@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.57.0", "", { "os": "linux", "cpu": "x64" }, "sha512-OR5p5yG5OKSxHReWmwvM0P+VTPMwoBS45PXTMYaskKQqybkS3Kmugq1W+YbNWArF8/s7jQScgzXUhArzEQ7x0A=="],
|
||||
|
||||
"@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.57.0", "", { "os": "linux", "cpu": "x64" }, "sha512-XeatKzo4lHDsVEbm1XDHZlhYZZSQYym6dg2X/Ko0kSFgio+KXLsxwJQprnR48GvdIKDOpqWqssC3iBCjoMcMpw=="],
|
||||
|
||||
"@rollup/rollup-openbsd-x64": ["@rollup/rollup-openbsd-x64@4.57.0", "", { "os": "openbsd", "cpu": "x64" }, "sha512-Lu71y78F5qOfYmubYLHPcJm74GZLU6UJ4THkf/a1K7Tz2ycwC2VUbsqbJAXaR6Bx70SRdlVrt2+n5l7F0agTUw=="],
|
||||
|
||||
"@rollup/rollup-openharmony-arm64": ["@rollup/rollup-openharmony-arm64@4.57.0", "", { "os": "none", "cpu": "arm64" }, "sha512-v5xwKDWcu7qhAEcsUubiav7r+48Uk/ENWdr82MBZZRIm7zThSxCIVDfb3ZeRRq9yqk+oIzMdDo6fCcA5DHfMyA=="],
|
||||
|
||||
"@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.57.0", "", { "os": "win32", "cpu": "arm64" }, "sha512-XnaaaSMGSI6Wk8F4KK3QP7GfuuhjGchElsVerCplUuxRIzdvZ7hRBpLR0omCmw+kI2RFJB80nenhOoGXlJ5TfQ=="],
|
||||
|
||||
"@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.57.0", "", { "os": "win32", "cpu": "ia32" }, "sha512-3K1lP+3BXY4t4VihLw5MEg6IZD3ojSYzqzBG571W3kNQe4G4CcFpSUQVgurYgib5d+YaCjeFow8QivWp8vuSvA=="],
|
||||
|
||||
"@rollup/rollup-win32-x64-gnu": ["@rollup/rollup-win32-x64-gnu@4.57.0", "", { "os": "win32", "cpu": "x64" }, "sha512-MDk610P/vJGc5L5ImE4k5s+GZT3en0KoK1MKPXCRgzmksAMk79j4h3k1IerxTNqwDLxsGxStEZVBqG0gIqZqoA=="],
|
||||
|
||||
"@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.57.0", "", { "os": "win32", "cpu": "x64" }, "sha512-Zv7v6q6aV+VslnpwzqKAmrk5JdVkLUzok2208ZXGipjb+msxBr/fJPZyeEXiFgH7k62Ak0SLIfxQRZQvTuf7rQ=="],
|
||||
|
||||
"@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="],
|
||||
|
||||
"@sveltejs/acorn-typescript": ["@sveltejs/acorn-typescript@1.0.8", "", { "peerDependencies": { "acorn": "^8.9.0" } }, "sha512-esgN+54+q0NjB0Y/4BomT9samII7jGwNy/2a3wNZbT2A2RpmXsXwUt24LvLhx6jUq2gVk4cWEvcRO6MFQbOfNA=="],
|
||||
|
||||
"@sveltejs/adapter-static": ["@sveltejs/adapter-static@3.0.10", "", { "peerDependencies": { "@sveltejs/kit": "^2.0.0" } }, "sha512-7D9lYFWJmB7zxZyTE/qxjksvMqzMuYrrsyh1f4AlZqeZeACPRySjbC3aFiY55wb1tWUaKOQG9PVbm74JcN2Iew=="],
|
||||
|
||||
"@sveltejs/kit": ["@sveltejs/kit@2.50.1", "", { "dependencies": { "@standard-schema/spec": "^1.0.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/cookie": "^0.6.0", "acorn": "^8.14.1", "cookie": "^0.6.0", "devalue": "^5.6.2", "esm-env": "^1.2.2", "kleur": "^4.1.5", "magic-string": "^0.30.5", "mrmime": "^2.0.0", "sade": "^1.8.1", "set-cookie-parser": "^2.6.0", "sirv": "^3.0.0" }, "peerDependencies": { "@opentelemetry/api": "^1.0.0", "@sveltejs/vite-plugin-svelte": "^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0", "svelte": "^4.0.0 || ^5.0.0-next.0", "typescript": "^5.3.3", "vite": "^5.0.3 || ^6.0.0 || ^7.0.0-beta.0" }, "optionalPeers": ["@opentelemetry/api", "typescript"], "bin": { "svelte-kit": "svelte-kit.js" } }, "sha512-XRHD2i3zC4ukhz2iCQzO4mbsts081PAZnnMAQ7LNpWeYgeBmwMsalf0FGSwhFXBbtr2XViPKnFJBDCckWqrsLw=="],
|
||||
|
||||
"@sveltejs/vite-plugin-svelte": ["@sveltejs/vite-plugin-svelte@5.1.1", "", { "dependencies": { "@sveltejs/vite-plugin-svelte-inspector": "^4.0.1", "debug": "^4.4.1", "deepmerge": "^4.3.1", "kleur": "^4.1.5", "magic-string": "^0.30.17", "vitefu": "^1.0.6" }, "peerDependencies": { "svelte": "^5.0.0", "vite": "^6.0.0" } }, "sha512-Y1Cs7hhTc+a5E9Va/xwKlAJoariQyHY+5zBgCZg4PFWNYQ1nMN9sjK1zhw1gK69DuqVP++sht/1GZg1aRwmAXQ=="],
|
||||
|
||||
"@sveltejs/vite-plugin-svelte-inspector": ["@sveltejs/vite-plugin-svelte-inspector@4.0.1", "", { "dependencies": { "debug": "^4.3.7" }, "peerDependencies": { "@sveltejs/vite-plugin-svelte": "^5.0.0", "svelte": "^5.0.0", "vite": "^6.0.0" } }, "sha512-J/Nmb2Q2y7mck2hyCX4ckVHcR5tu2J+MtBEQqpDrrgELZ2uvraQcK/ioCV61AqkdXFgriksOKIceDcQmqnGhVw=="],
|
||||
|
||||
"@swc/helpers": ["@swc/helpers@0.5.18", "", { "dependencies": { "tslib": "^2.8.0" } }, "sha512-TXTnIcNJQEKwThMMqBXsZ4VGAza6bvN4pa41Rkqoio6QBKMvo+5lexeTMScGCIxtzgQJzElcvIltani+adC5PQ=="],
|
||||
|
||||
"@tailwindcss/node": ["@tailwindcss/node@4.1.18", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "enhanced-resolve": "^5.18.3", "jiti": "^2.6.1", "lightningcss": "1.30.2", "magic-string": "^0.30.21", "source-map-js": "^1.2.1", "tailwindcss": "4.1.18" } }, "sha512-DoR7U1P7iYhw16qJ49fgXUlry1t4CpXeErJHnQ44JgTSKMaZUdf17cfn5mHchfJ4KRBZRFA/Coo+MUF5+gOaCQ=="],
|
||||
|
||||
"@tailwindcss/oxide": ["@tailwindcss/oxide@4.1.18", "", { "optionalDependencies": { "@tailwindcss/oxide-android-arm64": "4.1.18", "@tailwindcss/oxide-darwin-arm64": "4.1.18", "@tailwindcss/oxide-darwin-x64": "4.1.18", "@tailwindcss/oxide-freebsd-x64": "4.1.18", "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.18", "@tailwindcss/oxide-linux-arm64-gnu": "4.1.18", "@tailwindcss/oxide-linux-arm64-musl": "4.1.18", "@tailwindcss/oxide-linux-x64-gnu": "4.1.18", "@tailwindcss/oxide-linux-x64-musl": "4.1.18", "@tailwindcss/oxide-wasm32-wasi": "4.1.18", "@tailwindcss/oxide-win32-arm64-msvc": "4.1.18", "@tailwindcss/oxide-win32-x64-msvc": "4.1.18" } }, "sha512-EgCR5tTS5bUSKQgzeMClT6iCY3ToqE1y+ZB0AKldj809QXk1Y+3jB0upOYZrn9aGIzPtUsP7sX4QQ4XtjBB95A=="],
|
||||
|
||||
"@tailwindcss/oxide-android-arm64": ["@tailwindcss/oxide-android-arm64@4.1.18", "", { "os": "android", "cpu": "arm64" }, "sha512-dJHz7+Ugr9U/diKJA0W6N/6/cjI+ZTAoxPf9Iz9BFRF2GzEX8IvXxFIi/dZBloVJX/MZGvRuFA9rqwdiIEZQ0Q=="],
|
||||
|
||||
"@tailwindcss/oxide-darwin-arm64": ["@tailwindcss/oxide-darwin-arm64@4.1.18", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Gc2q4Qhs660bhjyBSKgq6BYvwDz4G+BuyJ5H1xfhmDR3D8HnHCmT/BSkvSL0vQLy/nkMLY20PQ2OoYMO15Jd0A=="],
|
||||
|
||||
"@tailwindcss/oxide-darwin-x64": ["@tailwindcss/oxide-darwin-x64@4.1.18", "", { "os": "darwin", "cpu": "x64" }, "sha512-FL5oxr2xQsFrc3X9o1fjHKBYBMD1QZNyc1Xzw/h5Qu4XnEBi3dZn96HcHm41c/euGV+GRiXFfh2hUCyKi/e+yw=="],
|
||||
|
||||
"@tailwindcss/oxide-freebsd-x64": ["@tailwindcss/oxide-freebsd-x64@4.1.18", "", { "os": "freebsd", "cpu": "x64" }, "sha512-Fj+RHgu5bDodmV1dM9yAxlfJwkkWvLiRjbhuO2LEtwtlYlBgiAT4x/j5wQr1tC3SANAgD+0YcmWVrj8R9trVMA=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-arm-gnueabihf": ["@tailwindcss/oxide-linux-arm-gnueabihf@4.1.18", "", { "os": "linux", "cpu": "arm" }, "sha512-Fp+Wzk/Ws4dZn+LV2Nqx3IilnhH51YZoRaYHQsVq3RQvEl+71VGKFpkfHrLM/Li+kt5c0DJe/bHXK1eHgDmdiA=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-arm64-gnu": ["@tailwindcss/oxide-linux-arm64-gnu@4.1.18", "", { "os": "linux", "cpu": "arm64" }, "sha512-S0n3jboLysNbh55Vrt7pk9wgpyTTPD0fdQeh7wQfMqLPM/Hrxi+dVsLsPrycQjGKEQk85Kgbx+6+QnYNiHalnw=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-arm64-musl": ["@tailwindcss/oxide-linux-arm64-musl@4.1.18", "", { "os": "linux", "cpu": "arm64" }, "sha512-1px92582HkPQlaaCkdRcio71p8bc8i/ap5807tPRDK/uw953cauQBT8c5tVGkOwrHMfc2Yh6UuxaH4vtTjGvHg=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-x64-gnu": ["@tailwindcss/oxide-linux-x64-gnu@4.1.18", "", { "os": "linux", "cpu": "x64" }, "sha512-v3gyT0ivkfBLoZGF9LyHmts0Isc8jHZyVcbzio6Wpzifg/+5ZJpDiRiUhDLkcr7f/r38SWNe7ucxmGW3j3Kb/g=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-x64-musl": ["@tailwindcss/oxide-linux-x64-musl@4.1.18", "", { "os": "linux", "cpu": "x64" }, "sha512-bhJ2y2OQNlcRwwgOAGMY0xTFStt4/wyU6pvI6LSuZpRgKQwxTec0/3Scu91O8ir7qCR3AuepQKLU/kX99FouqQ=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi": ["@tailwindcss/oxide-wasm32-wasi@4.1.18", "", { "dependencies": { "@emnapi/core": "^1.7.1", "@emnapi/runtime": "^1.7.1", "@emnapi/wasi-threads": "^1.1.0", "@napi-rs/wasm-runtime": "^1.1.0", "@tybys/wasm-util": "^0.10.1", "tslib": "^2.4.0" }, "cpu": "none" }, "sha512-LffYTvPjODiP6PT16oNeUQJzNVyJl1cjIebq/rWWBF+3eDst5JGEFSc5cWxyRCJ0Mxl+KyIkqRxk1XPEs9x8TA=="],
|
||||
|
||||
"@tailwindcss/oxide-win32-arm64-msvc": ["@tailwindcss/oxide-win32-arm64-msvc@4.1.18", "", { "os": "win32", "cpu": "arm64" }, "sha512-HjSA7mr9HmC8fu6bdsZvZ+dhjyGCLdotjVOgLA2vEqxEBZaQo9YTX4kwgEvPCpRh8o4uWc4J/wEoFzhEmjvPbA=="],
|
||||
|
||||
"@tailwindcss/oxide-win32-x64-msvc": ["@tailwindcss/oxide-win32-x64-msvc@4.1.18", "", { "os": "win32", "cpu": "x64" }, "sha512-bJWbyYpUlqamC8dpR7pfjA0I7vdF6t5VpUGMWRkXVE3AXgIZjYUYAK7II1GNaxR8J1SSrSrppRar8G++JekE3Q=="],
|
||||
|
||||
"@tailwindcss/vite": ["@tailwindcss/vite@4.1.18", "", { "dependencies": { "@tailwindcss/node": "4.1.18", "@tailwindcss/oxide": "4.1.18", "tailwindcss": "4.1.18" }, "peerDependencies": { "vite": "^5.2.0 || ^6 || ^7" } }, "sha512-jVA+/UpKL1vRLg6Hkao5jldawNmRo7mQYrZtNHMIVpLfLhDml5nMRUo/8MwoX2vNXvnaXNNMedrMfMugAVX1nA=="],
|
||||
|
||||
"@tanstack/table-core": ["@tanstack/table-core@8.21.3", "", {}, "sha512-ldZXEhOBb8Is7xLs01fR3YEc3DERiz5silj8tnGkFZytt1abEvl/GhUmCE0PMLaMPTa3Jk4HbKmRlHmu+gCftg=="],
|
||||
|
||||
"@types/chai": ["@types/chai@5.2.3", "", { "dependencies": { "@types/deep-eql": "*", "assertion-error": "^2.0.1" } }, "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA=="],
|
||||
|
||||
"@types/cookie": ["@types/cookie@0.6.0", "", {}, "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA=="],
|
||||
|
||||
"@types/deep-eql": ["@types/deep-eql@4.0.2", "", {}, "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw=="],
|
||||
|
||||
"@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="],
|
||||
|
||||
"@types/node": ["@types/node@25.1.0", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-t7frlewr6+cbx+9Ohpl0NOTKXZNV9xHRmNOvql47BFJKcEG1CxtxlPEEe+gR9uhVWM4DwhnvTF110mIL4yP9RA=="],
|
||||
|
||||
"@vitest/expect": ["@vitest/expect@3.2.4", "", { "dependencies": { "@types/chai": "^5.2.2", "@vitest/spy": "3.2.4", "@vitest/utils": "3.2.4", "chai": "^5.2.0", "tinyrainbow": "^2.0.0" } }, "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig=="],
|
||||
|
||||
"@vitest/mocker": ["@vitest/mocker@3.2.4", "", { "dependencies": { "@vitest/spy": "3.2.4", "estree-walker": "^3.0.3", "magic-string": "^0.30.17" }, "peerDependencies": { "msw": "^2.4.9", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "optionalPeers": ["msw", "vite"] }, "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ=="],
|
||||
|
||||
"@vitest/pretty-format": ["@vitest/pretty-format@3.2.4", "", { "dependencies": { "tinyrainbow": "^2.0.0" } }, "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA=="],
|
||||
|
||||
"@vitest/runner": ["@vitest/runner@3.2.4", "", { "dependencies": { "@vitest/utils": "3.2.4", "pathe": "^2.0.3", "strip-literal": "^3.0.0" } }, "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ=="],
|
||||
|
||||
"@vitest/snapshot": ["@vitest/snapshot@3.2.4", "", { "dependencies": { "@vitest/pretty-format": "3.2.4", "magic-string": "^0.30.17", "pathe": "^2.0.3" } }, "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ=="],
|
||||
|
||||
"@vitest/spy": ["@vitest/spy@3.2.4", "", { "dependencies": { "tinyspy": "^4.0.3" } }, "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw=="],
|
||||
|
||||
"@vitest/utils": ["@vitest/utils@3.2.4", "", { "dependencies": { "@vitest/pretty-format": "3.2.4", "loupe": "^3.1.4", "tinyrainbow": "^2.0.0" } }, "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA=="],
|
||||
|
||||
"acorn": ["acorn@8.15.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="],
|
||||
|
||||
"agent-base": ["agent-base@7.1.4", "", {}, "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ=="],
|
||||
|
||||
"aria-query": ["aria-query@5.3.2", "", {}, "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw=="],
|
||||
|
||||
"assertion-error": ["assertion-error@2.0.1", "", {}, "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA=="],
|
||||
|
||||
"axobject-query": ["axobject-query@4.1.0", "", {}, "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ=="],
|
||||
|
||||
"bits-ui": ["bits-ui@1.8.0", "", { "dependencies": { "@floating-ui/core": "^1.6.4", "@floating-ui/dom": "^1.6.7", "@internationalized/date": "^3.5.6", "css.escape": "^1.5.1", "esm-env": "^1.1.2", "runed": "^0.23.2", "svelte-toolbelt": "^0.7.1", "tabbable": "^6.2.0" }, "peerDependencies": { "svelte": "^5.11.0" } }, "sha512-CXD6Orp7l8QevNDcRPLXc/b8iMVgxDWT2LyTwsdLzJKh9CxesOmPuNePSPqAxKoT59FIdU4aFPS1k7eBdbaCxg=="],
|
||||
|
||||
"cac": ["cac@6.7.14", "", {}, "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ=="],
|
||||
|
||||
"chai": ["chai@5.3.3", "", { "dependencies": { "assertion-error": "^2.0.1", "check-error": "^2.1.1", "deep-eql": "^5.0.1", "loupe": "^3.1.0", "pathval": "^2.0.0" } }, "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw=="],
|
||||
|
||||
"check-error": ["check-error@2.1.3", "", {}, "sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA=="],
|
||||
|
||||
"chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="],
|
||||
|
||||
"clsx": ["clsx@2.1.1", "", {}, "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA=="],
|
||||
|
||||
"cookie": ["cookie@0.6.0", "", {}, "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw=="],
|
||||
|
||||
"css.escape": ["css.escape@1.5.1", "", {}, "sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg=="],
|
||||
|
||||
"cssstyle": ["cssstyle@4.6.0", "", { "dependencies": { "@asamuzakjp/css-color": "^3.2.0", "rrweb-cssom": "^0.8.0" } }, "sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg=="],
|
||||
|
||||
"data-urls": ["data-urls@5.0.0", "", { "dependencies": { "whatwg-mimetype": "^4.0.0", "whatwg-url": "^14.0.0" } }, "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg=="],
|
||||
|
||||
"debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="],
|
||||
|
||||
"decimal.js": ["decimal.js@10.6.0", "", {}, "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg=="],
|
||||
|
||||
"deep-eql": ["deep-eql@5.0.2", "", {}, "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q=="],
|
||||
|
||||
"deepmerge": ["deepmerge@4.3.1", "", {}, "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A=="],
|
||||
|
||||
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
|
||||
|
||||
"devalue": ["devalue@5.6.2", "", {}, "sha512-nPRkjWzzDQlsejL1WVifk5rvcFi/y1onBRxjaFMjZeR9mFpqu2gmAZ9xUB9/IEanEP/vBtGeGganC/GO1fmufg=="],
|
||||
|
||||
"enhanced-resolve": ["enhanced-resolve@5.18.4", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "sha512-LgQMM4WXU3QI+SYgEc2liRgznaD5ojbmY3sb8LxyguVkIg5FxdpTkvk72te2R38/TGKxH634oLxXRGY6d7AP+Q=="],
|
||||
|
||||
"entities": ["entities@6.0.1", "", {}, "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g=="],
|
||||
|
||||
"es-module-lexer": ["es-module-lexer@1.7.0", "", {}, "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA=="],
|
||||
|
||||
"esbuild": ["esbuild@0.25.12", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.12", "@esbuild/android-arm": "0.25.12", "@esbuild/android-arm64": "0.25.12", "@esbuild/android-x64": "0.25.12", "@esbuild/darwin-arm64": "0.25.12", "@esbuild/darwin-x64": "0.25.12", "@esbuild/freebsd-arm64": "0.25.12", "@esbuild/freebsd-x64": "0.25.12", "@esbuild/linux-arm": "0.25.12", "@esbuild/linux-arm64": "0.25.12", "@esbuild/linux-ia32": "0.25.12", "@esbuild/linux-loong64": "0.25.12", "@esbuild/linux-mips64el": "0.25.12", "@esbuild/linux-ppc64": "0.25.12", "@esbuild/linux-riscv64": "0.25.12", "@esbuild/linux-s390x": "0.25.12", "@esbuild/linux-x64": "0.25.12", "@esbuild/netbsd-arm64": "0.25.12", "@esbuild/netbsd-x64": "0.25.12", "@esbuild/openbsd-arm64": "0.25.12", "@esbuild/openbsd-x64": "0.25.12", "@esbuild/openharmony-arm64": "0.25.12", "@esbuild/sunos-x64": "0.25.12", "@esbuild/win32-arm64": "0.25.12", "@esbuild/win32-ia32": "0.25.12", "@esbuild/win32-x64": "0.25.12" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg=="],
|
||||
|
||||
"esm-env": ["esm-env@1.2.2", "", {}, "sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA=="],
|
||||
|
||||
"esrap": ["esrap@2.2.2", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-zA6497ha+qKvoWIK+WM9NAh5ni17sKZKhbS5B3PoYbBvaYHZWoS33zmFybmyqpn07RLUxSmn+RCls2/XF+d0oQ=="],
|
||||
|
||||
"estree-walker": ["estree-walker@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.0" } }, "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g=="],
|
||||
|
||||
"expect-type": ["expect-type@1.3.0", "", {}, "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA=="],
|
||||
|
||||
"fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="],
|
||||
|
||||
"fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="],
|
||||
|
||||
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
|
||||
|
||||
"html-encoding-sniffer": ["html-encoding-sniffer@4.0.0", "", { "dependencies": { "whatwg-encoding": "^3.1.1" } }, "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ=="],
|
||||
|
||||
"http-proxy-agent": ["http-proxy-agent@7.0.2", "", { "dependencies": { "agent-base": "^7.1.0", "debug": "^4.3.4" } }, "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig=="],
|
||||
|
||||
"https-proxy-agent": ["https-proxy-agent@7.0.6", "", { "dependencies": { "agent-base": "^7.1.2", "debug": "4" } }, "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw=="],
|
||||
|
||||
"iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
|
||||
|
||||
"inline-style-parser": ["inline-style-parser@0.2.7", "", {}, "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA=="],
|
||||
|
||||
"is-potential-custom-element-name": ["is-potential-custom-element-name@1.0.1", "", {}, "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ=="],
|
||||
|
||||
"is-reference": ["is-reference@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.6" } }, "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw=="],
|
||||
|
||||
"jiti": ["jiti@2.6.1", "", { "bin": { "jiti": "lib/jiti-cli.mjs" } }, "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ=="],
|
||||
|
||||
"js-tokens": ["js-tokens@9.0.1", "", {}, "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ=="],
|
||||
|
||||
"jsdom": ["jsdom@26.1.0", "", { "dependencies": { "cssstyle": "^4.2.1", "data-urls": "^5.0.0", "decimal.js": "^10.5.0", "html-encoding-sniffer": "^4.0.0", "http-proxy-agent": "^7.0.2", "https-proxy-agent": "^7.0.6", "is-potential-custom-element-name": "^1.0.1", "nwsapi": "^2.2.16", "parse5": "^7.2.1", "rrweb-cssom": "^0.8.0", "saxes": "^6.0.0", "symbol-tree": "^3.2.4", "tough-cookie": "^5.1.1", "w3c-xmlserializer": "^5.0.0", "webidl-conversions": "^7.0.0", "whatwg-encoding": "^3.1.1", "whatwg-mimetype": "^4.0.0", "whatwg-url": "^14.1.1", "ws": "^8.18.0", "xml-name-validator": "^5.0.0" }, "peerDependencies": { "canvas": "^3.0.0" }, "optionalPeers": ["canvas"] }, "sha512-Cvc9WUhxSMEo4McES3P7oK3QaXldCfNWp7pl2NNeiIFlCoLr3kfq9kb1fxftiwk1FLV7CvpvDfonxtzUDeSOPg=="],
|
||||
|
||||
"kleur": ["kleur@4.1.5", "", {}, "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ=="],
|
||||
|
||||
"lightningcss": ["lightningcss@1.30.2", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-android-arm64": "1.30.2", "lightningcss-darwin-arm64": "1.30.2", "lightningcss-darwin-x64": "1.30.2", "lightningcss-freebsd-x64": "1.30.2", "lightningcss-linux-arm-gnueabihf": "1.30.2", "lightningcss-linux-arm64-gnu": "1.30.2", "lightningcss-linux-arm64-musl": "1.30.2", "lightningcss-linux-x64-gnu": "1.30.2", "lightningcss-linux-x64-musl": "1.30.2", "lightningcss-win32-arm64-msvc": "1.30.2", "lightningcss-win32-x64-msvc": "1.30.2" } }, "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ=="],
|
||||
|
||||
"lightningcss-android-arm64": ["lightningcss-android-arm64@1.30.2", "", { "os": "android", "cpu": "arm64" }, "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A=="],
|
||||
|
||||
"lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA=="],
|
||||
|
||||
"lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ=="],
|
||||
|
||||
"lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA=="],
|
||||
|
||||
"lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.2", "", { "os": "linux", "cpu": "arm" }, "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA=="],
|
||||
|
||||
"lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A=="],
|
||||
|
||||
"lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA=="],
|
||||
|
||||
"lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w=="],
|
||||
|
||||
"lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA=="],
|
||||
|
||||
"lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ=="],
|
||||
|
||||
"lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.2", "", { "os": "win32", "cpu": "x64" }, "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw=="],
|
||||
|
||||
"locate-character": ["locate-character@3.0.0", "", {}, "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA=="],
|
||||
|
||||
"loupe": ["loupe@3.2.1", "", {}, "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ=="],
|
||||
|
||||
"lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
|
||||
|
||||
"magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="],
|
||||
|
||||
"mri": ["mri@1.2.0", "", {}, "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA=="],
|
||||
|
||||
"mrmime": ["mrmime@2.0.1", "", {}, "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ=="],
|
||||
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
|
||||
"nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
|
||||
|
||||
"nwsapi": ["nwsapi@2.2.23", "", {}, "sha512-7wfH4sLbt4M0gCDzGE6vzQBo0bfTKjU7Sfpqy/7gs1qBfYz2vEJH6vXcBKpO3+6Yu1telwd0t9HpyOoLEQQbIQ=="],
|
||||
|
||||
"overlayscrollbars": ["overlayscrollbars@2.14.0", "", {}, "sha512-RjV0pqc79kYhQLC3vTcLRb5GLpI1n6qh0Oua3g+bGH4EgNOJHVBGP7u0zZtxoAa0dkHlAqTTSYRb9MMmxNLjig=="],
|
||||
|
||||
"overlayscrollbars-svelte": ["overlayscrollbars-svelte@0.5.5", "", { "peerDependencies": { "overlayscrollbars": "^2.0.0", "svelte": "^5.0.0" } }, "sha512-+dRW3YZSvFbKi5vDCpnUOHuoPLLSdu0BUVVMYZdmfVghu7XkafDRebG2y91/ImPqj6YDAUsz1rcWVYhCJSS/pQ=="],
|
||||
|
||||
"parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw=="],
|
||||
|
||||
"pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
|
||||
|
||||
"pathval": ["pathval@2.0.1", "", {}, "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ=="],
|
||||
|
||||
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
|
||||
|
||||
"picomatch": ["picomatch@4.0.3", "", {}, "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q=="],
|
||||
|
||||
"postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="],
|
||||
|
||||
"punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="],
|
||||
|
||||
"readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
|
||||
|
||||
"rollup": ["rollup@4.57.0", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.57.0", "@rollup/rollup-android-arm64": "4.57.0", "@rollup/rollup-darwin-arm64": "4.57.0", "@rollup/rollup-darwin-x64": "4.57.0", "@rollup/rollup-freebsd-arm64": "4.57.0", "@rollup/rollup-freebsd-x64": "4.57.0", "@rollup/rollup-linux-arm-gnueabihf": "4.57.0", "@rollup/rollup-linux-arm-musleabihf": "4.57.0", "@rollup/rollup-linux-arm64-gnu": "4.57.0", "@rollup/rollup-linux-arm64-musl": "4.57.0", "@rollup/rollup-linux-loong64-gnu": "4.57.0", "@rollup/rollup-linux-loong64-musl": "4.57.0", "@rollup/rollup-linux-ppc64-gnu": "4.57.0", "@rollup/rollup-linux-ppc64-musl": "4.57.0", "@rollup/rollup-linux-riscv64-gnu": "4.57.0", "@rollup/rollup-linux-riscv64-musl": "4.57.0", "@rollup/rollup-linux-s390x-gnu": "4.57.0", "@rollup/rollup-linux-x64-gnu": "4.57.0", "@rollup/rollup-linux-x64-musl": "4.57.0", "@rollup/rollup-openbsd-x64": "4.57.0", "@rollup/rollup-openharmony-arm64": "4.57.0", "@rollup/rollup-win32-arm64-msvc": "4.57.0", "@rollup/rollup-win32-ia32-msvc": "4.57.0", "@rollup/rollup-win32-x64-gnu": "4.57.0", "@rollup/rollup-win32-x64-msvc": "4.57.0", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-e5lPJi/aui4TO1LpAXIRLySmwXSE8k3b9zoGfd42p67wzxog4WHjiZF3M2uheQih4DGyc25QEV4yRBbpueNiUA=="],
|
||||
|
||||
"rrweb-cssom": ["rrweb-cssom@0.8.0", "", {}, "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw=="],
|
||||
|
||||
"runed": ["runed@0.23.4", "", { "dependencies": { "esm-env": "^1.0.0" }, "peerDependencies": { "svelte": "^5.7.0" } }, "sha512-9q8oUiBYeXIDLWNK5DfCWlkL0EW3oGbk845VdKlPeia28l751VpfesaB/+7pI6rnbx1I6rqoZ2fZxptOJLxILA=="],
|
||||
|
||||
"sade": ["sade@1.8.1", "", { "dependencies": { "mri": "^1.1.0" } }, "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A=="],
|
||||
|
||||
"safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="],
|
||||
|
||||
"saxes": ["saxes@6.0.0", "", { "dependencies": { "xmlchars": "^2.2.0" } }, "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA=="],
|
||||
|
||||
"set-cookie-parser": ["set-cookie-parser@2.7.2", "", {}, "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw=="],
|
||||
|
||||
"siginfo": ["siginfo@2.0.0", "", {}, "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g=="],
|
||||
|
||||
"sirv": ["sirv@3.0.2", "", { "dependencies": { "@polka/url": "^1.0.0-next.24", "mrmime": "^2.0.0", "totalist": "^3.0.0" } }, "sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g=="],
|
||||
|
||||
"source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
|
||||
|
||||
"stackback": ["stackback@0.0.2", "", {}, "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw=="],
|
||||
|
||||
"std-env": ["std-env@3.10.0", "", {}, "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg=="],
|
||||
|
||||
"strip-literal": ["strip-literal@3.1.0", "", { "dependencies": { "js-tokens": "^9.0.1" } }, "sha512-8r3mkIM/2+PpjHoOtiAW8Rg3jJLHaV7xPwG+YRGrv6FP0wwk/toTpATxWYOW0BKdWwl82VT2tFYi5DlROa0Mxg=="],
|
||||
|
||||
"style-to-object": ["style-to-object@1.0.14", "", { "dependencies": { "inline-style-parser": "0.2.7" } }, "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw=="],
|
||||
|
||||
"svelte": ["svelte@5.49.0", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "devalue": "^5.6.2", "esm-env": "^1.2.1", "esrap": "^2.2.1", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-Fn2mCc3XX0gnnbBYzWOTrZHi5WnF9KvqmB1+KGlUWoJkdioPmFYtg2ALBr6xl2dcnFTz3Vi7/mHpbKSVg/imVg=="],
|
||||
|
||||
"svelte-check": ["svelte-check@4.3.5", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.25", "chokidar": "^4.0.1", "fdir": "^6.2.0", "picocolors": "^1.0.0", "sade": "^1.7.4" }, "peerDependencies": { "svelte": "^4.0.0 || ^5.0.0-next.0", "typescript": ">=5.0.0" }, "bin": { "svelte-check": "bin/svelte-check" } }, "sha512-e4VWZETyXaKGhpkxOXP+B/d0Fp/zKViZoJmneZWe/05Y2aqSKj3YN2nLfYPJBQ87WEiY4BQCQ9hWGu9mPT1a1Q=="],
|
||||
|
||||
"svelte-toolbelt": ["svelte-toolbelt@0.7.1", "", { "dependencies": { "clsx": "^2.1.1", "runed": "^0.23.2", "style-to-object": "^1.0.8" }, "peerDependencies": { "svelte": "^5.0.0" } }, "sha512-HcBOcR17Vx9bjaOceUvxkY3nGmbBmCBBbuWLLEWO6jtmWH8f/QoWmbyUfQZrpDINH39en1b8mptfPQT9VKQ1xQ=="],
|
||||
|
||||
"symbol-tree": ["symbol-tree@3.2.4", "", {}, "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw=="],
|
||||
|
||||
"tabbable": ["tabbable@6.4.0", "", {}, "sha512-05PUHKSNE8ou2dwIxTngl4EzcnsCDZGJ/iCLtDflR/SHB/ny14rXc+qU5P4mG9JkusiV7EivzY9Mhm55AzAvCg=="],
|
||||
|
||||
"tailwind-merge": ["tailwind-merge@3.4.0", "", {}, "sha512-uSaO4gnW+b3Y2aWoWfFpX62vn2sR3skfhbjsEnaBI81WD1wBLlHZe5sWf0AqjksNdYTbGBEd0UasQMT3SNV15g=="],
|
||||
|
||||
"tailwindcss": ["tailwindcss@4.1.18", "", {}, "sha512-4+Z+0yiYyEtUVCScyfHCxOYP06L5Ne+JiHhY2IjR2KWMIWhJOYZKLSGZaP5HkZ8+bY0cxfzwDE5uOmzFXyIwxw=="],
|
||||
|
||||
"tapable": ["tapable@2.3.0", "", {}, "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg=="],
|
||||
|
||||
"tinybench": ["tinybench@2.9.0", "", {}, "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg=="],
|
||||
|
||||
"tinyexec": ["tinyexec@0.3.2", "", {}, "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA=="],
|
||||
|
||||
"tinyglobby": ["tinyglobby@0.2.15", "", { "dependencies": { "fdir": "^6.5.0", "picomatch": "^4.0.3" } }, "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ=="],
|
||||
|
||||
"tinypool": ["tinypool@1.1.1", "", {}, "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg=="],
|
||||
|
||||
"tinyrainbow": ["tinyrainbow@2.0.0", "", {}, "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw=="],
|
||||
|
||||
"tinyspy": ["tinyspy@4.0.4", "", {}, "sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q=="],
|
||||
|
||||
"tldts": ["tldts@6.1.86", "", { "dependencies": { "tldts-core": "^6.1.86" }, "bin": { "tldts": "bin/cli.js" } }, "sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ=="],
|
||||
|
||||
"tldts-core": ["tldts-core@6.1.86", "", {}, "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA=="],
|
||||
|
||||
"totalist": ["totalist@3.0.1", "", {}, "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ=="],
|
||||
|
||||
"tough-cookie": ["tough-cookie@5.1.2", "", { "dependencies": { "tldts": "^6.1.32" } }, "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A=="],
|
||||
|
||||
"tr46": ["tr46@5.1.1", "", { "dependencies": { "punycode": "^2.3.1" } }, "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw=="],
|
||||
|
||||
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
|
||||
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
|
||||
|
||||
"undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
|
||||
|
||||
"vite": ["vite@6.4.1", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", "picomatch": "^4.0.2", "postcss": "^8.5.3", "rollup": "^4.34.9", "tinyglobby": "^0.2.13" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "jiti": ">=1.21.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g=="],
|
||||
|
||||
"vite-node": ["vite-node@3.2.4", "", { "dependencies": { "cac": "^6.7.14", "debug": "^4.4.1", "es-module-lexer": "^1.7.0", "pathe": "^2.0.3", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": { "vite-node": "vite-node.mjs" } }, "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg=="],
|
||||
|
||||
"vitefu": ["vitefu@1.1.1", "", { "peerDependencies": { "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0-beta.0" }, "optionalPeers": ["vite"] }, "sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ=="],
|
||||
|
||||
"vitest": ["vitest@3.2.4", "", { "dependencies": { "@types/chai": "^5.2.2", "@vitest/expect": "3.2.4", "@vitest/mocker": "3.2.4", "@vitest/pretty-format": "^3.2.4", "@vitest/runner": "3.2.4", "@vitest/snapshot": "3.2.4", "@vitest/spy": "3.2.4", "@vitest/utils": "3.2.4", "chai": "^5.2.0", "debug": "^4.4.1", "expect-type": "^1.2.1", "magic-string": "^0.30.17", "pathe": "^2.0.3", "picomatch": "^4.0.2", "std-env": "^3.9.0", "tinybench": "^2.9.0", "tinyexec": "^0.3.2", "tinyglobby": "^0.2.14", "tinypool": "^1.1.1", "tinyrainbow": "^2.0.0", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", "vite-node": "3.2.4", "why-is-node-running": "^2.3.0" }, "peerDependencies": { "@edge-runtime/vm": "*", "@types/debug": "^4.1.12", "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "@vitest/browser": "3.2.4", "@vitest/ui": "3.2.4", "happy-dom": "*", "jsdom": "*" }, "optionalPeers": ["@edge-runtime/vm", "@types/debug", "@types/node", "@vitest/browser", "@vitest/ui", "happy-dom", "jsdom"], "bin": { "vitest": "vitest.mjs" } }, "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A=="],
|
||||
|
||||
"w3c-xmlserializer": ["w3c-xmlserializer@5.0.0", "", { "dependencies": { "xml-name-validator": "^5.0.0" } }, "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA=="],
|
||||
|
||||
"webidl-conversions": ["webidl-conversions@7.0.0", "", {}, "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g=="],
|
||||
|
||||
"whatwg-encoding": ["whatwg-encoding@3.1.1", "", { "dependencies": { "iconv-lite": "0.6.3" } }, "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ=="],
|
||||
|
||||
"whatwg-mimetype": ["whatwg-mimetype@4.0.0", "", {}, "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg=="],
|
||||
|
||||
"whatwg-url": ["whatwg-url@14.2.0", "", { "dependencies": { "tr46": "^5.1.0", "webidl-conversions": "^7.0.0" } }, "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw=="],
|
||||
|
||||
"why-is-node-running": ["why-is-node-running@2.3.0", "", { "dependencies": { "siginfo": "^2.0.0", "stackback": "0.0.2" }, "bin": { "why-is-node-running": "cli.js" } }, "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w=="],
|
||||
|
||||
"ws": ["ws@8.19.0", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg=="],
|
||||
|
||||
"xml-name-validator": ["xml-name-validator@5.0.0", "", {}, "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg=="],
|
||||
|
||||
"xmlchars": ["xmlchars@2.2.0", "", {}, "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw=="],
|
||||
|
||||
"zimmerframe": ["zimmerframe@1.1.4", "", {}, "sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.8.1", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" }, "bundled": true }, "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi/@emnapi/runtime": ["@emnapi/runtime@1.8.1", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi/@emnapi/wasi-threads": ["@emnapi/wasi-threads@1.1.0", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi/@napi-rs/wasm-runtime": ["@napi-rs/wasm-runtime@1.1.1", "", { "dependencies": { "@emnapi/core": "^1.7.1", "@emnapi/runtime": "^1.7.1", "@tybys/wasm-util": "^0.10.1" }, "bundled": true }, "sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi/@tybys/wasm-util": ["@tybys/wasm-util@0.10.1", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi/tslib": ["tslib@2.8.1", "", { "bundled": true }, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"name": "banner-web",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite dev --port 3000",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview",
|
||||
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
|
||||
"test": "vitest run",
|
||||
"format": "biome format --write .",
|
||||
"format:check": "biome format ."
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.9.4",
|
||||
"@fontsource-variable/inter": "^5.2.5",
|
||||
"@lucide/svelte": "^0.563.0",
|
||||
"@sveltejs/adapter-static": "^3.0.8",
|
||||
"@sveltejs/kit": "^2.16.0",
|
||||
"@sveltejs/vite-plugin-svelte": "^5.0.3",
|
||||
"@tailwindcss/vite": "^4.0.0",
|
||||
"@tanstack/table-core": "^8.21.3",
|
||||
"@types/node": "^25.1.0",
|
||||
"bits-ui": "^1.3.7",
|
||||
"clsx": "^2.1.1",
|
||||
"jsdom": "^26.0.0",
|
||||
"svelte": "^5.19.0",
|
||||
"svelte-check": "^4.1.4",
|
||||
"tailwind-merge": "^3.0.1",
|
||||
"tailwindcss": "^4.0.0",
|
||||
"typescript": "^5.7.2",
|
||||
"vite": "^6.3.5",
|
||||
"vitest": "^3.0.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"overlayscrollbars": "^2.14.0",
|
||||
"overlayscrollbars-svelte": "^0.5.5"
|
||||
}
|
||||
}
|
||||
Vendored
+11
@@ -0,0 +1,11 @@
|
||||
/// <reference types="@sveltejs/kit" />
|
||||
|
||||
declare const __APP_VERSION__: string;
|
||||
|
||||
declare namespace App {
|
||||
// interface Error {}
|
||||
// interface Locals {}
|
||||
// interface PageData {}
|
||||
// interface PageState {}
|
||||
// interface Platform {}
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
<!doctype html>
|
||||
<html lang="en" class="no-transition">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<link rel="icon" href="%sveltekit.assets%/favicon.ico" />
|
||||
<meta name="theme-color" content="#000000" />
|
||||
<meta
|
||||
name="description"
|
||||
content="Banner, a Discord bot and web interface for UTSA Course Monitoring"
|
||||
/>
|
||||
<link rel="apple-touch-icon" href="%sveltekit.assets%/logo192.png" />
|
||||
<link rel="manifest" href="%sveltekit.assets%/manifest.json" />
|
||||
<title>Banner</title>
|
||||
<script>
|
||||
(function () {
|
||||
var stored = localStorage.getItem("theme");
|
||||
var isDark =
|
||||
stored === "dark" ||
|
||||
(stored !== "light" &&
|
||||
window.matchMedia("(prefers-color-scheme: dark)").matches);
|
||||
if (isDark) {
|
||||
document.documentElement.classList.add("dark");
|
||||
}
|
||||
})();
|
||||
</script>
|
||||
%sveltekit.head%
|
||||
</head>
|
||||
<body data-sveltekit-preload-data="hover">
|
||||
<div style="display: contents">%sveltekit.body%</div>
|
||||
</body>
|
||||
</html>
|
||||
@@ -0,0 +1,144 @@
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { BannerApiClient } from "./api";
|
||||
|
||||
global.fetch = vi.fn();
|
||||
|
||||
describe("BannerApiClient", () => {
|
||||
let apiClient: BannerApiClient;
|
||||
|
||||
beforeEach(() => {
|
||||
apiClient = new BannerApiClient();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("should fetch status data", async () => {
|
||||
const mockStatus = {
|
||||
status: "active" as const,
|
||||
version: "0.3.4",
|
||||
commit: "abc1234",
|
||||
services: {
|
||||
web: { name: "web", status: "active" as const },
|
||||
database: { name: "database", status: "connected" as const },
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockStatus),
|
||||
} as Response);
|
||||
|
||||
const result = await apiClient.getStatus();
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith("/api/status");
|
||||
expect(result).toEqual(mockStatus);
|
||||
});
|
||||
|
||||
it("should handle API errors", async () => {
|
||||
vi.mocked(fetch).mockResolvedValueOnce({
|
||||
ok: false,
|
||||
status: 500,
|
||||
statusText: "Internal Server Error",
|
||||
} as Response);
|
||||
|
||||
await expect(apiClient.getStatus()).rejects.toThrow(
|
||||
"API request failed: 500 Internal Server Error"
|
||||
);
|
||||
});
|
||||
|
||||
it("should search courses with all params", async () => {
|
||||
const mockResponse = {
|
||||
courses: [],
|
||||
totalCount: 0,
|
||||
offset: 0,
|
||||
limit: 25,
|
||||
};
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
const result = await apiClient.searchCourses({
|
||||
term: "202420",
|
||||
subjects: ["CS"],
|
||||
q: "data",
|
||||
open_only: true,
|
||||
limit: 25,
|
||||
offset: 50,
|
||||
});
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith(
|
||||
"/api/courses/search?term=202420&subject=CS&q=data&open_only=true&limit=25&offset=50"
|
||||
);
|
||||
expect(result).toEqual(mockResponse);
|
||||
});
|
||||
|
||||
it("should search courses with minimal params", async () => {
|
||||
const mockResponse = {
|
||||
courses: [],
|
||||
totalCount: 0,
|
||||
offset: 0,
|
||||
limit: 25,
|
||||
};
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
await apiClient.searchCourses({ term: "202420" });
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith("/api/courses/search?term=202420");
|
||||
});
|
||||
|
||||
it("should fetch terms", async () => {
|
||||
const mockTerms = [
|
||||
{ code: "202420", description: "Fall 2024" },
|
||||
{ code: "202510", description: "Spring 2025" },
|
||||
];
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockTerms),
|
||||
} as Response);
|
||||
|
||||
const result = await apiClient.getTerms();
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith("/api/terms");
|
||||
expect(result).toEqual(mockTerms);
|
||||
});
|
||||
|
||||
it("should fetch subjects for a term", async () => {
|
||||
const mockSubjects = [
|
||||
{ code: "CS", description: "Computer Science" },
|
||||
{ code: "MAT", description: "Mathematics" },
|
||||
];
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockSubjects),
|
||||
} as Response);
|
||||
|
||||
const result = await apiClient.getSubjects("202420");
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith("/api/subjects?term=202420");
|
||||
expect(result).toEqual(mockSubjects);
|
||||
});
|
||||
|
||||
it("should fetch reference data", async () => {
|
||||
const mockRef = [
|
||||
{ code: "F", description: "Face to Face" },
|
||||
{ code: "OL", description: "Online" },
|
||||
];
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockRef),
|
||||
} as Response);
|
||||
|
||||
const result = await apiClient.getReference("instructional_methods");
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith("/api/reference/instructional_methods");
|
||||
expect(result).toEqual(mockRef);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,101 @@
|
||||
import type {
|
||||
CodeDescription,
|
||||
CourseResponse,
|
||||
DbMeetingTime,
|
||||
InstructorResponse,
|
||||
SearchResponse as SearchResponseGenerated,
|
||||
ServiceInfo,
|
||||
ServiceStatus,
|
||||
StatusResponse,
|
||||
} from "$lib/bindings";
|
||||
|
||||
const API_BASE_URL = "/api";
|
||||
|
||||
// Re-export generated types under their canonical names
|
||||
export type {
|
||||
CodeDescription,
|
||||
CourseResponse,
|
||||
DbMeetingTime,
|
||||
InstructorResponse,
|
||||
ServiceInfo,
|
||||
ServiceStatus,
|
||||
StatusResponse,
|
||||
};
|
||||
|
||||
// Semantic aliases — these all share the CodeDescription shape
|
||||
export type Term = CodeDescription;
|
||||
export type Subject = CodeDescription;
|
||||
export type ReferenceEntry = CodeDescription;
|
||||
|
||||
// SearchResponse re-exported (aliased to strip the "Generated" suffix)
|
||||
export type SearchResponse = SearchResponseGenerated;
|
||||
|
||||
// Client-side only — not generated from Rust
|
||||
export type SortColumn = "course_code" | "title" | "instructor" | "time" | "seats";
|
||||
export type SortDirection = "asc" | "desc";
|
||||
|
||||
export interface SearchParams {
|
||||
term: string;
|
||||
subjects?: string[];
|
||||
q?: string;
|
||||
open_only?: boolean;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
sort_by?: SortColumn;
|
||||
sort_dir?: SortDirection;
|
||||
}
|
||||
|
||||
export class BannerApiClient {
|
||||
private baseUrl: string;
|
||||
private fetchFn: typeof fetch;
|
||||
|
||||
constructor(baseUrl: string = API_BASE_URL, fetchFn: typeof fetch = fetch) {
|
||||
this.baseUrl = baseUrl;
|
||||
this.fetchFn = fetchFn;
|
||||
}
|
||||
|
||||
private async request<T>(endpoint: string): Promise<T> {
|
||||
const response = await this.fetchFn(`${this.baseUrl}${endpoint}`);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
return (await response.json()) as T;
|
||||
}
|
||||
|
||||
async getStatus(): Promise<StatusResponse> {
|
||||
return this.request<StatusResponse>("/status");
|
||||
}
|
||||
|
||||
async searchCourses(params: SearchParams): Promise<SearchResponse> {
|
||||
const query = new URLSearchParams();
|
||||
query.set("term", params.term);
|
||||
if (params.subjects) {
|
||||
for (const s of params.subjects) {
|
||||
query.append("subject", s);
|
||||
}
|
||||
}
|
||||
if (params.q) query.set("q", params.q);
|
||||
if (params.open_only) query.set("open_only", "true");
|
||||
if (params.limit !== undefined) query.set("limit", String(params.limit));
|
||||
if (params.offset !== undefined) query.set("offset", String(params.offset));
|
||||
if (params.sort_by) query.set("sort_by", params.sort_by);
|
||||
if (params.sort_dir) query.set("sort_dir", params.sort_dir);
|
||||
return this.request<SearchResponse>(`/courses/search?${query.toString()}`);
|
||||
}
|
||||
|
||||
async getTerms(): Promise<Term[]> {
|
||||
return this.request<Term[]>("/terms");
|
||||
}
|
||||
|
||||
async getSubjects(termCode: string): Promise<Subject[]> {
|
||||
return this.request<Subject[]>(`/subjects?term=${encodeURIComponent(termCode)}`);
|
||||
}
|
||||
|
||||
async getReference(category: string): Promise<ReferenceEntry[]> {
|
||||
return this.request<ReferenceEntry[]>(`/reference/${encodeURIComponent(category)}`);
|
||||
}
|
||||
}
|
||||
|
||||
export const client = new BannerApiClient();
|
||||
@@ -0,0 +1,8 @@
|
||||
export type { CodeDescription } from "./CodeDescription";
|
||||
export type { CourseResponse } from "./CourseResponse";
|
||||
export type { DbMeetingTime } from "./DbMeetingTime";
|
||||
export type { InstructorResponse } from "./InstructorResponse";
|
||||
export type { SearchResponse } from "./SearchResponse";
|
||||
export type { ServiceInfo } from "./ServiceInfo";
|
||||
export type { ServiceStatus } from "./ServiceStatus";
|
||||
export type { StatusResponse } from "./StatusResponse";
|
||||
@@ -0,0 +1,226 @@
|
||||
<script lang="ts">
|
||||
import type { CourseResponse } from "$lib/api";
|
||||
import {
|
||||
formatTime,
|
||||
formatCreditHours,
|
||||
formatDate,
|
||||
formatMeetingDaysLong,
|
||||
isMeetingTimeTBA,
|
||||
isTimeTBA,
|
||||
ratingColor,
|
||||
} from "$lib/course";
|
||||
import { useClipboard } from "$lib/composables/useClipboard.svelte";
|
||||
import { cn, tooltipContentClass } from "$lib/utils";
|
||||
import { Tooltip } from "bits-ui";
|
||||
import SimpleTooltip from "./SimpleTooltip.svelte";
|
||||
import { Info, Copy, Check } from "@lucide/svelte";
|
||||
|
||||
let { course }: { course: CourseResponse } = $props();
|
||||
|
||||
const clipboard = useClipboard();
|
||||
</script>
|
||||
|
||||
<div class="bg-muted/60 p-5 text-sm border-b border-border">
|
||||
<div class="grid grid-cols-1 sm:grid-cols-2 gap-5">
|
||||
<!-- Instructors -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Instructors
|
||||
</h4>
|
||||
{#if course.instructors.length > 0}
|
||||
<div class="flex flex-wrap gap-1.5">
|
||||
{#each course.instructors as instructor}
|
||||
<Tooltip.Root delayDuration={200}>
|
||||
<Tooltip.Trigger>
|
||||
<span
|
||||
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
|
||||
>
|
||||
{instructor.displayName}
|
||||
{#if instructor.rmpRating != null}
|
||||
{@const rating = instructor.rmpRating}
|
||||
<span
|
||||
class="text-[10px] font-semibold {ratingColor(rating)}"
|
||||
>{rating.toFixed(1)}★</span>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
sideOffset={6}
|
||||
class={cn(tooltipContentClass, "px-3 py-2")}
|
||||
>
|
||||
<div class="space-y-1.5">
|
||||
<div class="font-medium">{instructor.displayName}</div>
|
||||
{#if instructor.isPrimary}
|
||||
<div class="text-muted-foreground">Primary instructor</div>
|
||||
{/if}
|
||||
{#if instructor.rmpRating != null}
|
||||
<div class="text-muted-foreground">
|
||||
{instructor.rmpRating.toFixed(1)}/5 ({instructor.rmpNumRatings ?? 0} ratings)
|
||||
</div>
|
||||
{/if}
|
||||
{#if instructor.email}
|
||||
<button
|
||||
onclick={(e) => clipboard.copy(instructor.email!, e)}
|
||||
class="inline-flex items-center gap-1 text-muted-foreground hover:text-foreground transition-colors cursor-pointer"
|
||||
>
|
||||
{#if clipboard.copiedValue === instructor.email}
|
||||
<Check class="size-3" />
|
||||
<span>Copied!</span>
|
||||
{:else}
|
||||
<Copy class="size-3" />
|
||||
<span>{instructor.email}</span>
|
||||
{/if}
|
||||
</button>
|
||||
{/if}
|
||||
</div>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{/each}
|
||||
</div>
|
||||
{:else}
|
||||
<span class="text-muted-foreground italic">Staff</span>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Meeting Times -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Meeting Times
|
||||
</h4>
|
||||
{#if course.meetingTimes.length > 0}
|
||||
<ul class="space-y-2">
|
||||
{#each course.meetingTimes as mt}
|
||||
<li>
|
||||
{#if isMeetingTimeTBA(mt) && isTimeTBA(mt)}
|
||||
<span class="italic text-muted-foreground">TBA</span>
|
||||
{:else}
|
||||
<div class="flex items-baseline gap-1.5">
|
||||
{#if !isMeetingTimeTBA(mt)}
|
||||
<span class="font-medium text-foreground">
|
||||
{formatMeetingDaysLong(mt)}
|
||||
</span>
|
||||
{/if}
|
||||
{#if !isTimeTBA(mt)}
|
||||
<span class="text-muted-foreground">
|
||||
{formatTime(mt.begin_time)}–{formatTime(mt.end_time)}
|
||||
</span>
|
||||
{:else}
|
||||
<span class="italic text-muted-foreground">Time TBA</span>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
{#if mt.building || mt.room}
|
||||
<div class="text-xs text-muted-foreground mt-0.5">
|
||||
{mt.building_description ?? mt.building}{mt.room ? ` ${mt.room}` : ""}
|
||||
</div>
|
||||
{/if}
|
||||
<div class="text-xs text-muted-foreground/70 mt-0.5">
|
||||
{formatDate(mt.start_date)} – {formatDate(mt.end_date)}
|
||||
</div>
|
||||
</li>
|
||||
{/each}
|
||||
</ul>
|
||||
{:else}
|
||||
<span class="italic text-muted-foreground">TBA</span>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Delivery -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Delivery
|
||||
<SimpleTooltip text="How the course is taught: in-person, online, hybrid, etc." delay={150} passthrough>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<span class="text-foreground">
|
||||
{course.instructionalMethod ?? "—"}
|
||||
{#if course.campus}
|
||||
<span class="text-muted-foreground"> · {course.campus}</span>
|
||||
{/if}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<!-- Credits -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Credits
|
||||
</h4>
|
||||
<span class="text-foreground">{formatCreditHours(course)}</span>
|
||||
</div>
|
||||
|
||||
<!-- Attributes -->
|
||||
{#if course.attributes.length > 0}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Attributes
|
||||
<SimpleTooltip text="Course flags for degree requirements, core curriculum, or special designations" delay={150} passthrough>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<div class="flex flex-wrap gap-1.5">
|
||||
{#each course.attributes as attr}
|
||||
<SimpleTooltip text="Course attribute code" delay={150} passthrough>
|
||||
<span
|
||||
class="inline-flex text-xs font-medium bg-card border border-border rounded-md px-2 py-0.5 text-muted-foreground hover:text-foreground hover:border-foreground/20 transition-colors"
|
||||
>
|
||||
{attr}
|
||||
</span>
|
||||
</SimpleTooltip>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- Cross-list -->
|
||||
{#if course.crossList}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Cross-list
|
||||
<SimpleTooltip text="Cross-listed sections share enrollment across multiple course numbers. Students in any linked section attend the same class." delay={150} passthrough>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<Tooltip.Root delayDuration={150} disableHoverableContent>
|
||||
<Tooltip.Trigger>
|
||||
<span class="inline-flex items-center gap-1.5 text-foreground font-mono">
|
||||
<span class="bg-card border border-border rounded-md px-2 py-0.5 text-xs font-medium">
|
||||
{course.crossList}
|
||||
</span>
|
||||
{#if course.crossListCount != null && course.crossListCapacity != null}
|
||||
<span class="text-muted-foreground text-xs">
|
||||
{course.crossListCount}/{course.crossListCapacity}
|
||||
</span>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
sideOffset={6}
|
||||
class={tooltipContentClass}
|
||||
>
|
||||
Group <span class="font-mono font-medium">{course.crossList}</span>
|
||||
{#if course.crossListCount != null && course.crossListCapacity != null}
|
||||
— {course.crossListCount} enrolled across {course.crossListCapacity} shared seats
|
||||
{/if}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- Waitlist -->
|
||||
{#if course.waitCapacity > 0}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Waitlist
|
||||
</h4>
|
||||
<span class="text-foreground">{course.waitCount} / {course.waitCapacity}</span>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
@@ -0,0 +1,684 @@
|
||||
<script lang="ts">
|
||||
import type { CourseResponse } from "$lib/api";
|
||||
import {
|
||||
abbreviateInstructor,
|
||||
concernAccentColor,
|
||||
formatLocationDisplay,
|
||||
formatLocationTooltip,
|
||||
formatMeetingDays,
|
||||
formatMeetingTimesTooltip,
|
||||
formatTimeRange,
|
||||
getDeliveryConcern,
|
||||
getPrimaryInstructor,
|
||||
isMeetingTimeTBA,
|
||||
isTimeTBA,
|
||||
openSeats,
|
||||
seatsColor,
|
||||
seatsDotColor,
|
||||
ratingColor,
|
||||
} from "$lib/course";
|
||||
import { useClipboard } from "$lib/composables/useClipboard.svelte";
|
||||
import { useOverlayScrollbars } from "$lib/composables/useOverlayScrollbars.svelte";
|
||||
import CourseDetail from "./CourseDetail.svelte";
|
||||
import { fade, fly, slide } from "svelte/transition";
|
||||
import { flip } from "svelte/animate";
|
||||
import { createSvelteTable, FlexRender } from "$lib/components/ui/data-table/index.js";
|
||||
import {
|
||||
getCoreRowModel,
|
||||
getSortedRowModel,
|
||||
type ColumnDef,
|
||||
type SortingState,
|
||||
type VisibilityState,
|
||||
type Updater,
|
||||
} from "@tanstack/table-core";
|
||||
import { ArrowUp, ArrowDown, ArrowUpDown, Columns3, Check, RotateCcw } from "@lucide/svelte";
|
||||
import { DropdownMenu, ContextMenu } from "bits-ui";
|
||||
import SimpleTooltip from "./SimpleTooltip.svelte";
|
||||
|
||||
let {
|
||||
courses,
|
||||
loading,
|
||||
sorting = [],
|
||||
onSortingChange,
|
||||
manualSorting = false,
|
||||
subjectMap = {},
|
||||
}: {
|
||||
courses: CourseResponse[];
|
||||
loading: boolean;
|
||||
sorting?: SortingState;
|
||||
onSortingChange?: (sorting: SortingState) => void;
|
||||
manualSorting?: boolean;
|
||||
subjectMap?: Record<string, string>;
|
||||
} = $props();
|
||||
|
||||
let expandedCrn: string | null = $state(null);
|
||||
let tableWrapper: HTMLDivElement = undefined!;
|
||||
const clipboard = useClipboard(1000);
|
||||
|
||||
// Collapse expanded row when the dataset changes to avoid stale detail rows
|
||||
// and FLIP position calculation glitches from lingering expanded content
|
||||
$effect(() => {
|
||||
courses; // track dependency
|
||||
expandedCrn = null;
|
||||
});
|
||||
|
||||
useOverlayScrollbars(() => tableWrapper, {
|
||||
overflow: { x: "scroll", y: "hidden" },
|
||||
scrollbars: { autoHide: "never" },
|
||||
});
|
||||
|
||||
// Column visibility state
|
||||
let columnVisibility: VisibilityState = $state({});
|
||||
|
||||
function resetColumnVisibility() {
|
||||
columnVisibility = {};
|
||||
}
|
||||
|
||||
function handleVisibilityChange(updater: Updater<VisibilityState>) {
|
||||
const newVisibility = typeof updater === "function" ? updater(columnVisibility) : updater;
|
||||
columnVisibility = newVisibility;
|
||||
}
|
||||
|
||||
// visibleColumnIds and hasCustomVisibility derived after column definitions below
|
||||
|
||||
function toggleRow(crn: string) {
|
||||
expandedCrn = expandedCrn === crn ? null : crn;
|
||||
}
|
||||
|
||||
function primaryInstructorDisplay(course: CourseResponse): string {
|
||||
const primary = getPrimaryInstructor(course.instructors);
|
||||
if (!primary) return "Staff";
|
||||
return abbreviateInstructor(primary.displayName);
|
||||
}
|
||||
|
||||
function primaryRating(course: CourseResponse): { rating: number; count: number } | null {
|
||||
const primary = getPrimaryInstructor(course.instructors);
|
||||
if (!primary?.rmpRating) return null;
|
||||
return { rating: primary.rmpRating, count: primary.rmpNumRatings ?? 0 };
|
||||
}
|
||||
|
||||
function timeIsTBA(course: CourseResponse): boolean {
|
||||
if (course.meetingTimes.length === 0) return true;
|
||||
const mt = course.meetingTimes[0];
|
||||
return isMeetingTimeTBA(mt) && isTimeTBA(mt);
|
||||
}
|
||||
|
||||
// Column definitions
|
||||
const columns: ColumnDef<CourseResponse, unknown>[] = [
|
||||
{
|
||||
id: "crn",
|
||||
accessorKey: "crn",
|
||||
header: "CRN",
|
||||
enableSorting: false,
|
||||
},
|
||||
{
|
||||
id: "course_code",
|
||||
accessorFn: (row) => `${row.subject} ${row.courseNumber}`,
|
||||
header: "Course",
|
||||
enableSorting: true,
|
||||
},
|
||||
{
|
||||
id: "title",
|
||||
accessorKey: "title",
|
||||
header: "Title",
|
||||
enableSorting: true,
|
||||
},
|
||||
{
|
||||
id: "instructor",
|
||||
accessorFn: (row) => primaryInstructorDisplay(row),
|
||||
header: "Instructor",
|
||||
enableSorting: true,
|
||||
},
|
||||
{
|
||||
id: "time",
|
||||
accessorFn: (row) => {
|
||||
if (row.meetingTimes.length === 0) return "";
|
||||
const mt = row.meetingTimes[0];
|
||||
return `${formatMeetingDays(mt)} ${formatTimeRange(mt.begin_time, mt.end_time)}`;
|
||||
},
|
||||
header: "Time",
|
||||
enableSorting: true,
|
||||
},
|
||||
{
|
||||
id: "location",
|
||||
accessorFn: (row) => formatLocationDisplay(row) ?? "",
|
||||
header: "Location",
|
||||
enableSorting: false,
|
||||
},
|
||||
{
|
||||
id: "seats",
|
||||
accessorFn: (row) => openSeats(row),
|
||||
header: "Seats",
|
||||
enableSorting: true,
|
||||
},
|
||||
];
|
||||
|
||||
/** Column IDs that are currently visible */
|
||||
let visibleColumnIds = $derived(
|
||||
columns.map((c) => c.id!).filter((id) => columnVisibility[id] !== false)
|
||||
);
|
||||
|
||||
let hasCustomVisibility = $derived(Object.values(columnVisibility).some((v) => v === false));
|
||||
|
||||
function handleSortingChange(updater: Updater<SortingState>) {
|
||||
const newSorting = typeof updater === "function" ? updater(sorting) : updater;
|
||||
onSortingChange?.(newSorting);
|
||||
}
|
||||
|
||||
const table = createSvelteTable({
|
||||
get data() {
|
||||
return courses;
|
||||
},
|
||||
getRowId: (row) => String(row.crn),
|
||||
columns,
|
||||
state: {
|
||||
get sorting() {
|
||||
return sorting;
|
||||
},
|
||||
get columnVisibility() {
|
||||
return columnVisibility;
|
||||
},
|
||||
},
|
||||
onSortingChange: handleSortingChange,
|
||||
onColumnVisibilityChange: handleVisibilityChange,
|
||||
getCoreRowModel: getCoreRowModel(),
|
||||
get getSortedRowModel() {
|
||||
return manualSorting ? undefined : getSortedRowModel<CourseResponse>();
|
||||
},
|
||||
get manualSorting() {
|
||||
return manualSorting;
|
||||
},
|
||||
enableSortingRemoval: true,
|
||||
});
|
||||
</script>
|
||||
|
||||
{#snippet columnVisibilityGroup(
|
||||
Group: typeof DropdownMenu.Group,
|
||||
GroupHeading: typeof DropdownMenu.GroupHeading,
|
||||
CheckboxItem: typeof DropdownMenu.CheckboxItem,
|
||||
Separator: typeof DropdownMenu.Separator,
|
||||
Item: typeof DropdownMenu.Item,
|
||||
)}
|
||||
<Group>
|
||||
<GroupHeading
|
||||
class="px-2 py-1.5 text-xs font-medium text-muted-foreground"
|
||||
>
|
||||
Toggle columns
|
||||
</GroupHeading>
|
||||
{#each columns as col}
|
||||
{@const id = col.id!}
|
||||
{@const label =
|
||||
typeof col.header === "string" ? col.header : id}
|
||||
<CheckboxItem
|
||||
checked={columnVisibility[id] !== false}
|
||||
closeOnSelect={false}
|
||||
onCheckedChange={(checked) => {
|
||||
columnVisibility = {
|
||||
...columnVisibility,
|
||||
[id]: checked,
|
||||
};
|
||||
}}
|
||||
class="relative flex items-center gap-2 rounded-sm px-2 py-1.5 text-sm cursor-pointer select-none outline-none data-highlighted:bg-accent data-highlighted:text-accent-foreground"
|
||||
>
|
||||
{#snippet children({ checked })}
|
||||
<span
|
||||
class="flex size-4 items-center justify-center rounded-sm border border-border"
|
||||
>
|
||||
{#if checked}
|
||||
<Check class="size-3" />
|
||||
{/if}
|
||||
</span>
|
||||
{label}
|
||||
{/snippet}
|
||||
</CheckboxItem>
|
||||
{/each}
|
||||
</Group>
|
||||
{#if hasCustomVisibility}
|
||||
<Separator class="mx-1 my-1 h-px bg-border" />
|
||||
<Item
|
||||
class="flex items-center gap-2 rounded-sm px-2 py-1.5 text-sm cursor-pointer select-none outline-none data-highlighted:bg-accent data-highlighted:text-accent-foreground"
|
||||
onSelect={resetColumnVisibility}
|
||||
>
|
||||
<RotateCcw class="size-3.5" />
|
||||
Reset to default
|
||||
</Item>
|
||||
{/if}
|
||||
{/snippet}
|
||||
|
||||
<!-- Toolbar: View columns button -->
|
||||
<div class="flex items-center justify-end pb-2">
|
||||
<DropdownMenu.Root>
|
||||
<DropdownMenu.Trigger
|
||||
class="inline-flex items-center gap-1.5 rounded-md border border-border bg-background px-2.5 py-1.5 text-xs font-medium text-muted-foreground hover:bg-accent hover:text-accent-foreground transition-colors cursor-pointer"
|
||||
>
|
||||
<Columns3 class="size-3.5" />
|
||||
View
|
||||
</DropdownMenu.Trigger>
|
||||
<DropdownMenu.Portal>
|
||||
<DropdownMenu.Content
|
||||
class="z-50 min-w-40 rounded-md border border-border bg-card p-1 text-card-foreground shadow-lg"
|
||||
align="end"
|
||||
sideOffset={4}
|
||||
forceMount
|
||||
>
|
||||
{#snippet child({ wrapperProps, props, open })}
|
||||
{#if open}
|
||||
<div {...wrapperProps}>
|
||||
<div
|
||||
{...props}
|
||||
transition:fly={{ duration: 150, y: -10 }}
|
||||
>
|
||||
{@render columnVisibilityGroup(
|
||||
DropdownMenu.Group,
|
||||
DropdownMenu.GroupHeading,
|
||||
DropdownMenu.CheckboxItem,
|
||||
DropdownMenu.Separator,
|
||||
DropdownMenu.Item,
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/snippet}
|
||||
</DropdownMenu.Content>
|
||||
</DropdownMenu.Portal>
|
||||
</DropdownMenu.Root>
|
||||
</div>
|
||||
|
||||
<!-- Table with context menu on header -->
|
||||
<div bind:this={tableWrapper} class="overflow-x-auto">
|
||||
<ContextMenu.Root>
|
||||
<ContextMenu.Trigger class="contents">
|
||||
<table class="w-full min-w-160 border-collapse text-sm">
|
||||
<thead>
|
||||
{#each table.getHeaderGroups() as headerGroup}
|
||||
<tr
|
||||
class="border-b border-border text-left text-muted-foreground"
|
||||
>
|
||||
{#each headerGroup.headers as header}
|
||||
{#if header.column.getIsVisible()}
|
||||
<th
|
||||
class="py-2 px-2 font-medium {header.id ===
|
||||
'seats'
|
||||
? 'text-right'
|
||||
: ''}"
|
||||
class:cursor-pointer={header.column.getCanSort()}
|
||||
class:select-none={header.column.getCanSort()}
|
||||
onclick={header.column.getToggleSortingHandler()}
|
||||
>
|
||||
{#if header.column.getCanSort()}
|
||||
<span
|
||||
class="inline-flex items-center gap-1"
|
||||
>
|
||||
{#if typeof header.column.columnDef.header === "string"}
|
||||
{header.column.columnDef
|
||||
.header}
|
||||
{:else}
|
||||
<FlexRender
|
||||
content={header.column
|
||||
.columnDef.header}
|
||||
context={header.getContext()}
|
||||
/>
|
||||
{/if}
|
||||
{#if header.column.getIsSorted() === "asc"}
|
||||
<ArrowUp class="size-3.5" />
|
||||
{:else if header.column.getIsSorted() === "desc"}
|
||||
<ArrowDown
|
||||
class="size-3.5"
|
||||
/>
|
||||
{:else}
|
||||
<ArrowUpDown
|
||||
class="size-3.5 text-muted-foreground/40"
|
||||
/>
|
||||
{/if}
|
||||
</span>
|
||||
{:else if typeof header.column.columnDef.header === "string"}
|
||||
{header.column.columnDef.header}
|
||||
{:else}
|
||||
<FlexRender
|
||||
content={header.column.columnDef
|
||||
.header}
|
||||
context={header.getContext()}
|
||||
/>
|
||||
{/if}
|
||||
</th>
|
||||
{/if}
|
||||
{/each}
|
||||
</tr>
|
||||
{/each}
|
||||
</thead>
|
||||
{#if loading && courses.length === 0}
|
||||
<tbody>
|
||||
{#each Array(5) as _}
|
||||
<tr class="border-b border-border">
|
||||
{#each table.getVisibleLeafColumns() as col}
|
||||
<td class="py-2.5 px-2">
|
||||
<div
|
||||
class="h-4 bg-muted rounded animate-pulse {col.id ===
|
||||
'seats'
|
||||
? 'w-14 ml-auto'
|
||||
: col.id === 'title'
|
||||
? 'w-40'
|
||||
: col.id === 'crn'
|
||||
? 'w-10'
|
||||
: 'w-20'}"
|
||||
></div>
|
||||
</td>
|
||||
{/each}
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
{:else if courses.length === 0}
|
||||
<tbody>
|
||||
<tr>
|
||||
<td
|
||||
colspan={visibleColumnIds.length}
|
||||
class="py-12 text-center text-muted-foreground"
|
||||
>
|
||||
No courses found. Try adjusting your filters.
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
{:else}
|
||||
{#each table.getRowModel().rows as row, i (row.id)}
|
||||
{@const course = row.original}
|
||||
<tbody
|
||||
animate:flip={{ duration: 300 }}
|
||||
in:fade={{ duration: 200, delay: Math.min(i * 20, 400) }}
|
||||
out:fade={{ duration: 150 }}
|
||||
>
|
||||
<tr
|
||||
class="border-b border-border cursor-pointer hover:bg-muted/50 transition-colors whitespace-nowrap {expandedCrn ===
|
||||
course.crn
|
||||
? 'bg-muted/30'
|
||||
: ''}"
|
||||
onclick={() => toggleRow(course.crn)}
|
||||
>
|
||||
{#each row.getVisibleCells() as cell (cell.id)}
|
||||
{@const colId = cell.column.id}
|
||||
{#if colId === "crn"}
|
||||
<td class="py-2 px-2 relative">
|
||||
<button
|
||||
class="relative inline-flex items-center rounded-full px-2 py-0.5 border border-border/50 bg-muted/20 hover:bg-muted/40 hover:border-foreground/30 transition-colors duration-150 cursor-copy focus-visible:outline-2 focus-visible:outline-offset-1 focus-visible:outline-ring font-mono text-xs text-muted-foreground/70"
|
||||
onclick={(e) =>
|
||||
clipboard.copy(
|
||||
course.crn,
|
||||
e,
|
||||
)}
|
||||
onkeydown={(e) => {
|
||||
if (e.key === "Enter" || e.key === " ") {
|
||||
e.preventDefault();
|
||||
clipboard.copy(course.crn, e);
|
||||
}
|
||||
}}
|
||||
aria-label="Copy CRN {course.crn} to clipboard"
|
||||
>
|
||||
{course.crn}
|
||||
{#if clipboard.copiedValue === course.crn}
|
||||
<span
|
||||
class="absolute -top-8 left-1/2 -translate-x-1/2 whitespace-nowrap text-xs px-2 py-1 rounded-md bg-green-500/10 border border-green-500/20 text-green-700 dark:text-green-300 pointer-events-none z-10"
|
||||
in:fade={{
|
||||
duration: 100,
|
||||
}}
|
||||
out:fade={{
|
||||
duration: 200,
|
||||
}}
|
||||
>
|
||||
Copied!
|
||||
</span>
|
||||
{/if}
|
||||
</button>
|
||||
</td>
|
||||
{:else if colId === "course_code"}
|
||||
{@const subjectDesc =
|
||||
subjectMap[course.subject]}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
<SimpleTooltip
|
||||
text={subjectDesc
|
||||
? `${subjectDesc} ${course.courseNumber}`
|
||||
: `${course.subject} ${course.courseNumber}`}
|
||||
delay={200}
|
||||
side="bottom"
|
||||
passthrough
|
||||
>
|
||||
<span class="font-semibold"
|
||||
>{course.subject}
|
||||
{course.courseNumber}</span
|
||||
>{#if course.sequenceNumber}<span
|
||||
class="text-muted-foreground"
|
||||
>-{course.sequenceNumber}</span
|
||||
>{/if}
|
||||
</SimpleTooltip>
|
||||
</td>
|
||||
{:else if colId === "title"}
|
||||
<td
|
||||
class="py-2 px-2 font-medium max-w-50 truncate"
|
||||
>
|
||||
<SimpleTooltip
|
||||
text={course.title}
|
||||
delay={200}
|
||||
side="bottom"
|
||||
passthrough
|
||||
>
|
||||
<span class="block truncate"
|
||||
>{course.title}</span
|
||||
>
|
||||
</SimpleTooltip>
|
||||
</td>
|
||||
{:else if colId === "instructor"}
|
||||
{@const primary = getPrimaryInstructor(
|
||||
course.instructors,
|
||||
)}
|
||||
{@const display = primaryInstructorDisplay(course)}
|
||||
{@const commaIdx = display.indexOf(", ")}
|
||||
{@const ratingData = primaryRating(course)}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
{#if display === "Staff"}
|
||||
<span
|
||||
class="text-xs text-muted-foreground/60 uppercase"
|
||||
>Staff</span
|
||||
>
|
||||
{:else}
|
||||
<SimpleTooltip
|
||||
text={primary?.displayName ??
|
||||
"Staff"}
|
||||
delay={200}
|
||||
side="bottom"
|
||||
passthrough
|
||||
>
|
||||
{#if commaIdx !== -1}
|
||||
<span>{display.slice(0, commaIdx)},
|
||||
<span class="text-muted-foreground">{display.slice(commaIdx + 1)}</span
|
||||
></span>
|
||||
{:else}
|
||||
<span>{display}</span>
|
||||
{/if}
|
||||
</SimpleTooltip>
|
||||
{/if}
|
||||
{#if ratingData}
|
||||
<SimpleTooltip
|
||||
text="{ratingData.rating.toFixed(
|
||||
1,
|
||||
)}/5 ({ratingData.count} ratings on RateMyProfessors)"
|
||||
delay={150}
|
||||
side="bottom"
|
||||
passthrough
|
||||
>
|
||||
<span
|
||||
class="ml-1 text-xs font-medium {ratingColor(
|
||||
ratingData.rating,
|
||||
)}"
|
||||
>{ratingData.rating.toFixed(
|
||||
1,
|
||||
)}★</span
|
||||
>
|
||||
</SimpleTooltip>
|
||||
{/if}
|
||||
</td>
|
||||
{:else if colId === "time"}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
<SimpleTooltip
|
||||
text={formatMeetingTimesTooltip(course.meetingTimes)}
|
||||
passthrough
|
||||
>
|
||||
{#if timeIsTBA(course)}
|
||||
<span
|
||||
class="text-xs text-muted-foreground/60"
|
||||
>TBA</span
|
||||
>
|
||||
{:else}
|
||||
{@const mt =
|
||||
course.meetingTimes[0]}
|
||||
<span>
|
||||
{#if !isMeetingTimeTBA(mt)}
|
||||
<span
|
||||
class="font-mono font-medium"
|
||||
>{formatMeetingDays(
|
||||
mt,
|
||||
)}</span
|
||||
>
|
||||
{" "}
|
||||
{/if}
|
||||
{#if !isTimeTBA(mt)}
|
||||
<span
|
||||
class="text-muted-foreground"
|
||||
>{formatTimeRange(
|
||||
mt.begin_time,
|
||||
mt.end_time,
|
||||
)}</span
|
||||
>
|
||||
{:else}
|
||||
<span
|
||||
class="text-xs text-muted-foreground/60"
|
||||
>TBA</span
|
||||
>
|
||||
{/if}
|
||||
{#if course.meetingTimes.length > 1}
|
||||
<span
|
||||
class="ml-1 text-xs text-muted-foreground/70 font-medium"
|
||||
>+{course
|
||||
.meetingTimes
|
||||
.length -
|
||||
1}</span
|
||||
>
|
||||
{/if}
|
||||
</span>
|
||||
{/if}
|
||||
</SimpleTooltip>
|
||||
</td>
|
||||
{:else if colId === "location"}
|
||||
{@const concern = getDeliveryConcern(course)}
|
||||
{@const accentColor = concernAccentColor(concern)}
|
||||
{@const locTooltip = formatLocationTooltip(course)}
|
||||
{@const locDisplay = formatLocationDisplay(course)}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
{#if locTooltip}
|
||||
<SimpleTooltip
|
||||
text={locTooltip}
|
||||
delay={200}
|
||||
passthrough
|
||||
>
|
||||
<span
|
||||
class="text-muted-foreground"
|
||||
class:pl-2={accentColor !== null}
|
||||
style:border-left={accentColor ? `2px solid ${accentColor}` : undefined}
|
||||
>
|
||||
{locDisplay ?? "—"}
|
||||
</span>
|
||||
</SimpleTooltip>
|
||||
{:else if locDisplay}
|
||||
<span class="text-muted-foreground">
|
||||
{locDisplay}
|
||||
</span>
|
||||
{:else}
|
||||
<span class="text-xs text-muted-foreground/50">—</span>
|
||||
{/if}
|
||||
</td>
|
||||
{:else if colId === "seats"}
|
||||
<td
|
||||
class="py-2 px-2 text-right whitespace-nowrap"
|
||||
>
|
||||
<SimpleTooltip
|
||||
text="{openSeats(
|
||||
course,
|
||||
)} of {course.maxEnrollment} seats open, {course.enrollment} enrolled{course.waitCount >
|
||||
0
|
||||
? `, ${course.waitCount} waitlisted`
|
||||
: ''}"
|
||||
delay={200}
|
||||
side="left"
|
||||
passthrough
|
||||
>
|
||||
<span
|
||||
class="inline-flex items-center gap-1.5"
|
||||
>
|
||||
<span
|
||||
class="size-1.5 rounded-full {seatsDotColor(
|
||||
course,
|
||||
)} shrink-0"
|
||||
></span>
|
||||
<span
|
||||
class="{seatsColor(
|
||||
course,
|
||||
)} font-medium tabular-nums"
|
||||
>{#if openSeats(course) === 0}Full{:else}{openSeats(
|
||||
course,
|
||||
)} open{/if}</span
|
||||
>
|
||||
<span
|
||||
class="text-muted-foreground/60 tabular-nums"
|
||||
>{course.enrollment}/{course.maxEnrollment}{#if course.waitCount > 0}
|
||||
· WL {course.waitCount}/{course.waitCapacity}{/if}</span
|
||||
>
|
||||
</span>
|
||||
</SimpleTooltip>
|
||||
</td>
|
||||
{/if}
|
||||
{/each}
|
||||
</tr>
|
||||
{#if expandedCrn === course.crn}
|
||||
<tr>
|
||||
<td
|
||||
colspan={visibleColumnIds.length}
|
||||
class="p-0"
|
||||
>
|
||||
<div
|
||||
transition:slide={{ duration: 200 }}
|
||||
>
|
||||
<CourseDetail {course} />
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{/if}
|
||||
</tbody>
|
||||
{/each}
|
||||
{/if}
|
||||
</table>
|
||||
</ContextMenu.Trigger>
|
||||
<ContextMenu.Portal>
|
||||
<ContextMenu.Content
|
||||
class="z-50 min-w-40 rounded-md border border-border bg-card p-1 text-card-foreground shadow-lg"
|
||||
forceMount
|
||||
>
|
||||
{#snippet child({ wrapperProps, props, open })}
|
||||
{#if open}
|
||||
<div {...wrapperProps}>
|
||||
<div
|
||||
{...props}
|
||||
in:fade={{ duration: 100 }}
|
||||
out:fade={{ duration: 100 }}
|
||||
>
|
||||
{@render columnVisibilityGroup(
|
||||
ContextMenu.Group,
|
||||
ContextMenu.GroupHeading,
|
||||
ContextMenu.CheckboxItem,
|
||||
ContextMenu.Separator,
|
||||
ContextMenu.Item,
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/snippet}
|
||||
</ContextMenu.Content>
|
||||
</ContextMenu.Portal>
|
||||
</ContextMenu.Root>
|
||||
</div>
|
||||
@@ -0,0 +1,36 @@
|
||||
<script lang="ts">
|
||||
import { cn } from "$lib/utils";
|
||||
|
||||
let {
|
||||
commitHash,
|
||||
showStatusLink = true,
|
||||
class: className,
|
||||
}: {
|
||||
commitHash?: string | null;
|
||||
showStatusLink?: boolean;
|
||||
class?: string;
|
||||
} = $props();
|
||||
</script>
|
||||
|
||||
<div class={cn("flex justify-center items-center gap-2 mt-auto pt-6 pb-4", className)}>
|
||||
{#if __APP_VERSION__}
|
||||
<span class="text-xs text-muted-foreground">v{__APP_VERSION__}</span>
|
||||
<div class="w-px h-3 bg-muted-foreground opacity-30"></div>
|
||||
{/if}
|
||||
<a
|
||||
href={commitHash
|
||||
? `https://github.com/Xevion/banner/commit/${commitHash}`
|
||||
: "https://github.com/Xevion/banner"}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
class="text-xs text-muted-foreground no-underline hover:underline"
|
||||
>
|
||||
GitHub
|
||||
</a>
|
||||
{#if showStatusLink}
|
||||
<div class="w-px h-3 bg-muted-foreground opacity-30"></div>
|
||||
<a href="/health" class="text-xs text-muted-foreground no-underline hover:underline">
|
||||
Status
|
||||
</a>
|
||||
{/if}
|
||||
</div>
|
||||
@@ -0,0 +1,162 @@
|
||||
<script lang="ts">
|
||||
import { Select } from "bits-ui";
|
||||
import { ChevronUp, ChevronDown } from "@lucide/svelte";
|
||||
import { fly } from "svelte/transition";
|
||||
|
||||
let {
|
||||
totalCount,
|
||||
offset,
|
||||
limit,
|
||||
onPageChange,
|
||||
}: {
|
||||
totalCount: number;
|
||||
offset: number;
|
||||
limit: number;
|
||||
onPageChange: (newOffset: number) => void;
|
||||
} = $props();
|
||||
|
||||
const currentPage = $derived(Math.floor(offset / limit) + 1);
|
||||
const totalPages = $derived(Math.ceil(totalCount / limit));
|
||||
const start = $derived(offset + 1);
|
||||
const end = $derived(Math.min(offset + limit, totalCount));
|
||||
|
||||
// Track direction for slide animation
|
||||
let prevPage = $state(1);
|
||||
let direction = $state(0);
|
||||
|
||||
$effect(() => {
|
||||
const page = currentPage;
|
||||
if (page !== prevPage) {
|
||||
direction = page > prevPage ? 1 : -1;
|
||||
prevPage = page;
|
||||
}
|
||||
});
|
||||
|
||||
// 5 page slots: current-2, current-1, current, current+1, current+2
|
||||
const pageSlots = $derived([-2, -1, 0, 1, 2].map((delta) => currentPage + delta));
|
||||
|
||||
function isSlotVisible(page: number): boolean {
|
||||
return page >= 1 && page <= totalPages;
|
||||
}
|
||||
|
||||
function goToPage(page: number) {
|
||||
onPageChange((page - 1) * limit);
|
||||
}
|
||||
|
||||
// Build items array for the Select dropdown
|
||||
const pageItems = $derived(
|
||||
Array.from({ length: totalPages }, (_, i) => ({
|
||||
value: String(i + 1),
|
||||
label: String(i + 1),
|
||||
}))
|
||||
);
|
||||
|
||||
const selectValue = $derived(String(currentPage));
|
||||
</script>
|
||||
|
||||
{#if totalCount > 0 && totalPages > 1}
|
||||
<div class="flex items-center text-sm">
|
||||
<!-- Left zone: result count -->
|
||||
<div class="flex-1">
|
||||
<span class="text-muted-foreground">
|
||||
Showing {start}–{end} of {totalCount} courses
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<!-- Center zone: page buttons -->
|
||||
<div class="flex items-center gap-1">
|
||||
{#key currentPage}
|
||||
{#each pageSlots as page, i (i)}
|
||||
{#if i === 2}
|
||||
<!-- Center slot: current page with dropdown trigger -->
|
||||
<Select.Root
|
||||
type="single"
|
||||
value={selectValue}
|
||||
onValueChange={(v) => {
|
||||
if (v) goToPage(Number(v));
|
||||
}}
|
||||
items={pageItems}
|
||||
>
|
||||
<Select.Trigger
|
||||
class="inline-flex items-center justify-center gap-1 w-auto min-w-9 h-9 px-2.5
|
||||
rounded-md text-sm font-medium tabular-nums
|
||||
border border-border bg-card text-foreground
|
||||
hover:bg-muted/50 active:bg-muted transition-colors
|
||||
cursor-pointer select-none outline-none
|
||||
focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background"
|
||||
aria-label="Page {currentPage} of {totalPages}, click to select page"
|
||||
>
|
||||
<span in:fly={{ x: direction * 20, duration: 200 }}>{currentPage}</span>
|
||||
<ChevronUp class="size-3 text-muted-foreground" />
|
||||
</Select.Trigger>
|
||||
<Select.Portal>
|
||||
<Select.Content
|
||||
class="border border-border bg-card shadow-md outline-hidden z-50
|
||||
max-h-72 min-w-16 w-auto
|
||||
select-none rounded-md p-1
|
||||
data-[state=open]:animate-in data-[state=closed]:animate-out
|
||||
data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0
|
||||
data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95
|
||||
data-[side=top]:slide-in-from-bottom-2
|
||||
data-[side=bottom]:slide-in-from-top-2"
|
||||
side="top"
|
||||
sideOffset={6}
|
||||
>
|
||||
<Select.ScrollUpButton class="flex w-full items-center justify-center py-0.5">
|
||||
<ChevronUp class="size-3.5 text-muted-foreground" />
|
||||
</Select.ScrollUpButton>
|
||||
<Select.Viewport class="p-0.5">
|
||||
{#each pageItems as item (item.value)}
|
||||
<Select.Item
|
||||
class="rounded-sm outline-hidden flex h-8 w-full select-none items-center
|
||||
justify-center px-3 text-sm tabular-nums
|
||||
data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground
|
||||
data-[selected]:font-semibold"
|
||||
value={item.value}
|
||||
label={item.label}
|
||||
>
|
||||
{item.label}
|
||||
</Select.Item>
|
||||
{/each}
|
||||
</Select.Viewport>
|
||||
<Select.ScrollDownButton class="flex w-full items-center justify-center py-0.5">
|
||||
<ChevronDown class="size-3.5 text-muted-foreground" />
|
||||
</Select.ScrollDownButton>
|
||||
</Select.Content>
|
||||
</Select.Portal>
|
||||
</Select.Root>
|
||||
{:else}
|
||||
<!-- Side slot: navigable page button or invisible placeholder -->
|
||||
<button
|
||||
class="inline-flex items-center justify-center w-9 h-9
|
||||
rounded-md text-sm tabular-nums
|
||||
text-muted-foreground
|
||||
hover:bg-muted/50 hover:text-foreground active:bg-muted transition-colors
|
||||
cursor-pointer select-none
|
||||
focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background
|
||||
{isSlotVisible(page) ? '' : 'invisible pointer-events-none'}"
|
||||
onclick={() => goToPage(page)}
|
||||
aria-label="Go to page {page}"
|
||||
aria-hidden={!isSlotVisible(page)}
|
||||
tabindex={isSlotVisible(page) ? 0 : -1}
|
||||
disabled={!isSlotVisible(page)}
|
||||
in:fly={{ x: direction * 20, duration: 200 }}
|
||||
>
|
||||
{page}
|
||||
</button>
|
||||
{/if}
|
||||
{/each}
|
||||
{/key}
|
||||
</div>
|
||||
|
||||
<!-- Right zone: spacer for centering -->
|
||||
<div class="flex-1"></div>
|
||||
</div>
|
||||
{:else if totalCount > 0}
|
||||
<!-- Single page: just show the count, no pagination controls -->
|
||||
<div class="flex items-center text-sm">
|
||||
<span class="text-muted-foreground">
|
||||
Showing {start}–{end} of {totalCount} courses
|
||||
</span>
|
||||
</div>
|
||||
{/if}
|
||||
@@ -0,0 +1,45 @@
|
||||
<script lang="ts">
|
||||
import type { Term, Subject } from "$lib/api";
|
||||
import SimpleTooltip from "./SimpleTooltip.svelte";
|
||||
import TermCombobox from "./TermCombobox.svelte";
|
||||
import SubjectCombobox from "./SubjectCombobox.svelte";
|
||||
|
||||
let {
|
||||
terms,
|
||||
subjects,
|
||||
selectedTerm = $bindable(),
|
||||
selectedSubjects = $bindable(),
|
||||
query = $bindable(),
|
||||
openOnly = $bindable(),
|
||||
}: {
|
||||
terms: Term[];
|
||||
subjects: Subject[];
|
||||
selectedTerm: string;
|
||||
selectedSubjects: string[];
|
||||
query: string;
|
||||
openOnly: boolean;
|
||||
} = $props();
|
||||
</script>
|
||||
|
||||
<div class="flex flex-wrap gap-3 items-start">
|
||||
<TermCombobox {terms} bind:value={selectedTerm} />
|
||||
|
||||
<SubjectCombobox {subjects} bind:value={selectedSubjects} />
|
||||
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search courses..."
|
||||
aria-label="Search courses"
|
||||
bind:value={query}
|
||||
class="h-9 border border-border bg-card text-foreground rounded-md px-3 text-sm flex-1 min-w-[200px]
|
||||
focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background
|
||||
transition-colors"
|
||||
/>
|
||||
|
||||
<SimpleTooltip text="Show only courses with available seats" delay={200} passthrough>
|
||||
<label class="flex items-center gap-1.5 h-9 text-sm text-muted-foreground cursor-pointer">
|
||||
<input type="checkbox" bind:checked={openOnly} />
|
||||
Open only
|
||||
</label>
|
||||
</SimpleTooltip>
|
||||
</div>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user