mirror of
https://github.com/Xevion/banner.git
synced 2025-12-15 08:11:11 -06:00
Compare commits
73 Commits
9da48b9985
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
966732a6d2 | ||
|
|
3292d35521 | ||
|
|
71ac0782d0 | ||
|
|
1c6d2d4b6e | ||
|
|
51f8256e61 | ||
|
|
b1ed2434f8 | ||
|
|
47c23459f1 | ||
|
|
8af9b0a1a2 | ||
| 020a00254f | |||
| 45de5be60d | |||
| 8384f418c8 | |||
| 3dca896a35 | |||
| 1b7d2d2824 | |||
| e370008d75 | |||
| 176574343f | |||
| 91899bb109 | |||
| 08ae54c093 | |||
| 33b8681b19 | |||
| 398a1b9474 | |||
| a732ff9a15 | |||
| bfcd868337 | |||
| 99f0d0bc49 | |||
| 8b7729788d | |||
| 27b0cb877e | |||
| 8ec2f7d36f | |||
| 28a8a15b6b | |||
| 19b3a98f66 | |||
| b64aa41b14 | |||
| 64449e8976 | |||
| 2e0fefa5ee | |||
| 97488494fb | |||
| b3322636a9 | |||
| 878cc5f773 | |||
| 94fb6b4190 | |||
| e3b638a7d8 | |||
| 404a52e64c | |||
| a917315967 | |||
| 9d51fde893 | |||
| 79fc931077 | |||
| f3861a60c4 | |||
| 26b1a88860 | |||
| 27ac9a7302 | |||
| 1d345ed247 | |||
| 6f831f5fa6 | |||
| ac2638dd9a | |||
| cfb847f2e5 | |||
| e7d47f1f96 | |||
| 9a48587479 | |||
| 624247ee14 | |||
| 430e2a255b | |||
| bbc78131ec | |||
| 77ab71d4d5 | |||
| 9d720bb0a7 | |||
| dcc564dee6 | |||
| 4ca55a1fd4 | |||
| a6e7adcaef | |||
| 752c855dec | |||
| 14b02df8f4 | |||
| 00cb209052 | |||
| dfc05a2789 | |||
| fe798e1867 | |||
| 39688f800f | |||
| b2b4bb67f0 | |||
| e5d8cec2d6 | |||
| e9a0558535 | |||
| 353c36bcf2 | |||
| 2f853a7de9 | |||
| dd212c3239 | |||
| 8ff3a18c3e | |||
| 43647096e9 | |||
| 1bdbd1d6d6 | |||
| 23be6035ed | |||
| 139e4aa635 |
51
.dockerignore
Normal file
51
.dockerignore
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# Build artifacts
|
||||||
|
target/
|
||||||
|
**/target/
|
||||||
|
|
||||||
|
# Documentation
|
||||||
|
README.md
|
||||||
|
docs/
|
||||||
|
*.md
|
||||||
|
|
||||||
|
# Old Go codebase
|
||||||
|
go/
|
||||||
|
|
||||||
|
# Development configuration
|
||||||
|
bacon.toml
|
||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
!.env.example
|
||||||
|
|
||||||
|
# CI/CD
|
||||||
|
.github/
|
||||||
|
.git/
|
||||||
|
|
||||||
|
# Development tools
|
||||||
|
Justfile
|
||||||
|
rust-toolchain.toml
|
||||||
|
|
||||||
|
# Frontend build artifacts and cache
|
||||||
|
web/node_modules/
|
||||||
|
web/dist/
|
||||||
|
web/.vite/
|
||||||
|
web/.tanstack/
|
||||||
|
web/.vscode/
|
||||||
|
|
||||||
|
# IDE and editor files
|
||||||
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
*~
|
||||||
|
|
||||||
|
# OS files
|
||||||
|
.DS_Store
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# Test coverage
|
||||||
|
coverage/
|
||||||
|
*.profdata
|
||||||
|
*.profraw
|
||||||
|
|
||||||
|
# SQLx offline mode (include this in builds)
|
||||||
|
!.sqlx/
|
||||||
65
.github/workflows/ci.yml
vendored
Normal file
65
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
name: CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
branches: [master]
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
RUST_BACKTRACE: 1
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Rust toolchain
|
||||||
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
with:
|
||||||
|
components: rustfmt, clippy
|
||||||
|
|
||||||
|
- name: Setup Bun
|
||||||
|
uses: oven-sh/setup-bun@v1
|
||||||
|
with:
|
||||||
|
bun-version: latest
|
||||||
|
|
||||||
|
- name: Cache Rust dependencies
|
||||||
|
uses: Swatinem/rust-cache@v2
|
||||||
|
with:
|
||||||
|
cache-on-failure: true
|
||||||
|
|
||||||
|
- name: Install frontend dependencies
|
||||||
|
working-directory: web
|
||||||
|
run: bun install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: Check Rust formatting
|
||||||
|
run: cargo fmt --all -- --check
|
||||||
|
|
||||||
|
- name: Check TypeScript formatting
|
||||||
|
working-directory: web
|
||||||
|
run: bun run format:check
|
||||||
|
|
||||||
|
- name: TypeScript type check
|
||||||
|
working-directory: web
|
||||||
|
run: bun run typecheck
|
||||||
|
|
||||||
|
- name: ESLint
|
||||||
|
working-directory: web
|
||||||
|
run: bun run lint
|
||||||
|
|
||||||
|
- name: Clippy
|
||||||
|
run: cargo clippy --all-features -- --deny warnings
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: cargo test --all-features
|
||||||
|
|
||||||
|
- name: Build frontend
|
||||||
|
working-directory: web
|
||||||
|
run: bun run build
|
||||||
|
|
||||||
|
- name: Build backend
|
||||||
|
run: cargo build --release --bin banner
|
||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,3 +1,5 @@
|
|||||||
.env
|
.env
|
||||||
/target
|
/target
|
||||||
/go/
|
/go/
|
||||||
|
.cargo/config.toml
|
||||||
|
src/scraper/README.md
|
||||||
3
.vscode/settings.json
vendored
Normal file
3
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"rust-analyzer.check.command": "clippy"
|
||||||
|
}
|
||||||
456
Cargo.lock
generated
456
Cargo.lock
generated
@@ -32,12 +32,6 @@ version = "0.2.21"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
|
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "android-tzdata"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "android_system_properties"
|
name = "android_system_properties"
|
||||||
version = "0.1.5"
|
version = "0.1.5"
|
||||||
@@ -47,6 +41,56 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstream"
|
||||||
|
version = "0.6.20"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192"
|
||||||
|
dependencies = [
|
||||||
|
"anstyle",
|
||||||
|
"anstyle-parse",
|
||||||
|
"anstyle-query",
|
||||||
|
"anstyle-wincon",
|
||||||
|
"colorchoice",
|
||||||
|
"is_terminal_polyfill",
|
||||||
|
"utf8parse",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstyle"
|
||||||
|
version = "1.0.11"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstyle-parse"
|
||||||
|
version = "0.2.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
|
||||||
|
dependencies = [
|
||||||
|
"utf8parse",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstyle-query"
|
||||||
|
version = "1.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2"
|
||||||
|
dependencies = [
|
||||||
|
"windows-sys 0.60.2",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstyle-wincon"
|
||||||
|
version = "3.0.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a"
|
||||||
|
dependencies = [
|
||||||
|
"anstyle",
|
||||||
|
"once_cell_polyfill",
|
||||||
|
"windows-sys 0.60.2",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.99"
|
version = "1.0.99"
|
||||||
@@ -174,34 +218,48 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "banner"
|
name = "banner"
|
||||||
version = "0.1.0"
|
version = "0.3.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"axum",
|
"axum",
|
||||||
"bitflags 2.9.3",
|
"bitflags 2.9.4",
|
||||||
"chrono",
|
"chrono",
|
||||||
"chrono-tz",
|
"clap",
|
||||||
"compile-time",
|
"compile-time",
|
||||||
|
"cookie",
|
||||||
|
"dashmap 6.1.0",
|
||||||
"dotenvy",
|
"dotenvy",
|
||||||
"figment",
|
"figment",
|
||||||
"fundu",
|
"fundu",
|
||||||
|
"futures",
|
||||||
"governor",
|
"governor",
|
||||||
|
"http 1.3.1",
|
||||||
|
"mime_guess",
|
||||||
|
"num-format",
|
||||||
|
"once_cell",
|
||||||
"poise",
|
"poise",
|
||||||
"rand 0.9.2",
|
"rand 0.9.2",
|
||||||
"redis",
|
"rapidhash",
|
||||||
"regex",
|
"regex",
|
||||||
"reqwest 0.12.23",
|
"reqwest 0.12.23",
|
||||||
|
"reqwest-middleware",
|
||||||
|
"rust-embed",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"serde_path_to_error",
|
||||||
"serenity",
|
"serenity",
|
||||||
"sqlx",
|
"sqlx",
|
||||||
"thiserror 2.0.16",
|
"thiserror 2.0.16",
|
||||||
"time",
|
"time",
|
||||||
|
"tl",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
"tokio-util",
|
||||||
|
"tower-http",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
"url",
|
"url",
|
||||||
|
"yansi",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -230,9 +288,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "2.9.3"
|
version = "2.9.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "34efbcccd345379ca2868b2b2c9d3782e9cc58ba87bc7d79d5b53d9c9ae6f25d"
|
checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
@@ -246,6 +304,16 @@ dependencies = [
|
|||||||
"generic-array",
|
"generic-array",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bstr"
|
||||||
|
version = "1.12.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bumpalo"
|
name = "bumpalo"
|
||||||
version = "3.19.0"
|
version = "3.19.0"
|
||||||
@@ -324,43 +392,64 @@ checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chrono"
|
name = "chrono"
|
||||||
version = "0.4.41"
|
version = "0.4.42"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d"
|
checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"android-tzdata",
|
|
||||||
"iana-time-zone",
|
"iana-time-zone",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
"serde",
|
"serde",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
"windows-link",
|
"windows-link 0.2.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chrono-tz"
|
name = "clap"
|
||||||
version = "0.10.4"
|
version = "4.5.47"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3"
|
checksum = "7eac00902d9d136acd712710d71823fb8ac8004ca445a89e73a41d45aa712931"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono",
|
"clap_builder",
|
||||||
"phf",
|
"clap_derive",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "combine"
|
name = "clap_builder"
|
||||||
version = "4.6.7"
|
version = "4.5.47"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd"
|
checksum = "2ad9bbf750e73b5884fb8a211a9424a1906c1e156724260fdae972f31d70e1d6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bytes",
|
"anstream",
|
||||||
"futures-core",
|
"anstyle",
|
||||||
"memchr",
|
"clap_lex",
|
||||||
"pin-project-lite",
|
"strsim",
|
||||||
"tokio",
|
|
||||||
"tokio-util",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap_derive"
|
||||||
|
version = "4.5.47"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c"
|
||||||
|
dependencies = [
|
||||||
|
"heck",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.106",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap_lex"
|
||||||
|
version = "0.7.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "colorchoice"
|
||||||
|
version = "1.0.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "command_attr"
|
name = "command_attr"
|
||||||
version = "0.5.3"
|
version = "0.5.3"
|
||||||
@@ -595,9 +684,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "deranged"
|
name = "deranged"
|
||||||
version = "0.4.0"
|
version = "0.5.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e"
|
checksum = "d630bccd429a5bb5a64b5e94f693bfc48c9f8566418fda4c494cc94f911f87cc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"powerfmt",
|
"powerfmt",
|
||||||
"serde",
|
"serde",
|
||||||
@@ -817,6 +906,7 @@ checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"futures-channel",
|
"futures-channel",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
|
"futures-executor",
|
||||||
"futures-io",
|
"futures-io",
|
||||||
"futures-sink",
|
"futures-sink",
|
||||||
"futures-task",
|
"futures-task",
|
||||||
@@ -970,6 +1060,19 @@ version = "0.3.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
|
checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "globset"
|
||||||
|
version = "0.4.16"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5"
|
||||||
|
dependencies = [
|
||||||
|
"aho-corasick",
|
||||||
|
"bstr",
|
||||||
|
"log",
|
||||||
|
"regex-automata",
|
||||||
|
"regex-syntax",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "governor"
|
name = "governor"
|
||||||
version = "0.10.1"
|
version = "0.10.1"
|
||||||
@@ -1152,6 +1255,12 @@ dependencies = [
|
|||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "http-range-header"
|
||||||
|
version = "0.4.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "httparse"
|
name = "httparse"
|
||||||
version = "1.10.1"
|
version = "1.10.1"
|
||||||
@@ -1442,7 +1551,7 @@ version = "0.7.10"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "046fa2d4d00aea763528b4950358d0ead425372445dc8ff86312b3c69ff7727b"
|
checksum = "046fa2d4d00aea763528b4950358d0ead425372445dc8ff86312b3c69ff7727b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.9.3",
|
"bitflags 2.9.4",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
@@ -1463,6 +1572,12 @@ dependencies = [
|
|||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "is_terminal_polyfill"
|
||||||
|
version = "1.70.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itoa"
|
name = "itoa"
|
||||||
version = "1.0.15"
|
version = "1.0.15"
|
||||||
@@ -1512,7 +1627,7 @@ version = "0.1.9"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3"
|
checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.9.3",
|
"bitflags 2.9.4",
|
||||||
"libc",
|
"libc",
|
||||||
"redox_syscall",
|
"redox_syscall",
|
||||||
]
|
]
|
||||||
@@ -1563,11 +1678,11 @@ checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "matchers"
|
name = "matchers"
|
||||||
version = "0.1.0"
|
version = "0.2.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
|
checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"regex-automata 0.1.10",
|
"regex-automata",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1668,22 +1783,11 @@ checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-ansi-term"
|
name = "nu-ansi-term"
|
||||||
version = "0.46.0"
|
version = "0.50.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
|
checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"overload",
|
"windows-sys 0.52.0",
|
||||||
"winapi",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "num-bigint"
|
|
||||||
version = "0.4.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
|
|
||||||
dependencies = [
|
|
||||||
"num-integer",
|
|
||||||
"num-traits",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1709,6 +1813,16 @@ version = "0.1.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
|
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "num-format"
|
||||||
|
version = "0.4.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a652d9771a63711fd3c3deb670acfbe5c30a4072e664d7a3bf5a9e1056ac72c3"
|
||||||
|
dependencies = [
|
||||||
|
"arrayvec",
|
||||||
|
"itoa",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num-integer"
|
name = "num-integer"
|
||||||
version = "0.1.46"
|
version = "0.1.46"
|
||||||
@@ -1754,13 +1868,19 @@ version = "1.21.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
|
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "once_cell_polyfill"
|
||||||
|
version = "1.70.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "openssl"
|
name = "openssl"
|
||||||
version = "0.10.73"
|
version = "0.10.73"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8"
|
checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.9.3",
|
"bitflags 2.9.4",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"foreign-types",
|
"foreign-types",
|
||||||
"libc",
|
"libc",
|
||||||
@@ -1798,12 +1918,6 @@ dependencies = [
|
|||||||
"vcpkg",
|
"vcpkg",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "overload"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "parking"
|
name = "parking"
|
||||||
version = "2.2.1"
|
version = "2.2.1"
|
||||||
@@ -1871,24 +1985,6 @@ version = "2.3.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
|
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "phf"
|
|
||||||
version = "0.12.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7"
|
|
||||||
dependencies = [
|
|
||||||
"phf_shared",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "phf_shared"
|
|
||||||
version = "0.12.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981"
|
|
||||||
dependencies = [
|
|
||||||
"siphasher",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pin-project-lite"
|
name = "pin-project-lite"
|
||||||
version = "0.2.16"
|
version = "0.2.16"
|
||||||
@@ -2031,7 +2127,7 @@ version = "0.9.6"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b"
|
checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.9.3",
|
"bitflags 2.9.4",
|
||||||
"memchr",
|
"memchr",
|
||||||
"unicase",
|
"unicase",
|
||||||
]
|
]
|
||||||
@@ -2066,17 +2162,6 @@ version = "5.3.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
|
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "r2d2"
|
|
||||||
version = "0.8.10"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93"
|
|
||||||
dependencies = [
|
|
||||||
"log",
|
|
||||||
"parking_lot",
|
|
||||||
"scheduled-thread-pool",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rand"
|
name = "rand"
|
||||||
version = "0.8.5"
|
version = "0.8.5"
|
||||||
@@ -2137,35 +2222,21 @@ dependencies = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "raw-cpuid"
|
name = "rapidhash"
|
||||||
version = "11.5.0"
|
version = "4.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c6df7ab838ed27997ba19a4664507e6f82b41fe6e20be42929332156e5e85146"
|
checksum = "164772177ee16e3b074e6019c63cd92cb3cecf38e8c40d097675958b86dd8084"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.9.3",
|
"rustversion",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "redis"
|
name = "raw-cpuid"
|
||||||
version = "0.32.5"
|
version = "11.6.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7cd3650deebc68526b304898b192fa4102a4ef0b9ada24da096559cb60e0eef8"
|
checksum = "498cd0dc59d73224351ee52a95fee0f1a617a2eae0e7d9d720cc622c73a54186"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bytes",
|
"bitflags 2.9.4",
|
||||||
"cfg-if",
|
|
||||||
"combine",
|
|
||||||
"futures-util",
|
|
||||||
"itoa",
|
|
||||||
"num-bigint",
|
|
||||||
"percent-encoding",
|
|
||||||
"pin-project-lite",
|
|
||||||
"r2d2",
|
|
||||||
"ryu",
|
|
||||||
"sha1_smol",
|
|
||||||
"socket2 0.6.0",
|
|
||||||
"tokio",
|
|
||||||
"tokio-util",
|
|
||||||
"url",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -2174,7 +2245,7 @@ version = "0.5.17"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77"
|
checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.9.3",
|
"bitflags 2.9.4",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -2185,17 +2256,8 @@ checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
"regex-automata 0.4.10",
|
"regex-automata",
|
||||||
"regex-syntax 0.8.6",
|
"regex-syntax",
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "regex-automata"
|
|
||||||
version = "0.1.10"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
|
|
||||||
dependencies = [
|
|
||||||
"regex-syntax 0.6.29",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -2206,15 +2268,9 @@ checksum = "6b9458fa0bfeeac22b5ca447c63aaf45f28439a709ccd244698632f9aa6394d6"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
"regex-syntax 0.8.6",
|
"regex-syntax",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "regex-syntax"
|
|
||||||
version = "0.6.29"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.8.6"
|
version = "0.8.6"
|
||||||
@@ -2307,6 +2363,21 @@ dependencies = [
|
|||||||
"web-sys",
|
"web-sys",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "reqwest-middleware"
|
||||||
|
version = "0.4.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "57f17d28a6e6acfe1733fe24bcd30774d13bffa4b8a22535b4c8c98423088d4e"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"async-trait",
|
||||||
|
"http 1.3.1",
|
||||||
|
"reqwest 0.12.23",
|
||||||
|
"serde",
|
||||||
|
"thiserror 1.0.69",
|
||||||
|
"tower-service",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ring"
|
name = "ring"
|
||||||
version = "0.17.14"
|
version = "0.17.14"
|
||||||
@@ -2341,6 +2412,41 @@ dependencies = [
|
|||||||
"zeroize",
|
"zeroize",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rust-embed"
|
||||||
|
version = "8.7.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "025908b8682a26ba8d12f6f2d66b987584a4a87bc024abc5bbc12553a8cd178a"
|
||||||
|
dependencies = [
|
||||||
|
"rust-embed-impl",
|
||||||
|
"rust-embed-utils",
|
||||||
|
"walkdir",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rust-embed-impl"
|
||||||
|
version = "8.7.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6065f1a4392b71819ec1ea1df1120673418bf386f50de1d6f54204d836d4349c"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"rust-embed-utils",
|
||||||
|
"syn 2.0.106",
|
||||||
|
"walkdir",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rust-embed-utils"
|
||||||
|
version = "8.7.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f6cc0c81648b20b70c491ff8cce00c1c3b223bb8ed2b5d41f0e54c6c4c0a3594"
|
||||||
|
dependencies = [
|
||||||
|
"globset",
|
||||||
|
"sha2",
|
||||||
|
"walkdir",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc-demangle"
|
name = "rustc-demangle"
|
||||||
version = "0.1.26"
|
version = "0.1.26"
|
||||||
@@ -2362,7 +2468,7 @@ version = "1.0.8"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
|
checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.9.3",
|
"bitflags 2.9.4",
|
||||||
"errno",
|
"errno",
|
||||||
"libc",
|
"libc",
|
||||||
"linux-raw-sys",
|
"linux-raw-sys",
|
||||||
@@ -2489,15 +2595,6 @@ dependencies = [
|
|||||||
"windows-sys 0.59.0",
|
"windows-sys 0.59.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "scheduled-thread-pool"
|
|
||||||
version = "0.2.7"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19"
|
|
||||||
dependencies = [
|
|
||||||
"parking_lot",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "scopeguard"
|
name = "scopeguard"
|
||||||
version = "1.2.0"
|
version = "1.2.0"
|
||||||
@@ -2530,7 +2627,7 @@ version = "2.11.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02"
|
checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.9.3",
|
"bitflags 2.9.4",
|
||||||
"core-foundation",
|
"core-foundation",
|
||||||
"core-foundation-sys",
|
"core-foundation-sys",
|
||||||
"libc",
|
"libc",
|
||||||
@@ -2637,7 +2734,7 @@ dependencies = [
|
|||||||
"arrayvec",
|
"arrayvec",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"base64 0.22.1",
|
"base64 0.22.1",
|
||||||
"bitflags 2.9.3",
|
"bitflags 2.9.4",
|
||||||
"bytes",
|
"bytes",
|
||||||
"chrono",
|
"chrono",
|
||||||
"command_attr",
|
"command_attr",
|
||||||
@@ -2676,12 +2773,6 @@ dependencies = [
|
|||||||
"digest",
|
"digest",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "sha1_smol"
|
|
||||||
version = "1.0.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sha2"
|
name = "sha2"
|
||||||
version = "0.10.9"
|
version = "0.10.9"
|
||||||
@@ -2727,12 +2818,6 @@ dependencies = [
|
|||||||
"rand_core 0.6.4",
|
"rand_core 0.6.4",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "siphasher"
|
|
||||||
version = "1.0.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "skeptic"
|
name = "skeptic"
|
||||||
version = "0.13.7"
|
version = "0.13.7"
|
||||||
@@ -2907,7 +2992,7 @@ checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"atoi",
|
"atoi",
|
||||||
"base64 0.22.1",
|
"base64 0.22.1",
|
||||||
"bitflags 2.9.3",
|
"bitflags 2.9.4",
|
||||||
"byteorder",
|
"byteorder",
|
||||||
"bytes",
|
"bytes",
|
||||||
"chrono",
|
"chrono",
|
||||||
@@ -2950,7 +3035,7 @@ checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"atoi",
|
"atoi",
|
||||||
"base64 0.22.1",
|
"base64 0.22.1",
|
||||||
"bitflags 2.9.3",
|
"bitflags 2.9.4",
|
||||||
"byteorder",
|
"byteorder",
|
||||||
"chrono",
|
"chrono",
|
||||||
"crc",
|
"crc",
|
||||||
@@ -3105,7 +3190,7 @@ version = "0.6.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b"
|
checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.9.3",
|
"bitflags 2.9.4",
|
||||||
"core-foundation",
|
"core-foundation",
|
||||||
"system-configuration-sys 0.6.0",
|
"system-configuration-sys 0.6.0",
|
||||||
]
|
]
|
||||||
@@ -3200,12 +3285,11 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "time"
|
name = "time"
|
||||||
version = "0.3.41"
|
version = "0.3.43"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40"
|
checksum = "83bde6f1ec10e72d583d91623c939f623002284ef622b87de38cfd546cbf2031"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"deranged",
|
"deranged",
|
||||||
"itoa",
|
|
||||||
"num-conv",
|
"num-conv",
|
||||||
"powerfmt",
|
"powerfmt",
|
||||||
"serde",
|
"serde",
|
||||||
@@ -3215,15 +3299,15 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "time-core"
|
name = "time-core"
|
||||||
version = "0.1.4"
|
version = "0.1.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c"
|
checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "time-macros"
|
name = "time-macros"
|
||||||
version = "0.2.22"
|
version = "0.2.24"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49"
|
checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"num-conv",
|
"num-conv",
|
||||||
"time-core",
|
"time-core",
|
||||||
@@ -3254,6 +3338,12 @@ version = "0.1.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tl"
|
||||||
|
version = "0.7.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b130bd8a58c163224b44e217b4239ca7b927d82bf6cc2fea1fc561d15056e3f7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio"
|
name = "tokio"
|
||||||
version = "1.47.1"
|
version = "1.47.1"
|
||||||
@@ -3429,16 +3519,26 @@ version = "0.6.6"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
|
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.9.3",
|
"bitflags 2.9.4",
|
||||||
"bytes",
|
"bytes",
|
||||||
|
"futures-core",
|
||||||
"futures-util",
|
"futures-util",
|
||||||
"http 1.3.1",
|
"http 1.3.1",
|
||||||
"http-body 1.0.1",
|
"http-body 1.0.1",
|
||||||
|
"http-body-util",
|
||||||
|
"http-range-header",
|
||||||
|
"httpdate",
|
||||||
"iri-string",
|
"iri-string",
|
||||||
|
"mime",
|
||||||
|
"mime_guess",
|
||||||
|
"percent-encoding",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
|
"tokio",
|
||||||
|
"tokio-util",
|
||||||
"tower",
|
"tower",
|
||||||
"tower-layer",
|
"tower-layer",
|
||||||
"tower-service",
|
"tower-service",
|
||||||
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -3509,14 +3609,14 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-subscriber"
|
name = "tracing-subscriber"
|
||||||
version = "0.3.19"
|
version = "0.3.20"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
|
checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"matchers",
|
"matchers",
|
||||||
"nu-ansi-term",
|
"nu-ansi-term",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"regex",
|
"regex-automata",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"sharded-slab",
|
"sharded-slab",
|
||||||
@@ -3674,6 +3774,12 @@ version = "1.0.4"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
|
checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "utf8parse"
|
||||||
|
version = "0.2.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uwl"
|
name = "uwl"
|
||||||
version = "0.6.0"
|
version = "0.6.0"
|
||||||
@@ -3915,7 +4021,7 @@ checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"windows-implement",
|
"windows-implement",
|
||||||
"windows-interface",
|
"windows-interface",
|
||||||
"windows-link",
|
"windows-link 0.1.3",
|
||||||
"windows-result",
|
"windows-result",
|
||||||
"windows-strings",
|
"windows-strings",
|
||||||
]
|
]
|
||||||
@@ -3948,13 +4054,19 @@ version = "0.1.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
|
checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-link"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-registry"
|
name = "windows-registry"
|
||||||
version = "0.5.3"
|
version = "0.5.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e"
|
checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows-link",
|
"windows-link 0.1.3",
|
||||||
"windows-result",
|
"windows-result",
|
||||||
"windows-strings",
|
"windows-strings",
|
||||||
]
|
]
|
||||||
@@ -3965,7 +4077,7 @@ version = "0.3.4"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6"
|
checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows-link",
|
"windows-link 0.1.3",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -3974,7 +4086,7 @@ version = "0.4.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57"
|
checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows-link",
|
"windows-link 0.1.3",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -4050,7 +4162,7 @@ version = "0.53.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91"
|
checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows-link",
|
"windows-link 0.1.3",
|
||||||
"windows_aarch64_gnullvm 0.53.0",
|
"windows_aarch64_gnullvm 0.53.0",
|
||||||
"windows_aarch64_msvc 0.53.0",
|
"windows_aarch64_msvc 0.53.0",
|
||||||
"windows_i686_gnu 0.53.0",
|
"windows_i686_gnu 0.53.0",
|
||||||
|
|||||||
44
Cargo.toml
44
Cargo.toml
@@ -1,34 +1,60 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "banner"
|
name = "banner"
|
||||||
version = "0.1.0"
|
version = "0.3.4"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
default-run = "banner"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.99"
|
anyhow = "1.0.99"
|
||||||
async-trait = "0.1"
|
async-trait = "0.1"
|
||||||
axum = "0.8.4"
|
axum = "0.8.4"
|
||||||
bitflags = { version = "2.9.3", features = ["serde"] }
|
bitflags = { version = "2.9.4", features = ["serde"] }
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
chrono = { version = "0.4.42", features = ["serde"] }
|
||||||
chrono-tz = "0.10.4"
|
|
||||||
compile-time = "0.2.0"
|
compile-time = "0.2.0"
|
||||||
|
cookie = "0.18.1"
|
||||||
|
dashmap = "6.1.0"
|
||||||
dotenvy = "0.15.7"
|
dotenvy = "0.15.7"
|
||||||
figment = { version = "0.10.19", features = ["toml", "env"] }
|
figment = { version = "0.10.19", features = ["toml", "env"] }
|
||||||
fundu = "2.0.1"
|
fundu = "2.0.1"
|
||||||
governor = "0.10.1"
|
futures = "0.3"
|
||||||
|
http = "1.3.1"
|
||||||
poise = "0.6.1"
|
poise = "0.6.1"
|
||||||
rand = "0.9.2"
|
rand = "0.9.2"
|
||||||
redis = { version = "0.32.5", features = ["tokio-comp", "r2d2"] }
|
|
||||||
regex = "1.10"
|
regex = "1.10"
|
||||||
reqwest = { version = "0.12.23", features = ["json", "cookies"] }
|
reqwest = { version = "0.12.23", features = ["json", "cookies"] }
|
||||||
|
reqwest-middleware = { version = "0.4.2", features = ["json"] }
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
serde_json = "1.0.143"
|
serde_json = "1.0.143"
|
||||||
serenity = { version = "0.12.4", features = ["rustls_backend"] }
|
serenity = { version = "0.12.4", features = ["rustls_backend"] }
|
||||||
sqlx = { version = "0.8.6", features = ["runtime-tokio-rustls", "postgres", "chrono", "json", "macros"] }
|
sqlx = { version = "0.8.6", features = [
|
||||||
|
"runtime-tokio-rustls",
|
||||||
|
"postgres",
|
||||||
|
"chrono",
|
||||||
|
"json",
|
||||||
|
"macros",
|
||||||
|
] }
|
||||||
thiserror = "2.0.16"
|
thiserror = "2.0.16"
|
||||||
time = "0.3.41"
|
time = "0.3.43"
|
||||||
tokio = { version = "1.47.1", features = ["full"] }
|
tokio = { version = "1.47.1", features = ["full"] }
|
||||||
|
tokio-util = "0.7"
|
||||||
|
tl = "0.7.8"
|
||||||
tracing = "0.1.41"
|
tracing = "0.1.41"
|
||||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter", "json"] }
|
tracing-subscriber = { version = "0.3.20", features = ["env-filter", "json"] }
|
||||||
url = "2.5"
|
url = "2.5"
|
||||||
|
governor = "0.10.1"
|
||||||
|
once_cell = "1.21.3"
|
||||||
|
serde_path_to_error = "0.1.17"
|
||||||
|
num-format = "0.4.4"
|
||||||
|
tower-http = { version = "0.6.0", features = ["fs", "cors", "trace", "timeout"] }
|
||||||
|
rust-embed = { version = "8.0", features = ["debug-embed", "include-exclude"] }
|
||||||
|
mime_guess = "2.0"
|
||||||
|
clap = { version = "4.5", features = ["derive"] }
|
||||||
|
rapidhash = "4.1.0"
|
||||||
|
yansi = "1.0.1"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
|
||||||
|
# A 'release mode' profile that compiles quickly, but still 'appears' like a release build, useful for debugging
|
||||||
|
[profile.dev-release]
|
||||||
|
inherits = "dev"
|
||||||
|
debug-assertions = false
|
||||||
|
|||||||
116
Dockerfile
Normal file
116
Dockerfile
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
# Build arguments
|
||||||
|
ARG RUST_VERSION=1.89.0
|
||||||
|
ARG RAILWAY_GIT_COMMIT_SHA
|
||||||
|
|
||||||
|
# --- Frontend Build Stage ---
|
||||||
|
FROM oven/bun:1 AS frontend-builder
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy backend Cargo.toml for build-time version retrieval
|
||||||
|
COPY ./Cargo.toml ./
|
||||||
|
|
||||||
|
# Copy frontend package files
|
||||||
|
COPY ./web/package.json ./web/bun.lock* ./
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
RUN bun install --frozen-lockfile
|
||||||
|
|
||||||
|
# Copy frontend source code
|
||||||
|
COPY ./web ./
|
||||||
|
|
||||||
|
# Build frontend
|
||||||
|
RUN bun run build
|
||||||
|
|
||||||
|
# --- Chef Base Stage ---
|
||||||
|
FROM lukemathwalker/cargo-chef:latest-rust-${RUST_VERSION} AS chef
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# --- Planner Stage ---
|
||||||
|
FROM chef AS planner
|
||||||
|
COPY Cargo.toml Cargo.lock ./
|
||||||
|
COPY build.rs ./
|
||||||
|
COPY src ./src
|
||||||
|
# Migrations & .sqlx specifically left out to avoid invalidating cache
|
||||||
|
RUN cargo chef prepare --recipe-path recipe.json --bin banner
|
||||||
|
|
||||||
|
# --- Rust Build Stage ---
|
||||||
|
FROM chef AS builder
|
||||||
|
|
||||||
|
# Set build-time environment variable for Railway Git commit SHA
|
||||||
|
ARG RAILWAY_GIT_COMMIT_SHA
|
||||||
|
ENV RAILWAY_GIT_COMMIT_SHA=${RAILWAY_GIT_COMMIT_SHA}
|
||||||
|
|
||||||
|
# Copy recipe from planner and build dependencies only
|
||||||
|
COPY --from=planner /app/recipe.json recipe.json
|
||||||
|
RUN cargo chef cook --release --recipe-path recipe.json --bin banner
|
||||||
|
|
||||||
|
# Install build dependencies for final compilation
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
pkg-config \
|
||||||
|
libssl-dev \
|
||||||
|
git \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy source code and built frontend assets
|
||||||
|
COPY Cargo.toml Cargo.lock ./
|
||||||
|
COPY build.rs ./
|
||||||
|
COPY .git* ./
|
||||||
|
COPY src ./src
|
||||||
|
COPY migrations ./migrations
|
||||||
|
COPY --from=frontend-builder /app/dist ./web/dist
|
||||||
|
|
||||||
|
# Build web app with embedded assets
|
||||||
|
RUN cargo build --release --bin banner
|
||||||
|
|
||||||
|
# Strip the binary to reduce size
|
||||||
|
RUN strip target/release/banner
|
||||||
|
|
||||||
|
# --- Runtime Stage ---
|
||||||
|
FROM debian:12-slim
|
||||||
|
|
||||||
|
ARG APP=/usr/src/app
|
||||||
|
ARG APP_USER=appuser
|
||||||
|
ARG UID=1000
|
||||||
|
ARG GID=1000
|
||||||
|
|
||||||
|
# Install runtime dependencies
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
ca-certificates \
|
||||||
|
tzdata \
|
||||||
|
wget \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
ARG TZ=Etc/UTC
|
||||||
|
ENV TZ=${TZ}
|
||||||
|
|
||||||
|
# Create user with specific UID/GID
|
||||||
|
RUN addgroup --gid $GID $APP_USER \
|
||||||
|
&& adduser --uid $UID --disabled-password --gecos "" --ingroup $APP_USER $APP_USER \
|
||||||
|
&& mkdir -p ${APP}
|
||||||
|
|
||||||
|
# Copy application binary
|
||||||
|
COPY --from=builder --chown=$APP_USER:$APP_USER /app/target/release/banner ${APP}/banner
|
||||||
|
|
||||||
|
# Set proper permissions
|
||||||
|
RUN chmod +x ${APP}/banner
|
||||||
|
|
||||||
|
USER $APP_USER
|
||||||
|
WORKDIR ${APP}
|
||||||
|
|
||||||
|
# Build-time arg for PORT, default to 8000
|
||||||
|
ARG PORT=8000
|
||||||
|
# Runtime environment var for PORT, default to build-time arg
|
||||||
|
ENV PORT=${PORT}
|
||||||
|
EXPOSE ${PORT}
|
||||||
|
|
||||||
|
# Add health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
||||||
|
CMD wget --no-verbose --tries=1 --spider http://localhost:${PORT}/health || exit 1
|
||||||
|
|
||||||
|
# Can be explicitly overriden with different hosts & ports
|
||||||
|
ENV HOSTS=0.0.0.0,[::]
|
||||||
|
|
||||||
|
# Implicitly uses PORT environment variable
|
||||||
|
# temporary: running without 'scraper' service
|
||||||
|
CMD ["sh", "-c", "exec ./banner --services web,bot"]
|
||||||
79
Justfile
Normal file
79
Justfile
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
default_services := "bot,web,scraper"
|
||||||
|
|
||||||
|
default:
|
||||||
|
just --list
|
||||||
|
|
||||||
|
# Run all checks (format, clippy, tests, lint)
|
||||||
|
check:
|
||||||
|
cargo fmt --all -- --check
|
||||||
|
cargo clippy --all-features -- --deny warnings
|
||||||
|
cargo nextest run
|
||||||
|
bun run --cwd web typecheck
|
||||||
|
bun run --cwd web lint
|
||||||
|
|
||||||
|
# Format all Rust and TypeScript code
|
||||||
|
format:
|
||||||
|
cargo fmt --all
|
||||||
|
bun run --cwd web format
|
||||||
|
|
||||||
|
# Check formatting without modifying (CI-friendly)
|
||||||
|
format-check:
|
||||||
|
cargo fmt --all -- --check
|
||||||
|
bun run --cwd web format:check
|
||||||
|
|
||||||
|
# Start PostgreSQL in Docker and update .env with connection string
|
||||||
|
db:
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Find available port
|
||||||
|
PORT=$(shuf -i 49152-65535 -n 1)
|
||||||
|
while ss -tlnp 2>/dev/null | grep -q ":$PORT "; do
|
||||||
|
PORT=$(shuf -i 49152-65535 -n 1)
|
||||||
|
done
|
||||||
|
|
||||||
|
# Start PostgreSQL container
|
||||||
|
docker run -d \
|
||||||
|
--name banner-postgres \
|
||||||
|
-e POSTGRES_PASSWORD=banner \
|
||||||
|
-e POSTGRES_USER=banner \
|
||||||
|
-e POSTGRES_DB=banner \
|
||||||
|
-p "$PORT:5432" \
|
||||||
|
postgres:17-alpine
|
||||||
|
|
||||||
|
# Update .env file
|
||||||
|
DB_URL="postgresql://banner:banner@localhost:$PORT/banner"
|
||||||
|
if [ -f .env ]; then
|
||||||
|
sed -i.bak "s|^DATABASE_URL=.*|DATABASE_URL=$DB_URL|" .env
|
||||||
|
else
|
||||||
|
echo "DATABASE_URL=$DB_URL" > .env
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "PostgreSQL started on port $PORT"
|
||||||
|
echo "DATABASE_URL=$DB_URL"
|
||||||
|
echo "Run: sqlx migrate run"
|
||||||
|
|
||||||
|
# Auto-reloading frontend server
|
||||||
|
frontend:
|
||||||
|
bun run --cwd web dev
|
||||||
|
|
||||||
|
# Production build of frontend
|
||||||
|
build-frontend:
|
||||||
|
bun run --cwd web build
|
||||||
|
|
||||||
|
# Auto-reloading backend server
|
||||||
|
backend *ARGS:
|
||||||
|
bacon --headless run -- -- {{ARGS}}
|
||||||
|
|
||||||
|
# Production build
|
||||||
|
build:
|
||||||
|
bun run --cwd web build
|
||||||
|
cargo build --release --bin banner
|
||||||
|
|
||||||
|
# Run auto-reloading development build with release characteristics
|
||||||
|
dev-build *ARGS='--services web --tracing pretty': build-frontend
|
||||||
|
bacon --headless run -- --profile dev-release -- {{ARGS}}
|
||||||
|
|
||||||
|
# Auto-reloading development build for both frontend and backend
|
||||||
|
[parallel]
|
||||||
|
dev *ARGS='--services web,bot': frontend (backend ARGS)
|
||||||
142
README.md
142
README.md
@@ -1,125 +1,51 @@
|
|||||||
# banner
|
# banner
|
||||||
|
|
||||||
A discord bot for executing queries & searches on the Ellucian Banner instance hosting all of UTSA's class data.
|
A complex multi-service system providing a Discord bot and browser-based interface to UTSA's course data.
|
||||||
|
|
||||||
## Feature Wishlist
|
## Services
|
||||||
|
|
||||||
- Commands
|
The application consists of three modular services that can be run independently or together:
|
||||||
- ICS Download (get a ICS download of your classes with location & timing perfectly - set for every class you're in)
|
|
||||||
- Classes Now (find classes happening)
|
|
||||||
- Autocomplete
|
|
||||||
- Class Title
|
|
||||||
- Course Number
|
|
||||||
- Term/Part of Term
|
|
||||||
- Professor
|
|
||||||
- Attribute
|
|
||||||
- Component Pagination
|
|
||||||
- RateMyProfessor Integration (Linked/Embedded)
|
|
||||||
- Smart term selection (i.e. Summer 2024 will be selected automatically when opened)
|
|
||||||
- Rate Limiting (bursting with global/user limits)
|
|
||||||
- DMs Integration (allow usage of the bot in DMs)
|
|
||||||
- Class Change Notifications (get notified when details about a class change)
|
|
||||||
- Multi-term Querying (currently the backend for searching is kinda weird)
|
|
||||||
- Full Autocomplete for Every Search Option
|
|
||||||
- Metrics, Log Query, Privileged Error Feedback
|
|
||||||
- Search for Classes
|
|
||||||
- Major, Professor, Location, Name, Time of Day
|
|
||||||
- Subscribe to Classes
|
|
||||||
- Availability (seat, pre-seat)
|
|
||||||
- Waitlist Movement
|
|
||||||
- Detail Changes (meta, time, location, seats, professor)
|
|
||||||
- `time` Start, End, Days of Week
|
|
||||||
- `seats` Any change in seat/waitlist data
|
|
||||||
- `meta`
|
|
||||||
- Lookup via Course Reference Number (CRN)
|
|
||||||
- Smart Time of Day Handling
|
|
||||||
- "2 PM" -> Start within 2:00 PM to 2:59 PM
|
|
||||||
- "2-3 PM" -> Start within 2:00 PM to 3:59 PM
|
|
||||||
- "ends by 2 PM" -> Ends within 12:00 AM to 2:00 PM
|
|
||||||
- "after 2 PM" -> Start within 2:01 PM to 11:59 PM
|
|
||||||
- "before 2 PM" -> Ends within 12:00 AM to 1:59 PM
|
|
||||||
- Get By Section Command
|
|
||||||
- CS 4393 001 =>
|
|
||||||
- Will require SQL to be able to search for a class by its section number
|
|
||||||
|
|
||||||
## Analysis Required
|
- Discord Bot ([`bot`][src-bot])
|
||||||
|
|
||||||
Some of the features and architecture of Ellucian's Banner system are not clear.
|
- Primary interface for course monitoring and data queries
|
||||||
The follow features, JSON, and more require validation & analysis:
|
- Built with [Serenity][serenity] and [Poise][poise] frameworks for robust command handling
|
||||||
|
- Uses slash commands with comprehensive error handling and logging
|
||||||
|
|
||||||
- Struct Nullability
|
- Web Server ([`web`][src-web])
|
||||||
- Much of the responses provided by Ellucian contain nulls, and most of them are uncertain as to when and why they're null.
|
|
||||||
- Analysis must be conducted to be sure of when to use a string and when it should nillable (pointer).
|
|
||||||
- Multiple Professors / Primary Indicator
|
|
||||||
- Multiple Meeting Times
|
|
||||||
- Meeting Schedule Types
|
|
||||||
- AFF vs AIN vs AHB etc.
|
|
||||||
- Do CRNs repeat between years?
|
|
||||||
- Check whether partOfTerm is always filled in, and it's meaning for various class results.
|
|
||||||
- Check which API calls are affected by change in term/sessionID term select
|
|
||||||
- SessionIDs
|
|
||||||
- How long does a session ID work?
|
|
||||||
- Do I really require a separate one per term?
|
|
||||||
- How many can I activate, are there any restrictions?
|
|
||||||
- How should session IDs be checked as 'invalid'?
|
|
||||||
- What action(s) keep a session ID 'active', if any?
|
|
||||||
- Are there any courses with multiple meeting times?
|
|
||||||
- Google Calendar link generation, as an alternative to ICS file generation
|
|
||||||
|
|
||||||
## Change Identification
|
- [Axum][axum]-based server with Vite/React-based frontend
|
||||||
|
- [Embeds static assets][rust-embed] at compile time with E-Tags & Cache-Control headers
|
||||||
|
|
||||||
- Important attributes of a class will be parsed on both the old and new data.
|
- Scraper ([`scraper`][src-scraper])
|
||||||
- These attributes will be compared and given identifiers that can be subscribed to.
|
|
||||||
- When a user subscribes to one of these identifiers, any changes identified will be sent to the user.
|
|
||||||
|
|
||||||
## Real-time Suggestions
|
- Intelligent data collection system with priority-based queuing inside PostgreSQL via [`sqlx`][sqlx]
|
||||||
|
- Rate-limited scraping with burst handling to respect UTSA's systems
|
||||||
|
- Handles course data updates, availability changes, and metadata synchronization
|
||||||
|
|
||||||
Various commands arguments have the ability to have suggestions appear.
|
## Quick Start
|
||||||
|
|
||||||
- They must be fast. As ephemeral suggestions that are only relevant for seconds or less, they need to be delivered in less than a second.
|
```bash
|
||||||
- They need to be easy to acquire. With as many commands & arguments to search as I do, it is paramount that the API be easy to understand & use.
|
bun install --cwd web # Install frontend dependencies
|
||||||
- It cannot be complicated. I only have so much time to develop this.
|
cargo build # Build the backend
|
||||||
- It does not need to be persistent. Since the data is scraped and rolled periodically from the Banner system, the data used will be deleted and re-requested occasionally.
|
|
||||||
|
|
||||||
For these reasons, I believe SQLite to be the ideal place for this data to be stored.
|
just dev # Runs auto-reloading dev build
|
||||||
It is exceptionally fast, works well in-memory, and is less complicated compared to most other solutions.
|
just dev --services bot,web # Runs auto-reloading dev build, running only the bot and web services
|
||||||
|
just dev-build # Development build with release characteristics (frontend is embedded, non-auto-reloading)
|
||||||
|
|
||||||
- Only required data about the class will be stored, along with the JSON-encoded string.
|
just build # Production build that embeds assets
|
||||||
- For now, this would only be the CRN (and possibly the Term).
|
```
|
||||||
- Potentially, a binary encoding could be used for performance, but it is unlikely to be better.
|
|
||||||
- Database dumping into R2 would be good to ensure that over-scraping of the Banner system does not occur.
|
|
||||||
- Upon a safe close requested
|
|
||||||
- Must be done quickly (<8 seconds)
|
|
||||||
- Every 30 minutes, if any scraping ocurred.
|
|
||||||
- May cause locking of commands.
|
|
||||||
|
|
||||||
## Scraping
|
## Documentation
|
||||||
|
|
||||||
In order to keep the in-memory database of the bot up-to-date with the Banner system, the API must be scraped.
|
Comprehensive documentation is available in the [`docs/`][documentation] folder.
|
||||||
Scraping will be separated by major to allow for priority majors (namely, Computer Science) to be scraped more often compared to others.
|
|
||||||
This will lower the overall load on the Banner system while ensuring that data presented by the app is still relevant.
|
|
||||||
|
|
||||||
For now, all majors will be scraped fully every 4 hours with at least 5 minutes between each one.
|
[documentation]: docs/README.md
|
||||||
|
[src-bot]: src/bot
|
||||||
- On startup, priority majors will be scraped first (if required).
|
[src-web]: src/web
|
||||||
- Other majors will be scraped in arbitrary order (if required).
|
[src-scraper]: src/scraper
|
||||||
- Scrape timing will be stored in Redis.
|
[serenity]: https://github.com/serenity-rs/serenity
|
||||||
- CRNs will be the Primary Key within SQLite
|
[poise]: https://github.com/serenity-rs/poise
|
||||||
- If CRNs are duplicated between terms, then the primary key will be (CRN, Term)
|
[axum]: https://github.com/tokio-rs/axum
|
||||||
|
[rust-embed]: https://lib.rs/crates/rust-embed
|
||||||
Considerations
|
[sqlx]: https://github.com/launchbadge/sqlx
|
||||||
|
|
||||||
- Change in metadata should decrease the interval
|
|
||||||
- The number of courses scraped should change the interval (2 hours per 500 courses involved)
|
|
||||||
|
|
||||||
## Rate Limiting, Costs & Bursting
|
|
||||||
|
|
||||||
Ideally, this application would implement dynamic rate limiting to ensure overload on the server does not occur.
|
|
||||||
Better, it would also ensure that priority requests (commands) are dispatched faster than background processes (scraping), while making sure different requests are weighted differently.
|
|
||||||
For example, a recent scrape of 350 classes should be weighted 5x more than a search for 8 classes by a user.
|
|
||||||
Still, even if the cap does not normally allow for this request to be processed immediately, the small user search should proceed with a small bursting cap.
|
|
||||||
|
|
||||||
The requirements to this hypothetical system would be:
|
|
||||||
|
|
||||||
- Conditional Bursting: background processes or other requests deemed "low priority" are not allowed to use bursting.
|
|
||||||
- Arbitrary Costs: rate limiting is considered in the form of the request size/speed more or less, such that small simple requests can be made more frequently, unlike large requests.
|
|
||||||
|
|||||||
58
bacon.toml
58
bacon.toml
@@ -9,61 +9,20 @@ default_job = "check"
|
|||||||
env.CARGO_TERM_COLOR = "always"
|
env.CARGO_TERM_COLOR = "always"
|
||||||
|
|
||||||
[jobs.check]
|
[jobs.check]
|
||||||
command = ["cargo", "check"]
|
|
||||||
need_stdout = false
|
|
||||||
|
|
||||||
[jobs.check-all]
|
|
||||||
command = ["cargo", "check", "--all-targets"]
|
command = ["cargo", "check", "--all-targets"]
|
||||||
need_stdout = false
|
need_stdout = false
|
||||||
|
|
||||||
# Run clippy on the default target
|
|
||||||
[jobs.clippy]
|
[jobs.clippy]
|
||||||
command = ["cargo", "clippy"]
|
|
||||||
need_stdout = false
|
|
||||||
|
|
||||||
# Run clippy on all targets
|
|
||||||
# To disable some lints, you may change the job this way:
|
|
||||||
# [jobs.clippy-all]
|
|
||||||
# command = [
|
|
||||||
# "cargo", "clippy",
|
|
||||||
# "--all-targets",
|
|
||||||
# "--",
|
|
||||||
# "-A", "clippy::bool_to_int_with_if",
|
|
||||||
# "-A", "clippy::collapsible_if",
|
|
||||||
# "-A", "clippy::derive_partial_eq_without_eq",
|
|
||||||
# ]
|
|
||||||
# need_stdout = false
|
|
||||||
[jobs.clippy-all]
|
|
||||||
command = ["cargo", "clippy", "--all-targets"]
|
command = ["cargo", "clippy", "--all-targets"]
|
||||||
need_stdout = false
|
need_stdout = false
|
||||||
|
|
||||||
# This job lets you run
|
|
||||||
# - all tests: bacon test
|
|
||||||
# - a specific test: bacon test -- config::test_default_files
|
|
||||||
# - the tests of a package: bacon test -- -- -p config
|
|
||||||
[jobs.test]
|
[jobs.test]
|
||||||
command = ["cargo", "test"]
|
|
||||||
need_stdout = true
|
|
||||||
|
|
||||||
[jobs.nextest]
|
|
||||||
command = [
|
command = [
|
||||||
"cargo", "nextest", "run",
|
"cargo", "nextest", "run",
|
||||||
"--hide-progress-bar", "--failure-output", "final"
|
|
||||||
]
|
]
|
||||||
need_stdout = true
|
need_stdout = true
|
||||||
analyzer = "nextest"
|
analyzer = "nextest"
|
||||||
|
|
||||||
[jobs.doc]
|
|
||||||
command = ["cargo", "doc", "--no-deps"]
|
|
||||||
need_stdout = false
|
|
||||||
|
|
||||||
# If the doc compiles, then it opens in your browser and bacon switches
|
|
||||||
# to the previous job
|
|
||||||
[jobs.doc-open]
|
|
||||||
command = ["cargo", "doc", "--no-deps", "--open"]
|
|
||||||
need_stdout = false
|
|
||||||
on_success = "back" # so that we don't open the browser at each change
|
|
||||||
|
|
||||||
[jobs.run]
|
[jobs.run]
|
||||||
command = [
|
command = [
|
||||||
"cargo", "run",
|
"cargo", "run",
|
||||||
@@ -74,19 +33,20 @@ background = false
|
|||||||
on_change_strategy = "kill_then_restart"
|
on_change_strategy = "kill_then_restart"
|
||||||
# kill = ["pkill", "-TERM", "-P"]'
|
# kill = ["pkill", "-TERM", "-P"]'
|
||||||
|
|
||||||
# This parameterized job runs the example of your choice, as soon
|
[jobs.dev]
|
||||||
# as the code compiles.
|
command = [
|
||||||
# Call it as
|
"just", "dev"
|
||||||
# bacon ex -- my-example
|
]
|
||||||
[jobs.ex]
|
|
||||||
command = ["cargo", "run", "--example"]
|
|
||||||
need_stdout = true
|
need_stdout = true
|
||||||
allow_warnings = true
|
allow_warnings = true
|
||||||
|
background = false
|
||||||
|
on_change_strategy = "kill_then_restart"
|
||||||
|
|
||||||
# You may define here keybindings that would be specific to
|
# You may define here keybindings that would be specific to
|
||||||
# a project, for example a shortcut to launch a specific job.
|
# a project, for example a shortcut to launch a specific job.
|
||||||
# Shortcuts to internal functions (scrolling, toggling, etc.)
|
# Shortcuts to internal functions (scrolling, toggling, etc.)
|
||||||
# should go in your personal global prefs.toml file instead.
|
# should go in your personal global prefs.toml file instead.
|
||||||
[keybindings]
|
[keybindings]
|
||||||
# alt-m = "job:my-job"
|
c = "job:clippy" # comment this to have 'c' run clippy on only the default target
|
||||||
c = "job:clippy-all" # comment this to have 'c' run clippy on only the default target
|
shift-c = "job:check"
|
||||||
|
d = "job:dev"
|
||||||
36
build.rs
Normal file
36
build.rs
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
use std::process::Command;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
// Try to get Git commit hash from Railway environment variable first
|
||||||
|
let git_hash = std::env::var("RAILWAY_GIT_COMMIT_SHA").unwrap_or_else(|_| {
|
||||||
|
// Fallback to git command if not on Railway
|
||||||
|
let output = Command::new("git").args(["rev-parse", "HEAD"]).output();
|
||||||
|
match output {
|
||||||
|
Ok(output) => {
|
||||||
|
if output.status.success() {
|
||||||
|
String::from_utf8_lossy(&output.stdout).trim().to_string()
|
||||||
|
} else {
|
||||||
|
"unknown".to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_) => "unknown".to_string(),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get the short hash (first 7 characters)
|
||||||
|
let short_hash = if git_hash != "unknown" && git_hash.len() >= 7 {
|
||||||
|
git_hash[..7].to_string()
|
||||||
|
} else {
|
||||||
|
git_hash.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Set the environment variables that will be available at compile time
|
||||||
|
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_hash);
|
||||||
|
println!("cargo:rustc-env=GIT_COMMIT_SHORT={}", short_hash);
|
||||||
|
|
||||||
|
// Rebuild if the Git commit changes (only works when .git directory is available)
|
||||||
|
if std::path::Path::new(".git/HEAD").exists() {
|
||||||
|
println!("cargo:rerun-if-changed=.git/HEAD");
|
||||||
|
println!("cargo:rerun-if-changed=.git/refs/heads");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
# For documentation on how to configure this file,
|
|
||||||
# see https://diesel.rs/guides/configuring-diesel-cli
|
|
||||||
|
|
||||||
[print_schema]
|
|
||||||
file = "src/data/schema.rs"
|
|
||||||
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
|
|
||||||
|
|
||||||
[migrations_directory]
|
|
||||||
dir = "migrations"
|
|
||||||
94
docs/ARCHITECTURE.md
Normal file
94
docs/ARCHITECTURE.md
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
# Architecture
|
||||||
|
|
||||||
|
## System Overview
|
||||||
|
|
||||||
|
The Banner project is built as a multi-service application with the following components:
|
||||||
|
|
||||||
|
- **Discord Bot Service**: Handles Discord interactions and commands
|
||||||
|
- **Web Service**: Serves the React frontend and provides API endpoints
|
||||||
|
- **Scraper Service**: Background data collection and synchronization
|
||||||
|
- **Database Layer**: PostgreSQL for persistent storage
|
||||||
|
|
||||||
|
## Technical Analysis
|
||||||
|
|
||||||
|
### Banner System Integration
|
||||||
|
|
||||||
|
Some of the features and architecture of Ellucian's Banner system are not clear.
|
||||||
|
The following features, JSON, and more require validation & analysis:
|
||||||
|
|
||||||
|
- Struct Nullability
|
||||||
|
- Much of the responses provided by Ellucian contain nulls, and most of them are uncertain as to when and why they're null.
|
||||||
|
- Analysis must be conducted to be sure of when to use a string and when it should nillable (pointer).
|
||||||
|
- Multiple Professors / Primary Indicator
|
||||||
|
- Multiple Meeting Times
|
||||||
|
- Meeting Schedule Types
|
||||||
|
- AFF vs AIN vs AHB etc.
|
||||||
|
- Do CRNs repeat between years?
|
||||||
|
- Check whether partOfTerm is always filled in, and it's meaning for various class results.
|
||||||
|
- Check which API calls are affected by change in term/sessionID term select
|
||||||
|
- SessionIDs
|
||||||
|
- How long does a session ID work?
|
||||||
|
- Do I really require a separate one per term?
|
||||||
|
- How many can I activate, are there any restrictions?
|
||||||
|
- How should session IDs be checked as 'invalid'?
|
||||||
|
- What action(s) keep a session ID 'active', if any?
|
||||||
|
- Are there any courses with multiple meeting times?
|
||||||
|
- Google Calendar link generation, as an alternative to ICS file generation
|
||||||
|
|
||||||
|
## Change Identification
|
||||||
|
|
||||||
|
- Important attributes of a class will be parsed on both the old and new data.
|
||||||
|
- These attributes will be compared and given identifiers that can be subscribed to.
|
||||||
|
- When a user subscribes to one of these identifiers, any changes identified will be sent to the user.
|
||||||
|
|
||||||
|
## Real-time Suggestions
|
||||||
|
|
||||||
|
Various commands arguments have the ability to have suggestions appear.
|
||||||
|
|
||||||
|
- They must be fast. As ephemeral suggestions that are only relevant for seconds or less, they need to be delivered in less than a second.
|
||||||
|
- They need to be easy to acquire. With as many commands & arguments to search as I do, it is paramount that the API be easy to understand & use.
|
||||||
|
- It cannot be complicated. I only have so much time to develop this.
|
||||||
|
- It does not need to be persistent. Since the data is scraped and rolled periodically from the Banner system, the data used will be deleted and re-requested occasionally.
|
||||||
|
|
||||||
|
For these reasons, I believe PostgreSQL to be the ideal place for this data to be stored.
|
||||||
|
It is exceptionally fast, works well in-memory, and is less complicated compared to most other solutions.
|
||||||
|
|
||||||
|
- Only required data about the class will be stored, along with the JSON-encoded string.
|
||||||
|
- For now, this would only be the CRN (and possibly the Term).
|
||||||
|
- Potentially, a binary encoding could be used for performance, but it is unlikely to be better.
|
||||||
|
- Database dumping into R2 would be good to ensure that over-scraping of the Banner system does not occur.
|
||||||
|
- Upon a safe close requested
|
||||||
|
- Must be done quickly (<8 seconds)
|
||||||
|
- Every 30 minutes, if any scraping ocurred.
|
||||||
|
- May cause locking of commands.
|
||||||
|
|
||||||
|
## Scraping System
|
||||||
|
|
||||||
|
In order to keep the in-memory database of the bot up-to-date with the Banner system, the API must be scraped.
|
||||||
|
Scraping will be separated by major to allow for priority majors (namely, Computer Science) to be scraped more often compared to others.
|
||||||
|
This will lower the overall load on the Banner system while ensuring that data presented by the app is still relevant.
|
||||||
|
|
||||||
|
For now, all majors will be scraped fully every 4 hours with at least 5 minutes between each one.
|
||||||
|
|
||||||
|
- On startup, priority majors will be scraped first (if required).
|
||||||
|
- Other majors will be scraped in arbitrary order (if required).
|
||||||
|
- Scrape timing will be stored in database.
|
||||||
|
- CRNs will be the Primary Key within database
|
||||||
|
- If CRNs are duplicated between terms, then the primary key will be (CRN, Term)
|
||||||
|
|
||||||
|
Considerations
|
||||||
|
|
||||||
|
- Change in metadata should decrease the interval
|
||||||
|
- The number of courses scraped should change the interval (2 hours per 500 courses involved)
|
||||||
|
|
||||||
|
## Rate Limiting, Costs & Bursting
|
||||||
|
|
||||||
|
Ideally, this application would implement dynamic rate limiting to ensure overload on the server does not occur.
|
||||||
|
Better, it would also ensure that priority requests (commands) are dispatched faster than background processes (scraping), while making sure different requests are weighted differently.
|
||||||
|
For example, a recent scrape of 350 classes should be weighted 5x more than a search for 8 classes by a user.
|
||||||
|
Still, even if the cap does not normally allow for this request to be processed immediately, the small user search should proceed with a small bursting cap.
|
||||||
|
|
||||||
|
The requirements to this hypothetical system would be:
|
||||||
|
|
||||||
|
- Conditional Bursting: background processes or other requests deemed "low priority" are not allowed to use bursting.
|
||||||
|
- Arbitrary Costs: rate limiting is considered in the form of the request size/speed more or less, such that small simple requests can be made more frequently, unlike large requests.
|
||||||
@@ -1,11 +1,17 @@
|
|||||||
# Sessions
|
# Banner
|
||||||
|
|
||||||
|
All notes on the internal workings of the Banner system by Ellucian.
|
||||||
|
|
||||||
|
## Sessions
|
||||||
|
|
||||||
All notes on the internal workings of Sessions in the Banner system.
|
All notes on the internal workings of Sessions in the Banner system.
|
||||||
|
|
||||||
- Sessions are generated on demand with a random string of characters.
|
- Sessions are generated on demand with a random string of characters.
|
||||||
|
- The format `{5 random characters}{milliseconds since epoch}`
|
||||||
|
- Example: ``
|
||||||
- Sessions are invalidated after 30 minutes, but may change.
|
- Sessions are invalidated after 30 minutes, but may change.
|
||||||
- This delay can be found in the original HTML returned, find `meta[name="maxInactiveInterval"]` and read the `content` attribute.
|
- This delay can be found in the original HTML returned, find `meta[name="maxInactiveInterval"]` and read the `content` attribute.
|
||||||
- This is read at runtime by the javascript on initialization.
|
- This is read at runtime (in the browser, by javascript) on initialization.
|
||||||
- Multiple timers exist, one is for the Inactivity Timer.
|
- Multiple timers exist, one is for the Inactivity Timer.
|
||||||
- A dialog will appear asking the user to continue their session.
|
- A dialog will appear asking the user to continue their session.
|
||||||
- If they click the button, the session will be extended via the keepAliveURL (see `meta[name="keepAliveURL"]`).
|
- If they click the button, the session will be extended via the keepAliveURL (see `meta[name="keepAliveURL"]`).
|
||||||
58
docs/FEATURES.md
Normal file
58
docs/FEATURES.md
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
# Features
|
||||||
|
|
||||||
|
## Current Features
|
||||||
|
|
||||||
|
### Discord Bot Commands
|
||||||
|
|
||||||
|
- **search** - Search for courses with various filters (title, course code, keywords)
|
||||||
|
- **terms** - List available terms or search for a specific term
|
||||||
|
- **time** - Get meeting times for a specific course (CRN)
|
||||||
|
- **ics** - Generate ICS calendar file for a course with holiday exclusions
|
||||||
|
- **gcal** - Generate Google Calendar link for a course
|
||||||
|
|
||||||
|
### Data Pipeline
|
||||||
|
|
||||||
|
- Intelligent scraping system with priority queues
|
||||||
|
- Rate limiting and burst handling
|
||||||
|
- Background data synchronization
|
||||||
|
|
||||||
|
## Feature Wishlist
|
||||||
|
|
||||||
|
### Commands
|
||||||
|
|
||||||
|
- ICS Download (get a ICS download of your classes with location & timing perfectly - set for every class you're in)
|
||||||
|
- Classes Now (find classes happening)
|
||||||
|
- Autocomplete
|
||||||
|
- Class Title
|
||||||
|
- Course Number
|
||||||
|
- Term/Part of Term
|
||||||
|
- Professor
|
||||||
|
- Attribute
|
||||||
|
- Component Pagination
|
||||||
|
- RateMyProfessor Integration (Linked/Embedded)
|
||||||
|
- Smart term selection (i.e. Summer 2024 will be selected automatically when opened)
|
||||||
|
- Rate Limiting (bursting with global/user limits)
|
||||||
|
- DMs Integration (allow usage of the bot in DMs)
|
||||||
|
- Class Change Notifications (get notified when details about a class change)
|
||||||
|
- Multi-term Querying (currently the backend for searching is kinda weird)
|
||||||
|
- Full Autocomplete for Every Search Option
|
||||||
|
- Metrics, Log Query, Privileged Error Feedback
|
||||||
|
- Search for Classes
|
||||||
|
- Major, Professor, Location, Name, Time of Day
|
||||||
|
- Subscribe to Classes
|
||||||
|
- Availability (seat, pre-seat)
|
||||||
|
- Waitlist Movement
|
||||||
|
- Detail Changes (meta, time, location, seats, professor)
|
||||||
|
- `time` Start, End, Days of Week
|
||||||
|
- `seats` Any change in seat/waitlist data
|
||||||
|
- `meta`
|
||||||
|
- Lookup via Course Reference Number (CRN)
|
||||||
|
- Smart Time of Day Handling
|
||||||
|
- "2 PM" -> Start within 2:00 PM to 2:59 PM
|
||||||
|
- "2-3 PM" -> Start within 2:00 PM to 3:59 PM
|
||||||
|
- "ends by 2 PM" -> Ends within 12:00 AM to 2:00 PM
|
||||||
|
- "after 2 PM" -> Start within 2:01 PM to 11:59 PM
|
||||||
|
- "before 2 PM" -> Ends within 12:00 AM to 1:59 PM
|
||||||
|
- Get By Section Command
|
||||||
|
- CS 4393 001 =>
|
||||||
|
- Will require SQL to be able to search for a class by its section number
|
||||||
42
docs/README.md
Normal file
42
docs/README.md
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
# Documentation
|
||||||
|
|
||||||
|
This folder contains detailed documentation for the Banner project. This file acts as the index.
|
||||||
|
|
||||||
|
## Files
|
||||||
|
|
||||||
|
- [`FEATURES.md`](FEATURES.md) - Current features, implemented functionality, and future roadmap
|
||||||
|
- [`BANNER.md`](BANNER.md) - General API documentation on the Banner system
|
||||||
|
- [`ARCHITECTURE.md`](ARCHITECTURE.md) - Technical implementation details, system design, and analysis
|
||||||
|
|
||||||
|
## Samples
|
||||||
|
|
||||||
|
The `samples/` folder contains real Banner API response examples:
|
||||||
|
|
||||||
|
- `search/` - Course search API responses with various filters
|
||||||
|
- [`searchResults.json`](samples/search/searchResults.json)
|
||||||
|
- [`searchResults_500.json`](samples/search/searchResults_500.json)
|
||||||
|
- [`searchResults_CS500.json`](samples/search/searchResults_CS500.json)
|
||||||
|
- [`searchResults_malware.json`](samples/search/searchResults_malware.json)
|
||||||
|
- `meta/` - Metadata API responses (terms, subjects, instructors, etc.)
|
||||||
|
- [`get_attribute.json`](samples/meta/get_attribute.json)
|
||||||
|
- [`get_campus.json`](samples/meta/get_campus.json)
|
||||||
|
- [`get_instructionalMethod.json`](samples/meta/get_instructionalMethod.json)
|
||||||
|
- [`get_instructor.json`](samples/meta/get_instructor.json)
|
||||||
|
- [`get_partOfTerm.json`](samples/meta/get_partOfTerm.json)
|
||||||
|
- [`get_subject.json`](samples/meta/get_subject.json)
|
||||||
|
- [`getTerms.json`](samples/meta/getTerms.json)
|
||||||
|
- `course/` - Course detail API responses (HTML and JSON)
|
||||||
|
- [`getFacultyMeetingTimes.json`](samples/course/getFacultyMeetingTimes.json)
|
||||||
|
- [`getClassDetails.html`](samples/course/getClassDetails.html)
|
||||||
|
- [`getCorequisites.html`](samples/course/getCorequisites.html)
|
||||||
|
- [`getCourseDescription.html`](samples/course/getCourseDescription.html)
|
||||||
|
- [`getEnrollmentInfo.html`](samples/course/getEnrollmentInfo.html)
|
||||||
|
- [`getFees.html`](samples/course/getFees.html)
|
||||||
|
- [`getLinkedSections.html`](samples/course/getLinkedSections.html)
|
||||||
|
- [`getRestrictions.html`](samples/course/getRestrictions.html)
|
||||||
|
- [`getSectionAttributes.html`](samples/course/getSectionAttributes.html)
|
||||||
|
- [`getSectionBookstoreDetails.html`](samples/course/getSectionBookstoreDetails.html)
|
||||||
|
- [`getSectionPrerequisites.html`](samples/course/getSectionPrerequisites.html)
|
||||||
|
- [`getXlistSections.html`](samples/course/getXlistSections.html)
|
||||||
|
|
||||||
|
These samples are used for development, testing, and understanding the Banner API structure.
|
||||||
3
migrations/20251103093649_add_retry_tracking.sql
Normal file
3
migrations/20251103093649_add_retry_tracking.sql
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
-- Add retry tracking columns to scrape_jobs table
|
||||||
|
ALTER TABLE scrape_jobs ADD COLUMN retry_count INTEGER NOT NULL DEFAULT 0 CHECK (retry_count >= 0);
|
||||||
|
ALTER TABLE scrape_jobs ADD COLUMN max_retries INTEGER NOT NULL DEFAULT 5 CHECK (max_retries >= 0);
|
||||||
45
migrations/20251103104300_add_performance_indexes.sql
Normal file
45
migrations/20251103104300_add_performance_indexes.sql
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
-- Performance optimization indexes
|
||||||
|
|
||||||
|
-- Index for term-based queries (most common access pattern)
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_courses_term_code ON courses(term_code);
|
||||||
|
|
||||||
|
-- Index for subject-based filtering
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_courses_subject ON courses(subject);
|
||||||
|
|
||||||
|
-- Composite index for subject + term queries
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_courses_subject_term ON courses(subject, term_code);
|
||||||
|
|
||||||
|
-- Index for course number lookups
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_courses_course_number ON courses(course_number);
|
||||||
|
|
||||||
|
-- Index for last scraped timestamp (useful for finding stale data)
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_courses_last_scraped ON courses(last_scraped_at);
|
||||||
|
|
||||||
|
-- Index for course metrics time-series queries
|
||||||
|
-- BRIN index is optimal for time-series data
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_course_metrics_timestamp ON course_metrics USING BRIN(timestamp);
|
||||||
|
|
||||||
|
-- B-tree index for specific course metric lookups
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_course_metrics_course_timestamp
|
||||||
|
ON course_metrics(course_id, timestamp DESC);
|
||||||
|
|
||||||
|
-- Partial index for pending scrape jobs (only unlocked jobs)
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_scrape_jobs_pending
|
||||||
|
ON scrape_jobs(execute_at ASC)
|
||||||
|
WHERE locked_at IS NULL;
|
||||||
|
|
||||||
|
-- Index for high-priority job processing
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_scrape_jobs_priority_pending
|
||||||
|
ON scrape_jobs(priority DESC, execute_at ASC)
|
||||||
|
WHERE locked_at IS NULL;
|
||||||
|
|
||||||
|
-- Index for retry tracking
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_scrape_jobs_retry_count
|
||||||
|
ON scrape_jobs(retry_count)
|
||||||
|
WHERE retry_count > 0 AND locked_at IS NULL;
|
||||||
|
|
||||||
|
-- Analyze tables to update statistics
|
||||||
|
ANALYZE courses;
|
||||||
|
ANALYZE course_metrics;
|
||||||
|
ANALYZE course_audits;
|
||||||
|
ANALYZE scrape_jobs;
|
||||||
53
migrations/20251103104400_optimize_indexes.sql
Normal file
53
migrations/20251103104400_optimize_indexes.sql
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
-- Index Optimization Follow-up Migration
|
||||||
|
|
||||||
|
-- Reason: Redundant with composite index idx_courses_subject_term
|
||||||
|
DROP INDEX IF EXISTS idx_courses_subject;
|
||||||
|
|
||||||
|
-- Remove: idx_scrape_jobs_retry_count
|
||||||
|
DROP INDEX IF EXISTS idx_scrape_jobs_retry_count;
|
||||||
|
|
||||||
|
-- Purpose: Optimize the scheduler's frequent query (runs every 60 seconds)
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_scrape_jobs_scheduler_lookup
|
||||||
|
ON scrape_jobs(target_type, target_payload)
|
||||||
|
WHERE locked_at IS NULL;
|
||||||
|
|
||||||
|
-- Note: We use (target_type, target_payload) instead of including locked_at
|
||||||
|
-- in the index columns because:
|
||||||
|
-- 1. The WHERE clause filters locked_at IS NULL (partial index optimization)
|
||||||
|
-- 2. target_payload is JSONB and already large; keeping it as an indexed column
|
||||||
|
-- allows PostgreSQL to use index-only scans for the SELECT target_payload query
|
||||||
|
-- 3. This design minimizes index size while maximizing query performance
|
||||||
|
|
||||||
|
|
||||||
|
-- Purpose: Enable efficient audit trail queries by course
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_course_audits_course_timestamp
|
||||||
|
ON course_audits(course_id, timestamp DESC);
|
||||||
|
|
||||||
|
-- Purpose: Enable queries like "Show all changes in the last 24 hours"
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_course_audits_timestamp
|
||||||
|
ON course_audits(timestamp DESC);
|
||||||
|
|
||||||
|
|
||||||
|
-- The BRIN index on course_metrics(timestamp) assumes data is inserted in
|
||||||
|
-- chronological order. BRIN indexes are only effective when data is physically
|
||||||
|
-- ordered on disk. If you perform:
|
||||||
|
-- - Backfills of historical data
|
||||||
|
-- - Out-of-order inserts
|
||||||
|
-- - Frequent UPDATEs that move rows
|
||||||
|
--
|
||||||
|
-- Then the BRIN index effectiveness will degrade. Monitor with:
|
||||||
|
-- SELECT * FROM brin_page_items(get_raw_page('idx_course_metrics_timestamp', 1));
|
||||||
|
--
|
||||||
|
-- If you see poor selectivity, consider:
|
||||||
|
-- 1. REINDEX to rebuild after bulk loads
|
||||||
|
-- 2. Switch to B-tree if inserts are not time-ordered
|
||||||
|
-- 3. Use CLUSTER to physically reorder the table (requires downtime)
|
||||||
|
|
||||||
|
COMMENT ON INDEX idx_course_metrics_timestamp IS
|
||||||
|
'BRIN index - requires chronologically ordered inserts for efficiency. Monitor selectivity.';
|
||||||
|
|
||||||
|
-- Update statistics for query planner
|
||||||
|
ANALYZE courses;
|
||||||
|
ANALYZE course_metrics;
|
||||||
|
ANALYZE course_audits;
|
||||||
|
ANALYZE scrape_jobs;
|
||||||
168
src/app.rs
Normal file
168
src/app.rs
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
use crate::banner::BannerApi;
|
||||||
|
use crate::cli::ServiceName;
|
||||||
|
use crate::config::Config;
|
||||||
|
use crate::scraper::ScraperService;
|
||||||
|
use crate::services::bot::BotService;
|
||||||
|
use crate::services::manager::ServiceManager;
|
||||||
|
use crate::services::web::WebService;
|
||||||
|
use crate::state::AppState;
|
||||||
|
use crate::web::routes::BannerState;
|
||||||
|
use figment::value::UncasedStr;
|
||||||
|
use figment::{Figment, providers::Env};
|
||||||
|
use sqlx::postgres::PgPoolOptions;
|
||||||
|
use std::process::ExitCode;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::time::Duration;
|
||||||
|
use tracing::{error, info};
|
||||||
|
|
||||||
|
/// Main application struct containing all necessary components
|
||||||
|
pub struct App {
|
||||||
|
config: Config,
|
||||||
|
db_pool: sqlx::PgPool,
|
||||||
|
banner_api: Arc<BannerApi>,
|
||||||
|
app_state: AppState,
|
||||||
|
banner_state: BannerState,
|
||||||
|
service_manager: ServiceManager,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl App {
|
||||||
|
/// Create a new App instance with all necessary components initialized
|
||||||
|
pub async fn new() -> Result<Self, anyhow::Error> {
|
||||||
|
// Load configuration
|
||||||
|
let config: Config = Figment::new()
|
||||||
|
.merge(Env::raw().map(|k| {
|
||||||
|
if k == UncasedStr::new("RAILWAY_DEPLOYMENT_DRAINING_SECONDS") {
|
||||||
|
"SHUTDOWN_TIMEOUT".into()
|
||||||
|
} else {
|
||||||
|
k.into()
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
.extract()
|
||||||
|
.expect("Failed to load config");
|
||||||
|
|
||||||
|
// Check if the database URL is via private networking
|
||||||
|
let is_private = config.database_url.contains("railway.internal");
|
||||||
|
let slow_threshold = Duration::from_millis(if is_private { 200 } else { 500 });
|
||||||
|
|
||||||
|
// Create database connection pool
|
||||||
|
let db_pool = PgPoolOptions::new()
|
||||||
|
.min_connections(0)
|
||||||
|
.max_connections(4)
|
||||||
|
.acquire_slow_threshold(slow_threshold)
|
||||||
|
.acquire_timeout(Duration::from_secs(4))
|
||||||
|
.idle_timeout(Duration::from_secs(60 * 2))
|
||||||
|
.max_lifetime(Duration::from_secs(60 * 30))
|
||||||
|
.connect(&config.database_url)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create database pool");
|
||||||
|
|
||||||
|
info!(
|
||||||
|
is_private = is_private,
|
||||||
|
slow_threshold = format!("{:.2?}", slow_threshold),
|
||||||
|
"database pool established"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Run database migrations
|
||||||
|
info!("Running database migrations...");
|
||||||
|
sqlx::migrate!("./migrations")
|
||||||
|
.run(&db_pool)
|
||||||
|
.await
|
||||||
|
.expect("Failed to run database migrations");
|
||||||
|
info!("Database migrations completed successfully");
|
||||||
|
|
||||||
|
// Create BannerApi and AppState
|
||||||
|
let banner_api = BannerApi::new_with_config(
|
||||||
|
config.banner_base_url.clone(),
|
||||||
|
config.rate_limiting.clone().into(),
|
||||||
|
)
|
||||||
|
.expect("Failed to create BannerApi");
|
||||||
|
|
||||||
|
let banner_api_arc = Arc::new(banner_api);
|
||||||
|
let app_state = AppState::new(banner_api_arc.clone(), db_pool.clone());
|
||||||
|
|
||||||
|
// Create BannerState for web service
|
||||||
|
let banner_state = BannerState {};
|
||||||
|
|
||||||
|
Ok(App {
|
||||||
|
config,
|
||||||
|
db_pool,
|
||||||
|
banner_api: banner_api_arc,
|
||||||
|
app_state,
|
||||||
|
banner_state,
|
||||||
|
service_manager: ServiceManager::new(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Setup and register services based on enabled service list
|
||||||
|
pub fn setup_services(&mut self, services: &[ServiceName]) -> Result<(), anyhow::Error> {
|
||||||
|
// Register enabled services with the manager
|
||||||
|
if services.contains(&ServiceName::Web) {
|
||||||
|
let web_service =
|
||||||
|
Box::new(WebService::new(self.config.port, self.banner_state.clone()));
|
||||||
|
self.service_manager
|
||||||
|
.register_service(ServiceName::Web.as_str(), web_service);
|
||||||
|
}
|
||||||
|
|
||||||
|
if services.contains(&ServiceName::Scraper) {
|
||||||
|
let scraper_service = Box::new(ScraperService::new(
|
||||||
|
self.db_pool.clone(),
|
||||||
|
self.banner_api.clone(),
|
||||||
|
));
|
||||||
|
self.service_manager
|
||||||
|
.register_service(ServiceName::Scraper.as_str(), scraper_service);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if any services are enabled
|
||||||
|
if !self.service_manager.has_services() && !services.contains(&ServiceName::Bot) {
|
||||||
|
error!("No services enabled. Cannot start application.");
|
||||||
|
return Err(anyhow::anyhow!("No services enabled"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Setup bot service if enabled
|
||||||
|
pub async fn setup_bot_service(&mut self) -> Result<(), anyhow::Error> {
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::sync::{Mutex, broadcast};
|
||||||
|
|
||||||
|
// Create shutdown channel for status update task
|
||||||
|
let (status_shutdown_tx, status_shutdown_rx) = broadcast::channel(1);
|
||||||
|
let status_task_handle = Arc::new(Mutex::new(None));
|
||||||
|
|
||||||
|
let client = BotService::create_client(
|
||||||
|
&self.config,
|
||||||
|
self.app_state.clone(),
|
||||||
|
status_task_handle.clone(),
|
||||||
|
status_shutdown_rx,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create Discord client");
|
||||||
|
|
||||||
|
let bot_service = Box::new(BotService::new(
|
||||||
|
client,
|
||||||
|
status_task_handle,
|
||||||
|
status_shutdown_tx,
|
||||||
|
));
|
||||||
|
|
||||||
|
self.service_manager
|
||||||
|
.register_service(ServiceName::Bot.as_str(), bot_service);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Start all registered services
|
||||||
|
pub fn start_services(&mut self) {
|
||||||
|
self.service_manager.spawn_all();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run the application and handle shutdown signals
|
||||||
|
pub async fn run(self) -> ExitCode {
|
||||||
|
use crate::signals::handle_shutdown_signals;
|
||||||
|
handle_shutdown_signals(self.service_manager, self.config.shutdown_timeout).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a reference to the configuration
|
||||||
|
pub fn config(&self) -> &Config {
|
||||||
|
&self.config
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
//! Application state shared across components (bot, web, scheduler).
|
|
||||||
|
|
||||||
use crate::banner::BannerApi;
|
|
||||||
use crate::banner::Course;
|
|
||||||
use anyhow::Result;
|
|
||||||
use redis::AsyncCommands;
|
|
||||||
use redis::Client;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct AppState {
|
|
||||||
pub banner_api: Arc<BannerApi>,
|
|
||||||
pub redis: Arc<Client>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AppState {
|
|
||||||
pub fn new(
|
|
||||||
banner_api: Arc<BannerApi>,
|
|
||||||
redis_url: &str,
|
|
||||||
) -> Result<Self, Box<dyn std::error::Error + Send + Sync>> {
|
|
||||||
let redis_client = Client::open(redis_url)?;
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
banner_api,
|
|
||||||
redis: Arc::new(redis_client),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a course by CRN with Redis cache fallback to Banner API
|
|
||||||
pub async fn get_course_or_fetch(&self, term: &str, crn: &str) -> Result<Course> {
|
|
||||||
let mut conn = self.redis.get_multiplexed_async_connection().await?;
|
|
||||||
|
|
||||||
let key = format!("class:{crn}");
|
|
||||||
if let Some(serialized) = conn.get::<_, Option<String>>(&key).await? {
|
|
||||||
let course: Course = serde_json::from_str(&serialized)?;
|
|
||||||
return Ok(course);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback: fetch from Banner API
|
|
||||||
if let Some(course) = self.banner_api.get_course_by_crn(term, crn).await? {
|
|
||||||
let serialized = serde_json::to_string(&course)?;
|
|
||||||
let _: () = conn.set(&key, serialized).await?;
|
|
||||||
return Ok(course);
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(anyhow::anyhow!("Course not found for CRN {crn}"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,72 +1,117 @@
|
|||||||
//! Main Banner API client implementation.
|
//! Main Banner API client implementation.
|
||||||
|
|
||||||
use crate::banner::{models::*, query::SearchQuery, session::SessionManager, util::user_agent};
|
use std::{
|
||||||
use anyhow::{Context, Result};
|
collections::{HashMap, VecDeque},
|
||||||
use axum::http::HeaderValue;
|
sync::{Arc, Mutex},
|
||||||
use reqwest::Client;
|
time::Instant,
|
||||||
use serde_json;
|
};
|
||||||
|
|
||||||
use tracing::{error, info};
|
use crate::banner::{
|
||||||
|
BannerSession, SessionPool, create_shared_rate_limiter,
|
||||||
|
errors::BannerApiError,
|
||||||
|
json::parse_json_with_context,
|
||||||
|
middleware::TransparentMiddleware,
|
||||||
|
models::*,
|
||||||
|
nonce,
|
||||||
|
query::SearchQuery,
|
||||||
|
rate_limit_middleware::RateLimitMiddleware,
|
||||||
|
rate_limiter::{RateLimitConfig, SharedRateLimiter},
|
||||||
|
util::user_agent,
|
||||||
|
};
|
||||||
|
use anyhow::{Context, Result, anyhow};
|
||||||
|
use cookie::Cookie;
|
||||||
|
use dashmap::DashMap;
|
||||||
|
use http::HeaderValue;
|
||||||
|
use reqwest::{Client, Request, Response};
|
||||||
|
use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
|
||||||
|
use serde_json;
|
||||||
|
use tl;
|
||||||
|
use tracing::{Level, Metadata, Span, debug, error, field::ValueSet, info, span, trace, warn};
|
||||||
|
|
||||||
/// Main Banner API client.
|
/// Main Banner API client.
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct BannerApi {
|
pub struct BannerApi {
|
||||||
sessions: SessionManager,
|
pub sessions: SessionPool,
|
||||||
http: Client,
|
http: ClientWithMiddleware,
|
||||||
base_url: String,
|
base_url: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
impl BannerApi {
|
impl BannerApi {
|
||||||
/// Creates a new Banner API client.
|
/// Creates a new Banner API client.
|
||||||
pub fn new(base_url: String) -> Result<Self> {
|
pub fn new(base_url: String) -> Result<Self> {
|
||||||
let http = Client::builder()
|
Self::new_with_config(base_url, RateLimitConfig::default())
|
||||||
.cookie_store(true)
|
}
|
||||||
.user_agent(user_agent())
|
|
||||||
.tcp_keepalive(Some(std::time::Duration::from_secs(60 * 5)))
|
|
||||||
.read_timeout(std::time::Duration::from_secs(10))
|
|
||||||
.connect_timeout(std::time::Duration::from_secs(10))
|
|
||||||
.timeout(std::time::Duration::from_secs(30))
|
|
||||||
.build()
|
|
||||||
.context("Failed to create HTTP client")?;
|
|
||||||
|
|
||||||
let session_manager = SessionManager::new(base_url.clone(), http.clone());
|
/// Creates a new Banner API client with custom rate limiting configuration.
|
||||||
|
pub fn new_with_config(base_url: String, rate_limit_config: RateLimitConfig) -> Result<Self> {
|
||||||
|
let rate_limiter = create_shared_rate_limiter(Some(rate_limit_config));
|
||||||
|
|
||||||
|
let http = ClientBuilder::new(
|
||||||
|
Client::builder()
|
||||||
|
.cookie_store(false)
|
||||||
|
.user_agent(user_agent())
|
||||||
|
.tcp_keepalive(Some(std::time::Duration::from_secs(60 * 5)))
|
||||||
|
.read_timeout(std::time::Duration::from_secs(10))
|
||||||
|
.connect_timeout(std::time::Duration::from_secs(10))
|
||||||
|
.timeout(std::time::Duration::from_secs(30))
|
||||||
|
.build()
|
||||||
|
.context("Failed to create HTTP client")?,
|
||||||
|
)
|
||||||
|
.with(TransparentMiddleware)
|
||||||
|
.with(RateLimitMiddleware::new(rate_limiter.clone()))
|
||||||
|
.build();
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
sessions: session_manager,
|
sessions: SessionPool::new(http.clone(), base_url.clone()),
|
||||||
http,
|
http,
|
||||||
base_url,
|
base_url,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
/// Validates offset parameter for search methods.
|
||||||
/// Sets up the API client by initializing session cookies.
|
fn validate_offset(offset: i32) -> Result<()> {
|
||||||
pub async fn setup(&self) -> Result<()> {
|
if offset <= 0 {
|
||||||
info!(base_url = self.base_url, "setting up banner api client");
|
Err(anyhow::anyhow!("Offset must be greater than 0"))
|
||||||
let result = self.sessions.setup().await;
|
} else {
|
||||||
match &result {
|
Ok(())
|
||||||
Ok(()) => info!("banner api client setup completed successfully"),
|
|
||||||
Err(e) => error!(error = ?e, "banner api client setup failed"),
|
|
||||||
}
|
}
|
||||||
result
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Retrieves a list of terms from the Banner API.
|
/// Builds common search parameters for list endpoints.
|
||||||
pub async fn get_terms(
|
fn build_list_params(
|
||||||
&self,
|
&self,
|
||||||
search: &str,
|
search: &str,
|
||||||
page: i32,
|
term: &str,
|
||||||
|
offset: i32,
|
||||||
max_results: i32,
|
max_results: i32,
|
||||||
) -> Result<Vec<BannerTerm>> {
|
session_id: &str,
|
||||||
if page <= 0 {
|
) -> Vec<(&str, String)> {
|
||||||
return Err(anyhow::anyhow!("Page must be greater than 0"));
|
vec![
|
||||||
}
|
("searchTerm", search.to_string()),
|
||||||
|
("term", term.to_string()),
|
||||||
|
("offset", offset.to_string()),
|
||||||
|
("max", max_results.to_string()),
|
||||||
|
("uniqueSessionId", session_id.to_string()),
|
||||||
|
("_", nonce()),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
let url = format!("{}/classSearch/getTerms", self.base_url);
|
/// Makes a GET request to a list endpoint and parses JSON response.
|
||||||
let params = [
|
async fn get_list_endpoint<T>(
|
||||||
("searchTerm", search),
|
&self,
|
||||||
("offset", &page.to_string()),
|
endpoint: &str,
|
||||||
("max", &max_results.to_string()),
|
search: &str,
|
||||||
("_", &SessionManager::nonce()),
|
term: &str,
|
||||||
];
|
offset: i32,
|
||||||
|
max_results: i32,
|
||||||
|
) -> Result<Vec<T>>
|
||||||
|
where
|
||||||
|
T: for<'de> serde::Deserialize<'de>,
|
||||||
|
{
|
||||||
|
Self::validate_offset(offset)?;
|
||||||
|
|
||||||
|
let session = self.sessions.acquire(term.parse()?).await?;
|
||||||
|
let url = format!("{}/classSearch/{}", self.base_url, endpoint);
|
||||||
|
let params = self.build_list_params(search, term, offset, max_results, &session.id());
|
||||||
|
|
||||||
let response = self
|
let response = self
|
||||||
.http
|
.http
|
||||||
@@ -74,14 +119,115 @@ impl BannerApi {
|
|||||||
.query(¶ms)
|
.query(¶ms)
|
||||||
.send()
|
.send()
|
||||||
.await
|
.await
|
||||||
.context("Failed to get terms")?;
|
.with_context(|| format!("Failed to get {}", endpoint))?;
|
||||||
|
|
||||||
let terms: Vec<BannerTerm> = response
|
let data: Vec<T> = response
|
||||||
.json()
|
.json()
|
||||||
.await
|
.await
|
||||||
.context("Failed to parse terms response")?;
|
.with_context(|| format!("Failed to parse {} response", endpoint))?;
|
||||||
|
|
||||||
Ok(terms)
|
Ok(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Builds search parameters for course search methods.
|
||||||
|
fn build_search_params(
|
||||||
|
&self,
|
||||||
|
query: &SearchQuery,
|
||||||
|
term: &str,
|
||||||
|
session_id: &str,
|
||||||
|
sort: &str,
|
||||||
|
sort_descending: bool,
|
||||||
|
) -> HashMap<String, String> {
|
||||||
|
let mut params = query.to_params();
|
||||||
|
params.insert("txt_term".to_string(), term.to_string());
|
||||||
|
params.insert("uniqueSessionId".to_string(), session_id.to_string());
|
||||||
|
params.insert("sortColumn".to_string(), sort.to_string());
|
||||||
|
params.insert(
|
||||||
|
"sortDirection".to_string(),
|
||||||
|
if sort_descending { "desc" } else { "asc" }.to_string(),
|
||||||
|
);
|
||||||
|
params.insert("startDatepicker".to_string(), String::new());
|
||||||
|
params.insert("endDatepicker".to_string(), String::new());
|
||||||
|
params
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Performs a course search and handles common response processing.
|
||||||
|
#[tracing::instrument(
|
||||||
|
skip(self, query),
|
||||||
|
fields(
|
||||||
|
term = %term,
|
||||||
|
subject = %query.get_subject().unwrap_or(&"all".to_string())
|
||||||
|
)
|
||||||
|
)]
|
||||||
|
async fn perform_search(
|
||||||
|
&self,
|
||||||
|
term: &str,
|
||||||
|
query: &SearchQuery,
|
||||||
|
sort: &str,
|
||||||
|
sort_descending: bool,
|
||||||
|
) -> Result<SearchResult, BannerApiError> {
|
||||||
|
let mut session = self.sessions.acquire(term.parse()?).await?;
|
||||||
|
|
||||||
|
if session.been_used() {
|
||||||
|
self.http
|
||||||
|
.post(format!("{}/classSearch/resetDataForm", self.base_url))
|
||||||
|
.header("Cookie", session.cookie())
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| BannerApiError::RequestFailed(e.into()))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
session.touch();
|
||||||
|
|
||||||
|
let params = self.build_search_params(query, term, &session.id(), sort, sort_descending);
|
||||||
|
|
||||||
|
debug!(
|
||||||
|
term = term,
|
||||||
|
subject = query.get_subject().map(|s| s.as_str()).unwrap_or("all"),
|
||||||
|
max_results = query.get_max_results(),
|
||||||
|
"Searching for courses"
|
||||||
|
);
|
||||||
|
|
||||||
|
let response = self
|
||||||
|
.http
|
||||||
|
.get(format!("{}/searchResults/searchResults", self.base_url))
|
||||||
|
.header("Cookie", session.cookie())
|
||||||
|
.query(¶ms)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("Failed to search courses")?;
|
||||||
|
|
||||||
|
let status = response.status();
|
||||||
|
let url = response.url().clone();
|
||||||
|
let body = response
|
||||||
|
.text()
|
||||||
|
.await
|
||||||
|
.with_context(|| format!("Failed to read body (status={status})"))?;
|
||||||
|
|
||||||
|
let search_result: SearchResult = parse_json_with_context(&body).map_err(|e| {
|
||||||
|
BannerApiError::RequestFailed(anyhow!(
|
||||||
|
"Failed to parse search response (status={status}, url={url}): {e}"
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Check for signs of an invalid session
|
||||||
|
if search_result.path_mode.is_none() {
|
||||||
|
return Err(BannerApiError::InvalidSession(
|
||||||
|
"Search result path mode is none".to_string(),
|
||||||
|
));
|
||||||
|
} else if search_result.data.is_none() {
|
||||||
|
return Err(BannerApiError::InvalidSession(
|
||||||
|
"Search result data is none".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if !search_result.success {
|
||||||
|
return Err(BannerApiError::RequestFailed(anyhow!(
|
||||||
|
"Search marked as unsuccessful by Banner API"
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(search_result)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Retrieves a list of subjects from the Banner API.
|
/// Retrieves a list of subjects from the Banner API.
|
||||||
@@ -92,35 +238,8 @@ impl BannerApi {
|
|||||||
offset: i32,
|
offset: i32,
|
||||||
max_results: i32,
|
max_results: i32,
|
||||||
) -> Result<Vec<Pair>> {
|
) -> Result<Vec<Pair>> {
|
||||||
if offset <= 0 {
|
self.get_list_endpoint("get_subject", search, term, offset, max_results)
|
||||||
return Err(anyhow::anyhow!("Offset must be greater than 0"));
|
|
||||||
}
|
|
||||||
|
|
||||||
let session_id = self.sessions.ensure_session()?;
|
|
||||||
let url = format!("{}/classSearch/get_subject", self.base_url);
|
|
||||||
let params = [
|
|
||||||
("searchTerm", search),
|
|
||||||
("term", term),
|
|
||||||
("offset", &offset.to_string()),
|
|
||||||
("max", &max_results.to_string()),
|
|
||||||
("uniqueSessionId", &session_id),
|
|
||||||
("_", &SessionManager::nonce()),
|
|
||||||
];
|
|
||||||
|
|
||||||
let response = self
|
|
||||||
.http
|
|
||||||
.get(&url)
|
|
||||||
.query(¶ms)
|
|
||||||
.send()
|
|
||||||
.await
|
.await
|
||||||
.context("Failed to get subjects")?;
|
|
||||||
|
|
||||||
let subjects: Vec<Pair> = response
|
|
||||||
.json()
|
|
||||||
.await
|
|
||||||
.context("Failed to parse subjects response")?;
|
|
||||||
|
|
||||||
Ok(subjects)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Retrieves a list of instructors from the Banner API.
|
/// Retrieves a list of instructors from the Banner API.
|
||||||
@@ -131,35 +250,8 @@ impl BannerApi {
|
|||||||
offset: i32,
|
offset: i32,
|
||||||
max_results: i32,
|
max_results: i32,
|
||||||
) -> Result<Vec<Instructor>> {
|
) -> Result<Vec<Instructor>> {
|
||||||
if offset <= 0 {
|
self.get_list_endpoint("get_instructor", search, term, offset, max_results)
|
||||||
return Err(anyhow::anyhow!("Offset must be greater than 0"));
|
|
||||||
}
|
|
||||||
|
|
||||||
let session_id = self.sessions.ensure_session()?;
|
|
||||||
let url = format!("{}/classSearch/get_instructor", self.base_url);
|
|
||||||
let params = [
|
|
||||||
("searchTerm", search),
|
|
||||||
("term", term),
|
|
||||||
("offset", &offset.to_string()),
|
|
||||||
("max", &max_results.to_string()),
|
|
||||||
("uniqueSessionId", &session_id),
|
|
||||||
("_", &SessionManager::nonce()),
|
|
||||||
];
|
|
||||||
|
|
||||||
let response = self
|
|
||||||
.http
|
|
||||||
.get(&url)
|
|
||||||
.query(¶ms)
|
|
||||||
.send()
|
|
||||||
.await
|
.await
|
||||||
.context("Failed to get instructors")?;
|
|
||||||
|
|
||||||
let instructors: Vec<Instructor> = response
|
|
||||||
.json()
|
|
||||||
.await
|
|
||||||
.context("Failed to parse instructors response")?;
|
|
||||||
|
|
||||||
Ok(instructors)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Retrieves a list of campuses from the Banner API.
|
/// Retrieves a list of campuses from the Banner API.
|
||||||
@@ -170,35 +262,8 @@ impl BannerApi {
|
|||||||
offset: i32,
|
offset: i32,
|
||||||
max_results: i32,
|
max_results: i32,
|
||||||
) -> Result<Vec<Pair>> {
|
) -> Result<Vec<Pair>> {
|
||||||
if offset <= 0 {
|
self.get_list_endpoint("get_campus", search, term, offset, max_results)
|
||||||
return Err(anyhow::anyhow!("Offset must be greater than 0"));
|
|
||||||
}
|
|
||||||
|
|
||||||
let session_id = self.sessions.ensure_session()?;
|
|
||||||
let url = format!("{}/classSearch/get_campus", self.base_url);
|
|
||||||
let params = [
|
|
||||||
("searchTerm", search),
|
|
||||||
("term", term),
|
|
||||||
("offset", &offset.to_string()),
|
|
||||||
("max", &max_results.to_string()),
|
|
||||||
("uniqueSessionId", &session_id),
|
|
||||||
("_", &SessionManager::nonce()),
|
|
||||||
];
|
|
||||||
|
|
||||||
let response = self
|
|
||||||
.http
|
|
||||||
.get(&url)
|
|
||||||
.query(¶ms)
|
|
||||||
.send()
|
|
||||||
.await
|
.await
|
||||||
.context("Failed to get campuses")?;
|
|
||||||
|
|
||||||
let campuses: Vec<Pair> = response
|
|
||||||
.json()
|
|
||||||
.await
|
|
||||||
.context("Failed to parse campuses response")?;
|
|
||||||
|
|
||||||
Ok(campuses)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Retrieves meeting time information for a course.
|
/// Retrieves meeting time information for a course.
|
||||||
@@ -259,95 +324,33 @@ impl BannerApi {
|
|||||||
query: &SearchQuery,
|
query: &SearchQuery,
|
||||||
sort: &str,
|
sort: &str,
|
||||||
sort_descending: bool,
|
sort_descending: bool,
|
||||||
) -> Result<SearchResult> {
|
) -> Result<SearchResult, BannerApiError> {
|
||||||
self.sessions.reset_data_form().await?;
|
self.perform_search(term, query, sort, sort_descending)
|
||||||
|
|
||||||
let session_id = self.sessions.ensure_session()?;
|
|
||||||
let mut params = query.to_params();
|
|
||||||
|
|
||||||
// Add additional parameters
|
|
||||||
params.insert("txt_term".to_string(), term.to_string());
|
|
||||||
params.insert("uniqueSessionId".to_string(), session_id);
|
|
||||||
params.insert("sortColumn".to_string(), sort.to_string());
|
|
||||||
params.insert(
|
|
||||||
"sortDirection".to_string(),
|
|
||||||
if sort_descending { "desc" } else { "asc" }.to_string(),
|
|
||||||
);
|
|
||||||
params.insert("startDatepicker".to_string(), String::new());
|
|
||||||
params.insert("endDatepicker".to_string(), String::new());
|
|
||||||
|
|
||||||
let url = format!("{}/searchResults/searchResults", self.base_url);
|
|
||||||
let response = self
|
|
||||||
.http
|
|
||||||
.get(&url)
|
|
||||||
.query(¶ms)
|
|
||||||
.send()
|
|
||||||
.await
|
.await
|
||||||
.context("Failed to search courses")?;
|
|
||||||
|
|
||||||
let search_result: SearchResult = response
|
|
||||||
.json()
|
|
||||||
.await
|
|
||||||
.context("Failed to parse search response")?;
|
|
||||||
|
|
||||||
if !search_result.success {
|
|
||||||
return Err(anyhow::anyhow!(
|
|
||||||
"Search marked as unsuccessful by Banner API"
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(search_result)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Selects a term for the current session.
|
|
||||||
pub async fn select_term(&self, term: &str) -> Result<()> {
|
|
||||||
self.sessions.select_term(term).await
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Retrieves a single course by CRN by issuing a minimal search
|
/// Retrieves a single course by CRN by issuing a minimal search
|
||||||
pub async fn get_course_by_crn(&self, term: &str, crn: &str) -> Result<Option<Course>> {
|
pub async fn get_course_by_crn(
|
||||||
self.sessions.reset_data_form().await?;
|
&self,
|
||||||
// Ensure session is configured for this term
|
term: &str,
|
||||||
self.select_term(term).await?;
|
crn: &str,
|
||||||
|
) -> Result<Option<Course>, BannerApiError> {
|
||||||
let session_id = self.sessions.ensure_session()?;
|
debug!(term = term, crn = crn, "Looking up course by CRN");
|
||||||
|
|
||||||
let query = SearchQuery::new()
|
let query = SearchQuery::new()
|
||||||
.course_reference_number(crn)
|
.course_reference_number(crn)
|
||||||
.max_results(1);
|
.max_results(1);
|
||||||
|
|
||||||
let mut params = query.to_params();
|
let search_result = self
|
||||||
params.insert("txt_term".to_string(), term.to_string());
|
.perform_search(term, &query, "subjectDescription", false)
|
||||||
params.insert("uniqueSessionId".to_string(), session_id);
|
.await?;
|
||||||
params.insert("sortColumn".to_string(), "subjectDescription".to_string());
|
|
||||||
params.insert("sortDirection".to_string(), "asc".to_string());
|
|
||||||
params.insert("startDatepicker".to_string(), String::new());
|
|
||||||
params.insert("endDatepicker".to_string(), String::new());
|
|
||||||
|
|
||||||
let url = format!("{}/searchResults/searchResults", self.base_url);
|
// Additional validation for CRN search
|
||||||
let response = self
|
if search_result.path_mode == Some("registration".to_string())
|
||||||
.http
|
&& search_result.data.is_none()
|
||||||
.get(&url)
|
{
|
||||||
.query(¶ms)
|
return Err(BannerApiError::InvalidSession(
|
||||||
.send()
|
"Search result path mode is registration and data is none".to_string(),
|
||||||
.await
|
|
||||||
.context("Failed to search course by CRN")?;
|
|
||||||
|
|
||||||
let status = response.status();
|
|
||||||
let body = response
|
|
||||||
.text()
|
|
||||||
.await
|
|
||||||
.with_context(|| format!("Failed to read body (status={status})"))?;
|
|
||||||
|
|
||||||
let search_result: SearchResult = parse_json_with_context(&body).map_err(|e| {
|
|
||||||
anyhow::anyhow!(
|
|
||||||
"Failed to parse search response for CRN (status={status}, url={url}): {e}",
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if !search_result.success {
|
|
||||||
return Err(anyhow::anyhow!(
|
|
||||||
"Search marked as unsuccessful by Banner API"
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -381,36 +384,3 @@ impl BannerApi {
|
|||||||
Ok(details)
|
Ok(details)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempt to parse JSON and, on failure, include a contextual snippet around the error location
|
|
||||||
fn parse_json_with_context<T: serde::de::DeserializeOwned>(body: &str) -> Result<T> {
|
|
||||||
match serde_json::from_str::<T>(body) {
|
|
||||||
Ok(value) => Ok(value),
|
|
||||||
Err(err) => {
|
|
||||||
let (line, column) = (err.line(), err.column());
|
|
||||||
let snippet = build_error_snippet(body, line, column, 120);
|
|
||||||
Err(anyhow::anyhow!(
|
|
||||||
"{err} at line {line}, column {column}\nSnippet:\n{snippet}",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_error_snippet(body: &str, line: usize, column: usize, max_len: usize) -> String {
|
|
||||||
let target_line = body.lines().nth(line.saturating_sub(1)).unwrap_or("");
|
|
||||||
if target_line.is_empty() {
|
|
||||||
return String::new();
|
|
||||||
}
|
|
||||||
|
|
||||||
let start = column.saturating_sub(max_len.min(column));
|
|
||||||
let end = (column + max_len).min(target_line.len());
|
|
||||||
let slice = &target_line[start..end];
|
|
||||||
|
|
||||||
let mut indicator = String::new();
|
|
||||||
if column > start {
|
|
||||||
indicator.push_str(&" ".repeat(column - start - 1));
|
|
||||||
indicator.push('^');
|
|
||||||
}
|
|
||||||
|
|
||||||
format!("{slice}\n{indicator}")
|
|
||||||
}
|
|
||||||
|
|||||||
11
src/banner/errors.rs
Normal file
11
src/banner/errors.rs
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
//! Error types for the Banner API client.
|
||||||
|
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
pub enum BannerApiError {
|
||||||
|
#[error("Banner session is invalid or expired: {0}")]
|
||||||
|
InvalidSession(String),
|
||||||
|
#[error(transparent)]
|
||||||
|
RequestFailed(#[from] anyhow::Error),
|
||||||
|
}
|
||||||
415
src/banner/json.rs
Normal file
415
src/banner/json.rs
Normal file
@@ -0,0 +1,415 @@
|
|||||||
|
//! JSON parsing utilities for the Banner API client.
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use serde_json::{self, Value};
|
||||||
|
|
||||||
|
/// Attempt to parse JSON and, on failure, include a contextual snippet of the
|
||||||
|
/// line where the error occurred.
|
||||||
|
///
|
||||||
|
/// In debug builds, this provides detailed context including the full JSON object
|
||||||
|
/// containing the error and type mismatch information. In release builds, it shows
|
||||||
|
/// a minimal snippet to prevent dumping huge JSON bodies to production logs.
|
||||||
|
pub fn parse_json_with_context<T: serde::de::DeserializeOwned>(body: &str) -> Result<T> {
|
||||||
|
let jd = &mut serde_json::Deserializer::from_str(body);
|
||||||
|
match serde_path_to_error::deserialize(jd) {
|
||||||
|
Ok(value) => Ok(value),
|
||||||
|
Err(err) => {
|
||||||
|
let inner_err = err.inner();
|
||||||
|
let (line, column) = (inner_err.line(), inner_err.column());
|
||||||
|
let path = err.path().to_string();
|
||||||
|
|
||||||
|
let msg = inner_err.to_string();
|
||||||
|
let loc = format!(" at line {line} column {column}");
|
||||||
|
let msg_without_loc = msg.strip_suffix(&loc).unwrap_or(&msg).to_string();
|
||||||
|
|
||||||
|
// Build error message differently for debug vs release builds
|
||||||
|
let final_err = if cfg!(debug_assertions) {
|
||||||
|
// Debug mode: provide detailed context
|
||||||
|
let type_info = parse_type_mismatch(&msg_without_loc);
|
||||||
|
let context = extract_json_object_at_path(body, err.path(), line, column);
|
||||||
|
|
||||||
|
let mut err_msg = String::new();
|
||||||
|
if !path.is_empty() && path != "." {
|
||||||
|
err_msg.push_str(&format!("for path '{}'\n", path));
|
||||||
|
}
|
||||||
|
err_msg.push_str(&format!(
|
||||||
|
"({}) at line {} column {}\n\n",
|
||||||
|
type_info, line, column
|
||||||
|
));
|
||||||
|
err_msg.push_str(&context);
|
||||||
|
|
||||||
|
err_msg
|
||||||
|
} else {
|
||||||
|
// Release mode: minimal snippet to keep logs concise
|
||||||
|
let snippet = build_error_snippet(body, line, column, 20);
|
||||||
|
|
||||||
|
let mut err_msg = String::new();
|
||||||
|
if !path.is_empty() && path != "." {
|
||||||
|
err_msg.push_str(&format!("for path '{}' ", path));
|
||||||
|
}
|
||||||
|
err_msg.push_str(&format!(
|
||||||
|
"({}) at line {} column {}",
|
||||||
|
msg_without_loc, line, column
|
||||||
|
));
|
||||||
|
err_msg.push_str(&format!("\n{}", snippet));
|
||||||
|
|
||||||
|
err_msg
|
||||||
|
};
|
||||||
|
|
||||||
|
Err(anyhow::anyhow!(final_err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract type mismatch information from a serde error message.
|
||||||
|
///
|
||||||
|
/// Parses error messages like "invalid type: null, expected a string" to extract
|
||||||
|
/// the expected and actual types for clearer error reporting.
|
||||||
|
///
|
||||||
|
/// Returns a formatted string like "(expected a string, got null)" or the original
|
||||||
|
/// message if parsing fails.
|
||||||
|
fn parse_type_mismatch(error_msg: &str) -> String {
|
||||||
|
// Try to parse "invalid type: X, expected Y" format
|
||||||
|
if let Some(invalid_start) = error_msg.find("invalid type: ") {
|
||||||
|
let after_prefix = &error_msg[invalid_start + "invalid type: ".len()..];
|
||||||
|
|
||||||
|
if let Some(comma_pos) = after_prefix.find(", expected ") {
|
||||||
|
let actual_type = &after_prefix[..comma_pos];
|
||||||
|
let expected_part = &after_prefix[comma_pos + ", expected ".len()..];
|
||||||
|
|
||||||
|
// Clean up expected part (remove " at line X column Y" if present)
|
||||||
|
let expected_type = expected_part
|
||||||
|
.split(" at line ")
|
||||||
|
.next()
|
||||||
|
.unwrap_or(expected_part)
|
||||||
|
.trim();
|
||||||
|
|
||||||
|
return format!("expected {}, got {}", expected_type, actual_type);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to parse "expected X at line Y" format
|
||||||
|
if error_msg.starts_with("expected ")
|
||||||
|
&& let Some(expected_part) = error_msg.split(" at line ").next()
|
||||||
|
{
|
||||||
|
return expected_part.to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: return original message without location info
|
||||||
|
error_msg.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract and pretty-print the JSON object/array containing the parse error.
|
||||||
|
///
|
||||||
|
/// This function navigates to the error location using the serde path and extracts
|
||||||
|
/// the parent object or array to provide better context for debugging.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
/// * `body` - The raw JSON string
|
||||||
|
/// * `path` - The serde path to the error (e.g., "data[0].faculty[0].displayName")
|
||||||
|
/// * `line` - Line number of the error (for fallback)
|
||||||
|
/// * `column` - Column number of the error (for fallback)
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
/// A formatted string containing the JSON object with the error, or a fallback snippet
|
||||||
|
fn extract_json_object_at_path(
|
||||||
|
body: &str,
|
||||||
|
path: &serde_path_to_error::Path,
|
||||||
|
line: usize,
|
||||||
|
column: usize,
|
||||||
|
) -> String {
|
||||||
|
// Try to parse the entire JSON structure
|
||||||
|
let root_value: Value = match serde_json::from_str(body) {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(_) => {
|
||||||
|
// If we can't parse the JSON at all, fall back to line snippet
|
||||||
|
return build_error_snippet(body, line, column, 20);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Navigate to the error location using the path
|
||||||
|
let path_str = path.to_string();
|
||||||
|
let segments = parse_path_segments(&path_str);
|
||||||
|
|
||||||
|
let (context_value, context_name) = navigate_to_context(&root_value, &segments);
|
||||||
|
|
||||||
|
// Pretty-print the context value with limited depth to avoid huge output
|
||||||
|
match serde_json::to_string_pretty(&context_value) {
|
||||||
|
Ok(pretty) => {
|
||||||
|
// Limit output to ~50 lines to prevent log spam
|
||||||
|
let lines: Vec<&str> = pretty.lines().collect();
|
||||||
|
let truncated = if lines.len() > 50 {
|
||||||
|
let mut result = lines[..47].join("\n");
|
||||||
|
result.push_str("\n ... (truncated, ");
|
||||||
|
result.push_str(&(lines.len() - 47).to_string());
|
||||||
|
result.push_str(" more lines)");
|
||||||
|
result
|
||||||
|
} else {
|
||||||
|
pretty
|
||||||
|
};
|
||||||
|
|
||||||
|
format!("{} at '{}':\n{}", context_name, path_str, truncated)
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
// Fallback to simple snippet if pretty-print fails
|
||||||
|
build_error_snippet(body, line, column, 20)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a JSON path string into segments for navigation.
|
||||||
|
///
|
||||||
|
/// Converts paths like "data[0].faculty[1].displayName" into a sequence of
|
||||||
|
/// object keys and array indices.
|
||||||
|
fn parse_path_segments(path: &str) -> Vec<PathSegment> {
|
||||||
|
let mut segments = Vec::new();
|
||||||
|
let mut current = String::new();
|
||||||
|
let mut in_bracket = false;
|
||||||
|
|
||||||
|
for ch in path.chars() {
|
||||||
|
match ch {
|
||||||
|
'.' if !in_bracket => {
|
||||||
|
if !current.is_empty() {
|
||||||
|
segments.push(PathSegment::Key(current.clone()));
|
||||||
|
current.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
'[' => {
|
||||||
|
if !current.is_empty() {
|
||||||
|
segments.push(PathSegment::Key(current.clone()));
|
||||||
|
current.clear();
|
||||||
|
}
|
||||||
|
in_bracket = true;
|
||||||
|
}
|
||||||
|
']' => {
|
||||||
|
if in_bracket && !current.is_empty() {
|
||||||
|
if let Ok(index) = current.parse::<usize>() {
|
||||||
|
segments.push(PathSegment::Index(index));
|
||||||
|
}
|
||||||
|
current.clear();
|
||||||
|
}
|
||||||
|
in_bracket = false;
|
||||||
|
}
|
||||||
|
_ => current.push(ch),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !current.is_empty() {
|
||||||
|
segments.push(PathSegment::Key(current));
|
||||||
|
}
|
||||||
|
|
||||||
|
segments
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents a segment in a JSON path (either an object key or array index).
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum PathSegment {
|
||||||
|
Key(String),
|
||||||
|
Index(usize),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Navigate through a JSON value using path segments and return the appropriate context.
|
||||||
|
///
|
||||||
|
/// This function walks the JSON structure and returns the parent object/array that
|
||||||
|
/// contains the error, providing meaningful context for debugging.
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
/// A tuple of (context_value, description) where context_value is the JSON to display
|
||||||
|
/// and description is a human-readable name for what we're showing.
|
||||||
|
fn navigate_to_context<'a>(
|
||||||
|
mut current: &'a Value,
|
||||||
|
segments: &[PathSegment],
|
||||||
|
) -> (&'a Value, &'static str) {
|
||||||
|
// If path is empty or just root, return the whole value
|
||||||
|
if segments.is_empty() {
|
||||||
|
return (current, "Root object");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to navigate to the parent of the error location
|
||||||
|
// We want to show the containing object/array, not just the failing field
|
||||||
|
let parent_depth = segments.len().saturating_sub(1);
|
||||||
|
|
||||||
|
for (i, segment) in segments.iter().enumerate() {
|
||||||
|
// Stop one level before the end to show the parent context
|
||||||
|
if i >= parent_depth {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
match segment {
|
||||||
|
PathSegment::Key(key) => {
|
||||||
|
if let Some(next) = current.get(key) {
|
||||||
|
current = next;
|
||||||
|
} else {
|
||||||
|
// Can't navigate further, return what we have
|
||||||
|
return (current, "Partial context (navigation stopped)");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
PathSegment::Index(idx) => {
|
||||||
|
if let Some(next) = current.get(idx) {
|
||||||
|
current = next;
|
||||||
|
} else {
|
||||||
|
return (current, "Partial context (index out of bounds)");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
(current, "Object containing error")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_error_snippet(body: &str, line: usize, column: usize, context_len: usize) -> String {
|
||||||
|
let target_line = body.lines().nth(line.saturating_sub(1)).unwrap_or("");
|
||||||
|
if target_line.is_empty() {
|
||||||
|
return "(empty line)".to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
// column is 1-based, convert to 0-based for slicing
|
||||||
|
let error_idx = column.saturating_sub(1);
|
||||||
|
|
||||||
|
let half_len = context_len / 2;
|
||||||
|
let start = error_idx.saturating_sub(half_len);
|
||||||
|
let end = (error_idx + half_len).min(target_line.len());
|
||||||
|
|
||||||
|
let slice = &target_line[start..end];
|
||||||
|
let indicator_pos = error_idx - start;
|
||||||
|
|
||||||
|
let indicator = " ".repeat(indicator_pos) + "^";
|
||||||
|
|
||||||
|
format!("...{slice}...\n {indicator}")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use serde::Deserialize;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_type_mismatch_invalid_type() {
|
||||||
|
let msg = "invalid type: null, expected a string at line 45 column 29";
|
||||||
|
let result = parse_type_mismatch(msg);
|
||||||
|
assert_eq!(result, "expected a string, got null");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_type_mismatch_expected() {
|
||||||
|
let msg = "expected value at line 1 column 1";
|
||||||
|
let result = parse_type_mismatch(msg);
|
||||||
|
assert_eq!(result, "expected value");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_path_segments_simple() {
|
||||||
|
let segments = parse_path_segments("data.name");
|
||||||
|
assert_eq!(segments.len(), 2);
|
||||||
|
match &segments[0] {
|
||||||
|
PathSegment::Key(k) => assert_eq!(k, "data"),
|
||||||
|
_ => panic!("Expected Key segment"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_path_segments_with_array() {
|
||||||
|
let segments = parse_path_segments("data[0].faculty[1].displayName");
|
||||||
|
assert_eq!(segments.len(), 5);
|
||||||
|
match &segments[0] {
|
||||||
|
PathSegment::Key(k) => assert_eq!(k, "data"),
|
||||||
|
_ => panic!("Expected Key segment"),
|
||||||
|
}
|
||||||
|
match &segments[1] {
|
||||||
|
PathSegment::Index(i) => assert_eq!(*i, 0),
|
||||||
|
_ => panic!("Expected Index segment"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_json_with_context_null_value() {
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
struct TestStruct {
|
||||||
|
name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
let json = r#"{"name": null}"#;
|
||||||
|
let result: Result<TestStruct> = parse_json_with_context(json);
|
||||||
|
|
||||||
|
assert!(result.is_err());
|
||||||
|
let err_msg = result.unwrap_err().to_string();
|
||||||
|
|
||||||
|
// Should contain path info
|
||||||
|
assert!(err_msg.contains("name"));
|
||||||
|
|
||||||
|
// In debug mode, should contain detailed context
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
assert!(err_msg.contains("expected"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_navigate_to_context() {
|
||||||
|
let json = r#"{"data": [{"faculty": [{"name": "John"}]}]}"#;
|
||||||
|
let value: Value = serde_json::from_str(json).unwrap();
|
||||||
|
|
||||||
|
let segments = parse_path_segments("data[0].faculty[0].name");
|
||||||
|
let (context, _) = navigate_to_context(&value, &segments);
|
||||||
|
|
||||||
|
// Should return the faculty[0] object (parent of 'name')
|
||||||
|
assert!(context.is_object());
|
||||||
|
assert!(context.get("name").is_some());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_realistic_banner_error() {
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
struct Course {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[serde(rename = "courseTitle")]
|
||||||
|
course_title: String,
|
||||||
|
faculty: Vec<Faculty>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
struct Faculty {
|
||||||
|
#[serde(rename = "displayName")]
|
||||||
|
display_name: String,
|
||||||
|
#[allow(dead_code)]
|
||||||
|
email: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
struct SearchResult {
|
||||||
|
data: Vec<Course>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Simulate Banner API response with null faculty displayName
|
||||||
|
// This mimics the actual error from SPN subject scrape
|
||||||
|
let json = r#"{
|
||||||
|
"data": [
|
||||||
|
{
|
||||||
|
"courseTitle": "Spanish Conversation",
|
||||||
|
"faculty": [
|
||||||
|
{
|
||||||
|
"displayName": null,
|
||||||
|
"email": "instructor@utsa.edu"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}"#;
|
||||||
|
|
||||||
|
let result: Result<SearchResult> = parse_json_with_context(json);
|
||||||
|
assert!(result.is_err());
|
||||||
|
|
||||||
|
let err_msg = result.unwrap_err().to_string();
|
||||||
|
println!("\n=== Error output in debug mode ===\n{}\n", err_msg);
|
||||||
|
|
||||||
|
// Verify error contains key information
|
||||||
|
assert!(err_msg.contains("data[0].faculty[0].displayName"));
|
||||||
|
|
||||||
|
// In debug mode, should show detailed context
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
// Should show type mismatch info
|
||||||
|
assert!(err_msg.contains("expected") && err_msg.contains("got"));
|
||||||
|
// Should show surrounding JSON context with the faculty object
|
||||||
|
assert!(err_msg.contains("email"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
75
src/banner/middleware.rs
Normal file
75
src/banner/middleware.rs
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
//! HTTP middleware for the Banner API client.
|
||||||
|
|
||||||
|
use http::Extensions;
|
||||||
|
use reqwest::{Request, Response};
|
||||||
|
use reqwest_middleware::{Middleware, Next};
|
||||||
|
use tracing::{debug, trace, warn};
|
||||||
|
|
||||||
|
pub struct TransparentMiddleware;
|
||||||
|
|
||||||
|
/// Threshold for logging slow requests at DEBUG level (in milliseconds)
|
||||||
|
const SLOW_REQUEST_THRESHOLD_MS: u128 = 1000;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl Middleware for TransparentMiddleware {
|
||||||
|
async fn handle(
|
||||||
|
&self,
|
||||||
|
req: Request,
|
||||||
|
extensions: &mut Extensions,
|
||||||
|
next: Next<'_>,
|
||||||
|
) -> std::result::Result<Response, reqwest_middleware::Error> {
|
||||||
|
let method = req.method().to_string();
|
||||||
|
let path = req.url().path().to_string();
|
||||||
|
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
let response_result = next.run(req, extensions).await;
|
||||||
|
let duration = start.elapsed();
|
||||||
|
|
||||||
|
match response_result {
|
||||||
|
Ok(response) => {
|
||||||
|
if response.status().is_success() {
|
||||||
|
let duration_ms = duration.as_millis();
|
||||||
|
if duration_ms >= SLOW_REQUEST_THRESHOLD_MS {
|
||||||
|
debug!(
|
||||||
|
method = method,
|
||||||
|
path = path,
|
||||||
|
status = response.status().as_u16(),
|
||||||
|
duration_ms = duration_ms,
|
||||||
|
"Request completed (slow)"
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
trace!(
|
||||||
|
method = method,
|
||||||
|
path = path,
|
||||||
|
status = response.status().as_u16(),
|
||||||
|
duration_ms = duration_ms,
|
||||||
|
"Request completed"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Ok(response)
|
||||||
|
} else {
|
||||||
|
let e = response.error_for_status_ref().unwrap_err();
|
||||||
|
warn!(
|
||||||
|
method = method,
|
||||||
|
path = path,
|
||||||
|
error = ?e,
|
||||||
|
status = response.status().as_u16(),
|
||||||
|
duration_ms = duration.as_millis(),
|
||||||
|
"Request failed"
|
||||||
|
);
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(error) => {
|
||||||
|
warn!(
|
||||||
|
method = method,
|
||||||
|
path = path,
|
||||||
|
error = ?error,
|
||||||
|
duration_ms = duration.as_millis(),
|
||||||
|
"Request failed"
|
||||||
|
);
|
||||||
|
Err(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,16 +5,22 @@
|
|||||||
//! This module provides functionality to:
|
//! This module provides functionality to:
|
||||||
//! - Search for courses and retrieve course information
|
//! - Search for courses and retrieve course information
|
||||||
//! - Manage Banner API sessions and authentication
|
//! - Manage Banner API sessions and authentication
|
||||||
//! - Scrape course data and cache it in Redis
|
|
||||||
//! - Generate ICS files and calendar links
|
//! - Generate ICS files and calendar links
|
||||||
|
|
||||||
pub mod api;
|
pub mod api;
|
||||||
|
pub mod errors;
|
||||||
|
pub mod json;
|
||||||
|
pub mod middleware;
|
||||||
pub mod models;
|
pub mod models;
|
||||||
pub mod query;
|
pub mod query;
|
||||||
|
pub mod rate_limit_middleware;
|
||||||
|
pub mod rate_limiter;
|
||||||
pub mod session;
|
pub mod session;
|
||||||
pub mod util;
|
pub mod util;
|
||||||
|
|
||||||
pub use api::*;
|
pub use api::*;
|
||||||
|
pub use errors::*;
|
||||||
pub use models::*;
|
pub use models::*;
|
||||||
pub use query::*;
|
pub use query::*;
|
||||||
|
pub use rate_limiter::*;
|
||||||
pub use session::*;
|
pub use session::*;
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ pub struct FacultyItem {
|
|||||||
#[serde(deserialize_with = "deserialize_string_to_u32")]
|
#[serde(deserialize_with = "deserialize_string_to_u32")]
|
||||||
pub course_reference_number: u32, // CRN, e.g 27294
|
pub course_reference_number: u32, // CRN, e.g 27294
|
||||||
pub display_name: String, // "LastName, FirstName"
|
pub display_name: String, // "LastName, FirstName"
|
||||||
pub email_address: String, // e.g. FirstName.LastName@utsaedu
|
pub email_address: Option<String>, // e.g. FirstName.LastName@utsaedu
|
||||||
pub primary_indicator: bool,
|
pub primary_indicator: bool,
|
||||||
pub term: String, // e.g "202420"
|
pub term: String, // e.g "202420"
|
||||||
}
|
}
|
||||||
@@ -42,11 +42,11 @@ pub struct FacultyItem {
|
|||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct MeetingTime {
|
pub struct MeetingTime {
|
||||||
pub start_date: String, // MM/DD/YYYY, e.g 08/26/2025
|
pub start_date: String, // MM/DD/YYYY, e.g 08/26/2025
|
||||||
pub end_date: String, // MM/DD/YYYY, e.g 08/26/2025
|
pub end_date: String, // MM/DD/YYYY, e.g 08/26/2025
|
||||||
pub begin_time: String, // HHMM, e.g 1000
|
pub begin_time: Option<String>, // HHMM, e.g 1000
|
||||||
pub end_time: String, // HHMM, e.g 1100
|
pub end_time: Option<String>, // HHMM, e.g 1100
|
||||||
pub category: String, // unknown meaning, e.g. 01, 02, etc
|
pub category: String, // unknown meaning, e.g. 01, 02, etc
|
||||||
pub class: String, // internal class name, e.g. net.hedtech.banner.general.overallMeetingTimeDecorator
|
pub class: String, // internal class name, e.g. net.hedtech.banner.general.overallMeetingTimeDecorator
|
||||||
pub monday: bool, // true if the meeting time occurs on Monday
|
pub monday: bool, // true if the meeting time occurs on Monday
|
||||||
pub tuesday: bool, // true if the meeting time occurs on Tuesday
|
pub tuesday: bool, // true if the meeting time occurs on Tuesday
|
||||||
@@ -55,15 +55,15 @@ pub struct MeetingTime {
|
|||||||
pub friday: bool, // true if the meeting time occurs on Friday
|
pub friday: bool, // true if the meeting time occurs on Friday
|
||||||
pub saturday: bool, // true if the meeting time occurs on Saturday
|
pub saturday: bool, // true if the meeting time occurs on Saturday
|
||||||
pub sunday: bool, // true if the meeting time occurs on Sunday
|
pub sunday: bool, // true if the meeting time occurs on Sunday
|
||||||
pub room: String, // e.g. 1238
|
pub room: Option<String>, // e.g. 1.238
|
||||||
#[serde(deserialize_with = "deserialize_string_to_term")]
|
#[serde(deserialize_with = "deserialize_string_to_term")]
|
||||||
pub term: Term, // e.g 202510
|
pub term: Term, // e.g 202510
|
||||||
pub building: String, // e.g NPB
|
pub building: Option<String>, // e.g NPB
|
||||||
pub building_description: String, // e.g North Paseo Building
|
pub building_description: Option<String>, // e.g North Paseo Building
|
||||||
pub campus: String, // campus code, e.g 11
|
pub campus: Option<String>, // campus code, e.g 11
|
||||||
pub campus_description: String, // name of campus, e.g Main Campus
|
pub campus_description: Option<String>, // name of campus, e.g Main Campus
|
||||||
pub course_reference_number: String, // CRN, e.g 27294
|
pub course_reference_number: String, // CRN, e.g 27294
|
||||||
pub credit_hour_session: f64, // e.g. 30
|
pub credit_hour_session: Option<f64>, // e.g. 30
|
||||||
pub hours_week: f64, // e.g. 30
|
pub hours_week: f64, // e.g. 30
|
||||||
pub meeting_schedule_type: String, // e.g AFF
|
pub meeting_schedule_type: String, // e.g AFF
|
||||||
pub meeting_type: String, // e.g HB, H2, H1, OS, OA, OH, ID, FF
|
pub meeting_type: String, // e.g HB, H2, H1, OS, OA, OH, ID, FF
|
||||||
@@ -148,6 +148,8 @@ pub enum DayOfWeek {
|
|||||||
|
|
||||||
impl DayOfWeek {
|
impl DayOfWeek {
|
||||||
/// Convert to short string representation
|
/// Convert to short string representation
|
||||||
|
///
|
||||||
|
/// Do not change these, these are used for ICS generation. Casing does not matter though.
|
||||||
pub fn to_short_string(self) -> &'static str {
|
pub fn to_short_string(self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
DayOfWeek::Monday => "Mo",
|
DayOfWeek::Monday => "Mo",
|
||||||
@@ -256,6 +258,7 @@ impl TimeRange {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get duration in minutes
|
/// Get duration in minutes
|
||||||
|
#[allow(dead_code)]
|
||||||
pub fn duration_minutes(&self) -> i64 {
|
pub fn duration_minutes(&self) -> i64 {
|
||||||
let start_minutes = self.start.hour() as i64 * 60 + self.start.minute() as i64;
|
let start_minutes = self.start.hour() as i64 * 60 + self.start.minute() as i64;
|
||||||
let end_minutes = self.end.hour() as i64 * 60 + self.end.minute() as i64;
|
let end_minutes = self.end.hour() as i64 * 60 + self.end.minute() as i64;
|
||||||
@@ -300,6 +303,7 @@ impl DateRange {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Check if a specific date falls within this range
|
/// Check if a specific date falls within this range
|
||||||
|
#[allow(dead_code)]
|
||||||
pub fn contains_date(&self, date: NaiveDate) -> bool {
|
pub fn contains_date(&self, date: NaiveDate) -> bool {
|
||||||
date >= self.start && date <= self.end
|
date >= self.start && date <= self.end
|
||||||
}
|
}
|
||||||
@@ -347,42 +351,58 @@ impl MeetingType {
|
|||||||
|
|
||||||
/// Meeting location information
|
/// Meeting location information
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct MeetingLocation {
|
pub enum MeetingLocation {
|
||||||
pub campus: String,
|
Online,
|
||||||
pub building: String,
|
InPerson {
|
||||||
pub building_description: String,
|
campus: String,
|
||||||
pub room: String,
|
campus_description: String,
|
||||||
pub is_online: bool,
|
building: String,
|
||||||
|
building_description: String,
|
||||||
|
room: String,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MeetingLocation {
|
impl MeetingLocation {
|
||||||
/// Create from raw MeetingTime data
|
/// Create from raw MeetingTime data
|
||||||
pub fn from_meeting_time(meeting_time: &MeetingTime) -> Self {
|
pub fn from_meeting_time(meeting_time: &MeetingTime) -> Self {
|
||||||
let is_online = meeting_time.room.is_empty();
|
if meeting_time.campus.is_none()
|
||||||
|
|| meeting_time.building.is_none()
|
||||||
|
|| meeting_time.building_description.is_none()
|
||||||
|
|| meeting_time.room.is_none()
|
||||||
|
|| meeting_time.campus_description.is_none()
|
||||||
|
|| meeting_time
|
||||||
|
.campus_description
|
||||||
|
.eq(&Some("Internet".to_string()))
|
||||||
|
{
|
||||||
|
return MeetingLocation::Online;
|
||||||
|
}
|
||||||
|
|
||||||
MeetingLocation {
|
MeetingLocation::InPerson {
|
||||||
campus: meeting_time.campus_description.clone(),
|
campus: meeting_time.campus.as_ref().unwrap().clone(),
|
||||||
building: meeting_time.building.clone(),
|
campus_description: meeting_time.campus_description.as_ref().unwrap().clone(),
|
||||||
building_description: meeting_time.building_description.clone(),
|
building: meeting_time.building.as_ref().unwrap().clone(),
|
||||||
room: meeting_time.room.clone(),
|
building_description: meeting_time.building_description.as_ref().unwrap().clone(),
|
||||||
is_online,
|
room: meeting_time.room.as_ref().unwrap().clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for MeetingLocation {
|
impl Display for MeetingLocation {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
if self.is_online {
|
match self {
|
||||||
write!(f, "Online")
|
MeetingLocation::Online => write!(f, "Online"),
|
||||||
} else {
|
MeetingLocation::InPerson {
|
||||||
write!(
|
campus,
|
||||||
|
building,
|
||||||
|
building_description,
|
||||||
|
room,
|
||||||
|
..
|
||||||
|
} => write!(
|
||||||
f,
|
f,
|
||||||
"{campus} | {building_name} | {building_code} {room}",
|
"{campus} | {building_name} | {building_code} {room}",
|
||||||
campus = self.campus,
|
building_name = building_description,
|
||||||
building_name = self.building_description,
|
building_code = building,
|
||||||
building_code = self.building,
|
),
|
||||||
room = self.room
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -402,7 +422,11 @@ impl MeetingScheduleInfo {
|
|||||||
/// Create from raw MeetingTime data
|
/// Create from raw MeetingTime data
|
||||||
pub fn from_meeting_time(meeting_time: &MeetingTime) -> Self {
|
pub fn from_meeting_time(meeting_time: &MeetingTime) -> Self {
|
||||||
let days = MeetingDays::from_meeting_time(meeting_time);
|
let days = MeetingDays::from_meeting_time(meeting_time);
|
||||||
let time_range = TimeRange::from_hhmm(&meeting_time.begin_time, &meeting_time.end_time);
|
let time_range = match (&meeting_time.begin_time, &meeting_time.end_time) {
|
||||||
|
(Some(begin), Some(end)) => TimeRange::from_hhmm(begin, end),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
let date_range =
|
let date_range =
|
||||||
DateRange::from_mm_dd_yyyy(&meeting_time.start_date, &meeting_time.end_date)
|
DateRange::from_mm_dd_yyyy(&meeting_time.start_date, &meeting_time.end_date)
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
@@ -470,16 +494,18 @@ impl MeetingScheduleInfo {
|
|||||||
|
|
||||||
/// Returns a formatted string representing the location of the meeting
|
/// Returns a formatted string representing the location of the meeting
|
||||||
pub fn place_string(&self) -> String {
|
pub fn place_string(&self) -> String {
|
||||||
if self.location.room.is_empty() {
|
match &self.location {
|
||||||
"Online".to_string()
|
MeetingLocation::Online => "Online".to_string(),
|
||||||
} else {
|
MeetingLocation::InPerson {
|
||||||
format!(
|
campus,
|
||||||
|
building,
|
||||||
|
building_description,
|
||||||
|
room,
|
||||||
|
..
|
||||||
|
} => format!(
|
||||||
"{} | {} | {} {}",
|
"{} | {} | {} {}",
|
||||||
self.location.campus,
|
campus, building_description, building, room
|
||||||
self.location.building_description,
|
),
|
||||||
self.location.building,
|
|
||||||
self.location.room
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -10,8 +10,8 @@ pub struct SearchResult {
|
|||||||
pub total_count: i32,
|
pub total_count: i32,
|
||||||
pub page_offset: i32,
|
pub page_offset: i32,
|
||||||
pub page_max_size: i32,
|
pub page_max_size: i32,
|
||||||
pub path_mode: String,
|
pub path_mode: Option<String>,
|
||||||
pub search_results_config: Vec<SearchResultConfig>,
|
pub search_results_config: Option<Vec<SearchResultConfig>>,
|
||||||
pub data: Option<Vec<Course>>,
|
pub data: Option<Vec<Course>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ const CURRENT_YEAR: u32 = compile_time::date!().year() as u32;
|
|||||||
const VALID_YEARS: RangeInclusive<u32> = 2007..=(CURRENT_YEAR + 10);
|
const VALID_YEARS: RangeInclusive<u32> = 2007..=(CURRENT_YEAR + 10);
|
||||||
|
|
||||||
/// Represents a term in the Banner system
|
/// Represents a term in the Banner system
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||||
pub struct Term {
|
pub struct Term {
|
||||||
pub year: u32, // 2024, 2025, etc
|
pub year: u32, // 2024, 2025, etc
|
||||||
pub season: Season,
|
pub season: Season,
|
||||||
@@ -29,7 +29,7 @@ pub enum TermPoint {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Represents a season within a term
|
/// Represents a season within a term
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||||
pub enum Season {
|
pub enum Season {
|
||||||
Fall,
|
Fall,
|
||||||
Spring,
|
Spring,
|
||||||
@@ -147,11 +147,6 @@ impl Term {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a long string representation of the term (e.g., "Fall 2025")
|
|
||||||
pub fn to_long_string(&self) -> String {
|
|
||||||
format!("{} {}", self.season, self.year)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TermPoint {
|
impl TermPoint {
|
||||||
@@ -193,7 +188,7 @@ impl std::fmt::Display for Term {
|
|||||||
|
|
||||||
impl Season {
|
impl Season {
|
||||||
/// Returns the season code as a string
|
/// Returns the season code as a string
|
||||||
fn to_str(&self) -> &'static str {
|
fn to_str(self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
Season::Fall => "10",
|
Season::Fall => "10",
|
||||||
Season::Spring => "20",
|
Season::Spring => "20",
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ pub struct SearchQuery {
|
|||||||
course_number_range: Option<Range>,
|
course_number_range: Option<Range>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
impl SearchQuery {
|
impl SearchQuery {
|
||||||
/// Creates a new SearchQuery with default values
|
/// Creates a new SearchQuery with default values
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
@@ -160,6 +161,16 @@ impl SearchQuery {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Gets the subject field
|
||||||
|
pub fn get_subject(&self) -> Option<&String> {
|
||||||
|
self.subject.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets the max_results field
|
||||||
|
pub fn get_max_results(&self) -> i32 {
|
||||||
|
self.max_results
|
||||||
|
}
|
||||||
|
|
||||||
/// Converts the query into URL parameters for the Banner API
|
/// Converts the query into URL parameters for the Banner API
|
||||||
pub fn to_params(&self) -> HashMap<String, String> {
|
pub fn to_params(&self) -> HashMap<String, String> {
|
||||||
let mut params = HashMap::new();
|
let mut params = HashMap::new();
|
||||||
|
|||||||
84
src/banner/rate_limit_middleware.rs
Normal file
84
src/banner/rate_limit_middleware.rs
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
//! HTTP middleware that enforces rate limiting for Banner API requests.
|
||||||
|
|
||||||
|
use crate::banner::rate_limiter::{RequestType, SharedRateLimiter};
|
||||||
|
use http::Extensions;
|
||||||
|
use reqwest::{Request, Response};
|
||||||
|
use reqwest_middleware::{Middleware, Next};
|
||||||
|
use tracing::debug;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
/// Middleware that enforces rate limiting based on request URL patterns
|
||||||
|
pub struct RateLimitMiddleware {
|
||||||
|
rate_limiter: SharedRateLimiter,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RateLimitMiddleware {
|
||||||
|
/// Creates a new rate limiting middleware
|
||||||
|
pub fn new(rate_limiter: SharedRateLimiter) -> Self {
|
||||||
|
Self { rate_limiter }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a human-readable description of the rate limit for a request type
|
||||||
|
fn get_rate_limit_description(request_type: RequestType) -> &'static str {
|
||||||
|
match request_type {
|
||||||
|
RequestType::Session => "6 rpm (~10s interval)",
|
||||||
|
RequestType::Search => "30 rpm (~2s interval)",
|
||||||
|
RequestType::Metadata => "20 rpm (~3s interval)",
|
||||||
|
RequestType::Reset => "10 rpm (~6s interval)",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Determines the request type based on the URL path
|
||||||
|
fn get_request_type(url: &Url) -> RequestType {
|
||||||
|
let path = url.path();
|
||||||
|
|
||||||
|
if path.contains("/registration")
|
||||||
|
|| path.contains("/selfServiceMenu")
|
||||||
|
|| path.contains("/term/termSelection")
|
||||||
|
{
|
||||||
|
RequestType::Session
|
||||||
|
} else if path.contains("/searchResults") || path.contains("/classSearch") {
|
||||||
|
RequestType::Search
|
||||||
|
} else if path.contains("/getTerms")
|
||||||
|
|| path.contains("/getSubjects")
|
||||||
|
|| path.contains("/getCampuses")
|
||||||
|
{
|
||||||
|
RequestType::Metadata
|
||||||
|
} else if path.contains("/resetDataForm") {
|
||||||
|
RequestType::Reset
|
||||||
|
} else {
|
||||||
|
// Default to search for unknown endpoints
|
||||||
|
RequestType::Search
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl Middleware for RateLimitMiddleware {
|
||||||
|
async fn handle(
|
||||||
|
&self,
|
||||||
|
req: Request,
|
||||||
|
extensions: &mut Extensions,
|
||||||
|
next: Next<'_>,
|
||||||
|
) -> std::result::Result<Response, reqwest_middleware::Error> {
|
||||||
|
let request_type = Self::get_request_type(req.url());
|
||||||
|
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
self.rate_limiter.wait_for_permission(request_type).await;
|
||||||
|
let wait_duration = start.elapsed();
|
||||||
|
|
||||||
|
// Only log if rate limiting caused significant delay (>= 500ms)
|
||||||
|
if wait_duration.as_millis() >= 500 {
|
||||||
|
let limit_desc = Self::get_rate_limit_description(request_type);
|
||||||
|
debug!(
|
||||||
|
request_type = ?request_type,
|
||||||
|
wait_ms = wait_duration.as_millis(),
|
||||||
|
rate_limit = limit_desc,
|
||||||
|
"Rate limit caused delay"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make the actual request
|
||||||
|
next.run(req, extensions).await
|
||||||
|
}
|
||||||
|
}
|
||||||
131
src/banner/rate_limiter.rs
Normal file
131
src/banner/rate_limiter.rs
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
//! Rate limiting for Banner API requests to prevent overwhelming the server.
|
||||||
|
|
||||||
|
use governor::{
|
||||||
|
Quota, RateLimiter,
|
||||||
|
clock::DefaultClock,
|
||||||
|
state::{InMemoryState, NotKeyed},
|
||||||
|
};
|
||||||
|
use std::num::NonZeroU32;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
/// Different types of Banner API requests with different rate limits
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub enum RequestType {
|
||||||
|
/// Session creation and management (very conservative)
|
||||||
|
Session,
|
||||||
|
/// Course search requests (moderate)
|
||||||
|
Search,
|
||||||
|
/// Term and metadata requests (moderate)
|
||||||
|
Metadata,
|
||||||
|
/// Data form resets (low priority)
|
||||||
|
Reset,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Rate limiter configuration for different request types
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct RateLimitConfig {
|
||||||
|
/// Requests per minute for session operations
|
||||||
|
pub session_rpm: u32,
|
||||||
|
/// Requests per minute for search operations
|
||||||
|
pub search_rpm: u32,
|
||||||
|
/// Requests per minute for metadata operations
|
||||||
|
pub metadata_rpm: u32,
|
||||||
|
/// Requests per minute for reset operations
|
||||||
|
pub reset_rpm: u32,
|
||||||
|
/// Burst allowance (extra requests allowed in short bursts)
|
||||||
|
pub burst_allowance: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for RateLimitConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
// Very conservative for session creation
|
||||||
|
session_rpm: 6, // 1 every 10 seconds
|
||||||
|
// Moderate for search operations
|
||||||
|
search_rpm: 30, // 1 every 2 seconds
|
||||||
|
// Moderate for metadata
|
||||||
|
metadata_rpm: 20, // 1 every 3 seconds
|
||||||
|
// Low for resets
|
||||||
|
reset_rpm: 10, // 1 every 6 seconds
|
||||||
|
// Allow small bursts
|
||||||
|
burst_allowance: 3,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A rate limiter that manages different request types with different limits
|
||||||
|
pub struct BannerRateLimiter {
|
||||||
|
session_limiter: RateLimiter<NotKeyed, InMemoryState, DefaultClock>,
|
||||||
|
search_limiter: RateLimiter<NotKeyed, InMemoryState, DefaultClock>,
|
||||||
|
metadata_limiter: RateLimiter<NotKeyed, InMemoryState, DefaultClock>,
|
||||||
|
reset_limiter: RateLimiter<NotKeyed, InMemoryState, DefaultClock>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BannerRateLimiter {
|
||||||
|
/// Creates a new rate limiter with the given configuration
|
||||||
|
pub fn new(config: RateLimitConfig) -> Self {
|
||||||
|
let session_quota = Quota::with_period(Duration::from_secs(60) / config.session_rpm)
|
||||||
|
.unwrap()
|
||||||
|
.allow_burst(NonZeroU32::new(config.burst_allowance).unwrap());
|
||||||
|
|
||||||
|
let search_quota = Quota::with_period(Duration::from_secs(60) / config.search_rpm)
|
||||||
|
.unwrap()
|
||||||
|
.allow_burst(NonZeroU32::new(config.burst_allowance).unwrap());
|
||||||
|
|
||||||
|
let metadata_quota = Quota::with_period(Duration::from_secs(60) / config.metadata_rpm)
|
||||||
|
.unwrap()
|
||||||
|
.allow_burst(NonZeroU32::new(config.burst_allowance).unwrap());
|
||||||
|
|
||||||
|
let reset_quota = Quota::with_period(Duration::from_secs(60) / config.reset_rpm)
|
||||||
|
.unwrap()
|
||||||
|
.allow_burst(NonZeroU32::new(config.burst_allowance).unwrap());
|
||||||
|
|
||||||
|
Self {
|
||||||
|
session_limiter: RateLimiter::direct(session_quota),
|
||||||
|
search_limiter: RateLimiter::direct(search_quota),
|
||||||
|
metadata_limiter: RateLimiter::direct(metadata_quota),
|
||||||
|
reset_limiter: RateLimiter::direct(reset_quota),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Waits for permission to make a request of the given type
|
||||||
|
pub async fn wait_for_permission(&self, request_type: RequestType) {
|
||||||
|
let limiter = match request_type {
|
||||||
|
RequestType::Session => &self.session_limiter,
|
||||||
|
RequestType::Search => &self.search_limiter,
|
||||||
|
RequestType::Metadata => &self.metadata_limiter,
|
||||||
|
RequestType::Reset => &self.reset_limiter,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Wait until we can make the request (logging handled by middleware)
|
||||||
|
limiter.until_ready().await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for BannerRateLimiter {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new(RateLimitConfig::default())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A shared rate limiter instance
|
||||||
|
pub type SharedRateLimiter = Arc<BannerRateLimiter>;
|
||||||
|
|
||||||
|
/// Creates a new shared rate limiter with custom configuration
|
||||||
|
pub fn create_shared_rate_limiter(config: Option<RateLimitConfig>) -> SharedRateLimiter {
|
||||||
|
Arc::new(BannerRateLimiter::new(config.unwrap_or_default()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Conversion from config module's RateLimitingConfig to this module's RateLimitConfig
|
||||||
|
impl From<crate::config::RateLimitingConfig> for RateLimitConfig {
|
||||||
|
fn from(config: crate::config::RateLimitingConfig) -> Self {
|
||||||
|
Self {
|
||||||
|
session_rpm: config.session_rpm,
|
||||||
|
search_rpm: config.search_rpm,
|
||||||
|
metadata_rpm: config.metadata_rpm,
|
||||||
|
reset_rpm: config.reset_rpm,
|
||||||
|
burst_allowance: config.burst_allowance,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,133 +1,425 @@
|
|||||||
//! Session management for Banner API.
|
//! Session management for Banner API.
|
||||||
|
|
||||||
use crate::banner::util::user_agent;
|
use crate::banner::BannerTerm;
|
||||||
use anyhow::Result;
|
use crate::banner::models::Term;
|
||||||
use rand::distributions::{Alphanumeric, DistString};
|
use anyhow::{Context, Result};
|
||||||
use reqwest::Client;
|
use cookie::Cookie;
|
||||||
use std::sync::Mutex;
|
use dashmap::DashMap;
|
||||||
|
use governor::state::InMemoryState;
|
||||||
|
use governor::{Quota, RateLimiter};
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use rand::distr::{Alphanumeric, SampleString};
|
||||||
|
use reqwest_middleware::ClientWithMiddleware;
|
||||||
|
use std::collections::{HashMap, VecDeque};
|
||||||
|
use std::num::NonZeroU32;
|
||||||
|
use std::ops::{Deref, DerefMut};
|
||||||
|
use std::sync::Arc;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
use tracing::{debug, info};
|
use tokio::sync::{Mutex, Notify};
|
||||||
|
use tracing::{debug, info, trace};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
/// Session manager for Banner API interactions
|
const SESSION_EXPIRY: Duration = Duration::from_secs(25 * 60); // 25 minutes
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct SessionManager {
|
|
||||||
current_session: Mutex<Option<SessionData>>,
|
|
||||||
base_url: String,
|
|
||||||
client: Client,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
// A global rate limiter to ensure we only try to create one new session every 10 seconds,
|
||||||
|
// preventing us from overwhelming the server with session creation requests.
|
||||||
|
static SESSION_CREATION_RATE_LIMITER: Lazy<
|
||||||
|
RateLimiter<governor::state::direct::NotKeyed, InMemoryState, governor::clock::DefaultClock>,
|
||||||
|
> = Lazy::new(|| RateLimiter::direct(Quota::with_period(Duration::from_secs(10)).unwrap()));
|
||||||
|
|
||||||
|
/// Represents an active anonymous session within the Banner API.
|
||||||
|
/// Identified by multiple persistent cookies, as well as a client-generated "unique session ID".
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
struct SessionData {
|
pub struct BannerSession {
|
||||||
session_id: String,
|
// Randomly generated
|
||||||
|
pub unique_session_id: String,
|
||||||
|
// Timestamp of creation
|
||||||
created_at: Instant,
|
created_at: Instant,
|
||||||
|
// Timestamp of last activity
|
||||||
|
last_activity: Option<Instant>,
|
||||||
|
// Cookie values from initial registration page
|
||||||
|
jsessionid: String,
|
||||||
|
ssb_cookie: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SessionManager {
|
/// Generates a new session ID mimicking Banner's format
|
||||||
const SESSION_EXPIRY: Duration = Duration::from_secs(25 * 60); // 25 minutes
|
fn generate_session_id() -> String {
|
||||||
|
let random_part = Alphanumeric.sample_string(&mut rand::rng(), 5);
|
||||||
|
let timestamp = std::time::SystemTime::now()
|
||||||
|
.duration_since(std::time::UNIX_EPOCH)
|
||||||
|
.unwrap()
|
||||||
|
.as_millis();
|
||||||
|
format!("{}{}", random_part, timestamp)
|
||||||
|
}
|
||||||
|
|
||||||
/// Creates a new session manager
|
/// Generates a timestamp-based nonce
|
||||||
pub fn new(base_url: String, client: Client) -> Self {
|
pub fn nonce() -> String {
|
||||||
|
std::time::SystemTime::now()
|
||||||
|
.duration_since(std::time::UNIX_EPOCH)
|
||||||
|
.unwrap()
|
||||||
|
.as_millis()
|
||||||
|
.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BannerSession {
|
||||||
|
/// Creates a new session
|
||||||
|
pub async fn new(unique_session_id: &str, jsessionid: &str, ssb_cookie: &str) -> Result<Self> {
|
||||||
|
let now = Instant::now();
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
created_at: now,
|
||||||
|
last_activity: None,
|
||||||
|
unique_session_id: unique_session_id.to_string(),
|
||||||
|
jsessionid: jsessionid.to_string(),
|
||||||
|
ssb_cookie: ssb_cookie.to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the unique session ID
|
||||||
|
pub fn id(&self) -> String {
|
||||||
|
self.unique_session_id.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Updates the last activity timestamp
|
||||||
|
pub fn touch(&mut self) {
|
||||||
|
self.last_activity = Some(Instant::now());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the session is expired
|
||||||
|
pub fn is_expired(&self) -> bool {
|
||||||
|
self.last_activity.unwrap_or(self.created_at).elapsed() > SESSION_EXPIRY
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a string used to for the "Cookie" header
|
||||||
|
pub fn cookie(&self) -> String {
|
||||||
|
format!(
|
||||||
|
"JSESSIONID={}; SSB_COOKIE={}",
|
||||||
|
self.jsessionid, self.ssb_cookie
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn been_used(&self) -> bool {
|
||||||
|
self.last_activity.is_some()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A smart pointer that returns a BannerSession to the pool when dropped.
|
||||||
|
pub struct PooledSession {
|
||||||
|
session: Option<BannerSession>,
|
||||||
|
// This Arc points directly to the term-specific pool.
|
||||||
|
pool: Arc<TermPool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PooledSession {
|
||||||
|
pub fn been_used(&self) -> bool {
|
||||||
|
self.session.as_ref().unwrap().been_used()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for PooledSession {
|
||||||
|
type Target = BannerSession;
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
// The option is only ever None after drop is called, so this is safe.
|
||||||
|
self.session.as_ref().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DerefMut for PooledSession {
|
||||||
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||||
|
self.session.as_mut().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The magic happens here: when the guard goes out of scope, this is called.
|
||||||
|
impl Drop for PooledSession {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if let Some(session) = self.session.take() {
|
||||||
|
let pool = self.pool.clone();
|
||||||
|
// Since drop() cannot be async, we spawn a task to return the session.
|
||||||
|
tokio::spawn(async move {
|
||||||
|
pool.release(session).await;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TermPool {
|
||||||
|
sessions: Mutex<VecDeque<BannerSession>>,
|
||||||
|
notifier: Notify,
|
||||||
|
is_creating: Mutex<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TermPool {
|
||||||
|
fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
current_session: Mutex::new(None),
|
sessions: Mutex::new(VecDeque::new()),
|
||||||
|
notifier: Notify::new(),
|
||||||
|
is_creating: Mutex::new(false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn release(&self, session: BannerSession) {
|
||||||
|
let id = session.unique_session_id.clone();
|
||||||
|
if session.is_expired() {
|
||||||
|
debug!(id = id, "Session expired, dropping");
|
||||||
|
// Wake up a waiter, as it might need to create a new session
|
||||||
|
// if this was the last one.
|
||||||
|
self.notifier.notify_one();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut queue = self.sessions.lock().await;
|
||||||
|
queue.push_back(session);
|
||||||
|
drop(queue); // Release lock before notifying
|
||||||
|
|
||||||
|
self.notifier.notify_one();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct SessionPool {
|
||||||
|
sessions: DashMap<Term, Arc<TermPool>>,
|
||||||
|
http: ClientWithMiddleware,
|
||||||
|
base_url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SessionPool {
|
||||||
|
pub fn new(http: ClientWithMiddleware, base_url: String) -> Self {
|
||||||
|
Self {
|
||||||
|
sessions: DashMap::new(),
|
||||||
|
http,
|
||||||
base_url,
|
base_url,
|
||||||
client,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Ensures a valid session is available, creating one if necessary
|
/// Acquires a session from the pool.
|
||||||
pub fn ensure_session(&self) -> Result<String> {
|
/// If no sessions are available, a new one is created on demand,
|
||||||
let start_time = std::time::Instant::now();
|
/// respecting the global rate limit.
|
||||||
let mut session_guard = self.current_session.lock().unwrap();
|
pub async fn acquire(&self, term: Term) -> Result<PooledSession> {
|
||||||
|
let term_pool = self
|
||||||
|
.sessions
|
||||||
|
.entry(term)
|
||||||
|
.or_insert_with(|| Arc::new(TermPool::new()))
|
||||||
|
.clone();
|
||||||
|
|
||||||
if let Some(ref session) = *session_guard
|
let start = Instant::now();
|
||||||
&& session.created_at.elapsed() < Self::SESSION_EXPIRY
|
let mut waited_for_creation = false;
|
||||||
{
|
|
||||||
let elapsed = start_time.elapsed();
|
loop {
|
||||||
debug!(
|
// Fast path: Try to get an existing, non-expired session.
|
||||||
session_id = session.session_id,
|
{
|
||||||
elapsed = format!("{:.2?}", elapsed),
|
let mut queue = term_pool.sessions.lock().await;
|
||||||
"reusing existing banner session"
|
if let Some(session) = queue.pop_front() {
|
||||||
);
|
if !session.is_expired() {
|
||||||
return Ok(session.session_id.clone());
|
return Ok(PooledSession {
|
||||||
|
session: Some(session),
|
||||||
|
pool: Arc::clone(&term_pool),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
debug!(id = session.unique_session_id, "Discarded expired session");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} // MutexGuard is dropped, lock is released.
|
||||||
|
|
||||||
|
// Slow path: No sessions available. We must either wait or become the creator.
|
||||||
|
let mut is_creating_guard = term_pool.is_creating.lock().await;
|
||||||
|
if *is_creating_guard {
|
||||||
|
// Another task is already creating a session. Release the lock and wait.
|
||||||
|
drop(is_creating_guard);
|
||||||
|
if !waited_for_creation {
|
||||||
|
trace!("Waiting for another task to create session");
|
||||||
|
waited_for_creation = true;
|
||||||
|
}
|
||||||
|
term_pool.notifier.notified().await;
|
||||||
|
// Loop back to the top to try the fast path again.
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This task is now the designated creator.
|
||||||
|
*is_creating_guard = true;
|
||||||
|
drop(is_creating_guard);
|
||||||
|
|
||||||
|
// Race: wait for a session to be returned OR for the rate limiter to allow a new one.
|
||||||
|
trace!("Pool empty, creating new session");
|
||||||
|
tokio::select! {
|
||||||
|
_ = term_pool.notifier.notified() => {
|
||||||
|
// A session was returned while we were waiting!
|
||||||
|
// We are no longer the creator. Reset the flag and loop to race for the new session.
|
||||||
|
let mut guard = term_pool.is_creating.lock().await;
|
||||||
|
*guard = false;
|
||||||
|
drop(guard);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
_ = SESSION_CREATION_RATE_LIMITER.until_ready() => {
|
||||||
|
// The rate limit has elapsed. It's our job to create the session.
|
||||||
|
let new_session_result = self.create_session(&term).await;
|
||||||
|
|
||||||
|
// After creation, we are no longer the creator. Reset the flag
|
||||||
|
// and notify all other waiting tasks.
|
||||||
|
let mut guard = term_pool.is_creating.lock().await;
|
||||||
|
*guard = false;
|
||||||
|
drop(guard);
|
||||||
|
term_pool.notifier.notify_waiters();
|
||||||
|
|
||||||
|
match new_session_result {
|
||||||
|
Ok(new_session) => {
|
||||||
|
let elapsed = start.elapsed();
|
||||||
|
debug!(
|
||||||
|
id = new_session.unique_session_id,
|
||||||
|
elapsed_ms = elapsed.as_millis(),
|
||||||
|
"Created new session"
|
||||||
|
);
|
||||||
|
return Ok(PooledSession {
|
||||||
|
session: Some(new_session),
|
||||||
|
pool: term_pool,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
// Propagate the error if session creation failed.
|
||||||
|
return Err(e.context("Failed to create new session in pool"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generate new session
|
|
||||||
let session_id = self.generate_session_id();
|
|
||||||
*session_guard = Some(SessionData {
|
|
||||||
session_id: session_id.clone(),
|
|
||||||
created_at: Instant::now(),
|
|
||||||
});
|
|
||||||
|
|
||||||
let elapsed = start_time.elapsed();
|
|
||||||
debug!(
|
|
||||||
session_id = session_id,
|
|
||||||
elapsed = format!("{:.2?}", elapsed),
|
|
||||||
"generated new banner session"
|
|
||||||
);
|
|
||||||
Ok(session_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generates a new session ID mimicking Banner's format
|
|
||||||
fn generate_session_id(&self) -> String {
|
|
||||||
let random_part = Alphanumeric.sample_string(&mut rand::thread_rng(), 5);
|
|
||||||
let timestamp = std::time::SystemTime::now()
|
|
||||||
.duration_since(std::time::UNIX_EPOCH)
|
|
||||||
.unwrap()
|
|
||||||
.as_millis();
|
|
||||||
format!("{}{}", random_part, timestamp)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets up initial session cookies by making required Banner API requests
|
/// Sets up initial session cookies by making required Banner API requests
|
||||||
pub async fn setup(&self) -> Result<()> {
|
pub async fn create_session(&self, term: &Term) -> Result<BannerSession> {
|
||||||
info!("setting up banner session...");
|
info!(term = %term, "setting up banner session");
|
||||||
|
|
||||||
let request_paths = ["/registration/registration", "/selfServiceMenu/data"];
|
// The 'register' or 'search' registration page
|
||||||
|
let initial_registration = self
|
||||||
|
.http
|
||||||
|
.get(format!("{}/registration", self.base_url))
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
// TODO: Validate success
|
||||||
|
|
||||||
for path in &request_paths {
|
let cookies = initial_registration
|
||||||
let url = format!("{}{}", self.base_url, path);
|
.headers()
|
||||||
let response = self
|
.get_all("Set-Cookie")
|
||||||
.client
|
.iter()
|
||||||
.get(&url)
|
.filter_map(|header_value| {
|
||||||
.query(&[("_", Self::nonce())])
|
if let Ok(cookie_str) = header_value.to_str() {
|
||||||
.header("User-Agent", user_agent())
|
if let Ok(cookie) = Cookie::parse(cookie_str) {
|
||||||
.send()
|
Some((cookie.name().to_string(), cookie.value().to_string()))
|
||||||
.await?;
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<HashMap<String, String>>();
|
||||||
|
|
||||||
if !response.status().is_success() {
|
if !cookies.contains_key("JSESSIONID") || !cookies.contains_key("SSB_COOKIE") {
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow::anyhow!("Failed to get cookies"));
|
||||||
"Failed to setup session, request to {} returned {}",
|
|
||||||
path,
|
|
||||||
response.status()
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: Cookie validation would require additional setup in a real implementation
|
let jsessionid = cookies
|
||||||
debug!("session setup complete");
|
.get("JSESSIONID")
|
||||||
Ok(())
|
.ok_or_else(|| anyhow::anyhow!("JSESSIONID cookie missing after validation"))?;
|
||||||
|
let ssb_cookie = cookies
|
||||||
|
.get("SSB_COOKIE")
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("SSB_COOKIE cookie missing after validation"))?;
|
||||||
|
let cookie_header = format!("JSESSIONID={}; SSB_COOKIE={}", jsessionid, ssb_cookie);
|
||||||
|
|
||||||
|
self.http
|
||||||
|
.get(format!("{}/selfServiceMenu/data", self.base_url))
|
||||||
|
.header("Cookie", &cookie_header)
|
||||||
|
.send()
|
||||||
|
.await?
|
||||||
|
.error_for_status()
|
||||||
|
.context("Failed to get data page")?;
|
||||||
|
|
||||||
|
self.http
|
||||||
|
.get(format!("{}/term/termSelection", self.base_url))
|
||||||
|
.header("Cookie", &cookie_header)
|
||||||
|
.query(&[("mode", "search")])
|
||||||
|
.send()
|
||||||
|
.await?
|
||||||
|
.error_for_status()
|
||||||
|
.context("Failed to get term selection page")?;
|
||||||
|
// TOOD: Validate success
|
||||||
|
|
||||||
|
let terms = self.get_terms("", 1, 10).await?;
|
||||||
|
if !terms.iter().any(|t| t.code == term.to_string()) {
|
||||||
|
return Err(anyhow::anyhow!("Failed to get term search response"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let specific_term_search_response = self.get_terms(&term.to_string(), 1, 10).await?;
|
||||||
|
if !specific_term_search_response
|
||||||
|
.iter()
|
||||||
|
.any(|t| t.code == term.to_string())
|
||||||
|
{
|
||||||
|
return Err(anyhow::anyhow!("Failed to get term search response"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let unique_session_id = generate_session_id();
|
||||||
|
self.select_term(&term.to_string(), &unique_session_id, &cookie_header)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
BannerSession::new(&unique_session_id, jsessionid, ssb_cookie).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieves a list of terms from the Banner API.
|
||||||
|
pub async fn get_terms(
|
||||||
|
&self,
|
||||||
|
search: &str,
|
||||||
|
page: i32,
|
||||||
|
max_results: i32,
|
||||||
|
) -> Result<Vec<BannerTerm>> {
|
||||||
|
if page <= 0 {
|
||||||
|
return Err(anyhow::anyhow!("Page must be greater than 0"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let url = format!("{}/classSearch/getTerms", self.base_url);
|
||||||
|
let params = [
|
||||||
|
("searchTerm", search),
|
||||||
|
("offset", &page.to_string()),
|
||||||
|
("max", &max_results.to_string()),
|
||||||
|
("_", &nonce()),
|
||||||
|
];
|
||||||
|
|
||||||
|
let response = self
|
||||||
|
.http
|
||||||
|
.get(&url)
|
||||||
|
.query(¶ms)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.with_context(|| "Failed to get terms".to_string())?;
|
||||||
|
|
||||||
|
let terms: Vec<BannerTerm> = response
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.context("Failed to parse terms response")?;
|
||||||
|
|
||||||
|
Ok(terms)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Selects a term for the current session
|
/// Selects a term for the current session
|
||||||
pub async fn select_term(&self, term: &str) -> Result<()> {
|
pub async fn select_term(
|
||||||
let session_id = self.ensure_session()?;
|
&self,
|
||||||
|
term: &str,
|
||||||
|
unique_session_id: &str,
|
||||||
|
cookie_header: &str,
|
||||||
|
) -> Result<()> {
|
||||||
let form_data = [
|
let form_data = [
|
||||||
("term", term),
|
("term", term),
|
||||||
("studyPath", ""),
|
("studyPath", ""),
|
||||||
("studyPathText", ""),
|
("studyPathText", ""),
|
||||||
("startDatepicker", ""),
|
("startDatepicker", ""),
|
||||||
("endDatepicker", ""),
|
("endDatepicker", ""),
|
||||||
("uniqueSessionId", &session_id),
|
("uniqueSessionId", unique_session_id),
|
||||||
];
|
];
|
||||||
|
|
||||||
let url = format!("{}/term/search", self.base_url);
|
let url = format!("{}/term/search", self.base_url);
|
||||||
let response = self
|
let response = self
|
||||||
.client
|
.http
|
||||||
.post(&url)
|
.post(&url)
|
||||||
|
.header("Cookie", cookie_header)
|
||||||
.query(&[("mode", "search")])
|
.query(&[("mode", "search")])
|
||||||
.form(&form_data)
|
.form(&form_data)
|
||||||
.header("User-Agent", user_agent())
|
|
||||||
.header("Content-Type", "application/x-www-form-urlencoded")
|
|
||||||
.send()
|
.send()
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -141,18 +433,36 @@ impl SessionManager {
|
|||||||
|
|
||||||
#[derive(serde::Deserialize)]
|
#[derive(serde::Deserialize)]
|
||||||
struct RedirectResponse {
|
struct RedirectResponse {
|
||||||
#[serde(rename = "fwdUrl")]
|
#[serde(rename = "fwdURL")]
|
||||||
fwd_url: String,
|
fwd_url: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
let redirect: RedirectResponse = response.json().await?;
|
let redirect: RedirectResponse = response.json().await?;
|
||||||
|
|
||||||
|
let base_url_path = self
|
||||||
|
.base_url
|
||||||
|
.parse::<Url>()
|
||||||
|
.context("Failed to parse base URL")?
|
||||||
|
.path()
|
||||||
|
.to_string();
|
||||||
|
let non_overlap_redirect =
|
||||||
|
redirect
|
||||||
|
.fwd_url
|
||||||
|
.strip_prefix(&base_url_path)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
anyhow::anyhow!(
|
||||||
|
"Redirect URL '{}' does not start with expected prefix '{}'",
|
||||||
|
redirect.fwd_url,
|
||||||
|
base_url_path
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
// Follow the redirect
|
// Follow the redirect
|
||||||
let redirect_url = format!("{}{}", self.base_url, redirect.fwd_url);
|
let redirect_url = format!("{}{}", self.base_url, non_overlap_redirect);
|
||||||
let redirect_response = self
|
let redirect_response = self
|
||||||
.client
|
.http
|
||||||
.get(&redirect_url)
|
.get(&redirect_url)
|
||||||
.header("User-Agent", user_agent())
|
.header("Cookie", cookie_header)
|
||||||
.send()
|
.send()
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -163,36 +473,6 @@ impl SessionManager {
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("successfully selected term: {}", term);
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resets the data form (required before new searches)
|
|
||||||
pub async fn reset_data_form(&self) -> Result<()> {
|
|
||||||
let url = format!("{}/classSearch/resetDataForm", self.base_url);
|
|
||||||
let response = self
|
|
||||||
.client
|
|
||||||
.post(&url)
|
|
||||||
.header("User-Agent", user_agent())
|
|
||||||
.send()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if !response.status().is_success() {
|
|
||||||
return Err(anyhow::anyhow!(
|
|
||||||
"Failed to reset data form: {}",
|
|
||||||
response.status()
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generates a timestamp-based nonce
|
|
||||||
pub fn nonce() -> String {
|
|
||||||
std::time::SystemTime::now()
|
|
||||||
.duration_since(std::time::UNIX_EPOCH)
|
|
||||||
.unwrap()
|
|
||||||
.as_millis()
|
|
||||||
.to_string()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
132
src/bin/search.rs
Normal file
132
src/bin/search.rs
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
use banner::banner::{BannerApi, SearchQuery, Term};
|
||||||
|
use banner::config::Config;
|
||||||
|
use banner::error::Result;
|
||||||
|
use figment::{Figment, providers::Env};
|
||||||
|
use futures::future;
|
||||||
|
use tracing::{error, info};
|
||||||
|
use tracing_subscriber::{EnvFilter, FmtSubscriber};
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<()> {
|
||||||
|
// Configure logging
|
||||||
|
let filter = EnvFilter::try_from_default_env()
|
||||||
|
.unwrap_or_else(|_| EnvFilter::new("info,banner=trace,reqwest=debug,hyper=info"));
|
||||||
|
let subscriber = FmtSubscriber::builder()
|
||||||
|
.with_env_filter(filter)
|
||||||
|
.with_target(true)
|
||||||
|
.finish();
|
||||||
|
tracing::subscriber::set_global_default(subscriber).expect("setting default subscriber failed");
|
||||||
|
|
||||||
|
info!("Starting Banner search test");
|
||||||
|
|
||||||
|
dotenvy::dotenv().ok();
|
||||||
|
|
||||||
|
// Load configuration
|
||||||
|
let config: Config = Figment::new()
|
||||||
|
.merge(Env::raw())
|
||||||
|
.extract()
|
||||||
|
.expect("Failed to load config");
|
||||||
|
|
||||||
|
info!(
|
||||||
|
banner_base_url = config.banner_base_url,
|
||||||
|
"Configuration loaded"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create Banner API client
|
||||||
|
let banner_api =
|
||||||
|
BannerApi::new_with_config(config.banner_base_url, config.rate_limiting.into())
|
||||||
|
.expect("Failed to create BannerApi");
|
||||||
|
|
||||||
|
// Get current term
|
||||||
|
let term = Term::get_current().inner().to_string();
|
||||||
|
info!(term = term, "Using current term");
|
||||||
|
|
||||||
|
// Define multiple search queries
|
||||||
|
let queries = vec![
|
||||||
|
(
|
||||||
|
"CS Courses",
|
||||||
|
SearchQuery::new().subject("CS").max_results(10),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Math Courses",
|
||||||
|
SearchQuery::new().subject("MAT").max_results(10),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"3000-level CS",
|
||||||
|
SearchQuery::new()
|
||||||
|
.subject("CS")
|
||||||
|
.course_numbers(3000, 3999)
|
||||||
|
.max_results(8),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"High Credit Courses",
|
||||||
|
SearchQuery::new().credits(4, 6).max_results(8),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Programming Courses",
|
||||||
|
SearchQuery::new().keyword("programming").max_results(6),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
info!(query_count = queries.len(), "Executing concurrent searches");
|
||||||
|
|
||||||
|
// Execute all searches concurrently
|
||||||
|
let search_futures = queries.into_iter().map(|(label, query)| {
|
||||||
|
info!(label = %label, "Starting search");
|
||||||
|
let banner_api = &banner_api;
|
||||||
|
let term = &term;
|
||||||
|
async move {
|
||||||
|
let result = banner_api
|
||||||
|
.search(term, &query, "subjectDescription", false)
|
||||||
|
.await;
|
||||||
|
(label, result)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for all searches to complete
|
||||||
|
let search_results = future::join_all(search_futures)
|
||||||
|
.await
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|(label, result)| match result {
|
||||||
|
Ok(search_result) => {
|
||||||
|
info!(
|
||||||
|
label = label,
|
||||||
|
success = search_result.success,
|
||||||
|
total_count = search_result.total_count,
|
||||||
|
"Search completed successfully"
|
||||||
|
);
|
||||||
|
Some((label, search_result))
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!(label = label, error = ?e, "Search failed");
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
// Process and display results
|
||||||
|
for (label, search_result) in search_results {
|
||||||
|
println!("\n=== {} ===", label);
|
||||||
|
if let Some(courses) = &search_result.data {
|
||||||
|
if courses.is_empty() {
|
||||||
|
println!(" No courses found");
|
||||||
|
} else {
|
||||||
|
println!(" Found {} courses:", courses.len());
|
||||||
|
for course in courses {
|
||||||
|
println!(
|
||||||
|
" {} {} - {} (CRN: {})",
|
||||||
|
course.subject,
|
||||||
|
course.course_number,
|
||||||
|
course.course_title,
|
||||||
|
course.course_reference_number
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println!(" No courses found");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Search test completed");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -77,7 +77,7 @@ pub async fn gcal(
|
|||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
info!("gcal command completed for CRN: {}", crn);
|
info!(crn = %crn, "gcal command completed");
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,111 @@
|
|||||||
//! ICS command implementation for generating calendar files.
|
//! ICS command implementation for generating calendar files.
|
||||||
|
|
||||||
|
use crate::banner::{Course, MeetingScheduleInfo};
|
||||||
use crate::bot::{Context, Error, utils};
|
use crate::bot::{Context, Error, utils};
|
||||||
|
use chrono::{Datelike, NaiveDate, Utc};
|
||||||
|
use serenity::all::CreateAttachment;
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
|
/// Represents a holiday or special day that should be excluded from class schedules
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
enum Holiday {
|
||||||
|
/// A single-day holiday
|
||||||
|
Single { month: u32, day: u32 },
|
||||||
|
/// A multi-day holiday range
|
||||||
|
Range {
|
||||||
|
month: u32,
|
||||||
|
start_day: u32,
|
||||||
|
end_day: u32,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Holiday {
|
||||||
|
/// Check if a specific date falls within this holiday
|
||||||
|
fn contains_date(&self, date: NaiveDate) -> bool {
|
||||||
|
match self {
|
||||||
|
Holiday::Single { month, day, .. } => date.month() == *month && date.day() == *day,
|
||||||
|
Holiday::Range {
|
||||||
|
month,
|
||||||
|
start_day,
|
||||||
|
end_day,
|
||||||
|
..
|
||||||
|
} => date.month() == *month && date.day() >= *start_day && date.day() <= *end_day,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all dates in this holiday for a given year
|
||||||
|
fn get_dates_for_year(&self, year: i32) -> Vec<NaiveDate> {
|
||||||
|
match self {
|
||||||
|
Holiday::Single { month, day, .. } => {
|
||||||
|
if let Some(date) = NaiveDate::from_ymd_opt(year, *month, *day) {
|
||||||
|
vec![date]
|
||||||
|
} else {
|
||||||
|
Vec::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Holiday::Range {
|
||||||
|
month,
|
||||||
|
start_day,
|
||||||
|
end_day,
|
||||||
|
..
|
||||||
|
} => {
|
||||||
|
let mut dates = Vec::new();
|
||||||
|
for day in *start_day..=*end_day {
|
||||||
|
if let Some(date) = NaiveDate::from_ymd_opt(year, *month, day) {
|
||||||
|
dates.push(date);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dates
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// University holidays that should be excluded from class schedules
|
||||||
|
const UNIVERSITY_HOLIDAYS: &[(&str, Holiday)] = &[
|
||||||
|
("Labor Day", Holiday::Single { month: 9, day: 1 }),
|
||||||
|
(
|
||||||
|
"Fall Break",
|
||||||
|
Holiday::Range {
|
||||||
|
month: 10,
|
||||||
|
start_day: 13,
|
||||||
|
end_day: 14,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Unspecified Holiday",
|
||||||
|
Holiday::Single { month: 11, day: 26 },
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Thanksgiving",
|
||||||
|
Holiday::Range {
|
||||||
|
month: 11,
|
||||||
|
start_day: 28,
|
||||||
|
end_day: 29,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
("Student Study Day", Holiday::Single { month: 12, day: 5 }),
|
||||||
|
(
|
||||||
|
"Winter Holiday",
|
||||||
|
Holiday::Range {
|
||||||
|
month: 12,
|
||||||
|
start_day: 23,
|
||||||
|
end_day: 31,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
("New Year's Day", Holiday::Single { month: 1, day: 1 }),
|
||||||
|
("MLK Day", Holiday::Single { month: 1, day: 20 }),
|
||||||
|
(
|
||||||
|
"Spring Break",
|
||||||
|
Holiday::Range {
|
||||||
|
month: 3,
|
||||||
|
start_day: 10,
|
||||||
|
end_day: 15,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
("Student Study Day", Holiday::Single { month: 5, day: 9 }),
|
||||||
|
];
|
||||||
|
|
||||||
/// Generate an ICS file for a course
|
/// Generate an ICS file for a course
|
||||||
#[poise::command(slash_command, prefix_command)]
|
#[poise::command(slash_command, prefix_command)]
|
||||||
pub async fn ics(
|
pub async fn ics(
|
||||||
@@ -12,14 +115,322 @@ pub async fn ics(
|
|||||||
ctx.defer().await?;
|
ctx.defer().await?;
|
||||||
|
|
||||||
let course = utils::get_course_by_crn(&ctx, crn).await?;
|
let course = utils::get_course_by_crn(&ctx, crn).await?;
|
||||||
|
let term = course.term.clone();
|
||||||
|
|
||||||
// TODO: Implement actual ICS file generation
|
// Get meeting times
|
||||||
ctx.say(format!(
|
let meeting_times = ctx
|
||||||
"ICS generation for '{}' is not yet implemented.",
|
.data()
|
||||||
course.display_title()
|
.app_state
|
||||||
))
|
.banner_api
|
||||||
|
.get_course_meeting_time(&term, &crn.to_string())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if meeting_times.is_empty() {
|
||||||
|
ctx.say("No meeting times found for this course.").await?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort meeting times by start time
|
||||||
|
let mut sorted_meeting_times = meeting_times.to_vec();
|
||||||
|
sorted_meeting_times.sort_unstable_by(|a, b| match (&a.time_range, &b.time_range) {
|
||||||
|
(Some(a_time), Some(b_time)) => a_time.start.cmp(&b_time.start),
|
||||||
|
(Some(_), None) => std::cmp::Ordering::Less,
|
||||||
|
(None, Some(_)) => std::cmp::Ordering::Greater,
|
||||||
|
(None, None) => a.days.bits().cmp(&b.days.bits()),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate ICS content
|
||||||
|
let (ics_content, excluded_holidays) =
|
||||||
|
generate_ics_content(&course, &term, &sorted_meeting_times)?;
|
||||||
|
|
||||||
|
// Create file attachment
|
||||||
|
let filename = format!(
|
||||||
|
"{subject}_{number}_{section}.ics",
|
||||||
|
subject = course.subject.replace(" ", "_"),
|
||||||
|
number = course.course_number,
|
||||||
|
section = course.sequence_number,
|
||||||
|
);
|
||||||
|
|
||||||
|
let file = CreateAttachment::bytes(ics_content.into_bytes(), filename.clone());
|
||||||
|
|
||||||
|
// Build response content
|
||||||
|
let mut response_content = format!(
|
||||||
|
"📅 Generated ICS calendar for **{}**\n\n**Meeting Times:**\n{}",
|
||||||
|
course.display_title(),
|
||||||
|
sorted_meeting_times
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, m)| {
|
||||||
|
let time_info = match &m.time_range {
|
||||||
|
Some(range) => format!(
|
||||||
|
"{} {}",
|
||||||
|
m.days_string().unwrap_or("TBA".to_string()),
|
||||||
|
range.format_12hr()
|
||||||
|
),
|
||||||
|
None => m.days_string().unwrap_or("TBA".to_string()),
|
||||||
|
};
|
||||||
|
format!("{}. {}", i + 1, time_info)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("\n")
|
||||||
|
);
|
||||||
|
|
||||||
|
// Add holiday exclusion information
|
||||||
|
if !excluded_holidays.is_empty() {
|
||||||
|
let count = excluded_holidays.len();
|
||||||
|
let count_text = if count == 1 {
|
||||||
|
"1 date was".to_string()
|
||||||
|
} else {
|
||||||
|
format!("{} dates were", count)
|
||||||
|
};
|
||||||
|
response_content.push_str(&format!("\n\n{} excluded from the ICS file:\n", count_text));
|
||||||
|
response_content.push_str(
|
||||||
|
&excluded_holidays
|
||||||
|
.iter()
|
||||||
|
.map(|s| format!("- {}", s))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("\n"),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.send(
|
||||||
|
poise::CreateReply::default()
|
||||||
|
.content(response_content)
|
||||||
|
.attachment(file),
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
info!("ics command completed for CRN: {}", crn);
|
info!(crn = %crn, "ics command completed");
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generate ICS content for a course and its meeting times
|
||||||
|
fn generate_ics_content(
|
||||||
|
course: &Course,
|
||||||
|
term: &str,
|
||||||
|
meeting_times: &[MeetingScheduleInfo],
|
||||||
|
) -> Result<(String, Vec<String>), anyhow::Error> {
|
||||||
|
let mut ics_content = String::new();
|
||||||
|
let mut excluded_holidays = Vec::new();
|
||||||
|
|
||||||
|
// ICS header
|
||||||
|
ics_content.push_str("BEGIN:VCALENDAR\r\n");
|
||||||
|
ics_content.push_str("VERSION:2.0\r\n");
|
||||||
|
ics_content.push_str("PRODID:-//Banner Bot//Course Calendar//EN\r\n");
|
||||||
|
ics_content.push_str("CALSCALE:GREGORIAN\r\n");
|
||||||
|
ics_content.push_str("METHOD:PUBLISH\r\n");
|
||||||
|
|
||||||
|
// Calendar name
|
||||||
|
ics_content.push_str(&format!(
|
||||||
|
"X-WR-CALNAME:{} - {}\r\n",
|
||||||
|
course.display_title(),
|
||||||
|
term
|
||||||
|
));
|
||||||
|
|
||||||
|
// Generate events for each meeting time
|
||||||
|
for (index, meeting_time) in meeting_times.iter().enumerate() {
|
||||||
|
let (event_content, holidays) = generate_event_content(course, meeting_time, index)?;
|
||||||
|
ics_content.push_str(&event_content);
|
||||||
|
excluded_holidays.extend(holidays);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ICS footer
|
||||||
|
ics_content.push_str("END:VCALENDAR\r\n");
|
||||||
|
|
||||||
|
Ok((ics_content, excluded_holidays))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate ICS event content for a single meeting time
|
||||||
|
fn generate_event_content(
|
||||||
|
course: &Course,
|
||||||
|
meeting_time: &MeetingScheduleInfo,
|
||||||
|
index: usize,
|
||||||
|
) -> Result<(String, Vec<String>), anyhow::Error> {
|
||||||
|
let course_title = course.display_title();
|
||||||
|
let instructor_name = course.primary_instructor_name();
|
||||||
|
let location = meeting_time.place_string();
|
||||||
|
|
||||||
|
// Create event title with meeting index if multiple meetings
|
||||||
|
let event_title = if index > 0 {
|
||||||
|
format!("{} (Meeting {})", course_title, index + 1)
|
||||||
|
} else {
|
||||||
|
course_title
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create event description
|
||||||
|
let description = format!(
|
||||||
|
"CRN: {}\\nInstructor: {}\\nDays: {}\\nMeeting Type: {}",
|
||||||
|
course.course_reference_number,
|
||||||
|
instructor_name,
|
||||||
|
meeting_time.days_string().unwrap_or("TBA".to_string()),
|
||||||
|
meeting_time.meeting_type.description()
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get start and end times
|
||||||
|
let (start_dt, end_dt) = meeting_time.datetime_range();
|
||||||
|
|
||||||
|
// Format datetimes for ICS (UTC format)
|
||||||
|
let start_utc = start_dt.with_timezone(&Utc);
|
||||||
|
let end_utc = end_dt.with_timezone(&Utc);
|
||||||
|
|
||||||
|
let start_str = start_utc.format("%Y%m%dT%H%M%SZ").to_string();
|
||||||
|
let end_str = end_utc.format("%Y%m%dT%H%M%SZ").to_string();
|
||||||
|
|
||||||
|
// Generate unique ID for the event
|
||||||
|
let uid = format!(
|
||||||
|
"{}-{}-{}@banner-bot.local",
|
||||||
|
course.course_reference_number,
|
||||||
|
index,
|
||||||
|
start_utc.timestamp()
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut event_content = String::new();
|
||||||
|
|
||||||
|
// Event header
|
||||||
|
event_content.push_str("BEGIN:VEVENT\r\n");
|
||||||
|
event_content.push_str(&format!("UID:{}\r\n", uid));
|
||||||
|
event_content.push_str(&format!("DTSTART:{}\r\n", start_str));
|
||||||
|
event_content.push_str(&format!("DTEND:{}\r\n", end_str));
|
||||||
|
event_content.push_str(&format!("SUMMARY:{}\r\n", escape_ics_text(&event_title)));
|
||||||
|
event_content.push_str(&format!(
|
||||||
|
"DESCRIPTION:{}\r\n",
|
||||||
|
escape_ics_text(&description)
|
||||||
|
));
|
||||||
|
event_content.push_str(&format!("LOCATION:{}\r\n", escape_ics_text(&location)));
|
||||||
|
|
||||||
|
// Add recurrence rule if there are specific days and times
|
||||||
|
if !meeting_time.days.is_empty() && meeting_time.time_range.is_some() {
|
||||||
|
let days_of_week = meeting_time.days_of_week();
|
||||||
|
let by_day: Vec<String> = days_of_week
|
||||||
|
.iter()
|
||||||
|
.map(|day| day.to_short_string().to_uppercase())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if !by_day.is_empty() {
|
||||||
|
let until_date = meeting_time
|
||||||
|
.date_range
|
||||||
|
.end
|
||||||
|
.format("%Y%m%dT000000Z")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
event_content.push_str(&format!(
|
||||||
|
"RRULE:FREQ=WEEKLY;BYDAY={};UNTIL={}\r\n",
|
||||||
|
by_day.join(","),
|
||||||
|
until_date
|
||||||
|
));
|
||||||
|
|
||||||
|
// Add holiday exceptions (EXDATE) if the class would meet on holiday dates
|
||||||
|
let holiday_exceptions = get_holiday_exceptions(meeting_time);
|
||||||
|
if let Some(exdate_property) = generate_exdate_property(&holiday_exceptions, start_utc)
|
||||||
|
{
|
||||||
|
event_content.push_str(&format!("{}\r\n", exdate_property));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect holiday names for reporting
|
||||||
|
let mut holiday_names = Vec::new();
|
||||||
|
for (holiday_name, holiday) in UNIVERSITY_HOLIDAYS {
|
||||||
|
for &exception_date in &holiday_exceptions {
|
||||||
|
if holiday.contains_date(exception_date) {
|
||||||
|
holiday_names.push(format!(
|
||||||
|
"{} ({})",
|
||||||
|
holiday_name,
|
||||||
|
exception_date.format("%a, %b %d")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
holiday_names.sort();
|
||||||
|
holiday_names.dedup();
|
||||||
|
|
||||||
|
return Ok((event_content, holiday_names));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Event footer
|
||||||
|
event_content.push_str("END:VEVENT\r\n");
|
||||||
|
|
||||||
|
Ok((event_content, Vec::new()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert chrono::Weekday to the custom DayOfWeek enum
|
||||||
|
fn chrono_weekday_to_day_of_week(weekday: chrono::Weekday) -> crate::banner::meetings::DayOfWeek {
|
||||||
|
use crate::banner::meetings::DayOfWeek;
|
||||||
|
match weekday {
|
||||||
|
chrono::Weekday::Mon => DayOfWeek::Monday,
|
||||||
|
chrono::Weekday::Tue => DayOfWeek::Tuesday,
|
||||||
|
chrono::Weekday::Wed => DayOfWeek::Wednesday,
|
||||||
|
chrono::Weekday::Thu => DayOfWeek::Thursday,
|
||||||
|
chrono::Weekday::Fri => DayOfWeek::Friday,
|
||||||
|
chrono::Weekday::Sat => DayOfWeek::Saturday,
|
||||||
|
chrono::Weekday::Sun => DayOfWeek::Sunday,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if a class meets on a specific date based on its meeting days
|
||||||
|
fn class_meets_on_date(meeting_time: &MeetingScheduleInfo, date: NaiveDate) -> bool {
|
||||||
|
let weekday = chrono_weekday_to_day_of_week(date.weekday());
|
||||||
|
let meeting_days = meeting_time.days_of_week();
|
||||||
|
|
||||||
|
meeting_days.contains(&weekday)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get holiday dates that fall within the course date range and would conflict with class meetings
|
||||||
|
fn get_holiday_exceptions(meeting_time: &MeetingScheduleInfo) -> Vec<NaiveDate> {
|
||||||
|
let mut exceptions = Vec::new();
|
||||||
|
|
||||||
|
// Get the year range from the course date range
|
||||||
|
let start_year = meeting_time.date_range.start.year();
|
||||||
|
let end_year = meeting_time.date_range.end.year();
|
||||||
|
|
||||||
|
for (_, holiday) in UNIVERSITY_HOLIDAYS {
|
||||||
|
// Check for the holiday in each year of the course
|
||||||
|
for year in start_year..=end_year {
|
||||||
|
let holiday_dates = holiday.get_dates_for_year(year);
|
||||||
|
|
||||||
|
for holiday_date in holiday_dates {
|
||||||
|
// Check if the holiday falls within the course date range
|
||||||
|
if holiday_date >= meeting_time.date_range.start
|
||||||
|
&& holiday_date <= meeting_time.date_range.end
|
||||||
|
{
|
||||||
|
// Check if the class would actually meet on this day
|
||||||
|
if class_meets_on_date(meeting_time, holiday_date) {
|
||||||
|
exceptions.push(holiday_date);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
exceptions
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate EXDATE property for holiday exceptions
|
||||||
|
fn generate_exdate_property(
|
||||||
|
exceptions: &[NaiveDate],
|
||||||
|
start_time: chrono::DateTime<Utc>,
|
||||||
|
) -> Option<String> {
|
||||||
|
if exceptions.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut exdate_values = Vec::new();
|
||||||
|
|
||||||
|
for &exception_date in exceptions {
|
||||||
|
// Create a datetime for the exception using the same time as the start time
|
||||||
|
let exception_datetime = exception_date.and_time(start_time.time()).and_utc();
|
||||||
|
|
||||||
|
let exdate_str = exception_datetime.format("%Y%m%dT%H%M%SZ").to_string();
|
||||||
|
exdate_values.push(exdate_str);
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(format!("EXDATE:{}", exdate_values.join(",")))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Escape text for ICS format
|
||||||
|
fn escape_ics_text(text: &str) -> String {
|
||||||
|
text.replace("\\", "\\\\")
|
||||||
|
.replace(";", "\\;")
|
||||||
|
.replace(",", "\\,")
|
||||||
|
.replace("\n", "\\n")
|
||||||
|
.replace("\r", "")
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
//! Bot commands module.
|
//! Bot commands module.
|
||||||
|
|
||||||
|
pub mod gcal;
|
||||||
|
pub mod ics;
|
||||||
pub mod search;
|
pub mod search;
|
||||||
pub mod terms;
|
pub mod terms;
|
||||||
pub mod time;
|
pub mod time;
|
||||||
pub mod ics;
|
|
||||||
pub mod gcal;
|
|
||||||
|
|
||||||
|
pub use gcal::gcal;
|
||||||
|
pub use ics::ics;
|
||||||
pub use search::search;
|
pub use search::search;
|
||||||
pub use terms::terms;
|
pub use terms::terms;
|
||||||
pub use time::time;
|
pub use time::time;
|
||||||
pub use ics::ics;
|
|
||||||
pub use gcal::gcal;
|
|
||||||
|
|||||||
@@ -92,9 +92,7 @@ fn parse_course_code(input: &str) -> Result<(i32, i32), Error> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if low > high {
|
if low > high {
|
||||||
return Err(anyhow!(
|
return Err(anyhow!("Invalid range: low value greater than high value"));
|
||||||
"Invalid range: low value greater than high value"
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if low < 1000 || high > 9999 {
|
if low < 1000 || high > 9999 {
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ pub async fn terms(
|
|||||||
.data()
|
.data()
|
||||||
.app_state
|
.app_state
|
||||||
.banner_api
|
.banner_api
|
||||||
|
.sessions
|
||||||
.get_terms(&search_term, page_number, max_results)
|
.get_terms(&search_term, page_number, max_results)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -46,7 +47,11 @@ fn format_term(term: &BannerTerm, current_term_code: &str) -> String {
|
|||||||
} else {
|
} else {
|
||||||
""
|
""
|
||||||
};
|
};
|
||||||
let is_archived = if term.is_archived() { " (archived)" } else { "" };
|
let is_archived = if term.is_archived() {
|
||||||
|
" (archived)"
|
||||||
|
} else {
|
||||||
|
""
|
||||||
|
};
|
||||||
format!(
|
format!(
|
||||||
"- `{}`: {}{}{}",
|
"- `{}`: {}{}{}",
|
||||||
term.code, term.description, is_current, is_archived
|
term.code, term.description, is_current, is_archived
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
//! Time command implementation for course meeting times.
|
//! Time command implementation for course meeting times.
|
||||||
|
|
||||||
use crate::bot::{utils, Context, Error};
|
use crate::bot::{Context, Error, utils};
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
/// Get meeting times for a specific course
|
/// Get meeting times for a specific course
|
||||||
@@ -20,6 +20,6 @@ pub async fn time(
|
|||||||
))
|
))
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
info!("time command completed for CRN: {}", crn);
|
info!(crn = %crn, "time command completed");
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
use crate::app_state::AppState;
|
|
||||||
use crate::error::Error;
|
use crate::error::Error;
|
||||||
|
use crate::state::AppState;
|
||||||
|
|
||||||
pub mod commands;
|
pub mod commands;
|
||||||
pub mod utils;
|
pub mod utils;
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Data {
|
pub struct Data {
|
||||||
pub app_state: AppState,
|
pub app_state: AppState,
|
||||||
} // User data, which is stored and accessible in all command invocations
|
} // User data, which is stored and accessible in all command invocations
|
||||||
|
|||||||
@@ -13,12 +13,12 @@ pub async fn get_course_by_crn(ctx: &Context<'_>, crn: i32) -> Result<Course> {
|
|||||||
let current_term_status = Term::get_current();
|
let current_term_status = Term::get_current();
|
||||||
let term = current_term_status.inner();
|
let term = current_term_status.inner();
|
||||||
|
|
||||||
// Fetch live course data from Redis cache via AppState
|
// Fetch live course data from database via AppState
|
||||||
app_state
|
app_state
|
||||||
.get_course_or_fetch(&term.to_string(), &crn.to_string())
|
.get_course_or_fetch(&term.to_string(), &crn.to_string())
|
||||||
.await
|
.await
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
error!(%e, crn, "failed to fetch course data");
|
error!(error = %e, crn = %crn, "failed to fetch course data");
|
||||||
e
|
e
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
104
src/cli.rs
Normal file
104
src/cli.rs
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
use clap::Parser;
|
||||||
|
|
||||||
|
/// Banner Discord Bot - Course availability monitoring
|
||||||
|
///
|
||||||
|
/// This application runs multiple services that can be controlled via CLI arguments:
|
||||||
|
/// - bot: Discord bot for course monitoring commands
|
||||||
|
/// - web: HTTP server for web interface and API
|
||||||
|
/// - scraper: Background service for scraping course data
|
||||||
|
///
|
||||||
|
/// Use --services to specify which services to run, or --disable-services to exclude specific services.
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
#[command(author, version, about, long_about = None)]
|
||||||
|
pub struct Args {
|
||||||
|
/// Log formatter to use
|
||||||
|
#[arg(long, value_enum, default_value_t = default_tracing_format())]
|
||||||
|
pub tracing: TracingFormat,
|
||||||
|
|
||||||
|
/// Services to run (comma-separated). Default: all services
|
||||||
|
///
|
||||||
|
/// Examples:
|
||||||
|
/// --services bot,web # Run only bot and web services
|
||||||
|
/// --services scraper # Run only the scraper service
|
||||||
|
#[arg(long, value_delimiter = ',', conflicts_with = "disable_services")]
|
||||||
|
pub services: Option<Vec<ServiceName>>,
|
||||||
|
|
||||||
|
/// Services to disable (comma-separated)
|
||||||
|
///
|
||||||
|
/// Examples:
|
||||||
|
/// --disable-services bot # Run web and scraper only
|
||||||
|
/// --disable-services bot,web # Run only the scraper service
|
||||||
|
#[arg(long, value_delimiter = ',', conflicts_with = "services")]
|
||||||
|
pub disable_services: Option<Vec<ServiceName>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(clap::ValueEnum, Clone, Debug)]
|
||||||
|
pub enum TracingFormat {
|
||||||
|
/// Use pretty formatter (default in debug mode)
|
||||||
|
Pretty,
|
||||||
|
/// Use JSON formatter (default in release mode)
|
||||||
|
Json,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(clap::ValueEnum, Clone, Debug, PartialEq)]
|
||||||
|
pub enum ServiceName {
|
||||||
|
/// Discord bot for course monitoring commands
|
||||||
|
Bot,
|
||||||
|
/// HTTP server for web interface and API
|
||||||
|
Web,
|
||||||
|
/// Background service for scraping course data
|
||||||
|
Scraper,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ServiceName {
|
||||||
|
/// Get all available services
|
||||||
|
pub fn all() -> Vec<ServiceName> {
|
||||||
|
vec![ServiceName::Bot, ServiceName::Web, ServiceName::Scraper]
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert to string for service registration
|
||||||
|
pub fn as_str(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
ServiceName::Bot => "bot",
|
||||||
|
ServiceName::Web => "web",
|
||||||
|
ServiceName::Scraper => "scraper",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Determine which services should be enabled based on CLI arguments
|
||||||
|
pub fn determine_enabled_services(args: &Args) -> Result<Vec<ServiceName>, anyhow::Error> {
|
||||||
|
match (&args.services, &args.disable_services) {
|
||||||
|
(Some(services), None) => {
|
||||||
|
// User specified which services to run
|
||||||
|
Ok(services.clone())
|
||||||
|
}
|
||||||
|
(None, Some(disabled)) => {
|
||||||
|
// User specified which services to disable
|
||||||
|
let enabled: Vec<ServiceName> = ServiceName::all()
|
||||||
|
.into_iter()
|
||||||
|
.filter(|s| !disabled.contains(s))
|
||||||
|
.collect();
|
||||||
|
Ok(enabled)
|
||||||
|
}
|
||||||
|
(None, None) => {
|
||||||
|
// Default: run all services
|
||||||
|
Ok(ServiceName::all())
|
||||||
|
}
|
||||||
|
(Some(_), Some(_)) => {
|
||||||
|
// This should be prevented by clap's conflicts_with, but just in case
|
||||||
|
Err(anyhow::anyhow!(
|
||||||
|
"Cannot specify both --services and --disable-services"
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
const DEFAULT_TRACING_FORMAT: TracingFormat = TracingFormat::Pretty;
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
|
const DEFAULT_TRACING_FORMAT: TracingFormat = TracingFormat::Json;
|
||||||
|
|
||||||
|
fn default_tracing_format() -> TracingFormat {
|
||||||
|
DEFAULT_TRACING_FORMAT
|
||||||
|
}
|
||||||
@@ -8,24 +8,23 @@ use fundu::{DurationParser, TimeUnit};
|
|||||||
use serde::{Deserialize, Deserializer};
|
use serde::{Deserialize, Deserializer};
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
/// Application configuration loaded from environment variables
|
/// Main application configuration containing all sub-configurations
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
/// Discord bot token for authentication
|
/// Log level for the application
|
||||||
pub bot_token: String,
|
///
|
||||||
/// Port for the web server
|
/// This value is used to set the log level for this application's target specifically.
|
||||||
|
/// e.g. "debug" would be similar to "warn,banner=debug,..."
|
||||||
|
///
|
||||||
|
/// Valid values are: "trace", "debug", "info", "warn", "error"
|
||||||
|
/// Defaults to "info" if not specified
|
||||||
|
#[serde(default = "default_log_level")]
|
||||||
|
pub log_level: String,
|
||||||
|
/// Port for the web server (default: 8080)
|
||||||
#[serde(default = "default_port")]
|
#[serde(default = "default_port")]
|
||||||
pub port: u16,
|
pub port: u16,
|
||||||
/// Database connection URL
|
/// Database connection URL
|
||||||
pub database_url: String,
|
pub database_url: String,
|
||||||
/// Redis connection URL
|
|
||||||
pub redis_url: String,
|
|
||||||
/// Base URL for banner generation service
|
|
||||||
pub banner_base_url: String,
|
|
||||||
/// Target Discord guild ID where the bot operates
|
|
||||||
pub bot_target_guild: u64,
|
|
||||||
/// Discord application ID
|
|
||||||
pub bot_app_id: u64,
|
|
||||||
/// Graceful shutdown timeout duration
|
/// Graceful shutdown timeout duration
|
||||||
///
|
///
|
||||||
/// Accepts both numeric values (seconds) and duration strings
|
/// Accepts both numeric values (seconds) and duration strings
|
||||||
@@ -35,11 +34,29 @@ pub struct Config {
|
|||||||
deserialize_with = "deserialize_duration"
|
deserialize_with = "deserialize_duration"
|
||||||
)]
|
)]
|
||||||
pub shutdown_timeout: Duration,
|
pub shutdown_timeout: Duration,
|
||||||
|
/// Discord bot token for authentication
|
||||||
|
pub bot_token: String,
|
||||||
|
/// Target Discord guild ID where the bot operates
|
||||||
|
pub bot_target_guild: u64,
|
||||||
|
|
||||||
|
/// Base URL for banner generation service
|
||||||
|
///
|
||||||
|
/// Defaults to "https://ssbprod.utsa.edu/StudentRegistrationSsb/ssb" if not specified
|
||||||
|
#[serde(default = "default_banner_base_url")]
|
||||||
|
pub banner_base_url: String,
|
||||||
|
/// Rate limiting configuration for Banner API requests
|
||||||
|
#[serde(default = "default_rate_limiting")]
|
||||||
|
pub rate_limiting: RateLimitingConfig,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Default port of 3000
|
/// Default log level of "info"
|
||||||
|
fn default_log_level() -> String {
|
||||||
|
"info".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Default port of 8080
|
||||||
fn default_port() -> u16 {
|
fn default_port() -> u16 {
|
||||||
3000
|
8080
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Default shutdown timeout of 8 seconds
|
/// Default shutdown timeout of 8 seconds
|
||||||
@@ -47,6 +64,67 @@ fn default_shutdown_timeout() -> Duration {
|
|||||||
Duration::from_secs(8)
|
Duration::from_secs(8)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Default banner base URL
|
||||||
|
fn default_banner_base_url() -> String {
|
||||||
|
"https://ssbprod.utsa.edu/StudentRegistrationSsb/ssb".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Rate limiting configuration for Banner API requests
|
||||||
|
#[derive(Deserialize, Clone, Debug)]
|
||||||
|
pub struct RateLimitingConfig {
|
||||||
|
/// Requests per minute for session operations (very conservative)
|
||||||
|
#[serde(default = "default_session_rpm")]
|
||||||
|
pub session_rpm: u32,
|
||||||
|
/// Requests per minute for search operations (moderate)
|
||||||
|
#[serde(default = "default_search_rpm")]
|
||||||
|
pub search_rpm: u32,
|
||||||
|
/// Requests per minute for metadata operations (moderate)
|
||||||
|
#[serde(default = "default_metadata_rpm")]
|
||||||
|
pub metadata_rpm: u32,
|
||||||
|
/// Requests per minute for reset operations (low priority)
|
||||||
|
#[serde(default = "default_reset_rpm")]
|
||||||
|
pub reset_rpm: u32,
|
||||||
|
/// Burst allowance (extra requests allowed in short bursts)
|
||||||
|
#[serde(default = "default_burst_allowance")]
|
||||||
|
pub burst_allowance: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Default rate limiting configuration
|
||||||
|
fn default_rate_limiting() -> RateLimitingConfig {
|
||||||
|
RateLimitingConfig {
|
||||||
|
session_rpm: default_session_rpm(),
|
||||||
|
search_rpm: default_search_rpm(),
|
||||||
|
metadata_rpm: default_metadata_rpm(),
|
||||||
|
reset_rpm: default_reset_rpm(),
|
||||||
|
burst_allowance: default_burst_allowance(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Default session requests per minute (6 = 1 every 10 seconds)
|
||||||
|
fn default_session_rpm() -> u32 {
|
||||||
|
6
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Default search requests per minute (30 = 1 every 2 seconds)
|
||||||
|
fn default_search_rpm() -> u32 {
|
||||||
|
30
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Default metadata requests per minute (20 = 1 every 3 seconds)
|
||||||
|
fn default_metadata_rpm() -> u32 {
|
||||||
|
20
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Default reset requests per minute (10 = 1 every 6 seconds)
|
||||||
|
fn default_reset_rpm() -> u32 {
|
||||||
|
10
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Default burst allowance (3 extra requests)
|
||||||
|
fn default_burst_allowance() -> u32 {
|
||||||
|
3
|
||||||
|
}
|
||||||
|
|
||||||
/// Duration parser configured to handle various time units with seconds as default
|
/// Duration parser configured to handle various time units with seconds as default
|
||||||
///
|
///
|
||||||
/// Supports:
|
/// Supports:
|
||||||
|
|||||||
135
src/data/batch.rs
Normal file
135
src/data/batch.rs
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
//! Batch database operations for improved performance.
|
||||||
|
|
||||||
|
use crate::banner::Course;
|
||||||
|
use crate::error::Result;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use std::time::Instant;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
/// Batch upsert courses in a single database query.
|
||||||
|
///
|
||||||
|
/// This function performs a bulk INSERT...ON CONFLICT DO UPDATE for all courses
|
||||||
|
/// in a single round-trip to the database, significantly reducing overhead compared
|
||||||
|
/// to individual inserts.
|
||||||
|
///
|
||||||
|
/// # Performance
|
||||||
|
/// - Reduces N database round-trips to 1
|
||||||
|
/// - Typical usage: 50-200 courses per batch
|
||||||
|
/// - PostgreSQL parameter limit: 65,535 (we use ~10 per course)
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
/// * `courses` - Slice of Course structs from the Banner API
|
||||||
|
/// * `db_pool` - PostgreSQL connection pool
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
/// * `Ok(())` on success
|
||||||
|
/// * `Err(_)` if the database operation fails
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
/// ```no_run
|
||||||
|
/// use banner::data::batch::batch_upsert_courses;
|
||||||
|
/// use banner::banner::Course;
|
||||||
|
/// use sqlx::PgPool;
|
||||||
|
///
|
||||||
|
/// async fn example(courses: &[Course], pool: &PgPool) -> anyhow::Result<()> {
|
||||||
|
/// batch_upsert_courses(courses, pool).await?;
|
||||||
|
/// Ok(())
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<()> {
|
||||||
|
// Early return for empty batches
|
||||||
|
if courses.is_empty() {
|
||||||
|
info!("No courses to upsert, skipping batch operation");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let start = Instant::now();
|
||||||
|
let course_count = courses.len();
|
||||||
|
|
||||||
|
// Extract course fields into vectors for UNNEST
|
||||||
|
let crns: Vec<&str> = courses
|
||||||
|
.iter()
|
||||||
|
.map(|c| c.course_reference_number.as_str())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let subjects: Vec<&str> = courses.iter().map(|c| c.subject.as_str()).collect();
|
||||||
|
|
||||||
|
let course_numbers: Vec<&str> = courses.iter().map(|c| c.course_number.as_str()).collect();
|
||||||
|
|
||||||
|
let titles: Vec<&str> = courses.iter().map(|c| c.course_title.as_str()).collect();
|
||||||
|
|
||||||
|
let term_codes: Vec<&str> = courses.iter().map(|c| c.term.as_str()).collect();
|
||||||
|
|
||||||
|
let enrollments: Vec<i32> = courses.iter().map(|c| c.enrollment).collect();
|
||||||
|
|
||||||
|
let max_enrollments: Vec<i32> = courses.iter().map(|c| c.maximum_enrollment).collect();
|
||||||
|
|
||||||
|
let wait_counts: Vec<i32> = courses.iter().map(|c| c.wait_count).collect();
|
||||||
|
|
||||||
|
let wait_capacities: Vec<i32> = courses.iter().map(|c| c.wait_capacity).collect();
|
||||||
|
|
||||||
|
// Perform batch upsert using UNNEST for efficient bulk insertion
|
||||||
|
let result = sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO courses (
|
||||||
|
crn, subject, course_number, title, term_code,
|
||||||
|
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at
|
||||||
|
)
|
||||||
|
SELECT * FROM UNNEST(
|
||||||
|
$1::text[], $2::text[], $3::text[], $4::text[], $5::text[],
|
||||||
|
$6::int4[], $7::int4[], $8::int4[], $9::int4[],
|
||||||
|
array_fill(NOW()::timestamptz, ARRAY[$10])
|
||||||
|
) AS t(
|
||||||
|
crn, subject, course_number, title, term_code,
|
||||||
|
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at
|
||||||
|
)
|
||||||
|
ON CONFLICT (crn, term_code)
|
||||||
|
DO UPDATE SET
|
||||||
|
subject = EXCLUDED.subject,
|
||||||
|
course_number = EXCLUDED.course_number,
|
||||||
|
title = EXCLUDED.title,
|
||||||
|
enrollment = EXCLUDED.enrollment,
|
||||||
|
max_enrollment = EXCLUDED.max_enrollment,
|
||||||
|
wait_count = EXCLUDED.wait_count,
|
||||||
|
wait_capacity = EXCLUDED.wait_capacity,
|
||||||
|
last_scraped_at = EXCLUDED.last_scraped_at
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(&crns)
|
||||||
|
.bind(&subjects)
|
||||||
|
.bind(&course_numbers)
|
||||||
|
.bind(&titles)
|
||||||
|
.bind(&term_codes)
|
||||||
|
.bind(&enrollments)
|
||||||
|
.bind(&max_enrollments)
|
||||||
|
.bind(&wait_counts)
|
||||||
|
.bind(&wait_capacities)
|
||||||
|
.bind(course_count as i32)
|
||||||
|
.execute(db_pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| anyhow::anyhow!("Failed to batch upsert courses: {}", e))?;
|
||||||
|
|
||||||
|
let duration = start.elapsed();
|
||||||
|
|
||||||
|
info!(
|
||||||
|
courses_count = course_count,
|
||||||
|
rows_affected = result.rows_affected(),
|
||||||
|
duration_ms = duration.as_millis(),
|
||||||
|
"Batch upserted courses"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_empty_batch_returns_ok() {
|
||||||
|
// This is a basic compile-time test
|
||||||
|
// Runtime tests would require sqlx::test macro and a test database
|
||||||
|
let courses: Vec<Course> = vec![];
|
||||||
|
assert_eq!(courses.len(), 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
//! Database models and schema.
|
//! Database models and schema.
|
||||||
|
|
||||||
|
pub mod batch;
|
||||||
pub mod models;
|
pub mod models;
|
||||||
pub mod schema;
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||||
pub struct Course {
|
pub struct Course {
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
@@ -18,6 +19,7 @@ pub struct Course {
|
|||||||
pub last_scraped_at: DateTime<Utc>,
|
pub last_scraped_at: DateTime<Utc>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||||
pub struct CourseMetric {
|
pub struct CourseMetric {
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
@@ -28,6 +30,7 @@ pub struct CourseMetric {
|
|||||||
pub seats_available: i32,
|
pub seats_available: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||||
pub struct CourseAudit {
|
pub struct CourseAudit {
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
@@ -59,6 +62,7 @@ pub enum TargetType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Represents a queryable job from the database.
|
/// Represents a queryable job from the database.
|
||||||
|
#[allow(dead_code)]
|
||||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||||
pub struct ScrapeJob {
|
pub struct ScrapeJob {
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
@@ -68,4 +72,8 @@ pub struct ScrapeJob {
|
|||||||
pub execute_at: DateTime<Utc>,
|
pub execute_at: DateTime<Utc>,
|
||||||
pub created_at: DateTime<Utc>,
|
pub created_at: DateTime<Utc>,
|
||||||
pub locked_at: Option<DateTime<Utc>>,
|
pub locked_at: Option<DateTime<Utc>>,
|
||||||
|
/// Number of retry attempts for this job (non-negative, enforced by CHECK constraint)
|
||||||
|
pub retry_count: i32,
|
||||||
|
/// Maximum number of retry attempts allowed (non-negative, enforced by CHECK constraint)
|
||||||
|
pub max_retries: i32,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,69 +0,0 @@
|
|||||||
// pub mod sql_types {
|
|
||||||
// #[derive(diesel::sql_types::SqlType)]
|
|
||||||
// #[diesel(postgres_type(name = "scrape_priority"))]
|
|
||||||
// pub struct ScrapePriority;
|
|
||||||
|
|
||||||
// #[derive(diesel::sql_types::SqlType)]
|
|
||||||
// #[diesel(postgres_type(name = "target_type"))]
|
|
||||||
// pub struct TargetType;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// use super::models::{ScrapePriorityMapping, TargetTypeMapping};
|
|
||||||
|
|
||||||
// diesel::table! {
|
|
||||||
// use diesel::sql_types::*;
|
|
||||||
// use super::{ScrapePriorityMapping, TargetTypeMapping};
|
|
||||||
|
|
||||||
// scrape_jobs (id) {
|
|
||||||
// id -> Int4,
|
|
||||||
// target_type -> TargetTypeMapping,
|
|
||||||
// target_payload -> Jsonb,
|
|
||||||
// priority -> ScrapePriorityMapping,
|
|
||||||
// execute_at -> Timestamptz,
|
|
||||||
// created_at -> Timestamptz,
|
|
||||||
// locked_at -> Nullable<Timestamptz>,
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// diesel::table! {
|
|
||||||
// courses (id) {
|
|
||||||
// id -> Int4,
|
|
||||||
// crn -> Varchar,
|
|
||||||
// subject -> Varchar,
|
|
||||||
// course_number -> Varchar,
|
|
||||||
// title -> Varchar,
|
|
||||||
// term_code -> Varchar,
|
|
||||||
// enrollment -> Int4,
|
|
||||||
// max_enrollment -> Int4,
|
|
||||||
// wait_count -> Int4,
|
|
||||||
// wait_capacity -> Int4,
|
|
||||||
// last_scraped_at -> Timestamptz,
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// diesel::table! {
|
|
||||||
// course_metrics (id) {
|
|
||||||
// id -> Int4,
|
|
||||||
// course_id -> Int4,
|
|
||||||
// timestamp -> Timestamptz,
|
|
||||||
// enrollment -> Int4,
|
|
||||||
// wait_count -> Int4,
|
|
||||||
// seats_available -> Int4,
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// diesel::table! {
|
|
||||||
// course_audits (id) {
|
|
||||||
// id -> Int4,
|
|
||||||
// course_id -> Int4,
|
|
||||||
// timestamp -> Timestamptz,
|
|
||||||
// field_changed -> Varchar,
|
|
||||||
// old_value -> Text,
|
|
||||||
// new_value -> Text,
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// diesel::joinable!(course_metrics -> courses (course_id));
|
|
||||||
// diesel::joinable!(course_audits -> courses (course_id));
|
|
||||||
|
|
||||||
// diesel::allow_tables_to_appear_in_same_query!(courses, course_metrics, course_audits, scrape_jobs,);
|
|
||||||
275
src/formatter.rs
Normal file
275
src/formatter.rs
Normal file
@@ -0,0 +1,275 @@
|
|||||||
|
//! Custom tracing formatter
|
||||||
|
|
||||||
|
use serde::Serialize;
|
||||||
|
use serde_json::{Map, Value};
|
||||||
|
use std::fmt;
|
||||||
|
use time::macros::format_description;
|
||||||
|
use time::{OffsetDateTime, format_description::FormatItem};
|
||||||
|
use tracing::field::{Field, Visit};
|
||||||
|
use tracing::{Event, Level, Subscriber};
|
||||||
|
use tracing_subscriber::fmt::format::Writer;
|
||||||
|
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields, FormattedFields};
|
||||||
|
use tracing_subscriber::registry::LookupSpan;
|
||||||
|
use yansi::Paint;
|
||||||
|
|
||||||
|
/// Cached format description for timestamps
|
||||||
|
/// Uses 3 subsecond digits on Emscripten, 5 otherwise for better performance
|
||||||
|
#[cfg(target_os = "emscripten")]
|
||||||
|
const TIMESTAMP_FORMAT: &[FormatItem<'static>] =
|
||||||
|
format_description!("[hour]:[minute]:[second].[subsecond digits:3]");
|
||||||
|
|
||||||
|
#[cfg(not(target_os = "emscripten"))]
|
||||||
|
const TIMESTAMP_FORMAT: &[FormatItem<'static>] =
|
||||||
|
format_description!("[hour]:[minute]:[second].[subsecond digits:5]");
|
||||||
|
|
||||||
|
/// A custom formatter with enhanced timestamp formatting
|
||||||
|
///
|
||||||
|
/// Re-implementation of the Full formatter with improved timestamp display.
|
||||||
|
pub struct CustomPrettyFormatter;
|
||||||
|
|
||||||
|
impl<S, N> FormatEvent<S, N> for CustomPrettyFormatter
|
||||||
|
where
|
||||||
|
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||||
|
N: for<'a> FormatFields<'a> + 'static,
|
||||||
|
{
|
||||||
|
fn format_event(
|
||||||
|
&self,
|
||||||
|
ctx: &FmtContext<'_, S, N>,
|
||||||
|
mut writer: Writer<'_>,
|
||||||
|
event: &Event<'_>,
|
||||||
|
) -> fmt::Result {
|
||||||
|
let meta = event.metadata();
|
||||||
|
|
||||||
|
// 1) Timestamp (dimmed when ANSI)
|
||||||
|
let now = OffsetDateTime::now_utc();
|
||||||
|
let formatted_time = now.format(&TIMESTAMP_FORMAT).map_err(|e| {
|
||||||
|
eprintln!("Failed to format timestamp: {}", e);
|
||||||
|
fmt::Error
|
||||||
|
})?;
|
||||||
|
write_dimmed(&mut writer, formatted_time)?;
|
||||||
|
writer.write_char(' ')?;
|
||||||
|
|
||||||
|
// 2) Colored 5-char level like Full
|
||||||
|
write_colored_level(&mut writer, meta.level())?;
|
||||||
|
writer.write_char(' ')?;
|
||||||
|
|
||||||
|
// 3) Span scope chain (bold names, fields in braces, dimmed ':')
|
||||||
|
if let Some(scope) = ctx.event_scope() {
|
||||||
|
let mut saw_any = false;
|
||||||
|
for span in scope.from_root() {
|
||||||
|
write_bold(&mut writer, span.metadata().name())?;
|
||||||
|
saw_any = true;
|
||||||
|
|
||||||
|
write_dimmed(&mut writer, ":")?;
|
||||||
|
|
||||||
|
let ext = span.extensions();
|
||||||
|
if let Some(fields) = &ext.get::<FormattedFields<N>>()
|
||||||
|
&& !fields.fields.is_empty()
|
||||||
|
{
|
||||||
|
write_bold(&mut writer, "{")?;
|
||||||
|
writer.write_str(fields.fields.as_str())?;
|
||||||
|
write_bold(&mut writer, "}")?;
|
||||||
|
}
|
||||||
|
write_dimmed(&mut writer, ":")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if saw_any {
|
||||||
|
writer.write_char(' ')?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4) Target (dimmed), then a space
|
||||||
|
if writer.has_ansi_escapes() {
|
||||||
|
write!(writer, "{}: ", Paint::new(meta.target()).dim())?;
|
||||||
|
} else {
|
||||||
|
write!(writer, "{}: ", meta.target())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5) Event fields
|
||||||
|
ctx.format_fields(writer.by_ref(), event)?;
|
||||||
|
|
||||||
|
// 6) Newline
|
||||||
|
writeln!(writer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A custom JSON formatter that flattens fields to root level
|
||||||
|
///
|
||||||
|
/// Outputs logs in the format: { "message": "...", "level": "...", "customAttribute": "..." }
|
||||||
|
pub struct CustomJsonFormatter;
|
||||||
|
|
||||||
|
impl<S, N> FormatEvent<S, N> for CustomJsonFormatter
|
||||||
|
where
|
||||||
|
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||||
|
N: for<'a> FormatFields<'a> + 'static,
|
||||||
|
{
|
||||||
|
fn format_event(
|
||||||
|
&self,
|
||||||
|
ctx: &FmtContext<'_, S, N>,
|
||||||
|
mut writer: Writer<'_>,
|
||||||
|
event: &Event<'_>,
|
||||||
|
) -> fmt::Result {
|
||||||
|
let meta = event.metadata();
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct EventFields {
|
||||||
|
message: String,
|
||||||
|
level: String,
|
||||||
|
target: String,
|
||||||
|
#[serde(flatten)]
|
||||||
|
spans: Map<String, Value>,
|
||||||
|
#[serde(flatten)]
|
||||||
|
fields: Map<String, Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let (message, fields, spans) = {
|
||||||
|
let mut message: Option<String> = None;
|
||||||
|
let mut fields: Map<String, Value> = Map::new();
|
||||||
|
let mut spans: Map<String, Value> = Map::new();
|
||||||
|
|
||||||
|
struct FieldVisitor<'a> {
|
||||||
|
message: &'a mut Option<String>,
|
||||||
|
fields: &'a mut Map<String, Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Visit for FieldVisitor<'a> {
|
||||||
|
fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) {
|
||||||
|
let key = field.name();
|
||||||
|
if key == "message" {
|
||||||
|
*self.message = Some(format!("{:?}", value));
|
||||||
|
} else {
|
||||||
|
// Use typed methods for better performance
|
||||||
|
self.fields
|
||||||
|
.insert(key.to_string(), Value::String(format!("{:?}", value)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn record_str(&mut self, field: &Field, value: &str) {
|
||||||
|
let key = field.name();
|
||||||
|
if key == "message" {
|
||||||
|
*self.message = Some(value.to_string());
|
||||||
|
} else {
|
||||||
|
self.fields
|
||||||
|
.insert(key.to_string(), Value::String(value.to_string()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn record_i64(&mut self, field: &Field, value: i64) {
|
||||||
|
let key = field.name();
|
||||||
|
if key != "message" {
|
||||||
|
self.fields.insert(
|
||||||
|
key.to_string(),
|
||||||
|
Value::Number(serde_json::Number::from(value)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn record_u64(&mut self, field: &Field, value: u64) {
|
||||||
|
let key = field.name();
|
||||||
|
if key != "message" {
|
||||||
|
self.fields.insert(
|
||||||
|
key.to_string(),
|
||||||
|
Value::Number(serde_json::Number::from(value)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn record_bool(&mut self, field: &Field, value: bool) {
|
||||||
|
let key = field.name();
|
||||||
|
if key != "message" {
|
||||||
|
self.fields.insert(key.to_string(), Value::Bool(value));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut visitor = FieldVisitor {
|
||||||
|
message: &mut message,
|
||||||
|
fields: &mut fields,
|
||||||
|
};
|
||||||
|
event.record(&mut visitor);
|
||||||
|
|
||||||
|
// Collect span information from the span hierarchy
|
||||||
|
if let Some(scope) = ctx.event_scope() {
|
||||||
|
for span in scope.from_root() {
|
||||||
|
let span_name = span.metadata().name().to_string();
|
||||||
|
let mut span_fields: Map<String, Value> = Map::new();
|
||||||
|
|
||||||
|
// Try to extract fields from FormattedFields
|
||||||
|
let ext = span.extensions();
|
||||||
|
if let Some(formatted_fields) = ext.get::<FormattedFields<N>>() {
|
||||||
|
// Try to parse as JSON first
|
||||||
|
if let Ok(json_fields) = serde_json::from_str::<Map<String, Value>>(
|
||||||
|
formatted_fields.fields.as_str(),
|
||||||
|
) {
|
||||||
|
span_fields.extend(json_fields);
|
||||||
|
} else {
|
||||||
|
// If not valid JSON, treat the entire field string as a single field
|
||||||
|
span_fields.insert(
|
||||||
|
"raw".to_string(),
|
||||||
|
Value::String(formatted_fields.fields.as_str().to_string()),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert span as a nested object directly into the spans map
|
||||||
|
spans.insert(span_name, Value::Object(span_fields));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
(message, fields, spans)
|
||||||
|
};
|
||||||
|
|
||||||
|
let json = EventFields {
|
||||||
|
message: message.unwrap_or_default(),
|
||||||
|
level: meta.level().to_string(),
|
||||||
|
target: meta.target().to_string(),
|
||||||
|
spans,
|
||||||
|
fields,
|
||||||
|
};
|
||||||
|
|
||||||
|
writeln!(
|
||||||
|
writer,
|
||||||
|
"{}",
|
||||||
|
serde_json::to_string(&json).unwrap_or_else(|_| "{}".to_string())
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write the verbosity level with the same coloring/alignment as the Full formatter.
|
||||||
|
fn write_colored_level(writer: &mut Writer<'_>, level: &Level) -> fmt::Result {
|
||||||
|
if writer.has_ansi_escapes() {
|
||||||
|
let paint = match *level {
|
||||||
|
Level::TRACE => Paint::new("TRACE").magenta(),
|
||||||
|
Level::DEBUG => Paint::new("DEBUG").blue(),
|
||||||
|
Level::INFO => Paint::new(" INFO").green(),
|
||||||
|
Level::WARN => Paint::new(" WARN").yellow(),
|
||||||
|
Level::ERROR => Paint::new("ERROR").red(),
|
||||||
|
};
|
||||||
|
write!(writer, "{}", paint)
|
||||||
|
} else {
|
||||||
|
// Right-pad to width 5 like Full's non-ANSI mode
|
||||||
|
match *level {
|
||||||
|
Level::TRACE => write!(writer, "{:>5}", "TRACE"),
|
||||||
|
Level::DEBUG => write!(writer, "{:>5}", "DEBUG"),
|
||||||
|
Level::INFO => write!(writer, "{:>5}", " INFO"),
|
||||||
|
Level::WARN => write!(writer, "{:>5}", " WARN"),
|
||||||
|
Level::ERROR => write!(writer, "{:>5}", "ERROR"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_dimmed(writer: &mut Writer<'_>, s: impl fmt::Display) -> fmt::Result {
|
||||||
|
if writer.has_ansi_escapes() {
|
||||||
|
write!(writer, "{}", Paint::new(s).dim())
|
||||||
|
} else {
|
||||||
|
write!(writer, "{}", s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_bold(writer: &mut Writer<'_>, s: impl fmt::Display) -> fmt::Result {
|
||||||
|
if writer.has_ansi_escapes() {
|
||||||
|
write!(writer, "{}", Paint::new(s).bold())
|
||||||
|
} else {
|
||||||
|
write!(writer, "{}", s)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,8 +1,15 @@
|
|||||||
pub mod app_state;
|
pub mod app;
|
||||||
pub mod banner;
|
pub mod banner;
|
||||||
pub mod bot;
|
pub mod bot;
|
||||||
|
pub mod cli;
|
||||||
|
pub mod config;
|
||||||
pub mod data;
|
pub mod data;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
|
pub mod formatter;
|
||||||
|
pub mod logging;
|
||||||
pub mod scraper;
|
pub mod scraper;
|
||||||
pub mod services;
|
pub mod services;
|
||||||
|
pub mod signals;
|
||||||
|
pub mod state;
|
||||||
|
pub mod utils;
|
||||||
pub mod web;
|
pub mod web;
|
||||||
|
|||||||
47
src/logging.rs
Normal file
47
src/logging.rs
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
use crate::cli::TracingFormat;
|
||||||
|
use crate::config::Config;
|
||||||
|
use crate::formatter;
|
||||||
|
use tracing_subscriber::fmt::format::JsonFields;
|
||||||
|
use tracing_subscriber::{EnvFilter, FmtSubscriber};
|
||||||
|
|
||||||
|
/// Configure and initialize logging for the application
|
||||||
|
pub fn setup_logging(config: &Config, tracing_format: TracingFormat) {
|
||||||
|
// Configure logging based on config
|
||||||
|
// Note: Even when base_level is trace or debug, we suppress trace logs from noisy
|
||||||
|
// infrastructure modules to keep output readable. These modules use debug for important
|
||||||
|
// events and trace only for very detailed debugging.
|
||||||
|
let filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| {
|
||||||
|
let base_level = &config.log_level;
|
||||||
|
EnvFilter::new(format!(
|
||||||
|
"warn,banner={},banner::rate_limiter=warn,banner::session=debug,banner::rate_limit_middleware=warn,banner::middleware=debug",
|
||||||
|
base_level
|
||||||
|
))
|
||||||
|
});
|
||||||
|
|
||||||
|
// Select formatter based on CLI args
|
||||||
|
let use_pretty = match tracing_format {
|
||||||
|
TracingFormat::Pretty => true,
|
||||||
|
TracingFormat::Json => false,
|
||||||
|
};
|
||||||
|
|
||||||
|
let subscriber: Box<dyn tracing::Subscriber + Send + Sync> = if use_pretty {
|
||||||
|
Box::new(
|
||||||
|
FmtSubscriber::builder()
|
||||||
|
.with_target(true)
|
||||||
|
.event_format(formatter::CustomPrettyFormatter)
|
||||||
|
.with_env_filter(filter)
|
||||||
|
.finish(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
Box::new(
|
||||||
|
FmtSubscriber::builder()
|
||||||
|
.with_target(true)
|
||||||
|
.event_format(formatter::CustomJsonFormatter)
|
||||||
|
.fmt_fields(JsonFields::new())
|
||||||
|
.with_env_filter(filter)
|
||||||
|
.finish(),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
tracing::subscriber::set_global_default(subscriber).expect("setting default subscriber failed");
|
||||||
|
}
|
||||||
267
src/main.rs
267
src/main.rs
@@ -1,48 +1,46 @@
|
|||||||
use serenity::all::{CacheHttp, ClientBuilder, GatewayIntents};
|
use crate::app::App;
|
||||||
use tokio::signal;
|
use crate::cli::{Args, ServiceName, determine_enabled_services};
|
||||||
use tracing::{error, info, warn};
|
use crate::logging::setup_logging;
|
||||||
use tracing_subscriber::{EnvFilter, FmtSubscriber};
|
use clap::Parser;
|
||||||
|
use std::process::ExitCode;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
use crate::app_state::AppState;
|
mod app;
|
||||||
use crate::banner::BannerApi;
|
|
||||||
use crate::bot::{Data, get_commands};
|
|
||||||
use crate::config::Config;
|
|
||||||
use crate::services::manager::ServiceManager;
|
|
||||||
use crate::services::{ServiceResult, bot::BotService, web::WebService};
|
|
||||||
use crate::web::routes::BannerState;
|
|
||||||
use figment::{Figment, providers::Env};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
mod app_state;
|
|
||||||
mod banner;
|
mod banner;
|
||||||
mod bot;
|
mod bot;
|
||||||
|
mod cli;
|
||||||
mod config;
|
mod config;
|
||||||
mod data;
|
mod data;
|
||||||
mod error;
|
mod error;
|
||||||
|
mod formatter;
|
||||||
|
mod logging;
|
||||||
|
mod scraper;
|
||||||
mod services;
|
mod services;
|
||||||
|
mod signals;
|
||||||
|
mod state;
|
||||||
mod web;
|
mod web;
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() -> ExitCode {
|
||||||
dotenvy::dotenv().ok();
|
dotenvy::dotenv().ok();
|
||||||
|
|
||||||
// Configure logging
|
// Parse CLI arguments
|
||||||
let filter =
|
let args = Args::parse();
|
||||||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("warn,banner=debug"));
|
|
||||||
let subscriber = {
|
// Determine which services should be enabled
|
||||||
#[cfg(debug_assertions)]
|
let enabled_services: Vec<ServiceName> =
|
||||||
{
|
determine_enabled_services(&args).expect("Failed to determine enabled services");
|
||||||
FmtSubscriber::builder()
|
|
||||||
}
|
info!(
|
||||||
#[cfg(not(debug_assertions))]
|
enabled_services = ?enabled_services,
|
||||||
{
|
"services configuration loaded"
|
||||||
FmtSubscriber::builder().json()
|
);
|
||||||
}
|
|
||||||
}
|
// Create and initialize the application
|
||||||
.with_env_filter(filter)
|
let mut app = App::new().await.expect("Failed to initialize application");
|
||||||
.with_target(true)
|
|
||||||
.finish();
|
// Setup logging
|
||||||
tracing::subscriber::set_global_default(subscriber).expect("setting default subscriber failed");
|
setup_logging(app.config(), args.tracing);
|
||||||
|
|
||||||
// Log application startup context
|
// Log application startup context
|
||||||
info!(
|
info!(
|
||||||
@@ -52,202 +50,21 @@ async fn main() {
|
|||||||
} else {
|
} else {
|
||||||
"production"
|
"production"
|
||||||
},
|
},
|
||||||
"starting banner system"
|
"starting banner"
|
||||||
);
|
);
|
||||||
|
|
||||||
let config: Config = Figment::new()
|
// Setup services (web, scraper)
|
||||||
.merge(Env::prefixed("APP_"))
|
app.setup_services(&enabled_services)
|
||||||
.extract()
|
.expect("Failed to setup services");
|
||||||
.expect("Failed to load config");
|
|
||||||
|
|
||||||
info!(
|
// Setup bot service if enabled
|
||||||
port = config.port,
|
if enabled_services.contains(&ServiceName::Bot) {
|
||||||
shutdown_timeout = format!("{:.2?}", config.shutdown_timeout),
|
app.setup_bot_service()
|
||||||
banner_base_url = config.banner_base_url,
|
|
||||||
"configuration loaded"
|
|
||||||
);
|
|
||||||
|
|
||||||
// Create BannerApi and AppState
|
|
||||||
let banner_api =
|
|
||||||
BannerApi::new(config.banner_base_url.clone()).expect("Failed to create BannerApi");
|
|
||||||
banner_api
|
|
||||||
.setup()
|
|
||||||
.await
|
|
||||||
.expect("Failed to set up BannerApi session");
|
|
||||||
|
|
||||||
let banner_api_arc = Arc::new(banner_api);
|
|
||||||
let app_state = AppState::new(banner_api_arc.clone(), &config.redis_url)
|
|
||||||
.expect("Failed to create AppState");
|
|
||||||
|
|
||||||
// Create BannerState for web service
|
|
||||||
let banner_state = BannerState {
|
|
||||||
api: banner_api_arc,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Configure the client with your Discord bot token in the environment
|
|
||||||
let intents = GatewayIntents::non_privileged();
|
|
||||||
|
|
||||||
let bot_target_guild = config.bot_target_guild;
|
|
||||||
|
|
||||||
let framework = poise::Framework::builder()
|
|
||||||
.options(poise::FrameworkOptions {
|
|
||||||
commands: get_commands(),
|
|
||||||
pre_command: |ctx| {
|
|
||||||
Box::pin(async move {
|
|
||||||
let content = match ctx {
|
|
||||||
poise::Context::Application(_) => ctx.invocation_string(),
|
|
||||||
poise::Context::Prefix(prefix) => prefix.msg.content.to_string(),
|
|
||||||
};
|
|
||||||
let channel_name = ctx
|
|
||||||
.channel_id()
|
|
||||||
.name(ctx.http())
|
|
||||||
.await
|
|
||||||
.unwrap_or("unknown".to_string());
|
|
||||||
|
|
||||||
let span = tracing::Span::current();
|
|
||||||
span.record("command_name", ctx.command().qualified_name.as_str());
|
|
||||||
span.record("invocation", ctx.invocation_string());
|
|
||||||
span.record("msg.content", content.as_str());
|
|
||||||
span.record("msg.author", ctx.author().tag().as_str());
|
|
||||||
span.record("msg.id", ctx.id());
|
|
||||||
span.record("msg.channel_id", ctx.channel_id().get());
|
|
||||||
span.record("msg.channel", &channel_name.as_str());
|
|
||||||
|
|
||||||
tracing::info!(
|
|
||||||
command_name = ctx.command().qualified_name.as_str(),
|
|
||||||
invocation = ctx.invocation_string(),
|
|
||||||
msg.content = %content,
|
|
||||||
msg.author = %ctx.author().tag(),
|
|
||||||
msg.author_id = %ctx.author().id,
|
|
||||||
msg.id = %ctx.id(),
|
|
||||||
msg.channel = %channel_name.as_str(),
|
|
||||||
msg.channel_id = %ctx.channel_id(),
|
|
||||||
"{} invoked by {}",
|
|
||||||
ctx.command().name,
|
|
||||||
ctx.author().tag()
|
|
||||||
);
|
|
||||||
})
|
|
||||||
},
|
|
||||||
on_error: |error| {
|
|
||||||
Box::pin(async move {
|
|
||||||
if let Err(e) = poise::builtins::on_error(error).await {
|
|
||||||
tracing::error!("Fatal error while sending error message: {}", e);
|
|
||||||
}
|
|
||||||
// error!(error = ?error, "command error");
|
|
||||||
})
|
|
||||||
},
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.setup(move |ctx, _ready, framework| {
|
|
||||||
let app_state = app_state.clone();
|
|
||||||
Box::pin(async move {
|
|
||||||
poise::builtins::register_in_guild(
|
|
||||||
ctx,
|
|
||||||
&framework.options().commands,
|
|
||||||
bot_target_guild.into(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
poise::builtins::register_globally(ctx, &framework.options().commands).await?;
|
|
||||||
Ok(Data { app_state })
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.build();
|
|
||||||
|
|
||||||
let client = ClientBuilder::new(config.bot_token, intents)
|
|
||||||
.framework(framework)
|
|
||||||
.await
|
|
||||||
.expect("Failed to build client");
|
|
||||||
|
|
||||||
// Extract shutdown timeout before moving config
|
|
||||||
let shutdown_timeout = config.shutdown_timeout;
|
|
||||||
let port = config.port;
|
|
||||||
|
|
||||||
// Create service manager
|
|
||||||
let mut service_manager = ServiceManager::new();
|
|
||||||
|
|
||||||
// Register services with the manager
|
|
||||||
let bot_service = Box::new(BotService::new(client));
|
|
||||||
let web_service = Box::new(WebService::new(port, banner_state));
|
|
||||||
|
|
||||||
service_manager.register_service("bot", bot_service);
|
|
||||||
service_manager.register_service("web", web_service);
|
|
||||||
|
|
||||||
// Spawn all registered services
|
|
||||||
service_manager.spawn_all();
|
|
||||||
|
|
||||||
// Set up CTRL+C signal handling
|
|
||||||
let ctrl_c = async {
|
|
||||||
signal::ctrl_c()
|
|
||||||
.await
|
.await
|
||||||
.expect("Failed to install CTRL+C signal handler");
|
.expect("Failed to setup bot service");
|
||||||
info!("received ctrl+c, gracefully shutting down...");
|
|
||||||
};
|
|
||||||
|
|
||||||
// Main application loop - wait for services or CTRL+C
|
|
||||||
let mut exit_code = 0;
|
|
||||||
|
|
||||||
tokio::select! {
|
|
||||||
(service_name, result) = service_manager.run() => {
|
|
||||||
// A service completed unexpectedly
|
|
||||||
match result {
|
|
||||||
ServiceResult::GracefulShutdown => {
|
|
||||||
info!(service = service_name, "service completed gracefully");
|
|
||||||
}
|
|
||||||
ServiceResult::NormalCompletion => {
|
|
||||||
warn!(service = service_name, "service completed unexpectedly");
|
|
||||||
exit_code = 1;
|
|
||||||
}
|
|
||||||
ServiceResult::Error(e) => {
|
|
||||||
error!(service = service_name, error = ?e, "service failed");
|
|
||||||
exit_code = 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Shutdown remaining services
|
|
||||||
match service_manager.shutdown(shutdown_timeout).await {
|
|
||||||
Ok(elapsed) => {
|
|
||||||
info!(
|
|
||||||
remaining = format!("{:.2?}", shutdown_timeout - elapsed),
|
|
||||||
"graceful shutdown complete"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
Err(pending_services) => {
|
|
||||||
warn!(
|
|
||||||
pending_count = pending_services.len(),
|
|
||||||
pending_services = ?pending_services,
|
|
||||||
"graceful shutdown elapsed - {} service(s) did not complete",
|
|
||||||
pending_services.len()
|
|
||||||
);
|
|
||||||
|
|
||||||
// Non-zero exit code, default to 2 if not set
|
|
||||||
exit_code = if exit_code == 0 { 2 } else { exit_code };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ = ctrl_c => {
|
|
||||||
// User requested shutdown
|
|
||||||
info!("user requested shutdown via ctrl+c");
|
|
||||||
match service_manager.shutdown(shutdown_timeout).await {
|
|
||||||
Ok(elapsed) => {
|
|
||||||
info!(
|
|
||||||
remaining = format!("{:.2?}", shutdown_timeout - elapsed),
|
|
||||||
"graceful shutdown complete"
|
|
||||||
);
|
|
||||||
info!("graceful shutdown complete");
|
|
||||||
}
|
|
||||||
Err(pending_services) => {
|
|
||||||
warn!(
|
|
||||||
pending_count = pending_services.len(),
|
|
||||||
pending_services = ?pending_services,
|
|
||||||
"graceful shutdown elapsed - {} service(s) did not complete",
|
|
||||||
pending_services.len()
|
|
||||||
);
|
|
||||||
exit_code = 2;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
info!(exit_code, "application shutdown complete");
|
// Start all services and run the application
|
||||||
std::process::exit(exit_code);
|
app.start_services();
|
||||||
|
app.run().await
|
||||||
}
|
}
|
||||||
|
|||||||
104
src/scraper/jobs/mod.rs
Normal file
104
src/scraper/jobs/mod.rs
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
pub mod subject;
|
||||||
|
|
||||||
|
use crate::banner::BannerApi;
|
||||||
|
use crate::data::models::TargetType;
|
||||||
|
use crate::error::Result;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
/// Errors that can occur during job parsing
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum JobParseError {
|
||||||
|
InvalidJson(serde_json::Error),
|
||||||
|
UnsupportedTargetType(TargetType),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for JobParseError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
JobParseError::InvalidJson(e) => write!(f, "Invalid JSON in job payload: {}", e),
|
||||||
|
JobParseError::UnsupportedTargetType(t) => {
|
||||||
|
write!(f, "Unsupported target type: {:?}", t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for JobParseError {
|
||||||
|
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||||
|
match self {
|
||||||
|
JobParseError::InvalidJson(e) => Some(e),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur during job processing
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum JobError {
|
||||||
|
Recoverable(anyhow::Error), // API failures, network issues
|
||||||
|
Unrecoverable(anyhow::Error), // Parse errors, corrupted data
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for JobError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
JobError::Recoverable(e) => write!(f, "Recoverable error: {}", e),
|
||||||
|
JobError::Unrecoverable(e) => write!(f, "Unrecoverable error: {}", e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for JobError {
|
||||||
|
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||||
|
match self {
|
||||||
|
JobError::Recoverable(e) => e.source(),
|
||||||
|
JobError::Unrecoverable(e) => e.source(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Common trait interface for all job types
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
pub trait Job: Send + Sync {
|
||||||
|
/// The target type this job handles
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn target_type(&self) -> TargetType;
|
||||||
|
|
||||||
|
/// Process the job with the given API client and database pool
|
||||||
|
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<()>;
|
||||||
|
|
||||||
|
/// Get a human-readable description of the job
|
||||||
|
fn description(&self) -> String;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Main job enum that dispatches to specific job implementations
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum JobType {
|
||||||
|
Subject(subject::SubjectJob),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JobType {
|
||||||
|
/// Create a job from the target type and payload
|
||||||
|
pub fn from_target_type_and_payload(
|
||||||
|
target_type: TargetType,
|
||||||
|
payload: serde_json::Value,
|
||||||
|
) -> Result<Self, JobParseError> {
|
||||||
|
match target_type {
|
||||||
|
TargetType::Subject => {
|
||||||
|
let subject_job: subject::SubjectJob =
|
||||||
|
serde_json::from_value(payload).map_err(JobParseError::InvalidJson)?;
|
||||||
|
Ok(JobType::Subject(subject_job))
|
||||||
|
}
|
||||||
|
_ => Err(JobParseError::UnsupportedTargetType(target_type)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert to a Job trait object
|
||||||
|
pub fn boxed(self) -> Box<dyn Job> {
|
||||||
|
match self {
|
||||||
|
JobType::Subject(job) => Box::new(job),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
56
src/scraper/jobs/subject.rs
Normal file
56
src/scraper/jobs/subject.rs
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
use super::Job;
|
||||||
|
use crate::banner::{BannerApi, SearchQuery, Term};
|
||||||
|
use crate::data::batch::batch_upsert_courses;
|
||||||
|
use crate::data::models::TargetType;
|
||||||
|
use crate::error::Result;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use tracing::{debug, info};
|
||||||
|
|
||||||
|
/// Job implementation for scraping subject data
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct SubjectJob {
|
||||||
|
pub subject: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SubjectJob {
|
||||||
|
pub fn new(subject: String) -> Self {
|
||||||
|
Self { subject }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl Job for SubjectJob {
|
||||||
|
fn target_type(&self) -> TargetType {
|
||||||
|
TargetType::Subject
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip(self, banner_api, db_pool), fields(subject = %self.subject))]
|
||||||
|
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<()> {
|
||||||
|
let subject_code = &self.subject;
|
||||||
|
|
||||||
|
// Get the current term
|
||||||
|
let term = Term::get_current().inner().to_string();
|
||||||
|
let query = SearchQuery::new().subject(subject_code).max_results(500);
|
||||||
|
|
||||||
|
let search_result = banner_api
|
||||||
|
.search(&term, &query, "subjectDescription", false)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Some(courses_from_api) = search_result.data {
|
||||||
|
info!(
|
||||||
|
subject = subject_code,
|
||||||
|
count = courses_from_api.len(),
|
||||||
|
"Found courses"
|
||||||
|
);
|
||||||
|
batch_upsert_courses(&courses_from_api, db_pool).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!(subject = subject_code, "Subject job completed");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn description(&self) -> String {
|
||||||
|
format!("Scrape subject: {}", self.subject)
|
||||||
|
}
|
||||||
|
}
|
||||||
116
src/scraper/mod.rs
Normal file
116
src/scraper/mod.rs
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
pub mod jobs;
|
||||||
|
pub mod scheduler;
|
||||||
|
pub mod worker;
|
||||||
|
|
||||||
|
use crate::banner::BannerApi;
|
||||||
|
use crate::services::Service;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::sync::broadcast;
|
||||||
|
use tokio::task::JoinHandle;
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
use self::scheduler::Scheduler;
|
||||||
|
use self::worker::Worker;
|
||||||
|
|
||||||
|
/// The main service that will be managed by the application's `ServiceManager`.
|
||||||
|
///
|
||||||
|
/// It holds the shared resources (database pool, API client) and manages the
|
||||||
|
/// lifecycle of the Scheduler and Worker tasks.
|
||||||
|
pub struct ScraperService {
|
||||||
|
db_pool: PgPool,
|
||||||
|
banner_api: Arc<BannerApi>,
|
||||||
|
scheduler_handle: Option<JoinHandle<()>>,
|
||||||
|
worker_handles: Vec<JoinHandle<()>>,
|
||||||
|
shutdown_tx: Option<broadcast::Sender<()>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ScraperService {
|
||||||
|
/// Creates a new `ScraperService`.
|
||||||
|
pub fn new(db_pool: PgPool, banner_api: Arc<BannerApi>) -> Self {
|
||||||
|
Self {
|
||||||
|
db_pool,
|
||||||
|
banner_api,
|
||||||
|
scheduler_handle: None,
|
||||||
|
worker_handles: Vec::new(),
|
||||||
|
shutdown_tx: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Starts the scheduler and a pool of workers.
|
||||||
|
pub fn start(&mut self) {
|
||||||
|
info!("ScraperService starting");
|
||||||
|
|
||||||
|
// Create shutdown channel
|
||||||
|
let (shutdown_tx, _) = broadcast::channel(1);
|
||||||
|
self.shutdown_tx = Some(shutdown_tx.clone());
|
||||||
|
|
||||||
|
let scheduler = Scheduler::new(self.db_pool.clone(), self.banner_api.clone());
|
||||||
|
let shutdown_rx = shutdown_tx.subscribe();
|
||||||
|
let scheduler_handle = tokio::spawn(async move {
|
||||||
|
scheduler.run(shutdown_rx).await;
|
||||||
|
});
|
||||||
|
self.scheduler_handle = Some(scheduler_handle);
|
||||||
|
info!("Scheduler task spawned");
|
||||||
|
|
||||||
|
let worker_count = 4; // This could be configurable
|
||||||
|
for i in 0..worker_count {
|
||||||
|
let worker = Worker::new(i, self.db_pool.clone(), self.banner_api.clone());
|
||||||
|
let shutdown_rx = shutdown_tx.subscribe();
|
||||||
|
let worker_handle = tokio::spawn(async move {
|
||||||
|
worker.run(shutdown_rx).await;
|
||||||
|
});
|
||||||
|
self.worker_handles.push(worker_handle);
|
||||||
|
}
|
||||||
|
info!(
|
||||||
|
worker_count = self.worker_handles.len(),
|
||||||
|
"Spawned worker tasks"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl Service for ScraperService {
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"scraper"
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn run(&mut self) -> Result<(), anyhow::Error> {
|
||||||
|
self.start();
|
||||||
|
std::future::pending::<()>().await;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn shutdown(&mut self) -> Result<(), anyhow::Error> {
|
||||||
|
info!("Shutting down scraper service");
|
||||||
|
|
||||||
|
// Send shutdown signal to all tasks
|
||||||
|
if let Some(shutdown_tx) = self.shutdown_tx.take() {
|
||||||
|
let _ = shutdown_tx.send(());
|
||||||
|
} else {
|
||||||
|
warn!("No shutdown channel found for scraper service");
|
||||||
|
return Err(anyhow::anyhow!("No shutdown channel available"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect all handles
|
||||||
|
let mut all_handles = Vec::new();
|
||||||
|
if let Some(handle) = self.scheduler_handle.take() {
|
||||||
|
all_handles.push(handle);
|
||||||
|
}
|
||||||
|
all_handles.append(&mut self.worker_handles);
|
||||||
|
|
||||||
|
// Wait for all tasks to complete (no internal timeout - let ServiceManager handle it)
|
||||||
|
let results = futures::future::join_all(all_handles).await;
|
||||||
|
let failed = results.iter().filter(|r| r.is_err()).count();
|
||||||
|
if failed > 0 {
|
||||||
|
warn!(
|
||||||
|
failed_count = failed,
|
||||||
|
"Some scraper tasks panicked during shutdown"
|
||||||
|
);
|
||||||
|
return Err(anyhow::anyhow!("{} task(s) panicked", failed));
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("All scraper tasks shutdown gracefully");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
188
src/scraper/scheduler.rs
Normal file
188
src/scraper/scheduler.rs
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
use crate::banner::{BannerApi, Term};
|
||||||
|
use crate::data::models::{ScrapePriority, TargetType};
|
||||||
|
use crate::error::Result;
|
||||||
|
use crate::scraper::jobs::subject::SubjectJob;
|
||||||
|
use serde_json::json;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::time::Duration;
|
||||||
|
use tokio::sync::broadcast;
|
||||||
|
use tokio::time;
|
||||||
|
use tokio_util::sync::CancellationToken;
|
||||||
|
use tracing::{debug, error, info, warn};
|
||||||
|
|
||||||
|
/// Periodically analyzes data and enqueues prioritized scrape jobs.
|
||||||
|
pub struct Scheduler {
|
||||||
|
db_pool: PgPool,
|
||||||
|
banner_api: Arc<BannerApi>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Scheduler {
|
||||||
|
pub fn new(db_pool: PgPool, banner_api: Arc<BannerApi>) -> Self {
|
||||||
|
Self {
|
||||||
|
db_pool,
|
||||||
|
banner_api,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Runs the scheduler's main loop with graceful shutdown support.
|
||||||
|
///
|
||||||
|
/// The scheduler wakes up every 60 seconds to analyze data and enqueue jobs.
|
||||||
|
/// When a shutdown signal is received:
|
||||||
|
/// 1. Any in-progress scheduling work is gracefully cancelled via CancellationToken
|
||||||
|
/// 2. The scheduler waits up to 5 seconds for work to complete
|
||||||
|
/// 3. If timeout occurs, the task is abandoned (it will be aborted when dropped)
|
||||||
|
///
|
||||||
|
/// This ensures that shutdown is responsive even if scheduling work is blocked.
|
||||||
|
pub async fn run(&self, mut shutdown_rx: broadcast::Receiver<()>) {
|
||||||
|
info!("Scheduler service started");
|
||||||
|
|
||||||
|
let work_interval = Duration::from_secs(60);
|
||||||
|
let mut next_run = time::Instant::now();
|
||||||
|
let mut current_work: Option<(tokio::task::JoinHandle<()>, CancellationToken)> = None;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
tokio::select! {
|
||||||
|
_ = time::sleep_until(next_run) => {
|
||||||
|
let cancel_token = CancellationToken::new();
|
||||||
|
|
||||||
|
// Spawn work in separate task to allow graceful cancellation during shutdown.
|
||||||
|
// Without this, shutdown would have to wait for the full scheduling cycle.
|
||||||
|
let work_handle = tokio::spawn({
|
||||||
|
let db_pool = self.db_pool.clone();
|
||||||
|
let banner_api = self.banner_api.clone();
|
||||||
|
let cancel_token = cancel_token.clone();
|
||||||
|
|
||||||
|
async move {
|
||||||
|
tokio::select! {
|
||||||
|
result = Self::schedule_jobs_impl(&db_pool, &banner_api) => {
|
||||||
|
if let Err(e) = result {
|
||||||
|
error!(error = ?e, "Failed to schedule jobs");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ = cancel_token.cancelled() => {
|
||||||
|
debug!("Scheduling work cancelled gracefully");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
current_work = Some((work_handle, cancel_token));
|
||||||
|
next_run = time::Instant::now() + work_interval;
|
||||||
|
}
|
||||||
|
_ = shutdown_rx.recv() => {
|
||||||
|
info!("Scheduler received shutdown signal");
|
||||||
|
|
||||||
|
if let Some((handle, cancel_token)) = current_work.take() {
|
||||||
|
cancel_token.cancel();
|
||||||
|
|
||||||
|
// Wait briefly for graceful completion
|
||||||
|
if tokio::time::timeout(Duration::from_secs(5), handle).await.is_err() {
|
||||||
|
warn!("Scheduling work did not complete within 5s, abandoning");
|
||||||
|
} else {
|
||||||
|
debug!("Scheduling work completed gracefully");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Scheduler exiting gracefully");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Core scheduling logic that analyzes data and creates scrape jobs.
|
||||||
|
///
|
||||||
|
/// Strategy:
|
||||||
|
/// 1. Fetch all subjects for the current term from Banner API
|
||||||
|
/// 2. Query existing jobs in a single batch query
|
||||||
|
/// 3. Create jobs only for subjects that don't have pending jobs
|
||||||
|
///
|
||||||
|
/// This is a static method (not &self) to allow it to be called from spawned tasks.
|
||||||
|
#[tracing::instrument(skip_all, fields(term))]
|
||||||
|
async fn schedule_jobs_impl(db_pool: &PgPool, banner_api: &BannerApi) -> Result<()> {
|
||||||
|
// For now, we will implement a simple baseline scheduling strategy:
|
||||||
|
// 1. Get a list of all subjects from the Banner API.
|
||||||
|
// 2. Query existing jobs for all subjects in a single query.
|
||||||
|
// 3. Create new jobs only for subjects that don't have existing jobs.
|
||||||
|
let term = Term::get_current().inner().to_string();
|
||||||
|
|
||||||
|
tracing::Span::current().record("term", term.as_str());
|
||||||
|
debug!(term = term, "Enqueuing subject jobs");
|
||||||
|
|
||||||
|
let subjects = banner_api.get_subjects("", &term, 1, 500).await?;
|
||||||
|
debug!(
|
||||||
|
subject_count = subjects.len(),
|
||||||
|
"Retrieved subjects from API"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create payloads for all subjects
|
||||||
|
let subject_payloads: Vec<_> = subjects
|
||||||
|
.iter()
|
||||||
|
.map(|subject| json!({ "subject": subject.code }))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Query existing jobs for all subjects in a single query
|
||||||
|
let existing_jobs: Vec<(serde_json::Value,)> = sqlx::query_as(
|
||||||
|
"SELECT target_payload FROM scrape_jobs
|
||||||
|
WHERE target_type = $1 AND target_payload = ANY($2) AND locked_at IS NULL",
|
||||||
|
)
|
||||||
|
.bind(TargetType::Subject)
|
||||||
|
.bind(&subject_payloads)
|
||||||
|
.fetch_all(db_pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Convert to a HashSet for efficient lookup
|
||||||
|
let existing_payloads: std::collections::HashSet<String> = existing_jobs
|
||||||
|
.into_iter()
|
||||||
|
.map(|(payload,)| payload.to_string())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Filter out subjects that already have jobs and prepare new jobs
|
||||||
|
let mut skipped_count = 0;
|
||||||
|
let new_jobs: Vec<_> = subjects
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|subject| {
|
||||||
|
let job = SubjectJob::new(subject.code.clone());
|
||||||
|
let payload = serde_json::to_value(&job).unwrap();
|
||||||
|
let payload_str = payload.to_string();
|
||||||
|
|
||||||
|
if existing_payloads.contains(&payload_str) {
|
||||||
|
skipped_count += 1;
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some((payload, subject.code))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if skipped_count > 0 {
|
||||||
|
debug!(count = skipped_count, "Skipped subjects with existing jobs");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert all new jobs in a single batch
|
||||||
|
if !new_jobs.is_empty() {
|
||||||
|
let now = chrono::Utc::now();
|
||||||
|
let mut tx = db_pool.begin().await?;
|
||||||
|
|
||||||
|
for (payload, subject_code) in new_jobs {
|
||||||
|
sqlx::query(
|
||||||
|
"INSERT INTO scrape_jobs (target_type, target_payload, priority, execute_at) VALUES ($1, $2, $3, $4)"
|
||||||
|
)
|
||||||
|
.bind(TargetType::Subject)
|
||||||
|
.bind(&payload)
|
||||||
|
.bind(ScrapePriority::Low)
|
||||||
|
.bind(now)
|
||||||
|
.execute(&mut *tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
debug!(subject = subject_code, "New job enqueued for subject");
|
||||||
|
}
|
||||||
|
|
||||||
|
tx.commit().await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("Job scheduling complete");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
300
src/scraper/worker.rs
Normal file
300
src/scraper/worker.rs
Normal file
@@ -0,0 +1,300 @@
|
|||||||
|
use crate::banner::{BannerApi, BannerApiError};
|
||||||
|
use crate::data::models::ScrapeJob;
|
||||||
|
use crate::error::Result;
|
||||||
|
use crate::scraper::jobs::{JobError, JobType};
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::time::Duration;
|
||||||
|
use tokio::sync::broadcast;
|
||||||
|
use tokio::time;
|
||||||
|
use tracing::{Instrument, debug, error, info, trace, warn};
|
||||||
|
|
||||||
|
/// A single worker instance.
|
||||||
|
///
|
||||||
|
/// Each worker runs in its own asynchronous task and continuously polls the
|
||||||
|
/// database for scrape jobs to execute.
|
||||||
|
pub struct Worker {
|
||||||
|
id: usize, // For logging purposes
|
||||||
|
db_pool: PgPool,
|
||||||
|
banner_api: Arc<BannerApi>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Worker {
|
||||||
|
pub fn new(id: usize, db_pool: PgPool, banner_api: Arc<BannerApi>) -> Self {
|
||||||
|
Self {
|
||||||
|
id,
|
||||||
|
db_pool,
|
||||||
|
banner_api,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Runs the worker's main loop.
|
||||||
|
pub async fn run(&self, mut shutdown_rx: broadcast::Receiver<()>) {
|
||||||
|
info!(worker_id = self.id, "Worker started");
|
||||||
|
|
||||||
|
loop {
|
||||||
|
// Fetch and lock a job, racing against shutdown signal
|
||||||
|
let job = tokio::select! {
|
||||||
|
_ = shutdown_rx.recv() => {
|
||||||
|
info!(worker_id = self.id, "Worker received shutdown signal, exiting gracefully");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
result = self.fetch_and_lock_job() => {
|
||||||
|
match result {
|
||||||
|
Ok(Some(job)) => job,
|
||||||
|
Ok(None) => {
|
||||||
|
trace!(worker_id = self.id, "No jobs available, waiting");
|
||||||
|
time::sleep(Duration::from_secs(5)).await;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn!(worker_id = self.id, error = ?e, "Failed to fetch job, waiting");
|
||||||
|
time::sleep(Duration::from_secs(10)).await;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let job_id = job.id;
|
||||||
|
let retry_count = job.retry_count;
|
||||||
|
let max_retries = job.max_retries;
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
|
||||||
|
// Process the job, racing against shutdown signal
|
||||||
|
let process_result = tokio::select! {
|
||||||
|
_ = shutdown_rx.recv() => {
|
||||||
|
self.handle_shutdown_during_processing(job_id).await;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
result = self.process_job(job) => result
|
||||||
|
};
|
||||||
|
|
||||||
|
let duration = start.elapsed();
|
||||||
|
|
||||||
|
// Handle the job processing result
|
||||||
|
self.handle_job_result(job_id, retry_count, max_retries, process_result, duration)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Atomically fetches a job from the queue, locking it for processing.
|
||||||
|
///
|
||||||
|
/// This uses a `FOR UPDATE SKIP LOCKED` query to ensure that multiple
|
||||||
|
/// workers can poll the queue concurrently without conflicts.
|
||||||
|
async fn fetch_and_lock_job(&self) -> Result<Option<ScrapeJob>> {
|
||||||
|
let mut tx = self.db_pool.begin().await?;
|
||||||
|
|
||||||
|
let job = sqlx::query_as::<_, ScrapeJob>(
|
||||||
|
"SELECT * FROM scrape_jobs WHERE locked_at IS NULL AND execute_at <= NOW() ORDER BY priority DESC, execute_at ASC LIMIT 1 FOR UPDATE SKIP LOCKED"
|
||||||
|
)
|
||||||
|
.fetch_optional(&mut *tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Some(ref job) = job {
|
||||||
|
sqlx::query("UPDATE scrape_jobs SET locked_at = NOW() WHERE id = $1")
|
||||||
|
.bind(job.id)
|
||||||
|
.execute(&mut *tx)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
tx.commit().await?;
|
||||||
|
|
||||||
|
Ok(job)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn process_job(&self, job: ScrapeJob) -> Result<(), JobError> {
|
||||||
|
// Convert the database job to our job type
|
||||||
|
let job_type = JobType::from_target_type_and_payload(job.target_type, job.target_payload)
|
||||||
|
.map_err(|e| JobError::Unrecoverable(anyhow::anyhow!(e)))?; // Parse errors are unrecoverable
|
||||||
|
|
||||||
|
// Get the job implementation
|
||||||
|
let job_impl = job_type.boxed();
|
||||||
|
|
||||||
|
// Create span with job context
|
||||||
|
let span = tracing::debug_span!(
|
||||||
|
"process_job",
|
||||||
|
job_id = job.id,
|
||||||
|
job_type = job_impl.description()
|
||||||
|
);
|
||||||
|
|
||||||
|
async move {
|
||||||
|
debug!(
|
||||||
|
worker_id = self.id,
|
||||||
|
job_id = job.id,
|
||||||
|
description = job_impl.description(),
|
||||||
|
"Processing job"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Process the job - API errors are recoverable
|
||||||
|
job_impl
|
||||||
|
.process(&self.banner_api, &self.db_pool)
|
||||||
|
.await
|
||||||
|
.map_err(JobError::Recoverable)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
.instrument(span)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn delete_job(&self, job_id: i32) -> Result<()> {
|
||||||
|
sqlx::query("DELETE FROM scrape_jobs WHERE id = $1")
|
||||||
|
.bind(job_id)
|
||||||
|
.execute(&self.db_pool)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn unlock_job(&self, job_id: i32) -> Result<()> {
|
||||||
|
sqlx::query("UPDATE scrape_jobs SET locked_at = NULL WHERE id = $1")
|
||||||
|
.bind(job_id)
|
||||||
|
.execute(&self.db_pool)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn unlock_and_increment_retry(&self, job_id: i32, max_retries: i32) -> Result<bool> {
|
||||||
|
let result = sqlx::query_scalar::<_, Option<i32>>(
|
||||||
|
"UPDATE scrape_jobs
|
||||||
|
SET locked_at = NULL, retry_count = retry_count + 1
|
||||||
|
WHERE id = $1
|
||||||
|
RETURNING CASE WHEN retry_count + 1 < $2 THEN retry_count + 1 ELSE NULL END",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(max_retries)
|
||||||
|
.fetch_one(&self.db_pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(result.is_some())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle shutdown signal received during job processing
|
||||||
|
async fn handle_shutdown_during_processing(&self, job_id: i32) {
|
||||||
|
info!(
|
||||||
|
worker_id = self.id,
|
||||||
|
job_id, "Shutdown received during job processing"
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Err(e) = self.unlock_job(job_id).await {
|
||||||
|
warn!(
|
||||||
|
worker_id = self.id,
|
||||||
|
job_id,
|
||||||
|
error = ?e,
|
||||||
|
"Failed to unlock job during shutdown"
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
debug!(worker_id = self.id, job_id, "Job unlocked during shutdown");
|
||||||
|
}
|
||||||
|
|
||||||
|
info!(worker_id = self.id, "Worker exiting gracefully");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle the result of job processing
|
||||||
|
async fn handle_job_result(
|
||||||
|
&self,
|
||||||
|
job_id: i32,
|
||||||
|
retry_count: i32,
|
||||||
|
max_retries: i32,
|
||||||
|
result: Result<(), JobError>,
|
||||||
|
duration: std::time::Duration,
|
||||||
|
) {
|
||||||
|
match result {
|
||||||
|
Ok(()) => {
|
||||||
|
debug!(
|
||||||
|
worker_id = self.id,
|
||||||
|
job_id,
|
||||||
|
duration_ms = duration.as_millis(),
|
||||||
|
"Job completed successfully"
|
||||||
|
);
|
||||||
|
if let Err(e) = self.delete_job(job_id).await {
|
||||||
|
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete completed job");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(JobError::Recoverable(e)) => {
|
||||||
|
self.handle_recoverable_error(job_id, retry_count, max_retries, e, duration)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
Err(JobError::Unrecoverable(e)) => {
|
||||||
|
error!(
|
||||||
|
worker_id = self.id,
|
||||||
|
job_id,
|
||||||
|
duration_ms = duration.as_millis(),
|
||||||
|
error = ?e,
|
||||||
|
"Job corrupted, deleting"
|
||||||
|
);
|
||||||
|
if let Err(e) = self.delete_job(job_id).await {
|
||||||
|
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete corrupted job");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle recoverable errors by logging appropriately and unlocking the job
|
||||||
|
async fn handle_recoverable_error(
|
||||||
|
&self,
|
||||||
|
job_id: i32,
|
||||||
|
retry_count: i32,
|
||||||
|
max_retries: i32,
|
||||||
|
e: anyhow::Error,
|
||||||
|
duration: std::time::Duration,
|
||||||
|
) {
|
||||||
|
let next_attempt = retry_count.saturating_add(1);
|
||||||
|
let remaining_retries = max_retries.saturating_sub(next_attempt);
|
||||||
|
|
||||||
|
// Log the error appropriately based on type
|
||||||
|
if let Some(BannerApiError::InvalidSession(_)) = e.downcast_ref::<BannerApiError>() {
|
||||||
|
warn!(
|
||||||
|
worker_id = self.id,
|
||||||
|
job_id,
|
||||||
|
duration_ms = duration.as_millis(),
|
||||||
|
retry_attempt = next_attempt,
|
||||||
|
max_retries = max_retries,
|
||||||
|
remaining_retries = remaining_retries,
|
||||||
|
"Invalid session detected, will retry"
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
error!(
|
||||||
|
worker_id = self.id,
|
||||||
|
job_id,
|
||||||
|
duration_ms = duration.as_millis(),
|
||||||
|
retry_attempt = next_attempt,
|
||||||
|
max_retries = max_retries,
|
||||||
|
remaining_retries = remaining_retries,
|
||||||
|
error = ?e,
|
||||||
|
"Failed to process job, will retry"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Atomically unlock and increment retry count, checking if retry is allowed
|
||||||
|
match self.unlock_and_increment_retry(job_id, max_retries).await {
|
||||||
|
Ok(can_retry) if can_retry => {
|
||||||
|
info!(
|
||||||
|
worker_id = self.id,
|
||||||
|
job_id,
|
||||||
|
retry_attempt = next_attempt,
|
||||||
|
remaining_retries = remaining_retries,
|
||||||
|
"Job unlocked for retry"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Ok(_) => {
|
||||||
|
// Max retries exceeded (detected atomically)
|
||||||
|
error!(
|
||||||
|
worker_id = self.id,
|
||||||
|
job_id,
|
||||||
|
duration_ms = duration.as_millis(),
|
||||||
|
retry_count = next_attempt,
|
||||||
|
max_retries = max_retries,
|
||||||
|
error = ?e,
|
||||||
|
"Job failed permanently (max retries exceeded), deleting"
|
||||||
|
);
|
||||||
|
if let Err(e) = self.delete_job(job_id).await {
|
||||||
|
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete failed job");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!(worker_id = self.id, job_id, error = ?e, "Failed to unlock and increment retry count");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,20 +1,193 @@
|
|||||||
use super::Service;
|
use super::Service;
|
||||||
|
use crate::bot::{Data, get_commands};
|
||||||
|
use crate::config::Config;
|
||||||
|
use crate::state::AppState;
|
||||||
|
use num_format::{Locale, ToFormattedString};
|
||||||
use serenity::Client;
|
use serenity::Client;
|
||||||
|
use serenity::all::{ActivityData, ClientBuilder, GatewayIntents};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tracing::{debug, error};
|
use std::time::Duration;
|
||||||
|
use tokio::sync::{Mutex, broadcast};
|
||||||
|
use tokio::task::JoinHandle;
|
||||||
|
use tracing::{debug, error, info, warn};
|
||||||
|
|
||||||
/// Discord bot service implementation
|
/// Discord bot service implementation
|
||||||
pub struct BotService {
|
pub struct BotService {
|
||||||
client: Client,
|
client: Client,
|
||||||
shard_manager: Arc<serenity::gateway::ShardManager>,
|
shard_manager: Arc<serenity::gateway::ShardManager>,
|
||||||
|
status_task_handle: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||||
|
status_shutdown_tx: Option<broadcast::Sender<()>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BotService {
|
impl BotService {
|
||||||
pub fn new(client: Client) -> Self {
|
/// Create a new Discord bot client with full configuration
|
||||||
|
pub async fn create_client(
|
||||||
|
config: &Config,
|
||||||
|
app_state: AppState,
|
||||||
|
status_task_handle: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||||
|
status_shutdown_rx: broadcast::Receiver<()>,
|
||||||
|
) -> Result<Client, anyhow::Error> {
|
||||||
|
let intents = GatewayIntents::non_privileged();
|
||||||
|
let bot_target_guild = config.bot_target_guild;
|
||||||
|
|
||||||
|
let framework = poise::Framework::builder()
|
||||||
|
.options(poise::FrameworkOptions {
|
||||||
|
commands: get_commands(),
|
||||||
|
pre_command: |ctx| {
|
||||||
|
Box::pin(async move {
|
||||||
|
let content = match ctx {
|
||||||
|
poise::Context::Application(_) => ctx.invocation_string(),
|
||||||
|
poise::Context::Prefix(prefix) => prefix.msg.content.to_string(),
|
||||||
|
};
|
||||||
|
let channel_name = ctx
|
||||||
|
.channel_id()
|
||||||
|
.name(ctx.http())
|
||||||
|
.await
|
||||||
|
.unwrap_or("unknown".to_string());
|
||||||
|
|
||||||
|
let span = tracing::Span::current();
|
||||||
|
span.record("command_name", ctx.command().qualified_name.as_str());
|
||||||
|
span.record("invocation", ctx.invocation_string());
|
||||||
|
span.record("msg.content", content.as_str());
|
||||||
|
span.record("msg.author", ctx.author().tag().as_str());
|
||||||
|
span.record("msg.id", ctx.id());
|
||||||
|
span.record("msg.channel_id", ctx.channel_id().get());
|
||||||
|
span.record("msg.channel", channel_name.as_str());
|
||||||
|
|
||||||
|
tracing::info!(
|
||||||
|
command_name = ctx.command().qualified_name.as_str(),
|
||||||
|
invocation = ctx.invocation_string(),
|
||||||
|
msg.content = %content,
|
||||||
|
msg.author = %ctx.author().tag(),
|
||||||
|
msg.author_id = %ctx.author().id,
|
||||||
|
msg.id = %ctx.id(),
|
||||||
|
msg.channel = %channel_name.as_str(),
|
||||||
|
msg.channel_id = %ctx.channel_id(),
|
||||||
|
"{} invoked by {}",
|
||||||
|
ctx.command().name,
|
||||||
|
ctx.author().tag()
|
||||||
|
);
|
||||||
|
})
|
||||||
|
},
|
||||||
|
on_error: |error| {
|
||||||
|
Box::pin(async move {
|
||||||
|
if let Err(e) = poise::builtins::on_error(error).await {
|
||||||
|
tracing::error!(error = %e, "Fatal error while sending error message");
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.setup(move |ctx, _ready, framework| {
|
||||||
|
let app_state = app_state.clone();
|
||||||
|
let status_task_handle = status_task_handle.clone();
|
||||||
|
Box::pin(async move {
|
||||||
|
poise::builtins::register_in_guild(
|
||||||
|
ctx,
|
||||||
|
&framework.options().commands,
|
||||||
|
bot_target_guild.into(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
poise::builtins::register_globally(ctx, &framework.options().commands).await?;
|
||||||
|
|
||||||
|
// Start status update task with shutdown support
|
||||||
|
let handle = Self::start_status_update_task(
|
||||||
|
ctx.clone(),
|
||||||
|
app_state.clone(),
|
||||||
|
status_shutdown_rx,
|
||||||
|
);
|
||||||
|
*status_task_handle.lock().await = Some(handle);
|
||||||
|
|
||||||
|
Ok(Data { app_state })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Ok(ClientBuilder::new(config.bot_token.clone(), intents)
|
||||||
|
.framework(framework)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Start the status update task for the Discord bot with graceful shutdown support
|
||||||
|
fn start_status_update_task(
|
||||||
|
ctx: serenity::client::Context,
|
||||||
|
app_state: AppState,
|
||||||
|
mut shutdown_rx: broadcast::Receiver<()>,
|
||||||
|
) -> JoinHandle<()> {
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let max_interval = Duration::from_secs(300); // 5 minutes
|
||||||
|
let base_interval = Duration::from_secs(30);
|
||||||
|
let mut interval = tokio::time::interval(base_interval);
|
||||||
|
let mut previous_course_count: Option<i64> = None;
|
||||||
|
|
||||||
|
// This runs once immediately on startup, then with adaptive intervals
|
||||||
|
loop {
|
||||||
|
tokio::select! {
|
||||||
|
_ = interval.tick() => {
|
||||||
|
// Get the course count, update the activity if it has changed/hasn't been set this session
|
||||||
|
let course_count = app_state.get_course_count().await.unwrap();
|
||||||
|
if previous_course_count.is_none() || previous_course_count != Some(course_count) {
|
||||||
|
ctx.set_activity(Some(ActivityData::playing(format!(
|
||||||
|
"Querying {:} classes",
|
||||||
|
course_count.to_formatted_string(&Locale::en)
|
||||||
|
))));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Increase or reset the interval
|
||||||
|
interval = tokio::time::interval(
|
||||||
|
// Avoid logging the first 'change'
|
||||||
|
if course_count != previous_course_count.unwrap_or(0) {
|
||||||
|
if previous_course_count.is_some() {
|
||||||
|
debug!(
|
||||||
|
new_course_count = course_count,
|
||||||
|
last_interval = interval.period().as_secs(),
|
||||||
|
"Course count changed, resetting interval"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Record the new course count
|
||||||
|
previous_course_count = Some(course_count);
|
||||||
|
|
||||||
|
// Reset to base interval
|
||||||
|
base_interval
|
||||||
|
} else {
|
||||||
|
// Increase interval by 10% (up to maximum)
|
||||||
|
let new_interval = interval.period().mul_f32(1.1).min(max_interval);
|
||||||
|
debug!(
|
||||||
|
current_course_count = course_count,
|
||||||
|
last_interval = interval.period().as_secs(),
|
||||||
|
new_interval = new_interval.as_secs(),
|
||||||
|
"Course count unchanged, increasing interval"
|
||||||
|
);
|
||||||
|
|
||||||
|
new_interval
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Reset the interval, otherwise it will tick again immediately
|
||||||
|
interval.reset();
|
||||||
|
}
|
||||||
|
_ = shutdown_rx.recv() => {
|
||||||
|
info!("Status update task received shutdown signal");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new(
|
||||||
|
client: Client,
|
||||||
|
status_task_handle: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||||
|
status_shutdown_tx: broadcast::Sender<()>,
|
||||||
|
) -> Self {
|
||||||
let shard_manager = client.shard_manager.clone();
|
let shard_manager = client.shard_manager.clone();
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
client,
|
client,
|
||||||
shard_manager,
|
shard_manager,
|
||||||
|
status_task_handle,
|
||||||
|
status_shutdown_tx: Some(status_shutdown_tx),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -28,7 +201,7 @@ impl Service for BotService {
|
|||||||
async fn run(&mut self) -> Result<(), anyhow::Error> {
|
async fn run(&mut self) -> Result<(), anyhow::Error> {
|
||||||
match self.client.start().await {
|
match self.client.start().await {
|
||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
debug!(service = "bot", "stopped early.");
|
warn!(service = "bot", "stopped early");
|
||||||
Err(anyhow::anyhow!("bot stopped early"))
|
Err(anyhow::anyhow!("bot stopped early"))
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
@@ -39,6 +212,28 @@ impl Service for BotService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn shutdown(&mut self) -> Result<(), anyhow::Error> {
|
async fn shutdown(&mut self) -> Result<(), anyhow::Error> {
|
||||||
|
// Signal status update task to stop
|
||||||
|
if let Some(status_shutdown_tx) = self.status_shutdown_tx.take() {
|
||||||
|
let _ = status_shutdown_tx.send(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for status update task to complete (with timeout)
|
||||||
|
let handle = self.status_task_handle.lock().await.take();
|
||||||
|
if let Some(handle) = handle {
|
||||||
|
match tokio::time::timeout(Duration::from_secs(2), handle).await {
|
||||||
|
Ok(Ok(())) => {
|
||||||
|
debug!("Status update task completed gracefully");
|
||||||
|
}
|
||||||
|
Ok(Err(e)) => {
|
||||||
|
warn!(error = ?e, "Status update task panicked");
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
warn!("Status update task did not complete within 2s timeout");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Shutdown Discord shards
|
||||||
self.shard_manager.shutdown_all().await;
|
self.shard_manager.shutdown_all().await;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,24 +1,35 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use tokio::sync::broadcast;
|
use tokio::sync::{broadcast, mpsc};
|
||||||
use tokio::task::JoinHandle;
|
use tracing::{debug, info, trace, warn};
|
||||||
use tracing::{debug, error, info, warn};
|
|
||||||
|
|
||||||
use crate::services::{Service, ServiceResult, run_service};
|
use crate::services::{Service, ServiceResult, run_service};
|
||||||
|
|
||||||
/// Manages multiple services and their lifecycle
|
/// Manages multiple services and their lifecycle
|
||||||
pub struct ServiceManager {
|
pub struct ServiceManager {
|
||||||
registered_services: HashMap<String, Box<dyn Service>>,
|
registered_services: HashMap<String, Box<dyn Service>>,
|
||||||
running_services: HashMap<String, JoinHandle<ServiceResult>>,
|
service_handles: HashMap<String, tokio::task::AbortHandle>,
|
||||||
|
completion_rx: Option<mpsc::UnboundedReceiver<(String, ServiceResult)>>,
|
||||||
|
completion_tx: mpsc::UnboundedSender<(String, ServiceResult)>,
|
||||||
shutdown_tx: broadcast::Sender<()>,
|
shutdown_tx: broadcast::Sender<()>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for ServiceManager {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl ServiceManager {
|
impl ServiceManager {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let (shutdown_tx, _) = broadcast::channel(1);
|
let (shutdown_tx, _) = broadcast::channel(1);
|
||||||
|
let (completion_tx, completion_rx) = mpsc::unbounded_channel();
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
registered_services: HashMap::new(),
|
registered_services: HashMap::new(),
|
||||||
running_services: HashMap::new(),
|
service_handles: HashMap::new(),
|
||||||
|
completion_rx: Some(completion_rx),
|
||||||
|
completion_tx,
|
||||||
shutdown_tx,
|
shutdown_tx,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -28,6 +39,11 @@ impl ServiceManager {
|
|||||||
self.registered_services.insert(name.to_string(), service);
|
self.registered_services.insert(name.to_string(), service);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Check if there are any registered services
|
||||||
|
pub fn has_services(&self) -> bool {
|
||||||
|
!self.registered_services.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
/// Spawn all registered services
|
/// Spawn all registered services
|
||||||
pub fn spawn_all(&mut self) {
|
pub fn spawn_all(&mut self) {
|
||||||
let service_count = self.registered_services.len();
|
let service_count = self.registered_services.len();
|
||||||
@@ -35,8 +51,20 @@ impl ServiceManager {
|
|||||||
|
|
||||||
for (name, service) in self.registered_services.drain() {
|
for (name, service) in self.registered_services.drain() {
|
||||||
let shutdown_rx = self.shutdown_tx.subscribe();
|
let shutdown_rx = self.shutdown_tx.subscribe();
|
||||||
let handle = tokio::spawn(run_service(service, shutdown_rx));
|
let completion_tx = self.completion_tx.clone();
|
||||||
self.running_services.insert(name, handle);
|
let name_clone = name.clone();
|
||||||
|
|
||||||
|
// Spawn service task
|
||||||
|
let handle = tokio::spawn(async move {
|
||||||
|
let result = run_service(service, shutdown_rx).await;
|
||||||
|
// Send completion notification
|
||||||
|
let _ = completion_tx.send((name_clone, result));
|
||||||
|
});
|
||||||
|
|
||||||
|
// Store abort handle for shutdown control
|
||||||
|
self.service_handles
|
||||||
|
.insert(name.clone(), handle.abort_handle());
|
||||||
|
debug!(service = name, id = ?handle.id(), "service spawned");
|
||||||
}
|
}
|
||||||
|
|
||||||
info!(
|
info!(
|
||||||
@@ -50,7 +78,7 @@ impl ServiceManager {
|
|||||||
/// Run all services until one completes or fails
|
/// Run all services until one completes or fails
|
||||||
/// Returns the first service that completes and its result
|
/// Returns the first service that completes and its result
|
||||||
pub async fn run(&mut self) -> (String, ServiceResult) {
|
pub async fn run(&mut self) -> (String, ServiceResult) {
|
||||||
if self.running_services.is_empty() {
|
if self.service_handles.is_empty() {
|
||||||
return (
|
return (
|
||||||
"none".to_string(),
|
"none".to_string(),
|
||||||
ServiceResult::Error(anyhow::anyhow!("No services to run")),
|
ServiceResult::Error(anyhow::anyhow!("No services to run")),
|
||||||
@@ -59,99 +87,134 @@ impl ServiceManager {
|
|||||||
|
|
||||||
info!(
|
info!(
|
||||||
"servicemanager running {} services",
|
"servicemanager running {} services",
|
||||||
self.running_services.len()
|
self.service_handles.len()
|
||||||
);
|
);
|
||||||
|
|
||||||
// Wait for any service to complete
|
// Wait for any service to complete via the channel
|
||||||
loop {
|
let completion_rx = self
|
||||||
let mut completed_services = Vec::new();
|
.completion_rx
|
||||||
|
.as_mut()
|
||||||
|
.expect("completion_rx should be available");
|
||||||
|
|
||||||
for (name, handle) in &mut self.running_services {
|
completion_rx
|
||||||
if handle.is_finished() {
|
.recv()
|
||||||
completed_services.push(name.clone());
|
.await
|
||||||
}
|
.map(|(name, result)| {
|
||||||
}
|
self.service_handles.remove(&name);
|
||||||
|
(name, result)
|
||||||
if let Some(completed_name) = completed_services.first() {
|
})
|
||||||
let handle = self.running_services.remove(completed_name).unwrap();
|
.unwrap_or_else(|| {
|
||||||
match handle.await {
|
(
|
||||||
Ok(result) => {
|
"channel_closed".to_string(),
|
||||||
return (completed_name.clone(), result);
|
ServiceResult::Error(anyhow::anyhow!("Completion channel closed")),
|
||||||
}
|
)
|
||||||
Err(e) => {
|
})
|
||||||
error!(service = completed_name, "service task panicked: {e}");
|
|
||||||
return (
|
|
||||||
completed_name.clone(),
|
|
||||||
ServiceResult::Error(anyhow::anyhow!("Task panic: {e}")),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Small delay to prevent busy-waiting
|
|
||||||
tokio::time::sleep(Duration::from_millis(10)).await;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Shutdown all services gracefully with a timeout.
|
/// Shutdown all services gracefully with a timeout.
|
||||||
///
|
///
|
||||||
/// If any service fails to shutdown, it will return an error containing the names of the services that failed to shutdown.
|
/// All services receive the shutdown signal simultaneously and shut down in parallel.
|
||||||
/// If all services shutdown successfully, the function will return the duration elapsed.
|
/// Each service gets the full timeout duration (they don't share/consume from a budget).
|
||||||
|
/// If any service fails to shutdown within the timeout, it will be aborted.
|
||||||
|
///
|
||||||
|
/// Returns the elapsed time if all succeed, or a list of failed service names.
|
||||||
pub async fn shutdown(&mut self, timeout: Duration) -> Result<Duration, Vec<String>> {
|
pub async fn shutdown(&mut self, timeout: Duration) -> Result<Duration, Vec<String>> {
|
||||||
let service_count = self.running_services.len();
|
let service_count = self.service_handles.len();
|
||||||
let service_names: Vec<_> = self.running_services.keys().cloned().collect();
|
let service_names: Vec<_> = self.service_handles.keys().cloned().collect();
|
||||||
|
|
||||||
info!(
|
info!(
|
||||||
service_count,
|
service_count,
|
||||||
services = ?service_names,
|
services = ?service_names,
|
||||||
timeout = format!("{:.2?}", timeout),
|
timeout = format!("{:.2?}", timeout),
|
||||||
"shutting down {} services with {:?} timeout",
|
"shutting down {} services in parallel with {:?} timeout each",
|
||||||
service_count,
|
service_count,
|
||||||
timeout
|
timeout
|
||||||
);
|
);
|
||||||
|
|
||||||
// Send shutdown signal to all services
|
if service_count == 0 {
|
||||||
|
return Ok(Duration::ZERO);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send shutdown signal to all services simultaneously
|
||||||
let _ = self.shutdown_tx.send(());
|
let _ = self.shutdown_tx.send(());
|
||||||
|
|
||||||
// Wait for all services to complete
|
|
||||||
let start_time = std::time::Instant::now();
|
let start_time = std::time::Instant::now();
|
||||||
let mut pending_services = Vec::new();
|
|
||||||
|
|
||||||
for (name, handle) in self.running_services.drain() {
|
// Collect results from all services with timeout
|
||||||
match tokio::time::timeout(timeout, handle).await {
|
let completion_rx = self
|
||||||
Ok(Ok(_)) => {
|
.completion_rx
|
||||||
debug!(service = name, "service shutdown completed");
|
.as_mut()
|
||||||
|
.expect("completion_rx should be available");
|
||||||
|
|
||||||
|
// Collect all completion results with a single timeout
|
||||||
|
let collect_future = async {
|
||||||
|
let mut collected: Vec<Option<(String, ServiceResult)>> = Vec::new();
|
||||||
|
for _ in 0..service_count {
|
||||||
|
if let Some(result) = completion_rx.recv().await {
|
||||||
|
collected.push(Some(result));
|
||||||
|
} else {
|
||||||
|
collected.push(None);
|
||||||
}
|
}
|
||||||
Ok(Err(e)) => {
|
}
|
||||||
warn!(service = name, error = ?e, "service shutdown failed");
|
collected
|
||||||
pending_services.push(name);
|
};
|
||||||
}
|
|
||||||
Err(_) => {
|
let results = match tokio::time::timeout(timeout, collect_future).await {
|
||||||
warn!(service = name, "service shutdown timed out");
|
Ok(results) => results,
|
||||||
pending_services.push(name);
|
Err(_) => {
|
||||||
|
// Timeout exceeded - abort all remaining services
|
||||||
|
warn!(
|
||||||
|
timeout = format!("{:.2?}", timeout),
|
||||||
|
"shutdown timeout exceeded - aborting all remaining services"
|
||||||
|
);
|
||||||
|
|
||||||
|
let failed: Vec<String> = self.service_handles.keys().cloned().collect();
|
||||||
|
for handle in self.service_handles.values() {
|
||||||
|
handle.abort();
|
||||||
}
|
}
|
||||||
|
self.service_handles.clear();
|
||||||
|
|
||||||
|
return Err(failed);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Process results and identify failures
|
||||||
|
let mut failed_services = Vec::new();
|
||||||
|
for (name, service_result) in results.into_iter().flatten() {
|
||||||
|
self.service_handles.remove(&name);
|
||||||
|
|
||||||
|
if matches!(service_result, ServiceResult::GracefulShutdown) {
|
||||||
|
trace!(service = name, "service shutdown completed");
|
||||||
|
} else {
|
||||||
|
warn!(
|
||||||
|
service = name,
|
||||||
|
result = ?service_result,
|
||||||
|
"service shutdown with non-graceful result"
|
||||||
|
);
|
||||||
|
failed_services.push(name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let elapsed = start_time.elapsed();
|
let elapsed = start_time.elapsed();
|
||||||
if pending_services.is_empty() {
|
|
||||||
|
if failed_services.is_empty() {
|
||||||
info!(
|
info!(
|
||||||
service_count,
|
service_count,
|
||||||
elapsed = format!("{:.2?}", elapsed),
|
elapsed = format!("{:.2?}", elapsed),
|
||||||
"services shutdown completed: {}",
|
"all services shutdown successfully: {}",
|
||||||
service_names.join(", ")
|
service_names.join(", ")
|
||||||
);
|
);
|
||||||
Ok(elapsed)
|
Ok(elapsed)
|
||||||
} else {
|
} else {
|
||||||
warn!(
|
warn!(
|
||||||
pending_count = pending_services.len(),
|
failed_count = failed_services.len(),
|
||||||
pending_services = ?pending_services,
|
failed_services = ?failed_services,
|
||||||
elapsed = format!("{:.2?}", elapsed),
|
elapsed = format!("{:.2?}", elapsed),
|
||||||
"services shutdown completed with {} pending: {}",
|
"{} service(s) failed to shutdown gracefully: {}",
|
||||||
pending_services.len(),
|
failed_services.len(),
|
||||||
pending_services.join(", ")
|
failed_services.join(", ")
|
||||||
);
|
);
|
||||||
Err(pending_services)
|
Err(failed_services)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,7 +23,11 @@ pub trait Service: Send + Sync {
|
|||||||
|
|
||||||
/// Gracefully shutdown the service
|
/// Gracefully shutdown the service
|
||||||
///
|
///
|
||||||
/// An 'Ok' result does not mean the service has completed shutdown, it merely means that the service shutdown was initiated.
|
/// Implementations should initiate shutdown and MAY wait for completion.
|
||||||
|
/// Services are expected to respond to this call and begin cleanup promptly.
|
||||||
|
/// When managed by ServiceManager, the configured timeout (default 8s) applies to
|
||||||
|
/// ALL services combined, not per-service. Services should complete shutdown as
|
||||||
|
/// quickly as possible to avoid timeout.
|
||||||
async fn shutdown(&mut self) -> Result<(), anyhow::Error>;
|
async fn shutdown(&mut self) -> Result<(), anyhow::Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ use crate::web::{BannerState, create_router};
|
|||||||
use std::net::SocketAddr;
|
use std::net::SocketAddr;
|
||||||
use tokio::net::TcpListener;
|
use tokio::net::TcpListener;
|
||||||
use tokio::sync::broadcast;
|
use tokio::sync::broadcast;
|
||||||
use tracing::{debug, info, warn};
|
use tracing::{info, trace, warn};
|
||||||
|
|
||||||
/// Web server service implementation
|
/// Web server service implementation
|
||||||
pub struct WebService {
|
pub struct WebService {
|
||||||
@@ -33,17 +33,13 @@ impl Service for WebService {
|
|||||||
let app = create_router(self.banner_state.clone());
|
let app = create_router(self.banner_state.clone());
|
||||||
|
|
||||||
let addr = SocketAddr::from(([0, 0, 0, 0], self.port));
|
let addr = SocketAddr::from(([0, 0, 0, 0], self.port));
|
||||||
info!(
|
|
||||||
service = "web",
|
|
||||||
link = format!("http://localhost:{}", addr.port()),
|
|
||||||
"starting web server",
|
|
||||||
);
|
|
||||||
|
|
||||||
let listener = TcpListener::bind(addr).await?;
|
let listener = TcpListener::bind(addr).await?;
|
||||||
debug!(
|
info!(
|
||||||
service = "web",
|
service = "web",
|
||||||
"web server listening on {}",
|
address = %addr,
|
||||||
format!("http://{}", addr)
|
link = format!("http://localhost:{}", addr.port()),
|
||||||
|
"web server listening"
|
||||||
);
|
);
|
||||||
|
|
||||||
// Create internal shutdown channel for axum graceful shutdown
|
// Create internal shutdown channel for axum graceful shutdown
|
||||||
@@ -54,20 +50,23 @@ impl Service for WebService {
|
|||||||
axum::serve(listener, app)
|
axum::serve(listener, app)
|
||||||
.with_graceful_shutdown(async move {
|
.with_graceful_shutdown(async move {
|
||||||
let _ = shutdown_rx.recv().await;
|
let _ = shutdown_rx.recv().await;
|
||||||
debug!(
|
trace!(
|
||||||
service = "web",
|
service = "web",
|
||||||
"received shutdown signal, starting graceful shutdown"
|
"received shutdown signal, starting graceful shutdown"
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
trace!(service = "web", "graceful shutdown completed");
|
||||||
info!(service = "web", "web server stopped");
|
info!(service = "web", "web server stopped");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn shutdown(&mut self) -> Result<(), anyhow::Error> {
|
async fn shutdown(&mut self) -> Result<(), anyhow::Error> {
|
||||||
if let Some(shutdown_tx) = self.shutdown_tx.take() {
|
if let Some(shutdown_tx) = self.shutdown_tx.take() {
|
||||||
let _ = shutdown_tx.send(());
|
let _ = shutdown_tx.send(());
|
||||||
|
trace!(service = "web", "sent shutdown signal to axum");
|
||||||
} else {
|
} else {
|
||||||
warn!(
|
warn!(
|
||||||
service = "web",
|
service = "web",
|
||||||
|
|||||||
106
src/signals.rs
Normal file
106
src/signals.rs
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
use crate::services::ServiceResult;
|
||||||
|
use crate::services::manager::ServiceManager;
|
||||||
|
use std::process::ExitCode;
|
||||||
|
use std::time::Duration;
|
||||||
|
use tokio::signal;
|
||||||
|
use tracing::{error, info, warn};
|
||||||
|
|
||||||
|
/// Handle application shutdown signals and graceful shutdown
|
||||||
|
pub async fn handle_shutdown_signals(
|
||||||
|
mut service_manager: ServiceManager,
|
||||||
|
shutdown_timeout: Duration,
|
||||||
|
) -> ExitCode {
|
||||||
|
// Set up signal handling for both SIGINT (Ctrl+C) and SIGTERM
|
||||||
|
let ctrl_c = async {
|
||||||
|
signal::ctrl_c()
|
||||||
|
.await
|
||||||
|
.expect("Failed to install CTRL+C signal handler");
|
||||||
|
info!("received ctrl+c, gracefully shutting down...");
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
let sigterm = async {
|
||||||
|
use tokio::signal::unix::{SignalKind, signal};
|
||||||
|
let mut sigterm_stream =
|
||||||
|
signal(SignalKind::terminate()).expect("Failed to install SIGTERM signal handler");
|
||||||
|
sigterm_stream.recv().await;
|
||||||
|
info!("received SIGTERM, gracefully shutting down...");
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
let sigterm = async {
|
||||||
|
// On non-Unix systems, create a future that never completes
|
||||||
|
// This ensures the select! macro works correctly
|
||||||
|
std::future::pending::<()>().await;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Main application loop - wait for services or signals
|
||||||
|
let mut exit_code = ExitCode::SUCCESS;
|
||||||
|
|
||||||
|
tokio::select! {
|
||||||
|
(service_name, result) = service_manager.run() => {
|
||||||
|
// A service completed unexpectedly
|
||||||
|
match result {
|
||||||
|
ServiceResult::GracefulShutdown => {
|
||||||
|
info!(service = service_name, "service completed gracefully");
|
||||||
|
}
|
||||||
|
ServiceResult::NormalCompletion => {
|
||||||
|
warn!(service = service_name, "service completed unexpectedly");
|
||||||
|
exit_code = ExitCode::FAILURE;
|
||||||
|
}
|
||||||
|
ServiceResult::Error(e) => {
|
||||||
|
error!(service = service_name, error = ?e, "service failed");
|
||||||
|
exit_code = ExitCode::FAILURE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Shutdown remaining services
|
||||||
|
exit_code = handle_graceful_shutdown(service_manager, shutdown_timeout, exit_code).await;
|
||||||
|
}
|
||||||
|
_ = ctrl_c => {
|
||||||
|
// User requested shutdown via Ctrl+C
|
||||||
|
info!("user requested shutdown via ctrl+c");
|
||||||
|
exit_code = handle_graceful_shutdown(service_manager, shutdown_timeout, ExitCode::SUCCESS).await;
|
||||||
|
}
|
||||||
|
_ = sigterm => {
|
||||||
|
// System requested shutdown via SIGTERM
|
||||||
|
info!("system requested shutdown via SIGTERM");
|
||||||
|
exit_code = handle_graceful_shutdown(service_manager, shutdown_timeout, ExitCode::SUCCESS).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!(exit_code = ?exit_code, "application shutdown complete");
|
||||||
|
exit_code
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle graceful shutdown of remaining services
|
||||||
|
async fn handle_graceful_shutdown(
|
||||||
|
mut service_manager: ServiceManager,
|
||||||
|
shutdown_timeout: Duration,
|
||||||
|
current_exit_code: ExitCode,
|
||||||
|
) -> ExitCode {
|
||||||
|
match service_manager.shutdown(shutdown_timeout).await {
|
||||||
|
Ok(elapsed) => {
|
||||||
|
info!(
|
||||||
|
remaining = format!("{:.2?}", shutdown_timeout - elapsed),
|
||||||
|
"graceful shutdown complete"
|
||||||
|
);
|
||||||
|
current_exit_code
|
||||||
|
}
|
||||||
|
Err(pending_services) => {
|
||||||
|
warn!(
|
||||||
|
pending_count = pending_services.len(),
|
||||||
|
pending_services = ?pending_services,
|
||||||
|
"graceful shutdown elapsed - {} service(s) did not complete",
|
||||||
|
pending_services.len()
|
||||||
|
);
|
||||||
|
|
||||||
|
// Non-zero exit code, default to FAILURE if not set
|
||||||
|
if current_exit_code == ExitCode::SUCCESS {
|
||||||
|
ExitCode::FAILURE
|
||||||
|
} else {
|
||||||
|
current_exit_code
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
38
src/state.rs
Normal file
38
src/state.rs
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
//! Application state shared across components (bot, web, scheduler).
|
||||||
|
|
||||||
|
use crate::banner::BannerApi;
|
||||||
|
use crate::banner::Course;
|
||||||
|
use anyhow::Result;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct AppState {
|
||||||
|
pub banner_api: Arc<BannerApi>,
|
||||||
|
pub db_pool: PgPool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AppState {
|
||||||
|
pub fn new(banner_api: Arc<BannerApi>, db_pool: PgPool) -> Self {
|
||||||
|
Self {
|
||||||
|
banner_api,
|
||||||
|
db_pool,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a course by CRN directly from Banner API
|
||||||
|
pub async fn get_course_or_fetch(&self, term: &str, crn: &str) -> Result<Course> {
|
||||||
|
self.banner_api
|
||||||
|
.get_course_by_crn(term, crn)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Course not found for CRN {crn}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the total number of courses in the database
|
||||||
|
pub async fn get_course_count(&self) -> Result<i64> {
|
||||||
|
let count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM courses")
|
||||||
|
.fetch_one(&self.db_pool)
|
||||||
|
.await?;
|
||||||
|
Ok(count.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
1
src/utils/mod.rs
Normal file
1
src/utils/mod.rs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pub mod shutdown;
|
||||||
32
src/utils/shutdown.rs
Normal file
32
src/utils/shutdown.rs
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
use tokio::task::JoinHandle;
|
||||||
|
use tracing::warn;
|
||||||
|
|
||||||
|
/// Helper for joining multiple task handles with proper error handling.
|
||||||
|
///
|
||||||
|
/// This function waits for all tasks to complete and reports any that panicked.
|
||||||
|
/// Returns an error if any task panicked, otherwise returns Ok.
|
||||||
|
pub async fn join_tasks(handles: Vec<JoinHandle<()>>) -> Result<(), anyhow::Error> {
|
||||||
|
let results = futures::future::join_all(handles).await;
|
||||||
|
|
||||||
|
let failed = results.iter().filter(|r| r.is_err()).count();
|
||||||
|
if failed > 0 {
|
||||||
|
warn!(failed_count = failed, "Some tasks panicked during shutdown");
|
||||||
|
Err(anyhow::anyhow!("{} task(s) panicked", failed))
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper for joining multiple task handles with a timeout.
|
||||||
|
///
|
||||||
|
/// Waits for all tasks to complete within the specified timeout.
|
||||||
|
/// If timeout occurs, remaining tasks are aborted.
|
||||||
|
pub async fn join_tasks_with_timeout(
|
||||||
|
handles: Vec<JoinHandle<()>>,
|
||||||
|
timeout: std::time::Duration,
|
||||||
|
) -> Result<(), anyhow::Error> {
|
||||||
|
match tokio::time::timeout(timeout, join_tasks(handles)).await {
|
||||||
|
Ok(result) => result,
|
||||||
|
Err(_) => Err(anyhow::anyhow!("Task join timed out after {:?}", timeout)),
|
||||||
|
}
|
||||||
|
}
|
||||||
96
src/web/assets.rs
Normal file
96
src/web/assets.rs
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
//! Embedded assets for the web frontend
|
||||||
|
//!
|
||||||
|
//! This module handles serving static assets that are embedded into the binary
|
||||||
|
//! at compile time using rust-embed.
|
||||||
|
|
||||||
|
use dashmap::DashMap;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use rapidhash::v3::rapidhash_v3;
|
||||||
|
use rust_embed::RustEmbed;
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
/// Embedded web assets from the dist directory
|
||||||
|
#[derive(RustEmbed)]
|
||||||
|
#[folder = "web/dist/"]
|
||||||
|
#[include = "*"]
|
||||||
|
#[exclude = "*.map"]
|
||||||
|
pub struct WebAssets;
|
||||||
|
|
||||||
|
/// RapidHash hash type for asset content (u64 native output size)
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
pub struct AssetHash(u64);
|
||||||
|
|
||||||
|
impl AssetHash {
|
||||||
|
/// Create a new AssetHash from u64 value
|
||||||
|
pub fn new(hash: u64) -> Self {
|
||||||
|
Self(hash)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the hash as a hex string
|
||||||
|
pub fn to_hex(&self) -> String {
|
||||||
|
format!("{:016x}", self.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the hash as a quoted hex string
|
||||||
|
pub fn quoted(&self) -> String {
|
||||||
|
format!("\"{}\"", self.to_hex())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for AssetHash {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "{}", self.to_hex())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Metadata for an asset including MIME type and RapidHash hash
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct AssetMetadata {
|
||||||
|
pub mime_type: Option<String>,
|
||||||
|
pub hash: AssetHash,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AssetMetadata {
|
||||||
|
/// Check if the etag matches the asset hash
|
||||||
|
pub fn etag_matches(&self, etag: &str) -> bool {
|
||||||
|
// Remove quotes if present (ETags are typically quoted)
|
||||||
|
let etag = etag.trim_matches('"');
|
||||||
|
|
||||||
|
// ETags generated from u64 hex should be 16 characters
|
||||||
|
etag.len() == 16
|
||||||
|
// Parse the hexadecimal, compare if it matches
|
||||||
|
&& etag.parse::<u64>()
|
||||||
|
.map(|parsed| parsed == self.hash.0)
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Global cache for asset metadata to avoid repeated calculations
|
||||||
|
static ASSET_CACHE: Lazy<DashMap<String, AssetMetadata>> = Lazy::new(DashMap::new);
|
||||||
|
|
||||||
|
/// Get cached asset metadata for a file path, caching on-demand
|
||||||
|
/// Returns AssetMetadata containing MIME type and RapidHash hash
|
||||||
|
pub fn get_asset_metadata_cached(path: &str, content: &[u8]) -> AssetMetadata {
|
||||||
|
// Check cache first
|
||||||
|
if let Some(cached) = ASSET_CACHE.get(path) {
|
||||||
|
return cached.value().clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate MIME type
|
||||||
|
let mime_type = mime_guess::from_path(path)
|
||||||
|
.first()
|
||||||
|
.map(|mime| mime.to_string());
|
||||||
|
|
||||||
|
// Calculate RapidHash hash (using u64 native output size)
|
||||||
|
let hash_value = rapidhash_v3(content);
|
||||||
|
let hash = AssetHash::new(hash_value);
|
||||||
|
|
||||||
|
let metadata = AssetMetadata { mime_type, hash };
|
||||||
|
|
||||||
|
// Only cache if we haven't exceeded the limit
|
||||||
|
if ASSET_CACHE.len() < 1000 {
|
||||||
|
ASSET_CACHE.insert(path.to_string(), metadata.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata
|
||||||
|
}
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
//! Web API module for the banner application.
|
//! Web API module for the banner application.
|
||||||
|
|
||||||
|
pub mod assets;
|
||||||
pub mod routes;
|
pub mod routes;
|
||||||
|
|
||||||
pub use routes::*;
|
pub use routes::*;
|
||||||
|
|||||||
@@ -1,38 +1,204 @@
|
|||||||
//! Web API endpoints for Banner bot monitoring and metrics.
|
//! Web API endpoints for Banner bot monitoring and metrics.
|
||||||
|
|
||||||
use axum::{Router, extract::State, response::Json, routing::get};
|
use axum::{
|
||||||
|
Router,
|
||||||
|
body::Body,
|
||||||
|
extract::{Request, State},
|
||||||
|
http::{HeaderMap, HeaderValue, StatusCode, Uri},
|
||||||
|
response::{Html, IntoResponse, Json, Response},
|
||||||
|
routing::get,
|
||||||
|
};
|
||||||
|
use http::header;
|
||||||
|
use serde::Serialize;
|
||||||
use serde_json::{Value, json};
|
use serde_json::{Value, json};
|
||||||
use std::sync::Arc;
|
use std::{collections::BTreeMap, time::Duration};
|
||||||
use tracing::info;
|
use tower_http::timeout::TimeoutLayer;
|
||||||
|
use tower_http::{
|
||||||
|
classify::ServerErrorsFailureClass,
|
||||||
|
cors::{Any, CorsLayer},
|
||||||
|
trace::TraceLayer,
|
||||||
|
};
|
||||||
|
use tracing::{Span, debug, info, warn};
|
||||||
|
|
||||||
use crate::banner::BannerApi;
|
use crate::web::assets::{WebAssets, get_asset_metadata_cached};
|
||||||
|
|
||||||
|
/// Set appropriate caching headers based on asset type
|
||||||
|
fn set_caching_headers(response: &mut Response, path: &str, etag: &str) {
|
||||||
|
let headers = response.headers_mut();
|
||||||
|
|
||||||
|
// Set ETag
|
||||||
|
if let Ok(etag_value) = HeaderValue::from_str(etag) {
|
||||||
|
headers.insert(header::ETAG, etag_value);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set Cache-Control based on asset type
|
||||||
|
let cache_control = if path.starts_with("assets/") {
|
||||||
|
// Static assets with hashed filenames - long-term cache
|
||||||
|
"public, max-age=31536000, immutable"
|
||||||
|
} else if path == "index.html" {
|
||||||
|
// HTML files - short-term cache
|
||||||
|
"public, max-age=300"
|
||||||
|
} else {
|
||||||
|
match path.split_once('.').map(|(_, extension)| extension) {
|
||||||
|
Some(ext) => match ext {
|
||||||
|
// CSS/JS files - medium-term cache
|
||||||
|
"css" | "js" => "public, max-age=86400",
|
||||||
|
// Images - long-term cache
|
||||||
|
"png" | "jpg" | "jpeg" | "gif" | "svg" | "ico" => "public, max-age=2592000",
|
||||||
|
// Default for other files
|
||||||
|
_ => "public, max-age=3600",
|
||||||
|
},
|
||||||
|
// Default for files without an extension
|
||||||
|
None => "public, max-age=3600",
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Ok(cache_control_value) = HeaderValue::from_str(cache_control) {
|
||||||
|
headers.insert(header::CACHE_CONTROL, cache_control_value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Shared application state for web server
|
/// Shared application state for web server
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct BannerState {
|
pub struct BannerState {}
|
||||||
pub api: Arc<BannerApi>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates the web server router
|
/// Creates the web server router
|
||||||
pub fn create_router(state: BannerState) -> Router {
|
pub fn create_router(state: BannerState) -> Router {
|
||||||
Router::new()
|
let api_router = Router::new()
|
||||||
.route("/", get(root))
|
|
||||||
.route("/health", get(health))
|
.route("/health", get(health))
|
||||||
.route("/status", get(status))
|
.route("/status", get(status))
|
||||||
.route("/metrics", get(metrics))
|
.route("/metrics", get(metrics))
|
||||||
.with_state(state)
|
.with_state(state);
|
||||||
|
|
||||||
|
let mut router = Router::new().nest("/api", api_router);
|
||||||
|
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
router = router.layer(
|
||||||
|
CorsLayer::new()
|
||||||
|
.allow_origin(Any)
|
||||||
|
.allow_methods(Any)
|
||||||
|
.allow_headers(Any),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
router = router.fallback(fallback);
|
||||||
|
}
|
||||||
|
|
||||||
|
router.layer((
|
||||||
|
TraceLayer::new_for_http()
|
||||||
|
.make_span_with(|request: &Request<Body>| {
|
||||||
|
tracing::debug_span!("request", path = request.uri().path())
|
||||||
|
})
|
||||||
|
.on_request(())
|
||||||
|
.on_body_chunk(())
|
||||||
|
.on_eos(())
|
||||||
|
.on_response(
|
||||||
|
|response: &Response<Body>, latency: Duration, _span: &Span| {
|
||||||
|
let latency_threshold = if cfg!(debug_assertions) {
|
||||||
|
Duration::from_millis(100)
|
||||||
|
} else {
|
||||||
|
Duration::from_millis(1000)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Format latency, status, and code
|
||||||
|
let (latency_str, status) = (
|
||||||
|
format!("{latency:.2?}"),
|
||||||
|
format!(
|
||||||
|
"{} {}",
|
||||||
|
response.status().as_u16(),
|
||||||
|
response.status().canonical_reason().unwrap_or("??")
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Log in warn if latency is above threshold, otherwise debug
|
||||||
|
if latency > latency_threshold {
|
||||||
|
warn!(latency = latency_str, status = status, "Response");
|
||||||
|
} else {
|
||||||
|
debug!(latency = latency_str, status = status, "Response");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.on_failure(
|
||||||
|
|error: ServerErrorsFailureClass, latency: Duration, _span: &Span| {
|
||||||
|
warn!(
|
||||||
|
error = ?error,
|
||||||
|
latency = format!("{latency:.2?}"),
|
||||||
|
"Request failed"
|
||||||
|
);
|
||||||
|
},
|
||||||
|
),
|
||||||
|
TimeoutLayer::new(Duration::from_secs(10)),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn root() -> Json<Value> {
|
/// Handler that extracts request information for caching
|
||||||
Json(json!({
|
async fn fallback(request: Request) -> Response {
|
||||||
"message": "Banner Discord Bot API",
|
let uri = request.uri().clone();
|
||||||
"version": "0.1.0",
|
let headers = request.headers().clone();
|
||||||
"endpoints": {
|
handle_spa_fallback_with_headers(uri, headers).await
|
||||||
"health": "/health",
|
}
|
||||||
"status": "/status",
|
|
||||||
"metrics": "/metrics"
|
/// Handles SPA routing by serving index.html for non-API, non-asset requests
|
||||||
|
/// This version includes HTTP caching headers and ETag support
|
||||||
|
async fn handle_spa_fallback_with_headers(uri: Uri, request_headers: HeaderMap) -> Response {
|
||||||
|
let path = uri.path().trim_start_matches('/');
|
||||||
|
|
||||||
|
if let Some(content) = WebAssets::get(path) {
|
||||||
|
// Get asset metadata (MIME type and hash) with caching
|
||||||
|
let metadata = get_asset_metadata_cached(path, &content.data);
|
||||||
|
|
||||||
|
// Check if client has a matching ETag (conditional request)
|
||||||
|
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
||||||
|
&& metadata.etag_matches(etag.to_str().unwrap())
|
||||||
|
{
|
||||||
|
return StatusCode::NOT_MODIFIED.into_response();
|
||||||
}
|
}
|
||||||
}))
|
|
||||||
|
// Use cached MIME type, only set Content-Type if we have a valid MIME type
|
||||||
|
let mut response = (
|
||||||
|
[(
|
||||||
|
header::CONTENT_TYPE,
|
||||||
|
// For unknown types, set to application/octet-stream
|
||||||
|
metadata
|
||||||
|
.mime_type
|
||||||
|
.unwrap_or("application/octet-stream".to_string()),
|
||||||
|
)],
|
||||||
|
content.data,
|
||||||
|
)
|
||||||
|
.into_response();
|
||||||
|
|
||||||
|
// Set caching headers
|
||||||
|
set_caching_headers(&mut response, path, &metadata.hash.quoted());
|
||||||
|
|
||||||
|
return response;
|
||||||
|
} else {
|
||||||
|
// Any assets that are not found should be treated as a 404, not falling back to the SPA index.html
|
||||||
|
if path.starts_with("assets/") {
|
||||||
|
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to the SPA index.html
|
||||||
|
match WebAssets::get("index.html") {
|
||||||
|
Some(content) => {
|
||||||
|
let metadata = get_asset_metadata_cached("index.html", &content.data);
|
||||||
|
|
||||||
|
// Check if client has a matching ETag for index.html
|
||||||
|
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
||||||
|
&& metadata.etag_matches(etag.to_str().unwrap())
|
||||||
|
{
|
||||||
|
return StatusCode::NOT_MODIFIED.into_response();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut response = Html(content.data).into_response();
|
||||||
|
set_caching_headers(&mut response, "index.html", &metadata.hash.quoted());
|
||||||
|
response
|
||||||
|
}
|
||||||
|
None => (
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
"Failed to load index.html",
|
||||||
|
)
|
||||||
|
.into_response(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Health check endpoint
|
/// Health check endpoint
|
||||||
@@ -44,43 +210,86 @@ async fn health() -> Json<Value> {
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
enum Status {
|
||||||
|
Disabled,
|
||||||
|
Connected,
|
||||||
|
Active,
|
||||||
|
Healthy,
|
||||||
|
Error,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct ServiceInfo {
|
||||||
|
name: String,
|
||||||
|
status: Status,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct StatusResponse {
|
||||||
|
status: Status,
|
||||||
|
version: String,
|
||||||
|
commit: String,
|
||||||
|
services: BTreeMap<String, ServiceInfo>,
|
||||||
|
}
|
||||||
|
|
||||||
/// Status endpoint showing bot and system status
|
/// Status endpoint showing bot and system status
|
||||||
async fn status(State(_state): State<BannerState>) -> Json<Value> {
|
async fn status(State(_state): State<BannerState>) -> Json<StatusResponse> {
|
||||||
// For now, return basic status without accessing private fields
|
let mut services = BTreeMap::new();
|
||||||
Json(json!({
|
|
||||||
"status": "operational",
|
// Bot service status - hardcoded as disabled for now
|
||||||
"bot": {
|
services.insert(
|
||||||
"status": "running",
|
"bot".to_string(),
|
||||||
"uptime": "TODO: implement uptime tracking"
|
ServiceInfo {
|
||||||
|
name: "Bot".to_string(),
|
||||||
|
status: Status::Disabled,
|
||||||
},
|
},
|
||||||
"cache": {
|
);
|
||||||
"status": "connected",
|
|
||||||
"courses": "TODO: implement course counting",
|
// Banner API status - always connected for now
|
||||||
"subjects": "TODO: implement subject counting"
|
services.insert(
|
||||||
|
"banner".to_string(),
|
||||||
|
ServiceInfo {
|
||||||
|
name: "Banner".to_string(),
|
||||||
|
status: Status::Connected,
|
||||||
},
|
},
|
||||||
"banner_api": {
|
);
|
||||||
"status": "connected"
|
|
||||||
|
// Discord status - hardcoded as disabled for now
|
||||||
|
services.insert(
|
||||||
|
"discord".to_string(),
|
||||||
|
ServiceInfo {
|
||||||
|
name: "Discord".to_string(),
|
||||||
|
status: Status::Disabled,
|
||||||
},
|
},
|
||||||
"timestamp": chrono::Utc::now().to_rfc3339()
|
);
|
||||||
}))
|
|
||||||
|
let overall_status = if services.values().any(|s| matches!(s.status, Status::Error)) {
|
||||||
|
Status::Error
|
||||||
|
} else if services
|
||||||
|
.values()
|
||||||
|
.all(|s| matches!(s.status, Status::Active | Status::Connected))
|
||||||
|
{
|
||||||
|
Status::Active
|
||||||
|
} else {
|
||||||
|
// If we have any Disabled services but no errors, show as Healthy
|
||||||
|
Status::Healthy
|
||||||
|
};
|
||||||
|
|
||||||
|
Json(StatusResponse {
|
||||||
|
status: overall_status,
|
||||||
|
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||||
|
commit: env!("GIT_COMMIT_HASH").to_string(),
|
||||||
|
services,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Metrics endpoint for monitoring
|
/// Metrics endpoint for monitoring
|
||||||
async fn metrics(State(_state): State<BannerState>) -> Json<Value> {
|
async fn metrics(State(_state): State<BannerState>) -> Json<Value> {
|
||||||
// For now, return basic metrics structure
|
// For now, return basic metrics structure
|
||||||
Json(json!({
|
Json(json!({
|
||||||
"redis": {
|
"banner_api": {
|
||||||
"status": "connected",
|
"status": "connected"
|
||||||
"connected_clients": "TODO: implement client counting",
|
|
||||||
"used_memory": "TODO: implement memory tracking"
|
|
||||||
},
|
|
||||||
"cache": {
|
|
||||||
"courses": {
|
|
||||||
"count": "TODO: implement course counting"
|
|
||||||
},
|
|
||||||
"subjects": {
|
|
||||||
"count": "TODO: implement subject counting"
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"timestamp": chrono::Utc::now().to_rfc3339()
|
"timestamp": chrono::Utc::now().to_rfc3339()
|
||||||
}))
|
}))
|
||||||
|
|||||||
39
tests/basic_test.rs
Normal file
39
tests/basic_test.rs
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
use banner::utils::shutdown::join_tasks;
|
||||||
|
use tokio::task::JoinHandle;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_join_tasks_success() {
|
||||||
|
// Create some tasks that complete successfully
|
||||||
|
let handles: Vec<JoinHandle<()>> = vec![
|
||||||
|
tokio::spawn(async { tokio::time::sleep(tokio::time::Duration::from_millis(10)).await }),
|
||||||
|
tokio::spawn(async { tokio::time::sleep(tokio::time::Duration::from_millis(20)).await }),
|
||||||
|
tokio::spawn(async { /* immediate completion */ }),
|
||||||
|
];
|
||||||
|
|
||||||
|
// All tasks should complete successfully
|
||||||
|
let result = join_tasks(handles).await;
|
||||||
|
assert!(
|
||||||
|
result.is_ok(),
|
||||||
|
"Expected all tasks to complete successfully"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_join_tasks_with_panic() {
|
||||||
|
// Create some tasks, including one that panics
|
||||||
|
let handles: Vec<JoinHandle<()>> = vec![
|
||||||
|
tokio::spawn(async { tokio::time::sleep(tokio::time::Duration::from_millis(10)).await }),
|
||||||
|
tokio::spawn(async { panic!("intentional test panic") }),
|
||||||
|
tokio::spawn(async { /* immediate completion */ }),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Should return an error because one task panicked
|
||||||
|
let result = join_tasks(handles).await;
|
||||||
|
assert!(result.is_err(), "Expected an error when a task panics");
|
||||||
|
|
||||||
|
let error_msg = result.unwrap_err().to_string();
|
||||||
|
assert!(
|
||||||
|
error_msg.contains("1 task(s) panicked"),
|
||||||
|
"Error message should mention panicked tasks"
|
||||||
|
);
|
||||||
|
}
|
||||||
9
web/.gitignore
vendored
Normal file
9
web/.gitignore
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
node_modules
|
||||||
|
.DS_Store
|
||||||
|
dist
|
||||||
|
dist-ssr
|
||||||
|
*.local
|
||||||
|
count.txt
|
||||||
|
.env
|
||||||
|
.nitro
|
||||||
|
.tanstack
|
||||||
11
web/.vscode/settings.json
vendored
Normal file
11
web/.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"files.watcherExclude": {
|
||||||
|
"**/routeTree.gen.ts": true
|
||||||
|
},
|
||||||
|
"search.exclude": {
|
||||||
|
"**/routeTree.gen.ts": true
|
||||||
|
},
|
||||||
|
"files.readonlyInclude": {
|
||||||
|
"**/routeTree.gen.ts": true
|
||||||
|
}
|
||||||
|
}
|
||||||
30
web/biome.json
Normal file
30
web/biome.json
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://biomejs.dev/schemas/1.9.4/schema.json",
|
||||||
|
"vcs": {
|
||||||
|
"enabled": true,
|
||||||
|
"clientKind": "git",
|
||||||
|
"useIgnoreFile": true
|
||||||
|
},
|
||||||
|
"files": {
|
||||||
|
"ignoreUnknown": false,
|
||||||
|
"ignore": ["dist/", "node_modules/", ".tanstack/"]
|
||||||
|
},
|
||||||
|
"formatter": {
|
||||||
|
"enabled": true,
|
||||||
|
"indentStyle": "space",
|
||||||
|
"indentWidth": 2,
|
||||||
|
"lineWidth": 100,
|
||||||
|
"lineEnding": "lf"
|
||||||
|
},
|
||||||
|
"javascript": {
|
||||||
|
"formatter": {
|
||||||
|
"quoteStyle": "double",
|
||||||
|
"trailingCommas": "es5",
|
||||||
|
"semicolons": "always",
|
||||||
|
"arrowParentheses": "always"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"linter": {
|
||||||
|
"enabled": false
|
||||||
|
}
|
||||||
|
}
|
||||||
1297
web/bun.lock
Normal file
1297
web/bun.lock
Normal file
File diff suppressed because it is too large
Load Diff
60
web/eslint.config.js
Normal file
60
web/eslint.config.js
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import js from "@eslint/js";
|
||||||
|
import tseslint from "typescript-eslint";
|
||||||
|
import react from "eslint-plugin-react";
|
||||||
|
import reactHooks from "eslint-plugin-react-hooks";
|
||||||
|
import reactRefresh from "eslint-plugin-react-refresh";
|
||||||
|
|
||||||
|
export default tseslint.config(
|
||||||
|
// Ignore generated files and build outputs
|
||||||
|
{
|
||||||
|
ignores: ["dist", "node_modules", "src/routeTree.gen.ts", "*.config.js"],
|
||||||
|
},
|
||||||
|
// Base configs
|
||||||
|
js.configs.recommended,
|
||||||
|
...tseslint.configs.recommendedTypeChecked,
|
||||||
|
// React plugin configuration
|
||||||
|
{
|
||||||
|
files: ["**/*.{ts,tsx}"],
|
||||||
|
plugins: {
|
||||||
|
react,
|
||||||
|
"react-hooks": reactHooks,
|
||||||
|
"react-refresh": reactRefresh,
|
||||||
|
},
|
||||||
|
languageOptions: {
|
||||||
|
parserOptions: {
|
||||||
|
project: true,
|
||||||
|
tsconfigRootDir: import.meta.dirname,
|
||||||
|
ecmaFeatures: {
|
||||||
|
jsx: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
settings: {
|
||||||
|
react: {
|
||||||
|
version: "19.0",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
rules: {
|
||||||
|
// React rules
|
||||||
|
...react.configs.recommended.rules,
|
||||||
|
...react.configs["jsx-runtime"].rules,
|
||||||
|
...reactHooks.configs.recommended.rules,
|
||||||
|
|
||||||
|
// React Refresh
|
||||||
|
"react-refresh/only-export-components": ["warn", { allowConstantExport: true }],
|
||||||
|
|
||||||
|
// TypeScript overrides
|
||||||
|
"@typescript-eslint/no-unused-vars": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
argsIgnorePattern: "^_",
|
||||||
|
varsIgnorePattern: "^_",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"@typescript-eslint/no-explicit-any": "warn",
|
||||||
|
|
||||||
|
// Disable prop-types since we're using TypeScript
|
||||||
|
"react/prop-types": "off",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
20
web/index.html
Normal file
20
web/index.html
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
|
<link rel="icon" href="/favicon.ico" />
|
||||||
|
<meta name="theme-color" content="#000000" />
|
||||||
|
<meta
|
||||||
|
name="description"
|
||||||
|
content="Banner, a Discord bot and web interface for UTSA Course Monitoring"
|
||||||
|
/>
|
||||||
|
<link rel="apple-touch-icon" href="/logo192.png" />
|
||||||
|
<link rel="manifest" href="/manifest.json" />
|
||||||
|
<title>Banner</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div id="app"></div>
|
||||||
|
<script type="module" src="/src/main.tsx"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
49
web/package.json
Normal file
49
web/package.json
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
{
|
||||||
|
"name": "banner-web",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"dev": "vite --port 3000",
|
||||||
|
"start": "vite --port 3000",
|
||||||
|
"build": "vite build && tsc",
|
||||||
|
"serve": "vite preview",
|
||||||
|
"test": "vitest run",
|
||||||
|
"lint": "tsc && eslint . --ext .ts,.tsx",
|
||||||
|
"typecheck": "tsc --noEmit",
|
||||||
|
"format": "biome format --write .",
|
||||||
|
"format:check": "biome format ."
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@radix-ui/themes": "^3.2.1",
|
||||||
|
"@tanstack/react-devtools": "^0.2.2",
|
||||||
|
"@tanstack/react-router": "^1.130.2",
|
||||||
|
"@tanstack/react-router-devtools": "^1.131.5",
|
||||||
|
"@tanstack/router-plugin": "^1.121.2",
|
||||||
|
"lucide-react": "^0.544.0",
|
||||||
|
"next-themes": "^0.4.6",
|
||||||
|
"react": "^19.0.0",
|
||||||
|
"react-dom": "^19.0.0",
|
||||||
|
"react-timeago": "^8.3.0",
|
||||||
|
"recharts": "^3.2.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@biomejs/biome": "^1.9.4",
|
||||||
|
"@eslint/js": "^9.39.0",
|
||||||
|
"@testing-library/dom": "^10.4.0",
|
||||||
|
"@testing-library/react": "^16.2.0",
|
||||||
|
"@types/node": "^24.3.3",
|
||||||
|
"@types/react": "^19.0.8",
|
||||||
|
"@types/react-dom": "^19.0.3",
|
||||||
|
"@vitejs/plugin-react": "^4.3.4",
|
||||||
|
"eslint": "^9.39.0",
|
||||||
|
"eslint-plugin-react": "^7.37.5",
|
||||||
|
"eslint-plugin-react-hooks": "^7.0.1",
|
||||||
|
"eslint-plugin-react-refresh": "^0.4.24",
|
||||||
|
"jsdom": "^26.0.0",
|
||||||
|
"typescript": "^5.7.2",
|
||||||
|
"typescript-eslint": "^8.46.2",
|
||||||
|
"vite": "^6.3.5",
|
||||||
|
"vitest": "^3.0.5",
|
||||||
|
"web-vitals": "^4.2.4"
|
||||||
|
}
|
||||||
|
}
|
||||||
BIN
web/public/favicon.ico
Normal file
BIN
web/public/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.8 KiB |
BIN
web/public/logo192.png
Normal file
BIN
web/public/logo192.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 5.2 KiB |
BIN
web/public/logo512.png
Normal file
BIN
web/public/logo512.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 9.4 KiB |
25
web/public/manifest.json
Normal file
25
web/public/manifest.json
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"short_name": "Banner",
|
||||||
|
"name": "Banner, a Discord bot and web interface for UTSA Course Monitoring",
|
||||||
|
"icons": [
|
||||||
|
{
|
||||||
|
"src": "favicon.ico",
|
||||||
|
"sizes": "64x64 32x32 24x24 16x16",
|
||||||
|
"type": "image/x-icon"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": "logo192.png",
|
||||||
|
"type": "image/png",
|
||||||
|
"sizes": "192x192"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": "logo512.png",
|
||||||
|
"type": "image/png",
|
||||||
|
"sizes": "512x512"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"start_url": ".",
|
||||||
|
"display": "standalone",
|
||||||
|
"theme_color": "#ffffff",
|
||||||
|
"background_color": "#ffffff"
|
||||||
|
}
|
||||||
3
web/public/robots.txt
Normal file
3
web/public/robots.txt
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# https://www.robotstxt.org/robotstxt.html
|
||||||
|
User-agent: *
|
||||||
|
Disallow:
|
||||||
34
web/src/App.css
Normal file
34
web/src/App.css
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
.App {
|
||||||
|
min-height: 100vh;
|
||||||
|
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", "Ubuntu",
|
||||||
|
"Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", sans-serif;
|
||||||
|
background-color: var(--color-background);
|
||||||
|
color: var(--color-text);
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes pulse {
|
||||||
|
0%,
|
||||||
|
100% {
|
||||||
|
opacity: 0.2;
|
||||||
|
}
|
||||||
|
50% {
|
||||||
|
opacity: 0.4;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.animate-pulse {
|
||||||
|
animation: pulse 2s ease-in-out infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Screen reader only text */
|
||||||
|
.sr-only {
|
||||||
|
position: absolute;
|
||||||
|
width: 1px;
|
||||||
|
height: 1px;
|
||||||
|
padding: 0;
|
||||||
|
margin: -1px;
|
||||||
|
overflow: hidden;
|
||||||
|
clip: rect(0, 0, 0, 0);
|
||||||
|
white-space: nowrap;
|
||||||
|
border: 0;
|
||||||
|
}
|
||||||
60
web/src/components/ThemeToggle.tsx
Normal file
60
web/src/components/ThemeToggle.tsx
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import { useTheme } from "next-themes";
|
||||||
|
import { Button } from "@radix-ui/themes";
|
||||||
|
import { Sun, Moon, Monitor } from "lucide-react";
|
||||||
|
import { useMemo } from "react";
|
||||||
|
|
||||||
|
export function ThemeToggle() {
|
||||||
|
const { theme, setTheme } = useTheme();
|
||||||
|
|
||||||
|
const nextTheme = useMemo(() => {
|
||||||
|
switch (theme) {
|
||||||
|
case "light":
|
||||||
|
return "dark";
|
||||||
|
case "dark":
|
||||||
|
return "system";
|
||||||
|
case "system":
|
||||||
|
return "light";
|
||||||
|
default:
|
||||||
|
console.error(`Invalid theme: ${theme}`);
|
||||||
|
return "system";
|
||||||
|
}
|
||||||
|
}, [theme]);
|
||||||
|
|
||||||
|
const icon = useMemo(() => {
|
||||||
|
if (nextTheme === "system") {
|
||||||
|
return <Monitor size={18} />;
|
||||||
|
}
|
||||||
|
return nextTheme === "dark" ? <Moon size={18} /> : <Sun size={18} />;
|
||||||
|
}, [nextTheme]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="3"
|
||||||
|
onClick={() => setTheme(nextTheme)}
|
||||||
|
style={{
|
||||||
|
cursor: "pointer",
|
||||||
|
backgroundColor: "transparent",
|
||||||
|
border: "none",
|
||||||
|
margin: "4px",
|
||||||
|
padding: "7px",
|
||||||
|
borderRadius: "6px",
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
justifyContent: "center",
|
||||||
|
color: "var(--gray-11)",
|
||||||
|
transition: "background-color 0.2s, color 0.2s",
|
||||||
|
transform: "scale(1.25)",
|
||||||
|
}}
|
||||||
|
onMouseEnter={(e) => {
|
||||||
|
e.currentTarget.style.backgroundColor = "var(--gray-4)";
|
||||||
|
}}
|
||||||
|
onMouseLeave={(e) => {
|
||||||
|
e.currentTarget.style.backgroundColor = "transparent";
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{icon}
|
||||||
|
<span className="sr-only">Toggle theme</span>
|
||||||
|
</Button>
|
||||||
|
);
|
||||||
|
}
|
||||||
63
web/src/lib/api.test.ts
Normal file
63
web/src/lib/api.test.ts
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach } from "vitest";
|
||||||
|
import { BannerApiClient } from "./api";
|
||||||
|
|
||||||
|
// Mock fetch
|
||||||
|
global.fetch = vi.fn();
|
||||||
|
|
||||||
|
describe("BannerApiClient", () => {
|
||||||
|
let apiClient: BannerApiClient;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
apiClient = new BannerApiClient();
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fetch health data", async () => {
|
||||||
|
const mockHealth = {
|
||||||
|
status: "healthy",
|
||||||
|
timestamp: "2024-01-01T00:00:00Z",
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.mocked(fetch).mockResolvedValueOnce({
|
||||||
|
ok: true,
|
||||||
|
json: () => Promise.resolve(mockHealth),
|
||||||
|
} as Response);
|
||||||
|
|
||||||
|
const result = await apiClient.getHealth();
|
||||||
|
|
||||||
|
expect(fetch).toHaveBeenCalledWith("/api/health");
|
||||||
|
expect(result).toEqual(mockHealth);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fetch status data", async () => {
|
||||||
|
const mockStatus = {
|
||||||
|
status: "operational",
|
||||||
|
bot: { status: "running", uptime: "1h" },
|
||||||
|
cache: { status: "connected", courses: "100", subjects: "50" },
|
||||||
|
banner_api: { status: "connected" },
|
||||||
|
timestamp: "2024-01-01T00:00:00Z",
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.mocked(fetch).mockResolvedValueOnce({
|
||||||
|
ok: true,
|
||||||
|
json: () => Promise.resolve(mockStatus),
|
||||||
|
} as Response);
|
||||||
|
|
||||||
|
const result = await apiClient.getStatus();
|
||||||
|
|
||||||
|
expect(fetch).toHaveBeenCalledWith("/api/status");
|
||||||
|
expect(result).toEqual(mockStatus);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle API errors", async () => {
|
||||||
|
vi.mocked(fetch).mockResolvedValueOnce({
|
||||||
|
ok: false,
|
||||||
|
status: 500,
|
||||||
|
statusText: "Internal Server Error",
|
||||||
|
} as Response);
|
||||||
|
|
||||||
|
await expect(apiClient.getHealth()).rejects.toThrow(
|
||||||
|
"API request failed: 500 Internal Server Error"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
61
web/src/lib/api.ts
Normal file
61
web/src/lib/api.ts
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
// API client for Banner backend
|
||||||
|
const API_BASE_URL = "/api";
|
||||||
|
|
||||||
|
export interface HealthResponse {
|
||||||
|
status: string;
|
||||||
|
timestamp: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Status = "Disabled" | "Connected" | "Active" | "Healthy" | "Error";
|
||||||
|
|
||||||
|
export interface ServiceInfo {
|
||||||
|
name: string;
|
||||||
|
status: Status;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StatusResponse {
|
||||||
|
status: Status;
|
||||||
|
version: string;
|
||||||
|
commit: string;
|
||||||
|
services: Record<string, ServiceInfo>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MetricsResponse {
|
||||||
|
banner_api: {
|
||||||
|
status: string;
|
||||||
|
};
|
||||||
|
timestamp: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class BannerApiClient {
|
||||||
|
private baseUrl: string;
|
||||||
|
|
||||||
|
constructor(baseUrl: string = API_BASE_URL) {
|
||||||
|
this.baseUrl = baseUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async request<T>(endpoint: string): Promise<T> {
|
||||||
|
const response = await fetch(`${this.baseUrl}${endpoint}`);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (await response.json()) as T;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getHealth(): Promise<HealthResponse> {
|
||||||
|
return this.request<HealthResponse>("/health");
|
||||||
|
}
|
||||||
|
|
||||||
|
async getStatus(): Promise<StatusResponse> {
|
||||||
|
return this.request<StatusResponse>("/status");
|
||||||
|
}
|
||||||
|
|
||||||
|
async getMetrics(): Promise<MetricsResponse> {
|
||||||
|
return this.request<MetricsResponse>("/metrics");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export a default instance
|
||||||
|
export const client = new BannerApiClient();
|
||||||
44
web/src/logo.svg
Normal file
44
web/src/logo.svg
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<svg id="Layer_1"
|
||||||
|
xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 841.9 595.3">
|
||||||
|
<!-- Generator: Adobe Illustrator 29.3.0, SVG Export Plug-In . SVG Version: 2.1.0 Build 146) -->
|
||||||
|
<defs>
|
||||||
|
<style>
|
||||||
|
.st0 {
|
||||||
|
fill: #9ae7fc;
|
||||||
|
}
|
||||||
|
|
||||||
|
.st1 {
|
||||||
|
fill: #61dafb;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</defs>
|
||||||
|
<g>
|
||||||
|
<path class="st1" d="M666.3,296.5c0-32.5-40.7-63.3-103.1-82.4,14.4-63.6,8-114.2-20.2-130.4-6.5-3.8-14.1-5.6-22.4-5.6v22.3c4.6,0,8.3.9,11.4,2.6,13.6,7.8,19.5,37.5,14.9,75.7-1.1,9.4-2.9,19.3-5.1,29.4-19.6-4.8-41-8.5-63.5-10.9-13.5-18.5-27.5-35.3-41.6-50,32.6-30.3,63.2-46.9,84-46.9v-22.3c-27.5,0-63.5,19.6-99.9,53.6-36.4-33.8-72.4-53.2-99.9-53.2v22.3c20.7,0,51.4,16.5,84,46.6-14,14.7-28,31.4-41.3,49.9-22.6,2.4-44,6.1-63.6,11-2.3-10-4-19.7-5.2-29-4.7-38.2,1.1-67.9,14.6-75.8,3-1.8,6.9-2.6,11.5-2.6v-22.3c-8.4,0-16,1.8-22.6,5.6-28.1,16.2-34.4,66.7-19.9,130.1-62.2,19.2-102.7,49.9-102.7,82.3s40.7,63.3,103.1,82.4c-14.4,63.6-8,114.2,20.2,130.4,6.5,3.8,14.1,5.6,22.5,5.6,27.5,0,63.5-19.6,99.9-53.6,36.4,33.8,72.4,53.2,99.9,53.2,8.4,0,16-1.8,22.6-5.6,28.1-16.2,34.4-66.7,19.9-130.1,62-19.1,102.5-49.9,102.5-82.3zm-130.2-66.7c-3.7,12.9-8.3,26.2-13.5,39.5-4.1-8-8.4-16-13.1-24-4.6-8-9.5-15.8-14.4-23.4,14.2,2.1,27.9,4.7,41,7.9zm-45.8,106.5c-7.8,13.5-15.8,26.3-24.1,38.2-14.9,1.3-30,2-45.2,2s-30.2-.7-45-1.9c-8.3-11.9-16.4-24.6-24.2-38-7.6-13.1-14.5-26.4-20.8-39.8,6.2-13.4,13.2-26.8,20.7-39.9,7.8-13.5,15.8-26.3,24.1-38.2,14.9-1.3,30-2,45.2-2s30.2.7,45,1.9c8.3,11.9,16.4,24.6,24.2,38,7.6,13.1,14.5,26.4,20.8,39.8-6.3,13.4-13.2,26.8-20.7,39.9zm32.3-13c5.4,13.4,10,26.8,13.8,39.8-13.1,3.2-26.9,5.9-41.2,8,4.9-7.7,9.8-15.6,14.4-23.7,4.6-8,8.9-16.1,13-24.1zm-101.4,106.7c-9.3-9.6-18.6-20.3-27.8-32,9,.4,18.2.7,27.5.7s18.7-.2,27.8-.7c-9,11.7-18.3,22.4-27.5,32zm-74.4-58.9c-14.2-2.1-27.9-4.7-41-7.9,3.7-12.9,8.3-26.2,13.5-39.5,4.1,8,8.4,16,13.1,24s9.5,15.8,14.4,23.4zm73.9-208.1c9.3,9.6,18.6,20.3,27.8,32-9-.4-18.2-.7-27.5-.7s-18.7.2-27.8.7c9-11.7,18.3-22.4,27.5-32zm-74,58.9c-4.9,7.7-9.8,15.6-14.4,23.7-4.6,8-8.9,16-13,24-5.4-13.4-10-26.8-13.8-39.8,13.1-3.1,26.9-5.8,41.2-7.9zm-90.5,125.2c-35.4-15.1-58.3-34.9-58.3-50.6s22.9-35.6,58.3-50.6c8.6-3.7,18-7,27.7-10.1,5.7,19.6,13.2,40,22.5,60.9-9.2,20.8-16.6,41.1-22.2,60.6-9.9-3.1-19.3-6.5-28-10.2zm53.8,142.9c-13.6-7.8-19.5-37.5-14.9-75.7,1.1-9.4,2.9-19.3,5.1-29.4,19.6,4.8,41,8.5,63.5,10.9,13.5,18.5,27.5,35.3,41.6,50-32.6,30.3-63.2,46.9-84,46.9-4.5-.1-8.3-1-11.3-2.7zm237.2-76.2c4.7,38.2-1.1,67.9-14.6,75.8-3,1.8-6.9,2.6-11.5,2.6-20.7,0-51.4-16.5-84-46.6,14-14.7,28-31.4,41.3-49.9,22.6-2.4,44-6.1,63.6-11,2.3,10.1,4.1,19.8,5.2,29.1zm38.5-66.7c-8.6,3.7-18,7-27.7,10.1-5.7-19.6-13.2-40-22.5-60.9,9.2-20.8,16.6-41.1,22.2-60.6,9.9,3.1,19.3,6.5,28.1,10.2,35.4,15.1,58.3,34.9,58.3,50.6,0,15.7-23,35.6-58.4,50.6zm-264.9-268.7z"/>
|
||||||
|
<circle class="st1" cx="420.9" cy="296.5" r="45.7"/>
|
||||||
|
<path class="st1" d="M520.5,78.1"/>
|
||||||
|
</g>
|
||||||
|
<circle class="st0" cx="420.8" cy="296.6" r="43"/>
|
||||||
|
<path class="st1" d="M466.1,296.6c0,25-20.2,45.2-45.2,45.2s-45.2-20.2-45.2-45.2,20.2-45.2,45.2-45.2,45.2,20.2,45.2,45.2ZM386,295.6v-6.3c0-1.1,1.2-5.1,1.8-6.2,1-1.9,2.9-3.5,4.6-4.7l-3.4-3.4c4-3.6,9.4-3.7,13.7-.7,1.9-4.7,6.6-7.1,11.6-6.7l-.8,4.2c5.9.2,13.1,4.1,13.1,10.8s0,.5-.7.7c-1.7.3-3.4-.4-5-.6s-1.2-.4-1.2.3,2.5,4.1,3,5.5,1,3.5.8,5.3c-5.6-.8-10.5-3.2-14.8-6.7.3,2.6,4.1,21.7,5.3,21.9s.8-.6,1-1.1,1.3-6.3,1.3-6.7c0-1-1.7-1.8-2.2-2.8-1.2-2.7,1.3-4.7,3.7-3.3s5.2,6.2,7.5,7.3,13,1.4,14.8,3.3-2.9,4.6-1.5,7.6c6.7-2.6,13.5-3.3,20.6-2.5,3.1-9.7,3.1-20.3-.9-29.8-7.3,0-14.7-3.6-17.2-10.8-2.5-7.2-.7-8.6-1.3-9.3-.8-1-6.3.6-7.4-1.5s.3-1.1-.2-1.4-1.9-.6-2.6-.8c-26-6.4-51.3,15.7-49.7,42.1,0,1.6,1.6,10.3,2.4,11.1s4.8,0,6.3,0,3.7.3,5,.5c2.9.4,7.2,2.4,9.4,2.5s2.4-.8,2.7-2.4c.4-2.6.5-7.4.5-10.1s-1-7.8-1.3-11.6c-.9-.2-.7,0-.9.5-.7,1.3-1.1,3.2-1.9,4.8s-5.2,8.7-5.7,9-.7-.5-.8-.8c-1.6-3.5-2-7.9-1.9-11.8-.9-1-5.4,4.9-6.7,5.3l-.8-.4v-.3h-.2ZM455.6,276.4c1.1-1.2-6-8.9-7.2-10-3-2.7-5.4-4.5-3.5,1.4s5.7,7.8,10.6,8.5h.1ZM410.9,270.1c-.4-.5-6.1,2.9-5.5,4.6,1.9-1.3,5.9-1.7,5.5-4.6ZM400.4,276.4c-.3-2.4-6.3-2.7-7.2-1s1.6,1.4,1.9,1.4c1.8.3,3.5-.6,5.2-.4h.1ZM411.3,276.8c3.8,1.3,6.6,3.6,10.9,3.7s0-3-1.2-3.9c-2.2-1.7-5.1-2.4-7.8-2.4s-1.6-.3-1.4.4c2.8.6,7.3.7,8.4,3.8-2.3-.3-3.9-1.6-6.2-2s-2.5-.5-2.6.3h0ZM420.6,290.3c-.8-5.1-5.7-10.8-10.9-11.6s-1.3-.4-.8.5,4.7,3.2,5.7,4,4.5,4.2,2.1,3.8-8.4-7.8-9.4-6.7c.2.9,1.1,1.9,1.7,2.7,3,3.8,6.9,6.8,11.8,7.4h-.2ZM395.3,279.8c-5,1.1-6.9,6.3-6.7,11,.7.8,5-3.8,5.4-4.5s2.7-4.6,1.1-4-2.9,4.4-4.2,4.6.2-2.1.4-2.5c1.1-1.6,2.9-3.1,4-4.6h0ZM400.4,281.5c-.4-.5-2,1.3-2.3,1.7-2.9,3.9-2.6,10.2-1.5,14.8.8.2.8-.3,1.2-.7,3-3.8,5.5-10.5,4.5-15.4-2.1,3.1-3.1,7.3-3.6,11h-1.3c0-4,1.9-7.7,3-11.4h0ZM426.9,305.9c0-1.7-1.7-1.4-2.5-1.9s-1.3-1.9-3-1.4c1.3,2.1,3,3.2,5.5,3.4h0ZM417.2,308.5c7.6.7,5.5-1.9,1.4-5.5-1.3-.3-1.5,4.5-1.4,5.5ZM437,309.7c-3.5-.3-7.8-2-11.2-2.1s-1.3,0-1.9.7c4,1.3,8.4,1.7,12.1,4l1-2.5h0ZM420.5,312.8c-7.3,0-15.1,3.7-20.4,8.8s-4.8,5.3-4.8,6.2c0,1.8,8.6,6.2,10.5,6.8,12.1,4.8,27.5,3.5,38.2-4.2s3.1-2.7,0-6.2c-5.7-6.6-14.7-11.4-23.4-11.3h-.1ZM398.7,316.9c-1.4-1.4-5-1.9-7-2.1s-5.3-.3-6.9.6l13.9,1.4h0ZM456.9,314.8h-7.4c-.9,0-4.9,1.1-6,1.6s-.8.6,0,.5c2.4,0,5.1-1,7.6-1.3s3.5.2,5.1,0,1.3-.3.6-.8h0Z"/>
|
||||||
|
<path class="st0" d="M386,295.6l.8.4c1.3-.3,5.8-6.2,6.7-5.3,0,3.9.3,8.3,1.9,11.8s0,1.2.8.8,5.1-7.8,5.7-9,1.3-3.5,1.9-4.8,0-.7.9-.5c.3,3.8,1.2,7.8,1.3,11.6s0,7.5-.5,10.1-1.1,2.4-2.7,2.4-6.5-2.1-9.4-2.5-3.7-.5-5-.5-5.4,1.1-6.3,0-2.2-9.5-2.4-11.1c-1.5-26.4,23.7-48.5,49.7-42.1s2.2.4,2.6.8,0,1,.2,1.4c1.1,2,6.5.5,7.4,1.5s.4,6.9,1.3,9.3c2.5,7.2,10,10.9,17.2,10.8,4,9.4,4,20.1.9,29.8-7.2-.7-13.9,0-20.6,2.5-1.3-3.1,4.1-5.1,1.5-7.6s-11.8-1.9-14.8-3.3-5.4-6.1-7.5-7.3-4.9.6-3.7,3.3,2.1,1.8,2.2,2.8-1,6.2-1.3,6.7-.3,1.3-1,1.1c-1.1-.3-5-19.3-5.3-21.9,4.3,3.5,9.2,5.9,14.8,6.7.2-1.9-.3-3.5-.8-5.3s-3-5.1-3-5.5c0-.8.9-.3,1.2-.3,1.6,0,3.3.8,5,.6s.7.3.7-.7c0-6.6-7.2-10.6-13.1-10.8l.8-4.2c-5.1-.3-9.6,2-11.6,6.7-4.3-3-9.8-3-13.7.7l3.4,3.4c-1.8,1.3-3.5,2.8-4.6,4.7s-1.8,5.1-1.8,6.2v6.6h.2ZM431.6,265c7.8,2.1,8.7-3.5.2-1.3l-.2,1.3ZM432.4,270.9c.3.6,6.4-.4,5.8-2.3s-4.6.6-5.7.6l-.2,1.7h.1ZM434.5,276c.8,1.2,5.7-1.8,5.5-2.7-.4-1.9-6.6,1.2-5.5,2.7ZM442.9,276.4c-.9-.9-5,2.8-4.6,4,.6,2.4,5.7-3,4.6-4ZM445.1,279.9c-.3.2-3.1,4.6-1.5,5s3.5-3.4,3.5-4-1.3-1.3-2-.9h0ZM448.9,287.4c2.1.8,3.8-5.1,2.3-5.5-1.9-.6-2.6,5.1-2.3,5.5ZM457.3,288.6c.5-1.7,1.1-4.7-1-5.5-1,.3-.6,3.9-.6,4.8l.3.5,1.3.2h0Z"/>
|
||||||
|
<path class="st0" d="M455.6,276.4c-5-.8-9.1-3.6-10.6-8.5s.5-4,3.5-1.4,8.3,8.7,7.2,10h-.1Z"/>
|
||||||
|
<path class="st0" d="M420.6,290.3c-4.9-.6-8.9-3.6-11.8-7.4s-1.5-1.8-1.7-2.7c1-1,8.5,6.6,9.4,6.7,2.4.4-1.8-3.5-2.1-3.8-1-.8-5.4-3.5-5.7-4-.4-.8.5-.5.8-.5,5.2.8,10.1,6.6,10.9,11.6h.2Z"/>
|
||||||
|
<path class="st0" d="M400.4,281.5c-1.1,3.7-3,7.3-3,11.4h1.3c.5-3.7,1.5-7.8,3.6-11,1,4.8-1.5,11.6-4.5,15.4s-.4.8-1.2.7c-1.1-4.5-1.3-10.8,1.5-14.8s1.9-2.2,2.3-1.7h0Z"/>
|
||||||
|
<path class="st0" d="M411.3,276.8c0-.8,2.1-.4,2.6-.3,2.4.4,4,1.7,6.2,2-1.2-3.1-5.7-3.2-8.4-3.8,0-.8.9-.4,1.4-.4,2.8,0,5.6.7,7.8,2.4,2.2,1.7,4,4,1.2,3.9-4.3,0-7.1-2.4-10.9-3.7h0Z"/>
|
||||||
|
<path class="st0" d="M395.3,279.8c-1.1,1.6-3,3-4,4.6s-1.9,2.8-.4,2.5,2.8-4,4.2-4.6-.9,3.6-1.1,4c-.4.7-4.7,5.2-5.4,4.5-.2-4.6,1.8-9.9,6.7-11h0Z"/>
|
||||||
|
<path class="st0" d="M437,309.7l-1,2.5c-3.6-2.3-8-2.8-12.1-4,.5-.7,1.1-.7,1.9-.7,3.4,0,7.8,1.8,11.2,2.1h0Z"/>
|
||||||
|
<path class="st0" d="M417.2,308.5c0-1,0-5.8,1.4-5.5,4,3.5,6.1,6.2-1.4,5.5Z"/>
|
||||||
|
<path class="st0" d="M400.4,276.4c-1.8-.3-3.5.7-5.2.4s-2.3-.8-1.9-1.4c.8-1.6,6.9-1.4,7.2,1h-.1Z"/>
|
||||||
|
<path class="st0" d="M410.9,270.1c.4,3-3.6,3.3-5.5,4.6-.6-1.8,5-5.1,5.5-4.6Z"/>
|
||||||
|
<path class="st0" d="M426.9,305.9c-2.5-.2-4.1-1.3-5.5-3.4,1.7-.4,2,.8,3,1.4s2.6.3,2.5,1.9h0Z"/>
|
||||||
|
<path class="st1" d="M432.4,270.9l.2-1.7c1.1,0,5.1-2.2,5.7-.6s-5.5,2.9-5.8,2.3h-.1Z"/>
|
||||||
|
<path class="st1" d="M431.6,265l.2-1.3c8.4-2.1,7.7,3.4-.2,1.3Z"/>
|
||||||
|
<path class="st1" d="M434.5,276c-1.1-1.5,5.1-4.6,5.5-2.7s-4.6,4-5.5,2.7Z"/>
|
||||||
|
<path class="st1" d="M442.9,276.4c1.1,1.1-4,6.4-4.6,4s3.7-4.9,4.6-4Z"/>
|
||||||
|
<path class="st1" d="M445.1,279.9c.7-.4,2.1,0,2,.9s-2.4,4.4-3.5,4,1.3-4.8,1.5-5h0Z"/>
|
||||||
|
<path class="st1" d="M448.9,287.4c-.3-.3.4-6.1,2.3-5.5,1.4.4-.2,6.2-2.3,5.5Z"/>
|
||||||
|
<path class="st1" d="M457.3,288.6l-1.3-.2-.3-.5c0-.9-.4-4.6.6-4.8,2.1.8,1.5,3.8,1,5.5h0Z"/>
|
||||||
|
<path class="st0" d="M420.5,312.8c8.9,0,17.9,4.7,23.4,11.3,5.6,6.6,3.8,3.5,0,6.2-10.7,7.7-26.1,9-38.2,4.2-1.9-.8-10.5-5.1-10.5-6.8s4-5.3,4.8-6.2c5.3-5,13.1-8.6,20.4-8.8h.1Z"/>
|
||||||
|
<path class="st0" d="M398.7,316.9l-13.9-1.4c1.7-1,5-.8,6.9-.6s5.6.7,7,2.1h0Z"/>
|
||||||
|
<path class="st0" d="M456.9,314.8c.7.5,0,.8-.6.8-1.6.2-3.5-.2-5.1,0-2.4.3-5.2,1.2-7.6,1.3s-1.1,0,0-.5,5.1-1.6,6-1.6h7.4,0Z"/>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 8.4 KiB |
53
web/src/main.tsx
Normal file
53
web/src/main.tsx
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import { StrictMode } from "react";
|
||||||
|
import ReactDOM from "react-dom/client";
|
||||||
|
import { RouterProvider, createRouter } from "@tanstack/react-router";
|
||||||
|
import { ThemeProvider } from "next-themes";
|
||||||
|
import { Theme } from "@radix-ui/themes";
|
||||||
|
|
||||||
|
// Import the generated route tree
|
||||||
|
import { routeTree } from "./routeTree.gen";
|
||||||
|
|
||||||
|
import "./styles.css";
|
||||||
|
import reportWebVitals from "./reportWebVitals.ts";
|
||||||
|
|
||||||
|
// Create a new router instance
|
||||||
|
const router = createRouter({
|
||||||
|
routeTree,
|
||||||
|
context: {},
|
||||||
|
defaultPreload: "intent",
|
||||||
|
scrollRestoration: true,
|
||||||
|
defaultStructuralSharing: true,
|
||||||
|
defaultPreloadStaleTime: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Register the router instance for type safety
|
||||||
|
declare module "@tanstack/react-router" {
|
||||||
|
interface Register {
|
||||||
|
router: typeof router;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Render the app
|
||||||
|
const rootElement = document.getElementById("app");
|
||||||
|
if (rootElement && !rootElement.innerHTML) {
|
||||||
|
const root = ReactDOM.createRoot(rootElement);
|
||||||
|
root.render(
|
||||||
|
<StrictMode>
|
||||||
|
<ThemeProvider
|
||||||
|
attribute="class"
|
||||||
|
defaultTheme="system"
|
||||||
|
enableSystem
|
||||||
|
disableTransitionOnChange={false}
|
||||||
|
>
|
||||||
|
<Theme>
|
||||||
|
<RouterProvider router={router} />
|
||||||
|
</Theme>
|
||||||
|
</ThemeProvider>
|
||||||
|
</StrictMode>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If you want to start measuring performance in your app, pass a function
|
||||||
|
// to log results (for example: reportWebVitals(console.log))
|
||||||
|
// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
|
||||||
|
reportWebVitals();
|
||||||
13
web/src/reportWebVitals.ts
Normal file
13
web/src/reportWebVitals.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
const reportWebVitals = (onPerfEntry?: () => void) => {
|
||||||
|
if (onPerfEntry && onPerfEntry instanceof Function) {
|
||||||
|
void import("web-vitals").then(({ onCLS, onINP, onFCP, onLCP, onTTFB }) => {
|
||||||
|
onCLS(onPerfEntry);
|
||||||
|
onINP(onPerfEntry);
|
||||||
|
onFCP(onPerfEntry);
|
||||||
|
onLCP(onPerfEntry);
|
||||||
|
onTTFB(onPerfEntry);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export default reportWebVitals;
|
||||||
59
web/src/routeTree.gen.ts
Normal file
59
web/src/routeTree.gen.ts
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
/* eslint-disable */
|
||||||
|
|
||||||
|
// @ts-nocheck
|
||||||
|
|
||||||
|
// noinspection JSUnusedGlobalSymbols
|
||||||
|
|
||||||
|
// This file was automatically generated by TanStack Router.
|
||||||
|
// You should NOT make any changes in this file as it will be overwritten.
|
||||||
|
// Additionally, you should also exclude this file from your linter and/or formatter to prevent it from being checked or modified.
|
||||||
|
|
||||||
|
import { Route as rootRouteImport } from "./routes/__root";
|
||||||
|
import { Route as IndexRouteImport } from "./routes/index";
|
||||||
|
|
||||||
|
const IndexRoute = IndexRouteImport.update({
|
||||||
|
id: "/",
|
||||||
|
path: "/",
|
||||||
|
getParentRoute: () => rootRouteImport,
|
||||||
|
} as any);
|
||||||
|
|
||||||
|
export interface FileRoutesByFullPath {
|
||||||
|
"/": typeof IndexRoute;
|
||||||
|
}
|
||||||
|
export interface FileRoutesByTo {
|
||||||
|
"/": typeof IndexRoute;
|
||||||
|
}
|
||||||
|
export interface FileRoutesById {
|
||||||
|
__root__: typeof rootRouteImport;
|
||||||
|
"/": typeof IndexRoute;
|
||||||
|
}
|
||||||
|
export interface FileRouteTypes {
|
||||||
|
fileRoutesByFullPath: FileRoutesByFullPath;
|
||||||
|
fullPaths: "/";
|
||||||
|
fileRoutesByTo: FileRoutesByTo;
|
||||||
|
to: "/";
|
||||||
|
id: "__root__" | "/";
|
||||||
|
fileRoutesById: FileRoutesById;
|
||||||
|
}
|
||||||
|
export interface RootRouteChildren {
|
||||||
|
IndexRoute: typeof IndexRoute;
|
||||||
|
}
|
||||||
|
|
||||||
|
declare module "@tanstack/react-router" {
|
||||||
|
interface FileRoutesByPath {
|
||||||
|
"/": {
|
||||||
|
id: "/";
|
||||||
|
path: "/";
|
||||||
|
fullPath: "/";
|
||||||
|
preLoaderRoute: typeof IndexRouteImport;
|
||||||
|
parentRoute: typeof rootRouteImport;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const rootRouteChildren: RootRouteChildren = {
|
||||||
|
IndexRoute: IndexRoute,
|
||||||
|
};
|
||||||
|
export const routeTree = rootRouteImport
|
||||||
|
._addFileChildren(rootRouteChildren)
|
||||||
|
._addFileTypes<FileRouteTypes>();
|
||||||
34
web/src/routes/__root.tsx
Normal file
34
web/src/routes/__root.tsx
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import { Outlet, createRootRoute } from "@tanstack/react-router";
|
||||||
|
import { TanStackRouterDevtoolsPanel } from "@tanstack/react-router-devtools";
|
||||||
|
import { TanstackDevtools } from "@tanstack/react-devtools";
|
||||||
|
import { Theme } from "@radix-ui/themes";
|
||||||
|
import "@radix-ui/themes/styles.css";
|
||||||
|
import { ThemeProvider } from "next-themes";
|
||||||
|
|
||||||
|
export const Route = createRootRoute({
|
||||||
|
component: () => (
|
||||||
|
<ThemeProvider
|
||||||
|
attribute="class"
|
||||||
|
defaultTheme="system"
|
||||||
|
enableSystem
|
||||||
|
disableTransitionOnChange={false}
|
||||||
|
>
|
||||||
|
<Theme accentColor="blue" grayColor="gray">
|
||||||
|
<Outlet />
|
||||||
|
{import.meta.env.DEV ? (
|
||||||
|
<TanstackDevtools
|
||||||
|
config={{
|
||||||
|
position: "bottom-left",
|
||||||
|
}}
|
||||||
|
plugins={[
|
||||||
|
{
|
||||||
|
name: "Tanstack Router",
|
||||||
|
render: <TanStackRouterDevtoolsPanel />,
|
||||||
|
},
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
) : null}
|
||||||
|
</Theme>
|
||||||
|
</ThemeProvider>
|
||||||
|
),
|
||||||
|
});
|
||||||
399
web/src/routes/index.tsx
Normal file
399
web/src/routes/index.tsx
Normal file
@@ -0,0 +1,399 @@
|
|||||||
|
import { createFileRoute } from "@tanstack/react-router";
|
||||||
|
import { useState, useEffect } from "react";
|
||||||
|
import { client, type StatusResponse, type Status } from "../lib/api";
|
||||||
|
import { Card, Flex, Text, Tooltip, Skeleton } from "@radix-ui/themes";
|
||||||
|
import {
|
||||||
|
CheckCircle,
|
||||||
|
XCircle,
|
||||||
|
Clock,
|
||||||
|
Bot,
|
||||||
|
Globe,
|
||||||
|
Hourglass,
|
||||||
|
Activity,
|
||||||
|
MessageCircle,
|
||||||
|
Circle,
|
||||||
|
WifiOff,
|
||||||
|
} from "lucide-react";
|
||||||
|
import TimeAgo from "react-timeago";
|
||||||
|
import { ThemeToggle } from "../components/ThemeToggle";
|
||||||
|
import "../App.css";
|
||||||
|
|
||||||
|
const REFRESH_INTERVAL = import.meta.env.DEV ? 3000 : 30000;
|
||||||
|
const REQUEST_TIMEOUT = 10000; // 10 seconds
|
||||||
|
|
||||||
|
const CARD_STYLES = {
|
||||||
|
padding: "24px",
|
||||||
|
maxWidth: "400px",
|
||||||
|
width: "100%",
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
const BORDER_STYLES = {
|
||||||
|
marginTop: "16px",
|
||||||
|
paddingTop: "16px",
|
||||||
|
borderTop: "1px solid var(--gray-7)",
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
const SERVICE_ICONS: Record<string, typeof Bot> = {
|
||||||
|
bot: Bot,
|
||||||
|
banner: Globe,
|
||||||
|
discord: MessageCircle,
|
||||||
|
};
|
||||||
|
|
||||||
|
interface ResponseTiming {
|
||||||
|
health: number | null;
|
||||||
|
status: number | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface StatusIcon {
|
||||||
|
icon: typeof CheckCircle;
|
||||||
|
color: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Service {
|
||||||
|
name: string;
|
||||||
|
status: Status;
|
||||||
|
icon: typeof Bot;
|
||||||
|
}
|
||||||
|
|
||||||
|
type StatusState =
|
||||||
|
| {
|
||||||
|
mode: "loading";
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
mode: "response";
|
||||||
|
timing: ResponseTiming;
|
||||||
|
lastFetch: Date;
|
||||||
|
status: StatusResponse;
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
mode: "error";
|
||||||
|
lastFetch: Date;
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
mode: "timeout";
|
||||||
|
lastFetch: Date;
|
||||||
|
};
|
||||||
|
|
||||||
|
const formatNumber = (num: number): string => {
|
||||||
|
return num.toLocaleString();
|
||||||
|
};
|
||||||
|
|
||||||
|
const getStatusIcon = (status: Status | "Unreachable"): StatusIcon => {
|
||||||
|
const statusMap: Record<Status | "Unreachable", StatusIcon> = {
|
||||||
|
Active: { icon: CheckCircle, color: "green" },
|
||||||
|
Connected: { icon: CheckCircle, color: "green" },
|
||||||
|
Healthy: { icon: CheckCircle, color: "green" },
|
||||||
|
Disabled: { icon: Circle, color: "gray" },
|
||||||
|
Error: { icon: XCircle, color: "red" },
|
||||||
|
Unreachable: { icon: WifiOff, color: "red" },
|
||||||
|
};
|
||||||
|
|
||||||
|
return statusMap[status];
|
||||||
|
};
|
||||||
|
|
||||||
|
const getOverallHealth = (state: StatusState): Status | "Unreachable" => {
|
||||||
|
if (state.mode === "timeout") return "Unreachable";
|
||||||
|
if (state.mode === "error") return "Error";
|
||||||
|
if (state.mode === "response") return state.status.status;
|
||||||
|
return "Error";
|
||||||
|
};
|
||||||
|
|
||||||
|
const getServices = (state: StatusState): Service[] => {
|
||||||
|
if (state.mode !== "response") return [];
|
||||||
|
|
||||||
|
return Object.entries(state.status.services).map(([serviceId, serviceInfo]) => ({
|
||||||
|
name: serviceInfo.name,
|
||||||
|
status: serviceInfo.status,
|
||||||
|
icon: SERVICE_ICONS[serviceId] || SERVICE_ICONS.default,
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
|
const StatusDisplay = ({ status }: { status: Status | "Unreachable" }) => {
|
||||||
|
const { icon: Icon, color } = getStatusIcon(status);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Flex align="center" gap="2">
|
||||||
|
<Text
|
||||||
|
size="2"
|
||||||
|
style={{
|
||||||
|
color: status === "Disabled" ? "var(--gray-11)" : undefined,
|
||||||
|
opacity: status === "Disabled" ? 0.7 : undefined,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{status}
|
||||||
|
</Text>
|
||||||
|
<Icon color={color} size={16} />
|
||||||
|
</Flex>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const ServiceStatus = ({ service }: { service: Service }) => {
|
||||||
|
return (
|
||||||
|
<Flex align="center" justify="between">
|
||||||
|
<Flex align="center" gap="2">
|
||||||
|
<service.icon size={18} />
|
||||||
|
<Text style={{ color: "var(--gray-11)" }}>{service.name}</Text>
|
||||||
|
</Flex>
|
||||||
|
<StatusDisplay status={service.status} />
|
||||||
|
</Flex>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const SkeletonService = () => {
|
||||||
|
return (
|
||||||
|
<Flex align="center" justify="between">
|
||||||
|
<Flex align="center" gap="2">
|
||||||
|
<Skeleton height="24px" width="18px" />
|
||||||
|
<Skeleton height="24px" width="60px" />
|
||||||
|
</Flex>
|
||||||
|
<Flex align="center" gap="2">
|
||||||
|
<Skeleton height="20px" width="50px" />
|
||||||
|
<Skeleton height="20px" width="16px" />
|
||||||
|
</Flex>
|
||||||
|
</Flex>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const TimingRow = ({
|
||||||
|
icon: Icon,
|
||||||
|
name,
|
||||||
|
children,
|
||||||
|
}: {
|
||||||
|
icon: React.ComponentType<{ size?: number }>;
|
||||||
|
name: string;
|
||||||
|
children: React.ReactNode;
|
||||||
|
}) => (
|
||||||
|
<Flex align="center" justify="between">
|
||||||
|
<Flex align="center" gap="2">
|
||||||
|
<Icon size={13} />
|
||||||
|
<Text size="2" color="gray">
|
||||||
|
{name}
|
||||||
|
</Text>
|
||||||
|
</Flex>
|
||||||
|
{children}
|
||||||
|
</Flex>
|
||||||
|
);
|
||||||
|
|
||||||
|
function App() {
|
||||||
|
const [state, setState] = useState<StatusState>({ mode: "loading" });
|
||||||
|
|
||||||
|
// State helpers
|
||||||
|
const isLoading = state.mode === "loading";
|
||||||
|
const hasError = state.mode === "error";
|
||||||
|
const hasTimeout = state.mode === "timeout";
|
||||||
|
const hasResponse = state.mode === "response";
|
||||||
|
const shouldShowSkeleton = isLoading || hasError;
|
||||||
|
const shouldShowTiming = hasResponse && state.timing.health !== null;
|
||||||
|
const shouldShowLastFetch = hasResponse || hasError || hasTimeout;
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
let timeoutId: NodeJS.Timeout;
|
||||||
|
|
||||||
|
const fetchData = async () => {
|
||||||
|
try {
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
// Create a timeout promise
|
||||||
|
const timeoutPromise = new Promise<never>((_, reject) => {
|
||||||
|
setTimeout(() => reject(new Error("Request timeout")), REQUEST_TIMEOUT);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Race between the API call and timeout
|
||||||
|
const statusData = await Promise.race([client.getStatus(), timeoutPromise]);
|
||||||
|
|
||||||
|
const endTime = Date.now();
|
||||||
|
const responseTime = endTime - startTime;
|
||||||
|
|
||||||
|
setState({
|
||||||
|
mode: "response",
|
||||||
|
status: statusData,
|
||||||
|
timing: { health: responseTime, status: responseTime },
|
||||||
|
lastFetch: new Date(),
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
const errorMessage = err instanceof Error ? err.message : "Failed to fetch data";
|
||||||
|
|
||||||
|
// Check if it's a timeout error
|
||||||
|
if (errorMessage === "Request timeout") {
|
||||||
|
setState({
|
||||||
|
mode: "timeout",
|
||||||
|
lastFetch: new Date(),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
setState({
|
||||||
|
mode: "error",
|
||||||
|
lastFetch: new Date(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Schedule the next request after the current one completes
|
||||||
|
timeoutId = setTimeout(() => void fetchData(), REFRESH_INTERVAL);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Start the first request immediately
|
||||||
|
void fetchData();
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
if (timeoutId) {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const overallHealth = getOverallHealth(state);
|
||||||
|
const { color: overallColor } = getStatusIcon(overallHealth);
|
||||||
|
const services = getServices(state);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="App">
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
position: "fixed",
|
||||||
|
top: "20px",
|
||||||
|
right: "20px",
|
||||||
|
zIndex: 1000,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<ThemeToggle />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Flex
|
||||||
|
direction="column"
|
||||||
|
align="center"
|
||||||
|
justify="center"
|
||||||
|
style={{ minHeight: "100vh", padding: "20px" }}
|
||||||
|
>
|
||||||
|
<Card style={CARD_STYLES}>
|
||||||
|
<Flex direction="column" gap="4">
|
||||||
|
{/* Overall Status */}
|
||||||
|
<Flex align="center" justify="between">
|
||||||
|
<Flex align="center" gap="2">
|
||||||
|
<Activity
|
||||||
|
color={isLoading ? undefined : overallColor}
|
||||||
|
size={18}
|
||||||
|
className={isLoading ? "animate-pulse" : ""}
|
||||||
|
style={{
|
||||||
|
opacity: isLoading ? 0.3 : 1,
|
||||||
|
transition: "opacity 2s ease-in-out, color 2s ease-in-out",
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
<Text size="4" style={{ color: "var(--gray-12)" }}>
|
||||||
|
System Status
|
||||||
|
</Text>
|
||||||
|
</Flex>
|
||||||
|
{isLoading ? (
|
||||||
|
<Skeleton height="20px" width="80px" />
|
||||||
|
) : (
|
||||||
|
<StatusDisplay status={overallHealth} />
|
||||||
|
)}
|
||||||
|
</Flex>
|
||||||
|
|
||||||
|
{/* Individual Services */}
|
||||||
|
<Flex direction="column" gap="3" style={{ marginTop: "16px" }}>
|
||||||
|
{shouldShowSkeleton
|
||||||
|
? // Show skeleton for 3 services during initial loading only
|
||||||
|
Array.from({ length: 3 }).map((_, index) => <SkeletonService key={index} />)
|
||||||
|
: services.map((service) => <ServiceStatus key={service.name} service={service} />)}
|
||||||
|
</Flex>
|
||||||
|
|
||||||
|
<Flex direction="column" gap="2" style={BORDER_STYLES}>
|
||||||
|
{isLoading ? (
|
||||||
|
<TimingRow icon={Hourglass} name="Response Time">
|
||||||
|
<Skeleton height="18px" width="50px" />
|
||||||
|
</TimingRow>
|
||||||
|
) : shouldShowTiming ? (
|
||||||
|
<TimingRow icon={Hourglass} name="Response Time">
|
||||||
|
<Text size="2" style={{ color: "var(--gray-11)" }}>
|
||||||
|
{formatNumber(state.timing.health!)}ms
|
||||||
|
</Text>
|
||||||
|
</TimingRow>
|
||||||
|
) : null}
|
||||||
|
|
||||||
|
{shouldShowLastFetch ? (
|
||||||
|
<TimingRow icon={Clock} name="Last Updated">
|
||||||
|
{isLoading ? (
|
||||||
|
<Text size="2" style={{ paddingBottom: "2px" }} color="gray">
|
||||||
|
Loading...
|
||||||
|
</Text>
|
||||||
|
) : (
|
||||||
|
<Tooltip content={`as of ${state.lastFetch.toLocaleTimeString()}`}>
|
||||||
|
<abbr
|
||||||
|
style={{
|
||||||
|
cursor: "pointer",
|
||||||
|
textDecoration: "underline",
|
||||||
|
textDecorationStyle: "dotted",
|
||||||
|
textDecorationColor: "var(--gray-6)",
|
||||||
|
textUnderlineOffset: "6px",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<Text size="2" style={{ color: "var(--gray-11)" }}>
|
||||||
|
<TimeAgo date={state.lastFetch} />
|
||||||
|
</Text>
|
||||||
|
</abbr>
|
||||||
|
</Tooltip>
|
||||||
|
)}
|
||||||
|
</TimingRow>
|
||||||
|
) : isLoading ? (
|
||||||
|
<TimingRow icon={Clock} name="Last Updated">
|
||||||
|
<Text size="2" color="gray">
|
||||||
|
Loading...
|
||||||
|
</Text>
|
||||||
|
</TimingRow>
|
||||||
|
) : null}
|
||||||
|
</Flex>
|
||||||
|
</Flex>
|
||||||
|
</Card>
|
||||||
|
<Flex justify="center" style={{ marginTop: "12px" }} gap="2" align="center">
|
||||||
|
{__APP_VERSION__ && (
|
||||||
|
<Text
|
||||||
|
size="1"
|
||||||
|
style={{
|
||||||
|
color: "var(--gray-11)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
v{__APP_VERSION__}
|
||||||
|
</Text>
|
||||||
|
)}
|
||||||
|
{__APP_VERSION__ && (
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
width: "1px",
|
||||||
|
height: "12px",
|
||||||
|
backgroundColor: "var(--gray-10)",
|
||||||
|
opacity: 0.3,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
<Text
|
||||||
|
size="1"
|
||||||
|
style={{
|
||||||
|
color: "var(--gray-11)",
|
||||||
|
textDecoration: "none",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<a
|
||||||
|
href={
|
||||||
|
hasResponse && state.status.commit
|
||||||
|
? `https://github.com/Xevion/banner/commit/${state.status.commit}`
|
||||||
|
: "https://github.com/Xevion/banner"
|
||||||
|
}
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
style={{
|
||||||
|
color: "inherit",
|
||||||
|
textDecoration: "none",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
GitHub
|
||||||
|
</a>
|
||||||
|
</Text>
|
||||||
|
</Flex>
|
||||||
|
</Flex>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export const Route = createFileRoute("/")({
|
||||||
|
component: App,
|
||||||
|
});
|
||||||
13
web/src/styles.css
Normal file
13
web/src/styles.css
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
@import "@radix-ui/themes/styles.css";
|
||||||
|
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", "Ubuntu",
|
||||||
|
"Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", sans-serif;
|
||||||
|
-webkit-font-smoothing: antialiased;
|
||||||
|
-moz-osx-font-smoothing: grayscale;
|
||||||
|
}
|
||||||
|
|
||||||
|
code {
|
||||||
|
font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New", monospace;
|
||||||
|
}
|
||||||
3
web/src/vite-env.d.ts
vendored
Normal file
3
web/src/vite-env.d.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
/// <reference types="vite/client" />
|
||||||
|
|
||||||
|
declare const __APP_VERSION__: string;
|
||||||
29
web/tsconfig.json
Normal file
29
web/tsconfig.json
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"include": ["**/*.ts", "**/*.tsx"],
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES2022",
|
||||||
|
"jsx": "react-jsx",
|
||||||
|
"module": "ESNext",
|
||||||
|
"lib": ["ES2022", "DOM", "DOM.Iterable"],
|
||||||
|
"types": ["vite/client"],
|
||||||
|
|
||||||
|
/* Bundler mode */
|
||||||
|
"moduleResolution": "bundler",
|
||||||
|
"allowImportingTsExtensions": true,
|
||||||
|
"verbatimModuleSyntax": true,
|
||||||
|
"isolatedModules": true,
|
||||||
|
"noEmit": true,
|
||||||
|
|
||||||
|
/* Linting */
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"strict": true,
|
||||||
|
"noUnusedLocals": true,
|
||||||
|
"noUnusedParameters": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
"noUncheckedSideEffectImports": true,
|
||||||
|
"baseUrl": ".",
|
||||||
|
"paths": {
|
||||||
|
"@/*": ["./src/*"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
67
web/vite.config.ts
Normal file
67
web/vite.config.ts
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
import { defineConfig } from "vite";
|
||||||
|
import viteReact from "@vitejs/plugin-react";
|
||||||
|
import tanstackRouter from "@tanstack/router-plugin/vite";
|
||||||
|
import { resolve } from "node:path";
|
||||||
|
import { readFileSync, existsSync } from "node:fs";
|
||||||
|
|
||||||
|
// Extract version from Cargo.toml
|
||||||
|
function getVersion() {
|
||||||
|
const filename = "Cargo.toml";
|
||||||
|
const paths = [resolve(__dirname, filename), resolve(__dirname, "..", filename)];
|
||||||
|
|
||||||
|
for (const path of paths) {
|
||||||
|
try {
|
||||||
|
// Check if file exists before reading
|
||||||
|
if (!existsSync(path)) {
|
||||||
|
console.log("Skipping ", path, " because it does not exist");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const cargoTomlContent = readFileSync(path, "utf8");
|
||||||
|
const versionMatch = cargoTomlContent.match(/^version\s*=\s*"([^"]+)"/m);
|
||||||
|
if (versionMatch) {
|
||||||
|
console.log("Found version in ", path, ": ", versionMatch[1]);
|
||||||
|
return versionMatch[1];
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.warn("Failed to read Cargo.toml at path: ", path, error);
|
||||||
|
// Continue to next path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.warn("Could not read version from Cargo.toml in any location");
|
||||||
|
return "unknown";
|
||||||
|
}
|
||||||
|
|
||||||
|
const version = getVersion();
|
||||||
|
|
||||||
|
// https://vitejs.dev/config/
|
||||||
|
export default defineConfig({
|
||||||
|
plugins: [tanstackRouter({ autoCodeSplitting: true }), viteReact()],
|
||||||
|
test: {
|
||||||
|
globals: true,
|
||||||
|
environment: "jsdom",
|
||||||
|
},
|
||||||
|
resolve: {
|
||||||
|
alias: {
|
||||||
|
"@": resolve(__dirname, "./src"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
server: {
|
||||||
|
port: 3000,
|
||||||
|
proxy: {
|
||||||
|
"/api": {
|
||||||
|
target: "http://localhost:8080",
|
||||||
|
changeOrigin: true,
|
||||||
|
secure: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
build: {
|
||||||
|
outDir: "dist",
|
||||||
|
sourcemap: true,
|
||||||
|
},
|
||||||
|
define: {
|
||||||
|
__APP_VERSION__: JSON.stringify(version),
|
||||||
|
},
|
||||||
|
});
|
||||||
Reference in New Issue
Block a user