mirror of
https://github.com/Xevion/banner.git
synced 2026-01-31 14:23:36 -06:00
Compare commits
50 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| b02a0738e2 | |||
| 5d7d60cd96 | |||
| 1954166db6 | |||
| a2a9116b7a | |||
| a103f0643a | |||
| 474d519b9d | |||
| fb27bdc119 | |||
| 669dec0235 | |||
| 67ba63339a | |||
| 7b8c11ac13 | |||
| a767a3f8be | |||
| 8ce398c0e0 | |||
| 9fed651641 | |||
| 75a99c10ea | |||
| 857ceabcca | |||
| 203c337cf0 | |||
| 39ba131322 | |||
| 2fad9c969d | |||
| 47b4f3315f | |||
| fa28f13a45 | |||
| 5a6ea1e53a | |||
| ba2b2fc50a | |||
| cfe098d193 | |||
| d861888e5e | |||
| f0645d82d9 | |||
| 7a1cd2a39b | |||
| d2985f98ce | |||
| b58eb840f3 | |||
| 2bc6fbdf30 | |||
| e41b970d6e | |||
| e880126281 | |||
| db0ec1e69d | |||
| 2947face06 | |||
| 36bcc27d7f | |||
| 9e403e5043 | |||
| 98a6d978c6 | |||
| 4deeef2f00 | |||
| e008ee5a12 | |||
| a007ccb6a2 | |||
| 527cbebc6a | |||
| 4207783cdd | |||
| c90bd740de | |||
| 61f8bd9de7 | |||
| b5eaedc9bc | |||
| 58475c8673 | |||
| 78159707e2 | |||
| 779144a4d5 | |||
| 0da2e810fe | |||
| ed72ac6bff | |||
| 57b5cafb27 |
Vendored
+1
-1
@@ -2,5 +2,5 @@
|
||||
/target
|
||||
|
||||
# ts-rs bindings
|
||||
web/src/lib/bindings/*.ts
|
||||
web/src/lib/bindings/**/*.ts
|
||||
!web/src/lib/bindings/index.ts
|
||||
|
||||
Generated
+309
-16
@@ -26,6 +26,21 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "alloc-no-stdlib"
|
||||
version = "2.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3"
|
||||
|
||||
[[package]]
|
||||
name = "alloc-stdlib"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece"
|
||||
dependencies = [
|
||||
"alloc-no-stdlib",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "allocator-api2"
|
||||
version = "0.2.21"
|
||||
@@ -106,6 +121,19 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-compression"
|
||||
version = "0.4.33"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93c1f86859c1af3d514fa19e8323147ff10ea98684e6c7b307912509f50e67b2"
|
||||
dependencies = [
|
||||
"compression-codecs",
|
||||
"compression-core",
|
||||
"futures-core",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.89"
|
||||
@@ -149,11 +177,12 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
||||
|
||||
[[package]]
|
||||
name = "axum"
|
||||
version = "0.8.4"
|
||||
version = "0.8.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5"
|
||||
checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
|
||||
dependencies = [
|
||||
"axum-core",
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
"form_urlencoded",
|
||||
"futures-util",
|
||||
@@ -168,13 +197,14 @@ dependencies = [
|
||||
"mime",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"rustversion",
|
||||
"serde",
|
||||
"serde_core",
|
||||
"serde_json",
|
||||
"serde_path_to_error",
|
||||
"serde_urlencoded",
|
||||
"sha1",
|
||||
"sync_wrapper 1.0.2",
|
||||
"tokio",
|
||||
"tokio-tungstenite 0.28.0",
|
||||
"tower",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
@@ -183,9 +213,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "axum-core"
|
||||
version = "0.5.2"
|
||||
version = "0.5.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6"
|
||||
checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
@@ -194,13 +224,37 @@ dependencies = [
|
||||
"http-body-util",
|
||||
"mime",
|
||||
"pin-project-lite",
|
||||
"rustversion",
|
||||
"sync_wrapper 1.0.2",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "axum-extra"
|
||||
version = "0.12.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fef252edff26ddba56bbcdf2ee3307b8129acb86f5749b68990c168a6fcc9c76"
|
||||
dependencies = [
|
||||
"axum",
|
||||
"axum-core",
|
||||
"bytes",
|
||||
"form_urlencoded",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"http 1.3.1",
|
||||
"http-body 1.0.1",
|
||||
"http-body-util",
|
||||
"mime",
|
||||
"pin-project-lite",
|
||||
"serde_core",
|
||||
"serde_html_form",
|
||||
"serde_path_to_error",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "backtrace"
|
||||
version = "0.3.75"
|
||||
@@ -218,13 +272,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "banner"
|
||||
version = "0.3.4"
|
||||
version = "0.6.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"axum",
|
||||
"axum-extra",
|
||||
"bitflags 2.9.4",
|
||||
"chrono",
|
||||
"chrono-tz",
|
||||
"clap",
|
||||
"compile-time",
|
||||
"cookie",
|
||||
@@ -236,6 +292,7 @@ dependencies = [
|
||||
"futures",
|
||||
"governor",
|
||||
"html-escape",
|
||||
"htmlize",
|
||||
"http 1.3.1",
|
||||
"mime_guess",
|
||||
"num-format",
|
||||
@@ -259,7 +316,9 @@ dependencies = [
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"ts-rs",
|
||||
"unicode-normalization",
|
||||
"url",
|
||||
"urlencoding",
|
||||
"yansi",
|
||||
]
|
||||
|
||||
@@ -305,6 +364,27 @@ dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "brotli"
|
||||
version = "8.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560"
|
||||
dependencies = [
|
||||
"alloc-no-stdlib",
|
||||
"alloc-stdlib",
|
||||
"brotli-decompressor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "brotli-decompressor"
|
||||
version = "5.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03"
|
||||
dependencies = [
|
||||
"alloc-no-stdlib",
|
||||
"alloc-stdlib",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.12.0"
|
||||
@@ -382,6 +462,8 @@ version = "1.2.34"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42bc4aea80032b7bf409b0bc7ccad88853858911b7713a8062fdc0623867bedc"
|
||||
dependencies = [
|
||||
"jobserver",
|
||||
"libc",
|
||||
"shlex",
|
||||
]
|
||||
|
||||
@@ -405,6 +487,16 @@ dependencies = [
|
||||
"windows-link 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chrono-tz"
|
||||
version = "0.10.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"phf 0.12.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.47"
|
||||
@@ -476,6 +568,26 @@ dependencies = [
|
||||
"time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "compression-codecs"
|
||||
version = "0.4.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "680dc087785c5230f8e8843e2e57ac7c1c90488b6a91b88caa265410568f441b"
|
||||
dependencies = [
|
||||
"brotli",
|
||||
"compression-core",
|
||||
"flate2",
|
||||
"memchr",
|
||||
"zstd",
|
||||
"zstd-safe",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "compression-core"
|
||||
version = "0.4.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75984efb6ed102a0d42db99afb6c1948f0380d1d91808d5529916e6c08b49d8d"
|
||||
|
||||
[[package]]
|
||||
name = "concurrent-queue"
|
||||
version = "2.5.0"
|
||||
@@ -1238,6 +1350,19 @@ dependencies = [
|
||||
"utf8-width",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "htmlize"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d347c0de239be20ba0982e4822de3124404281e119ae3e11f5d7425a414e1935"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"pastey",
|
||||
"phf 0.11.3",
|
||||
"phf_codegen",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "0.2.12"
|
||||
@@ -1617,6 +1742,16 @@ version = "1.0.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
||||
|
||||
[[package]]
|
||||
name = "jobserver"
|
||||
version = "0.1.34"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
|
||||
dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.77"
|
||||
@@ -1980,6 +2115,12 @@ dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pastey"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec"
|
||||
|
||||
[[package]]
|
||||
name = "pear"
|
||||
version = "0.2.9"
|
||||
@@ -2018,6 +2159,62 @@ version = "2.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
|
||||
|
||||
[[package]]
|
||||
name = "phf"
|
||||
version = "0.11.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078"
|
||||
dependencies = [
|
||||
"phf_shared 0.11.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7"
|
||||
dependencies = [
|
||||
"phf_shared 0.12.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf_codegen"
|
||||
version = "0.11.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a"
|
||||
dependencies = [
|
||||
"phf_generator",
|
||||
"phf_shared 0.11.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf_generator"
|
||||
version = "0.11.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d"
|
||||
dependencies = [
|
||||
"phf_shared 0.11.3",
|
||||
"rand 0.8.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf_shared"
|
||||
version = "0.11.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5"
|
||||
dependencies = [
|
||||
"siphasher",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf_shared"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981"
|
||||
dependencies = [
|
||||
"siphasher",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-lite"
|
||||
version = "0.2.16"
|
||||
@@ -2688,9 +2885,19 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.219"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
|
||||
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_core"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -2706,15 +2913,28 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.219"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
|
||||
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_html_form"
|
||||
version = "0.2.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b2f2d7ff8a2140333718bb329f5c40fc5f0865b84c426183ce14c97d2ab8154f"
|
||||
dependencies = [
|
||||
"form_urlencoded",
|
||||
"indexmap",
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.143"
|
||||
@@ -2787,7 +3007,7 @@ dependencies = [
|
||||
"static_assertions",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-tungstenite",
|
||||
"tokio-tungstenite 0.21.0",
|
||||
"tracing",
|
||||
"typemap_rev",
|
||||
"typesize",
|
||||
@@ -2851,6 +3071,12 @@ dependencies = [
|
||||
"rand_core 0.6.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "siphasher"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e"
|
||||
|
||||
[[package]]
|
||||
name = "skeptic"
|
||||
version = "0.13.7"
|
||||
@@ -3475,10 +3701,22 @@ dependencies = [
|
||||
"rustls-pki-types",
|
||||
"tokio",
|
||||
"tokio-rustls 0.25.0",
|
||||
"tungstenite",
|
||||
"tungstenite 0.21.0",
|
||||
"webpki-roots 0.26.11",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-tungstenite"
|
||||
version = "0.28.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857"
|
||||
dependencies = [
|
||||
"futures-util",
|
||||
"log",
|
||||
"tokio",
|
||||
"tungstenite 0.28.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-util"
|
||||
version = "0.7.16"
|
||||
@@ -3555,14 +3793,17 @@ version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"bitflags 2.9.4",
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"http 1.3.1",
|
||||
"http-body 1.0.1",
|
||||
"iri-string",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tower",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
@@ -3674,6 +3915,7 @@ version = "11.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4994acea2522cd2b3b85c1d9529a55991e3ad5e25cdcd3de9d505972c4379424"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"serde_json",
|
||||
"thiserror 2.0.16",
|
||||
"ts-rs-macros",
|
||||
@@ -3712,6 +3954,23 @@ dependencies = [
|
||||
"utf-8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tungstenite"
|
||||
version = "0.28.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"data-encoding",
|
||||
"http 1.3.1",
|
||||
"httparse",
|
||||
"log",
|
||||
"rand 0.9.2",
|
||||
"sha1",
|
||||
"thiserror 2.0.16",
|
||||
"utf-8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typemap_rev"
|
||||
version = "0.3.0"
|
||||
@@ -3782,9 +4041,9 @@ checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
version = "0.1.24"
|
||||
version = "0.1.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956"
|
||||
checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8"
|
||||
dependencies = [
|
||||
"tinyvec",
|
||||
]
|
||||
@@ -3813,6 +4072,12 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urlencoding"
|
||||
version = "2.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
|
||||
|
||||
[[package]]
|
||||
name = "utf-8"
|
||||
version = "0.7.6"
|
||||
@@ -4508,3 +4773,31 @@ dependencies = [
|
||||
"quote",
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd"
|
||||
version = "0.13.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a"
|
||||
dependencies = [
|
||||
"zstd-safe",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd-safe"
|
||||
version = "7.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d"
|
||||
dependencies = [
|
||||
"zstd-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd-sys"
|
||||
version = "2.0.16+zstd.1.5.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
+9
-4
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "banner"
|
||||
version = "0.3.4"
|
||||
version = "0.6.0"
|
||||
edition = "2024"
|
||||
default-run = "banner"
|
||||
|
||||
@@ -11,7 +11,7 @@ embed-assets = ["dep:rust-embed", "dep:mime_guess"]
|
||||
[dependencies]
|
||||
anyhow = "1.0.99"
|
||||
async-trait = "0.1"
|
||||
axum = "0.8.4"
|
||||
axum = { version = "0.8.4", features = ["ws"] }
|
||||
bitflags = { version = "2.9.4", features = ["serde"] }
|
||||
chrono = { version = "0.4.42", features = ["serde"] }
|
||||
compile-time = "0.2.0"
|
||||
@@ -48,15 +48,20 @@ url = "2.5"
|
||||
governor = "0.10.1"
|
||||
serde_path_to_error = "0.1.17"
|
||||
num-format = "0.4.4"
|
||||
tower-http = { version = "0.6.0", features = ["cors", "trace", "timeout"] }
|
||||
tower-http = { version = "0.6.0", features = ["cors", "trace", "timeout", "compression-full"] }
|
||||
rust-embed = { version = "8.0", features = ["include-exclude"], optional = true }
|
||||
mime_guess = { version = "2.0", optional = true }
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
rapidhash = "4.1.0"
|
||||
yansi = "1.0.1"
|
||||
extension-traits = "2"
|
||||
ts-rs = { version = "11.1.0", features = ["serde-compat", "serde-json-impl"] }
|
||||
ts-rs = { version = "11.1.0", features = ["chrono-impl", "serde-compat", "serde-json-impl", "no-serde-warnings"] }
|
||||
html-escape = "0.2.13"
|
||||
axum-extra = { version = "0.12.5", features = ["query"] }
|
||||
urlencoding = "2.1.3"
|
||||
chrono-tz = "0.10.4"
|
||||
htmlize = { version = "1.0.6", features = ["unescape"] }
|
||||
unicode-normalization = "0.1.25"
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
|
||||
+7
-4
@@ -7,6 +7,9 @@ FROM oven/bun:1 AS frontend-builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install zstd for pre-compression
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends zstd && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy backend Cargo.toml for build-time version retrieval
|
||||
COPY ./Cargo.toml ./
|
||||
|
||||
@@ -19,8 +22,8 @@ RUN bun install --frozen-lockfile
|
||||
# Copy frontend source code
|
||||
COPY ./web ./
|
||||
|
||||
# Build frontend
|
||||
RUN bun run build
|
||||
# Build frontend, then pre-compress static assets (gzip, brotli, zstd)
|
||||
RUN bun run build && bun run scripts/compress-assets.ts
|
||||
|
||||
# --- Chef Base Stage ---
|
||||
FROM lukemathwalker/cargo-chef:latest-rust-${RUST_VERSION} AS chef
|
||||
@@ -112,5 +115,5 @@ HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
||||
ENV HOSTS=0.0.0.0,[::]
|
||||
|
||||
# Implicitly uses PORT environment variable
|
||||
# temporary: running without 'scraper' service
|
||||
CMD ["sh", "-c", "exec ./banner --services web,bot"]
|
||||
# Runs all services: web, bot, and scraper
|
||||
CMD ["sh", "-c", "exec ./banner"]
|
||||
|
||||
@@ -1,51 +1,373 @@
|
||||
set dotenv-load
|
||||
default_services := "bot,web,scraper"
|
||||
|
||||
default:
|
||||
just --list
|
||||
|
||||
# Run all checks (format, clippy, tests, lint)
|
||||
check:
|
||||
cargo fmt --all -- --check
|
||||
cargo clippy --all-features -- --deny warnings
|
||||
cargo nextest run -E 'not test(export_bindings)'
|
||||
bun run --cwd web check
|
||||
bun run --cwd web test
|
||||
# Run all checks in parallel. Pass -f/--fix to auto-format and fix first.
|
||||
[script("bun")]
|
||||
check *flags:
|
||||
const args = "{{flags}}".split(/\s+/).filter(Boolean);
|
||||
let fix = false;
|
||||
for (const arg of args) {
|
||||
if (arg === "-f" || arg === "--fix") fix = true;
|
||||
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
|
||||
}
|
||||
|
||||
# Generate TypeScript bindings from Rust types (ts-rs)
|
||||
bindings:
|
||||
cargo test export_bindings
|
||||
const run = (cmd) => {
|
||||
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
|
||||
if (proc.exitCode !== 0) process.exit(proc.exitCode);
|
||||
};
|
||||
|
||||
# Run all tests (Rust + frontend)
|
||||
test: test-rust test-web
|
||||
if (fix) {
|
||||
console.log("\x1b[1;36m→ Fixing...\x1b[0m");
|
||||
run(["cargo", "fmt", "--all"]);
|
||||
run(["bun", "run", "--cwd", "web", "format"]);
|
||||
run(["cargo", "clippy", "--all-features", "--fix", "--allow-dirty", "--allow-staged",
|
||||
"--", "--deny", "warnings"]);
|
||||
console.log("\x1b[1;36m→ Verifying...\x1b[0m");
|
||||
}
|
||||
|
||||
# Run only Rust tests (excludes ts-rs bindings generation)
|
||||
test-rust *ARGS:
|
||||
cargo nextest run -E 'not test(export_bindings)' {{ARGS}}
|
||||
// Domain groups: format check name → { peers (other checks), formatter, sanity re-check }
|
||||
const domains = {
|
||||
rustfmt: {
|
||||
peers: ["clippy", "rust-test"],
|
||||
format: () => run(["cargo", "fmt", "--all"]),
|
||||
recheck: [
|
||||
{ name: "rustfmt", cmd: ["cargo", "fmt", "--all", "--", "--check"] },
|
||||
{ name: "cargo-check", cmd: ["cargo", "check", "--all-features"] },
|
||||
],
|
||||
},
|
||||
biome: {
|
||||
peers: ["svelte-check", "web-test"],
|
||||
format: () => run(["bun", "run", "--cwd", "web", "format"]),
|
||||
recheck: [
|
||||
{ name: "biome", cmd: ["bun", "run", "--cwd", "web", "format:check"] },
|
||||
{ name: "svelte-check", cmd: ["bun", "run", "--cwd", "web", "check"] },
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
# Run only frontend tests
|
||||
test-web:
|
||||
bun run --cwd web test
|
||||
const checks = [
|
||||
{ name: "rustfmt", cmd: ["cargo", "fmt", "--all", "--", "--check"], terse: true },
|
||||
{ name: "clippy", cmd: ["cargo", "clippy", "--all-features", "--", "--deny", "warnings"] },
|
||||
{ name: "rust-test", cmd: ["cargo", "nextest", "run", "-E", "not test(export_bindings)"] },
|
||||
{ name: "svelte-check", cmd: ["bun", "run", "--cwd", "web", "check"] },
|
||||
{ name: "biome", cmd: ["bun", "run", "--cwd", "web", "format:check"] },
|
||||
{ name: "web-test", cmd: ["bun", "run", "--cwd", "web", "test"] },
|
||||
// { name: "sqlx-prepare", cmd: ["cargo", "sqlx", "prepare", "--check"] },
|
||||
];
|
||||
|
||||
# Quick check: clippy + tests + typecheck (skips formatting)
|
||||
check-quick:
|
||||
cargo clippy --all-features -- --deny warnings
|
||||
cargo nextest run -E 'not test(export_bindings)'
|
||||
bun run --cwd web check
|
||||
const isTTY = process.stderr.isTTY;
|
||||
const start = Date.now();
|
||||
const remaining = new Set(checks.map(c => c.name));
|
||||
|
||||
# Run the Banner API search demo (hits live UTSA API, ~20s)
|
||||
search *ARGS:
|
||||
cargo run -q --bin search -- {{ARGS}}
|
||||
const promises = checks.map(async (check) => {
|
||||
const proc = Bun.spawn(check.cmd, {
|
||||
env: { ...process.env, FORCE_COLOR: "1" },
|
||||
stdout: "pipe", stderr: "pipe",
|
||||
});
|
||||
const [stdout, stderr] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
]);
|
||||
await proc.exited;
|
||||
return { ...check, stdout, stderr, exitCode: proc.exitCode,
|
||||
elapsed: ((Date.now() - start) / 1000).toFixed(1) };
|
||||
});
|
||||
|
||||
const interval = isTTY ? setInterval(() => {
|
||||
const elapsed = ((Date.now() - start) / 1000).toFixed(1);
|
||||
process.stderr.write(`\r\x1b[K${elapsed}s [${Array.from(remaining).join(", ")}]`);
|
||||
}, 100) : null;
|
||||
|
||||
// Phase 1: collect all results, eagerly displaying whichever finishes first
|
||||
const results = {};
|
||||
let anyFailed = false;
|
||||
const tagged = promises.map((p, i) => p.then(r => ({ i, r })));
|
||||
for (let n = 0; n < checks.length; n++) {
|
||||
const { i, r } = await Promise.race(tagged);
|
||||
tagged[i] = new Promise(() => {}); // sentinel: never resolves
|
||||
results[r.name] = r;
|
||||
remaining.delete(r.name);
|
||||
if (isTTY) process.stderr.write(`\r\x1b[K`);
|
||||
if (r.exitCode !== 0) {
|
||||
anyFailed = true;
|
||||
process.stdout.write(`\x1b[31m✗ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
|
||||
if (!r.terse) {
|
||||
if (r.stdout) process.stdout.write(r.stdout);
|
||||
if (r.stderr) process.stderr.write(r.stderr);
|
||||
}
|
||||
} else {
|
||||
process.stdout.write(`\x1b[32m✓ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
|
||||
}
|
||||
}
|
||||
|
||||
if (interval) clearInterval(interval);
|
||||
if (isTTY) process.stderr.write(`\r\x1b[K`);
|
||||
|
||||
// Phase 2: auto-fix formatting if it's the only failure in a domain
|
||||
let autoFixed = false;
|
||||
for (const [fmtName, domain] of Object.entries(domains)) {
|
||||
const fmtResult = results[fmtName];
|
||||
if (!fmtResult || fmtResult.exitCode === 0) continue;
|
||||
const peersAllPassed = domain.peers.every(p => results[p]?.exitCode === 0);
|
||||
if (!peersAllPassed) continue;
|
||||
|
||||
process.stdout.write(`\n\x1b[1;36m→ Auto-formatting ${fmtName} (peers passed, only formatting failed)...\x1b[0m\n`);
|
||||
domain.format();
|
||||
|
||||
// Re-verify format + sanity check in parallel
|
||||
const recheckStart = Date.now();
|
||||
const recheckPromises = domain.recheck.map(async (check) => {
|
||||
const proc = Bun.spawn(check.cmd, {
|
||||
env: { ...process.env, FORCE_COLOR: "1" },
|
||||
stdout: "pipe", stderr: "pipe",
|
||||
});
|
||||
const [stdout, stderr] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
]);
|
||||
await proc.exited;
|
||||
return { ...check, stdout, stderr, exitCode: proc.exitCode,
|
||||
elapsed: ((Date.now() - recheckStart) / 1000).toFixed(1) };
|
||||
});
|
||||
|
||||
let recheckFailed = false;
|
||||
for (const p of recheckPromises) {
|
||||
const r = await p;
|
||||
if (r.exitCode !== 0) {
|
||||
recheckFailed = true;
|
||||
process.stdout.write(`\x1b[31m ✗ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
|
||||
if (r.stdout) process.stdout.write(r.stdout);
|
||||
if (r.stderr) process.stderr.write(r.stderr);
|
||||
} else {
|
||||
process.stdout.write(`\x1b[32m ✓ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!recheckFailed) {
|
||||
process.stdout.write(`\x1b[32m ✓ ${fmtName} auto-fix succeeded\x1b[0m\n`);
|
||||
results[fmtName].exitCode = 0;
|
||||
autoFixed = true;
|
||||
} else {
|
||||
process.stdout.write(`\x1b[31m ✗ ${fmtName} auto-fix failed sanity check\x1b[0m\n`);
|
||||
}
|
||||
}
|
||||
|
||||
const finalFailed = Object.values(results).some(r => r.exitCode !== 0);
|
||||
if (autoFixed && !finalFailed) {
|
||||
process.stdout.write(`\n\x1b[1;32m✓ All checks passed (formatting was auto-fixed)\x1b[0m\n`);
|
||||
}
|
||||
process.exit(finalFailed ? 1 : 0);
|
||||
|
||||
# Format all Rust and TypeScript code
|
||||
format:
|
||||
cargo fmt --all
|
||||
bun run --cwd web format
|
||||
|
||||
# Check formatting without modifying (CI-friendly)
|
||||
format-check:
|
||||
cargo fmt --all -- --check
|
||||
bun run --cwd web format:check
|
||||
# Run tests. Usage: just test [rust|web|<nextest filter args>]
|
||||
[script("bun")]
|
||||
test *args:
|
||||
const input = "{{args}}".trim();
|
||||
const run = (cmd) => {
|
||||
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
|
||||
if (proc.exitCode !== 0) process.exit(proc.exitCode);
|
||||
};
|
||||
if (input === "web") {
|
||||
run(["bun", "run", "--cwd", "web", "test"]);
|
||||
} else if (input === "rust") {
|
||||
run(["cargo", "nextest", "run", "-E", "not test(export_bindings)"]);
|
||||
} else if (input === "") {
|
||||
run(["cargo", "nextest", "run", "-E", "not test(export_bindings)"]);
|
||||
run(["bun", "run", "--cwd", "web", "test"]);
|
||||
} else {
|
||||
run(["cargo", "nextest", "run", ...input.split(/\s+/)]);
|
||||
}
|
||||
|
||||
# Generate TypeScript bindings from Rust types (ts-rs)
|
||||
bindings:
|
||||
cargo test export_bindings
|
||||
|
||||
# Run the Banner API search demo (hits live UTSA API, ~20s)
|
||||
search *ARGS:
|
||||
cargo run -q --bin search -- {{ARGS}}
|
||||
|
||||
# Pass args to binary after --: just dev -n -- --some-flag
|
||||
# Dev server. Flags: -f(rontend) -b(ackend) -W(no-watch) -n(o-build) -r(elease) -e(mbed) --tracing <fmt>
|
||||
[script("bun")]
|
||||
dev *flags:
|
||||
const argv = "{{flags}}".split(/\s+/).filter(Boolean);
|
||||
|
||||
let frontendOnly = false, backendOnly = false;
|
||||
let noWatch = false, noBuild = false, release = false, embed = false;
|
||||
let tracing = "pretty";
|
||||
const passthrough = [];
|
||||
|
||||
let i = 0;
|
||||
let seenDashDash = false;
|
||||
while (i < argv.length) {
|
||||
const arg = argv[i];
|
||||
if (seenDashDash) { passthrough.push(arg); i++; continue; }
|
||||
if (arg === "--") { seenDashDash = true; i++; continue; }
|
||||
if (arg.startsWith("--")) {
|
||||
if (arg === "--frontend-only") frontendOnly = true;
|
||||
else if (arg === "--backend-only") backendOnly = true;
|
||||
else if (arg === "--no-watch") noWatch = true;
|
||||
else if (arg === "--no-build") noBuild = true;
|
||||
else if (arg === "--release") release = true;
|
||||
else if (arg === "--embed") embed = true;
|
||||
else if (arg === "--tracing") { tracing = argv[++i] || "pretty"; }
|
||||
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
|
||||
} else if (arg.startsWith("-") && arg.length > 1) {
|
||||
for (const c of arg.slice(1)) {
|
||||
if (c === "f") frontendOnly = true;
|
||||
else if (c === "b") backendOnly = true;
|
||||
else if (c === "W") noWatch = true;
|
||||
else if (c === "n") noBuild = true;
|
||||
else if (c === "r") release = true;
|
||||
else if (c === "e") embed = true;
|
||||
else { console.error(`Unknown flag: -${c}`); process.exit(1); }
|
||||
}
|
||||
} else { console.error(`Unknown argument: ${arg}`); process.exit(1); }
|
||||
i++;
|
||||
}
|
||||
|
||||
// -e implies -b (no point running Vite if assets are embedded)
|
||||
if (embed) backendOnly = true;
|
||||
// -n implies -W (no build means no watch)
|
||||
if (noBuild) noWatch = true;
|
||||
|
||||
// Validate conflicting flags
|
||||
if (frontendOnly && backendOnly) {
|
||||
console.error("Cannot use -f and -b together (or -e implies -b)");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const runFrontend = !backendOnly;
|
||||
const runBackend = !frontendOnly;
|
||||
const profile = release ? "release" : "dev";
|
||||
const profileDir = release ? "release" : "debug";
|
||||
|
||||
const procs = [];
|
||||
const cleanup = async () => {
|
||||
for (const p of procs) p.kill();
|
||||
await Promise.all(procs.map(p => p.exited));
|
||||
};
|
||||
process.on("SIGINT", async () => { await cleanup(); process.exit(0); });
|
||||
process.on("SIGTERM", async () => { await cleanup(); process.exit(0); });
|
||||
|
||||
// Build frontend first when embedding assets (backend will bake them in)
|
||||
if (embed && !noBuild) {
|
||||
console.log(`\x1b[1;36m→ Building frontend (for embedding)...\x1b[0m`);
|
||||
const fb = Bun.spawnSync(["bun", "run", "--cwd", "web", "build"], {
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
});
|
||||
if (fb.exitCode !== 0) process.exit(fb.exitCode);
|
||||
}
|
||||
|
||||
// Frontend: Vite dev server
|
||||
if (runFrontend) {
|
||||
const proc = Bun.spawn(["bun", "run", "--cwd", "web", "dev"], {
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
});
|
||||
procs.push(proc);
|
||||
}
|
||||
|
||||
// Backend
|
||||
if (runBackend) {
|
||||
const backendArgs = [`--tracing`, tracing, ...passthrough];
|
||||
const bin = `target/${profileDir}/banner`;
|
||||
|
||||
if (noWatch) {
|
||||
// Build first unless -n (skip build)
|
||||
if (!noBuild) {
|
||||
console.log(`\x1b[1;36m→ Building backend (${profile})...\x1b[0m`);
|
||||
const cargoArgs = ["cargo", "build", "--bin", "banner"];
|
||||
if (!embed) cargoArgs.push("--no-default-features");
|
||||
if (release) cargoArgs.push("--release");
|
||||
const build = Bun.spawnSync(cargoArgs, { stdio: ["inherit", "inherit", "inherit"] });
|
||||
if (build.exitCode !== 0) { cleanup(); process.exit(build.exitCode); }
|
||||
}
|
||||
|
||||
// Run the binary directly (no watch)
|
||||
const { existsSync } = await import("fs");
|
||||
if (!existsSync(bin)) {
|
||||
console.error(`Binary not found: ${bin}`);
|
||||
console.error(`Run 'just build${release ? "" : " -d"}' first, or remove -n to use bacon.`);
|
||||
cleanup();
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`\x1b[1;36m→ Running ${bin} (no watch)\x1b[0m`);
|
||||
const proc = Bun.spawn([bin, ...backendArgs], {
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
});
|
||||
procs.push(proc);
|
||||
} else {
|
||||
// Bacon watch mode
|
||||
const baconArgs = ["bacon", "--headless", "run", "--"];
|
||||
if (!embed) baconArgs.push("--no-default-features");
|
||||
if (release) baconArgs.push("--profile", "release");
|
||||
baconArgs.push("--", ...backendArgs);
|
||||
const proc = Bun.spawn(baconArgs, {
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
});
|
||||
procs.push(proc);
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for any process to exit, then kill the rest
|
||||
const results = procs.map((p, i) => p.exited.then(code => ({ i, code })));
|
||||
const first = await Promise.race(results);
|
||||
cleanup();
|
||||
process.exit(first.code);
|
||||
|
||||
# Production build. Flags: -d(ebug) -f(rontend-only) -b(ackend-only)
|
||||
[script("bun")]
|
||||
build *flags:
|
||||
const argv = "{{flags}}".split(/\s+/).filter(Boolean);
|
||||
|
||||
let debug = false, frontendOnly = false, backendOnly = false;
|
||||
for (const arg of argv) {
|
||||
if (arg.startsWith("--")) {
|
||||
if (arg === "--debug") debug = true;
|
||||
else if (arg === "--frontend-only") frontendOnly = true;
|
||||
else if (arg === "--backend-only") backendOnly = true;
|
||||
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
|
||||
} else if (arg.startsWith("-") && arg.length > 1) {
|
||||
for (const c of arg.slice(1)) {
|
||||
if (c === "d") debug = true;
|
||||
else if (c === "f") frontendOnly = true;
|
||||
else if (c === "b") backendOnly = true;
|
||||
else { console.error(`Unknown flag: -${c}`); process.exit(1); }
|
||||
}
|
||||
} else { console.error(`Unknown argument: ${arg}`); process.exit(1); }
|
||||
}
|
||||
|
||||
if (frontendOnly && backendOnly) {
|
||||
console.error("Cannot use -f and -b together");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const run = (cmd) => {
|
||||
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
|
||||
if (proc.exitCode !== 0) process.exit(proc.exitCode);
|
||||
};
|
||||
|
||||
const buildFrontend = !backendOnly;
|
||||
const buildBackend = !frontendOnly;
|
||||
const profile = debug ? "debug" : "release";
|
||||
|
||||
if (buildFrontend) {
|
||||
console.log("\x1b[1;36m→ Building frontend...\x1b[0m");
|
||||
run(["bun", "run", "--cwd", "web", "build"]);
|
||||
}
|
||||
|
||||
if (buildBackend) {
|
||||
console.log(`\x1b[1;36m→ Building backend (${profile})...\x1b[0m`);
|
||||
const cmd = ["cargo", "build", "--bin", "banner"];
|
||||
if (!debug) cmd.push("--release");
|
||||
run(cmd);
|
||||
}
|
||||
|
||||
# Start PostgreSQL in Docker and update .env with connection string
|
||||
# Commands: start (default), reset, rm
|
||||
@@ -115,86 +437,6 @@ db cmd="start":
|
||||
await updateEnv();
|
||||
}
|
||||
|
||||
# Auto-reloading frontend server
|
||||
frontend:
|
||||
bun run --cwd web dev
|
||||
|
||||
# Production build of frontend
|
||||
build-frontend:
|
||||
bun run --cwd web build
|
||||
|
||||
# Auto-reloading backend server (with embedded assets)
|
||||
backend *ARGS:
|
||||
bacon --headless run -- -- {{ARGS}}
|
||||
|
||||
# Auto-reloading backend server (no embedded assets, for dev proxy mode)
|
||||
backend-dev *ARGS:
|
||||
bacon --headless run -- --no-default-features -- {{ARGS}}
|
||||
|
||||
# Production build
|
||||
build:
|
||||
bun run --cwd web build
|
||||
cargo build --release --bin banner
|
||||
|
||||
# Run auto-reloading development build with release characteristics
|
||||
dev-build *ARGS='--services web --tracing pretty': build-frontend
|
||||
bacon --headless run -- --profile dev-release -- {{ARGS}}
|
||||
|
||||
# Auto-reloading development build: Vite frontend + backend (no embedded assets, proxies to Vite)
|
||||
[parallel]
|
||||
dev *ARGS='--services web,bot': frontend (backend-dev ARGS)
|
||||
|
||||
# Smoke test: start web server, hit API endpoints, verify responses
|
||||
[script("bash")]
|
||||
test-smoke port="18080":
|
||||
set -euo pipefail
|
||||
PORT={{port}}
|
||||
|
||||
cleanup() { kill "$SERVER_PID" 2>/dev/null; wait "$SERVER_PID" 2>/dev/null; }
|
||||
|
||||
# Start server in background
|
||||
PORT=$PORT cargo run -q --no-default-features -- --services web --tracing json &
|
||||
SERVER_PID=$!
|
||||
trap cleanup EXIT
|
||||
|
||||
# Wait for server to be ready (up to 15s)
|
||||
for i in $(seq 1 30); do
|
||||
if curl -sf "http://localhost:$PORT/api/health" >/dev/null 2>&1; then break; fi
|
||||
if ! kill -0 "$SERVER_PID" 2>/dev/null; then echo "FAIL: server exited early"; exit 1; fi
|
||||
sleep 0.5
|
||||
done
|
||||
|
||||
PASS=0; FAIL=0
|
||||
check() {
|
||||
local label="$1" url="$2" expected="$3"
|
||||
body=$(curl -sf "$url") || { echo "FAIL: $label - request failed"; FAIL=$((FAIL+1)); return; }
|
||||
if echo "$body" | grep -q "$expected"; then
|
||||
echo "PASS: $label"
|
||||
PASS=$((PASS+1))
|
||||
else
|
||||
echo "FAIL: $label - expected '$expected' in: $body"
|
||||
FAIL=$((FAIL+1))
|
||||
fi
|
||||
}
|
||||
|
||||
check "GET /api/health" "http://localhost:$PORT/api/health" '"status":"healthy"'
|
||||
check "GET /api/status" "http://localhost:$PORT/api/status" '"version"'
|
||||
check "GET /api/metrics" "http://localhost:$PORT/api/metrics" '"banner_api"'
|
||||
|
||||
# Test 404
|
||||
STATUS=$(curl -s -o /dev/null -w "%{http_code}" "http://localhost:$PORT/api/nonexistent")
|
||||
if [ "$STATUS" = "404" ]; then
|
||||
echo "PASS: 404 on unknown route"
|
||||
PASS=$((PASS+1))
|
||||
else
|
||||
echo "FAIL: expected 404, got $STATUS"
|
||||
FAIL=$((FAIL+1))
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Results: $PASS passed, $FAIL failed"
|
||||
[ "$FAIL" -eq 0 ]
|
||||
|
||||
alias b := bun
|
||||
bun *ARGS:
|
||||
cd web && bun {{ ARGS }}
|
||||
|
||||
@@ -29,8 +29,7 @@ The application consists of three modular services that can be run independently
|
||||
bun install --cwd web # Install frontend dependencies
|
||||
cargo build # Build the backend
|
||||
|
||||
just dev # Runs auto-reloading dev build
|
||||
just dev --services bot,web # Runs auto-reloading dev build, running only the bot and web services
|
||||
just dev # Runs auto-reloading dev build with all services
|
||||
just dev-build # Development build with release characteristics (frontend is embedded, non-auto-reloading)
|
||||
|
||||
just build # Production build that embeds assets
|
||||
|
||||
+27
-4
@@ -4,10 +4,33 @@
|
||||
|
||||
The Banner project is built as a multi-service application with the following components:
|
||||
|
||||
- **Discord Bot Service**: Handles Discord interactions and commands
|
||||
- **Web Service**: Serves the React frontend and provides API endpoints
|
||||
- **Scraper Service**: Background data collection and synchronization
|
||||
- **Database Layer**: PostgreSQL for persistent storage
|
||||
- **Discord Bot Service**: Handles Discord interactions and commands (Serenity/Poise)
|
||||
- **Web Service**: Axum HTTP server serving the SvelteKit frontend and REST API endpoints
|
||||
- **Scraper Service**: Background data collection and synchronization with job queue
|
||||
- **Database Layer**: PostgreSQL 17 for persistent storage (SQLx with compile-time verification)
|
||||
- **RateMyProfessors Client**: GraphQL-based bulk sync of professor ratings
|
||||
|
||||
### Frontend Stack
|
||||
|
||||
- **SvelteKit** with Svelte 5 runes (`$state`, `$derived`, `$effect`)
|
||||
- **Tailwind CSS v4** via `@tailwindcss/vite`
|
||||
- **bits-ui** for headless UI primitives (comboboxes, tooltips, dropdowns)
|
||||
- **TanStack Table** for interactive data tables with sorting and column control
|
||||
- **OverlayScrollbars** for styled, theme-aware scrollable areas
|
||||
- **ts-rs** generates TypeScript type bindings from Rust structs
|
||||
|
||||
### API Endpoints
|
||||
|
||||
| Endpoint | Description |
|
||||
|---|---|
|
||||
| `GET /api/health` | Health check |
|
||||
| `GET /api/status` | Service status, version, and commit hash |
|
||||
| `GET /api/metrics` | Basic metrics |
|
||||
| `GET /api/courses/search` | Paginated course search with filters (term, subject, query, open-only, sort) |
|
||||
| `GET /api/courses/:term/:crn` | Single course detail with instructors and RMP ratings |
|
||||
| `GET /api/terms` | Available terms from reference cache |
|
||||
| `GET /api/subjects?term=` | Subjects for a term, ordered by enrollment |
|
||||
| `GET /api/reference/:category` | Reference data lookups (campuses, instructional methods, etc.) |
|
||||
|
||||
## Technical Analysis
|
||||
|
||||
|
||||
+78
-1
@@ -6,7 +6,84 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [0.1.0] - 2026-01
|
||||
## [0.6.0] - 2026-01-30
|
||||
|
||||
### Added
|
||||
|
||||
- User authentication system with Discord OAuth, sessions, admin roles, and login page with FAQ.
|
||||
- Interactive timeline visualization with D3 canvas, pan/zoom, touch gestures, and enrollment aggregation API.
|
||||
- Scraper analytics dashboard with timeseries charts, subject monitoring, and per-subject detail views.
|
||||
- Adaptive scraper scheduling with admin endpoints for monitoring and configuration.
|
||||
- Scrape job result persistence for effectiveness tracking.
|
||||
- WebSocket support for real-time scrape job monitoring with connection status indicators.
|
||||
- Course change auditing with field-level tracking and time-series metrics endpoint.
|
||||
- Audit log UI with smart JSON diffing, conditional request caching, and auto-refresh.
|
||||
- Calendar export web endpoints for ICS download and Google Calendar redirect.
|
||||
- Confidence-based RMP matching with manual review workflow and admin instructor UI.
|
||||
- RMP profile links and confidence-aware rating display.
|
||||
- Name parsing and normalization for improved instructor-RMP matching.
|
||||
- Mobile touch controls with gesture detection for timeline.
|
||||
- Worker timeout protection and crash recovery for job queue.
|
||||
- Build-time asset compression with encoding negotiation (gzip, brotli, zstd).
|
||||
- Smart page transitions with theme-aware element transitions.
|
||||
- Search duration and result count feedback.
|
||||
- Root error page handling.
|
||||
- Login page with FAQ section and improved styling.
|
||||
|
||||
### Changed
|
||||
|
||||
- Consolidated navigation with top nav bar and route groups.
|
||||
- Centralized number formatting with locale-aware utility.
|
||||
- Modernized Justfile commands and simplified service management.
|
||||
- Persisted audit log state in module scope for cross-navigation caching.
|
||||
- Relative time feedback and improved tooltip customization.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Instructor/course mismatching via build-order-independent map for association.
|
||||
- Page content clipping.
|
||||
- Backend startup delays with retry logic in auth.
|
||||
- Banner API timeouts increased to handle slow responses.
|
||||
- i64 serialization for JavaScript compatibility, fixing avatar URL display.
|
||||
- Frontend build ordering with `-e` embed flag in Justfile.
|
||||
- Login page centering and unnecessary scrollbar.
|
||||
- ts-rs serde warnings.
|
||||
|
||||
## [0.5.0] - 2026-01-29
|
||||
|
||||
### Added
|
||||
|
||||
- Multi-select subject filtering with searchable comboboxes.
|
||||
- Smart instructor name abbreviation for compact table display.
|
||||
- Delivery mode indicators and tooltips in location column.
|
||||
- Page selector dropdown with animated pagination controls.
|
||||
- FLIP animations for smooth table row transitions during pagination.
|
||||
- Time tooltip with detailed meeting schedule and day abbreviations.
|
||||
- Reusable SimpleTooltip component for consistent UI hints.
|
||||
|
||||
### Changed
|
||||
|
||||
- Consolidated query logic and eliminated N+1 instructor loads via batch fetching.
|
||||
- Consolidated menu snippets and strengthened component type safety.
|
||||
- Enhanced table scrolling with OverlayScrollbars and theme-aware styling.
|
||||
- Eliminated initial theme flash on page load.
|
||||
|
||||
## [0.4.0] - 2026-01-28
|
||||
|
||||
### Added
|
||||
|
||||
- Web-based course search UI with interactive data table, multi-column sorting, and column visibility controls.
|
||||
- TypeScript type bindings generated from Rust types via ts-rs.
|
||||
- RateMyProfessors integration: bulk professor sync via GraphQL and inline rating display in search results.
|
||||
- Course detail expansion panel with enrollment, meeting times, and instructor info.
|
||||
- OverlayScrollbars integration for styled, theme-aware scrollable areas.
|
||||
- Pagination component for navigating large search result sets.
|
||||
- Footer component with version display.
|
||||
- API endpoints: `/api/courses/search`, `/api/courses/:term/:crn`, `/api/terms`, `/api/subjects`, `/api/reference/:category`.
|
||||
- Frontend API client with typed request/response handling and test coverage.
|
||||
- Course formatting utilities with comprehensive unit tests.
|
||||
|
||||
## [0.3.4] - 2026-01
|
||||
|
||||
### Added
|
||||
|
||||
|
||||
+2
-1
@@ -4,7 +4,8 @@ This folder contains detailed documentation for the Banner project. This file ac
|
||||
|
||||
## Files
|
||||
|
||||
- [`FEATURES.md`](FEATURES.md) - Current features, implemented functionality, and future roadmap
|
||||
- [`CHANGELOG.md`](CHANGELOG.md) - Notable changes by version
|
||||
- [`ROADMAP.md`](ROADMAP.md) - Planned features and priorities
|
||||
- [`BANNER.md`](BANNER.md) - General API documentation on the Banner system
|
||||
- [`ARCHITECTURE.md`](ARCHITECTURE.md) - Technical implementation details, system design, and analysis
|
||||
|
||||
|
||||
+28
-15
@@ -2,30 +2,43 @@
|
||||
|
||||
## Now
|
||||
|
||||
- **Notification and subscription system** - Subscribe to courses and get alerts on seat availability, waitlist movement, and detail changes (time, location, professor, seats). DB schema exists.
|
||||
- **RateMyProfessor integration** - Show professor ratings inline with search results and course details.
|
||||
- **Discord bot revival** - Audit and fix all existing commands (search, terms, ics, gcal) against the current data model. Add test coverage. Bot has been untouched since ~0.3.4 and commands may be broken.
|
||||
- **Notification and subscription system** - Subscribe to courses and get alerts on seat availability, waitlist movement, and detail changes (time, location, professor, seats). Deliver via Discord bot and web dashboard.
|
||||
- **Mobile/responsive redesign** - Hamburger nav for sidebar, responsive table column hiding, mobile-friendly admin pages. Timeline is the only area with solid mobile support; most pages need work.
|
||||
- **Professor name search filter** - Filter search results by instructor. Backend code exists but is commented out.
|
||||
- **Subject/major search filter** - Search by department code (e.g. CS, MAT). Also partially implemented.
|
||||
- **Autocomplete for search fields** - Typeahead for course titles, course numbers, professors, and terms.
|
||||
- **Test coverage expansion** - Broaden coverage with pure function tests (term parsing, search parsing, job types), session/rate-limiter tests, and more DB integration tests.
|
||||
- **Web course search UI** - Add a browser-based course search interface to the dashboard, supplementing the Discord bot.
|
||||
- **Search field autocomplete** - Typeahead for course titles, course numbers, professors, and terms.
|
||||
- **Large component extraction** - Break down CourseTable, Instructors page, and TimelineCanvas into smaller, testable subcomponents.
|
||||
|
||||
## Soon
|
||||
|
||||
- **Smart time-of-day search parsing** - Support natural queries like "2 PM", "2-3 PM", "ends by 2 PM", "after 2 PM", "before 2 PM" mapped to time ranges.
|
||||
- **Section-based lookup** - Search by full section identifier, e.g. "CS 4393 001".
|
||||
- **Search result pagination** - Paginated embeds for large result sets in Discord.
|
||||
- **Bot slash command parity** - Keep Discord bot commands in sync with web features: timeline summaries, RMP lookups, audit log highlights, notification management via bot.
|
||||
- **E2E test suite** - Playwright tests for critical user flows: search, login, admin pages, timeline interaction.
|
||||
- **Settings page** - Replace placeholder with theme preferences, notification settings, default term/subject selection.
|
||||
- **Profile enhancements** - Expand from read-only stub to subscription management, saved searches, and course watchlists.
|
||||
- **Smart time-of-day search parsing** - Support natural queries like "2 PM", "ends by 2 PM", "after 2 PM" mapped to time ranges.
|
||||
- **Multi-term querying** - Query across multiple terms in a single search instead of one at a time.
|
||||
- **Historical analytics** - Track seat availability over time and visualize fill-rate trends per course or professor.
|
||||
- **Schedule builder** - Visual weekly schedule tool for assembling a conflict-free course lineup.
|
||||
- **Professor stats** - Aggregate data views: average class size, typical waitlist length, schedule patterns across semesters.
|
||||
- **Historical analytics visualization** - Build trend UI on top of existing course metrics and timeline API. Fill-rate charts per course or professor.
|
||||
- **Schedule builder** - Visual weekly schedule tool for assembling a conflict-free course lineup. Timeline visualization serves as a foundation.
|
||||
|
||||
## Eventually
|
||||
|
||||
- **API rate limiting** - Rate limiter on public API endpoints. Needed before any public or external exposure.
|
||||
- **Bulk admin operations** - Batch RMP match/reject, bulk user management, data export from admin pages.
|
||||
- **Degree audit helper** - Map available courses to degree requirements and suggest what to take next.
|
||||
- **Dynamic scraper scheduling** - Adjust scrape intervals based on change frequency and course count (e.g. 2 hours per 500 courses, shorter intervals when changes are detected).
|
||||
- **DM support** - Allow the Discord bot to respond in direct messages, not just guild channels.
|
||||
- **"Classes Now" command** - Find classes currently in session based on the current day and time.
|
||||
- **CRN direct lookup** - Look up a course by its CRN without going through search.
|
||||
- **Metrics dashboard** - Surface scraper and service metrics visually on the web dashboard.
|
||||
- **Privileged error feedback** - Detailed error information surfaced to bot admins when commands fail.
|
||||
|
||||
## Done
|
||||
|
||||
- **Interactive timeline visualization** - D3 canvas with pan/zoom, touch gestures, and enrollment aggregation API. *(0.6.0)*
|
||||
- **Scraper analytics dashboard** - Timeseries charts, subject monitoring, adaptive scheduling, and admin endpoints. *(0.6.0)*
|
||||
- **WebSocket job monitoring** - Real-time scrape job queue with live connection status indicators. *(0.6.0)*
|
||||
- **Course change audit log** - Field-level change tracking with smart diffing, conditional caching, and auto-refresh. *(0.6.0)*
|
||||
- **User authentication system** - Discord OAuth, sessions, admin roles, and login page. *(0.6.0)*
|
||||
- **Dynamic scraper scheduling** - Adaptive scrape intervals based on change frequency and course volume. *(0.6.0)*
|
||||
- **Metrics dashboard** - Scraper and service metrics surfaced on the web dashboard. *(0.6.0)*
|
||||
- **Subject/major search filter** - Multi-select subject filtering with searchable comboboxes. *(0.5.0)*
|
||||
- **Web course search UI** - Browser-based course search with interactive data table, sorting, pagination, and column controls. *(0.4.0)*
|
||||
- **RateMyProfessor integration** - Bulk professor sync via GraphQL with inline ratings in search results. *(0.4.0)*
|
||||
- **Test coverage expansion** - Unit tests for course formatting, API client, query builder, CLI args, and config parsing. *(0.3.4--0.4.0)*
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
-- Add queued_at column to track when a job last entered the "ready to pick up" state.
|
||||
-- For fresh jobs this equals execute_at; for retried jobs it is updated to NOW().
|
||||
ALTER TABLE scrape_jobs
|
||||
ADD COLUMN queued_at TIMESTAMPTZ NOT NULL DEFAULT NOW();
|
||||
|
||||
-- Backfill existing rows: set queued_at = execute_at (best approximation)
|
||||
UPDATE scrape_jobs SET queued_at = execute_at;
|
||||
@@ -0,0 +1,19 @@
|
||||
CREATE TABLE users (
|
||||
discord_id BIGINT PRIMARY KEY,
|
||||
discord_username TEXT NOT NULL,
|
||||
discord_avatar_hash TEXT,
|
||||
is_admin BOOLEAN NOT NULL DEFAULT false,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE user_sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id BIGINT NOT NULL REFERENCES users(discord_id) ON DELETE CASCADE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
expires_at TIMESTAMPTZ NOT NULL,
|
||||
last_active_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE INDEX idx_user_sessions_user_id ON user_sessions(user_id);
|
||||
CREATE INDEX idx_user_sessions_expires_at ON user_sessions(expires_at);
|
||||
@@ -0,0 +1,80 @@
|
||||
-- Collapse instructors from per-banner-id rows to per-person rows (deduped by lowercased email).
|
||||
-- All existing RMP matches are deliberately dropped; the new auto-matcher will re-score from scratch.
|
||||
|
||||
-- 1. Create the new instructors table (1 row per person, keyed by email)
|
||||
CREATE TABLE instructors_new (
|
||||
id SERIAL PRIMARY KEY,
|
||||
display_name VARCHAR NOT NULL,
|
||||
email VARCHAR NOT NULL,
|
||||
rmp_professor_id INTEGER UNIQUE REFERENCES rmp_professors(legacy_id),
|
||||
rmp_match_status VARCHAR NOT NULL DEFAULT 'unmatched',
|
||||
CONSTRAINT instructors_email_unique UNIQUE (email)
|
||||
);
|
||||
|
||||
-- 2. Populate from existing data, deduplicating by lowercased email.
|
||||
-- For each email, pick the display_name from the row with the highest banner_id
|
||||
-- (deterministic tiebreaker). All rmp fields start fresh (NULL / 'unmatched').
|
||||
INSERT INTO instructors_new (display_name, email)
|
||||
SELECT DISTINCT ON (LOWER(email))
|
||||
display_name,
|
||||
LOWER(email)
|
||||
FROM instructors
|
||||
ORDER BY LOWER(email), banner_id DESC;
|
||||
|
||||
-- 3. Create the new course_instructors table with integer FK and banner_id column
|
||||
CREATE TABLE course_instructors_new (
|
||||
course_id INTEGER NOT NULL REFERENCES courses(id) ON DELETE CASCADE,
|
||||
instructor_id INTEGER NOT NULL REFERENCES instructors_new(id) ON DELETE CASCADE,
|
||||
banner_id VARCHAR NOT NULL,
|
||||
is_primary BOOLEAN NOT NULL DEFAULT false,
|
||||
PRIMARY KEY (course_id, instructor_id)
|
||||
);
|
||||
|
||||
-- 4. Populate from old data, mapping old banner_id → new instructor id via lowercased email.
|
||||
-- Use DISTINCT ON to handle cases where multiple old banner_ids for the same person
|
||||
-- taught the same course (would cause duplicate (course_id, instructor_id) pairs).
|
||||
INSERT INTO course_instructors_new (course_id, instructor_id, banner_id, is_primary)
|
||||
SELECT DISTINCT ON (ci.course_id, inew.id)
|
||||
ci.course_id,
|
||||
inew.id,
|
||||
ci.instructor_id, -- old banner_id
|
||||
ci.is_primary
|
||||
FROM course_instructors ci
|
||||
JOIN instructors iold ON iold.banner_id = ci.instructor_id
|
||||
JOIN instructors_new inew ON inew.email = LOWER(iold.email)
|
||||
ORDER BY ci.course_id, inew.id, ci.is_primary DESC;
|
||||
|
||||
-- 5. Drop old tables (course_instructors first due to FK dependency)
|
||||
DROP TABLE course_instructors;
|
||||
DROP TABLE instructors;
|
||||
|
||||
-- 6. Rename new tables into place
|
||||
ALTER TABLE instructors_new RENAME TO instructors;
|
||||
ALTER TABLE course_instructors_new RENAME TO course_instructors;
|
||||
|
||||
-- 7. Rename constraints to match the final table names
|
||||
ALTER TABLE instructors RENAME CONSTRAINT instructors_new_pkey TO instructors_pkey;
|
||||
ALTER TABLE instructors RENAME CONSTRAINT instructors_new_rmp_professor_id_key TO instructors_rmp_professor_id_key;
|
||||
ALTER TABLE course_instructors RENAME CONSTRAINT course_instructors_new_pkey TO course_instructors_pkey;
|
||||
|
||||
-- 8. Recreate indexes
|
||||
CREATE INDEX idx_course_instructors_instructor ON course_instructors (instructor_id);
|
||||
CREATE INDEX idx_instructors_rmp_status ON instructors (rmp_match_status);
|
||||
CREATE INDEX idx_instructors_email ON instructors (email);
|
||||
|
||||
-- 9. Create rmp_match_candidates table
|
||||
CREATE TABLE rmp_match_candidates (
|
||||
id SERIAL PRIMARY KEY,
|
||||
instructor_id INTEGER NOT NULL REFERENCES instructors(id) ON DELETE CASCADE,
|
||||
rmp_legacy_id INTEGER NOT NULL REFERENCES rmp_professors(legacy_id),
|
||||
score REAL NOT NULL,
|
||||
score_breakdown JSONB NOT NULL DEFAULT '{}',
|
||||
status VARCHAR NOT NULL DEFAULT 'pending',
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
resolved_at TIMESTAMPTZ,
|
||||
resolved_by BIGINT REFERENCES users(discord_id),
|
||||
CONSTRAINT uq_candidate_pair UNIQUE (instructor_id, rmp_legacy_id)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_match_candidates_instructor ON rmp_match_candidates (instructor_id);
|
||||
CREATE INDEX idx_match_candidates_status ON rmp_match_candidates (status);
|
||||
@@ -0,0 +1,24 @@
|
||||
-- Multi-RMP profile support: allow many RMP profiles per instructor.
|
||||
-- Each RMP profile still links to at most one instructor (rmp_legacy_id UNIQUE).
|
||||
|
||||
-- 1. Create junction table
|
||||
CREATE TABLE instructor_rmp_links (
|
||||
id SERIAL PRIMARY KEY,
|
||||
instructor_id INTEGER NOT NULL REFERENCES instructors(id) ON DELETE CASCADE,
|
||||
rmp_legacy_id INTEGER NOT NULL UNIQUE REFERENCES rmp_professors(legacy_id),
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
created_by BIGINT REFERENCES users(discord_id),
|
||||
source VARCHAR NOT NULL DEFAULT 'manual' -- 'auto' | 'manual'
|
||||
);
|
||||
|
||||
CREATE INDEX idx_instructor_rmp_links_instructor ON instructor_rmp_links (instructor_id);
|
||||
|
||||
-- 2. Migrate existing matches
|
||||
INSERT INTO instructor_rmp_links (instructor_id, rmp_legacy_id, source)
|
||||
SELECT id, rmp_professor_id,
|
||||
CASE rmp_match_status WHEN 'auto' THEN 'auto' ELSE 'manual' END
|
||||
FROM instructors
|
||||
WHERE rmp_professor_id IS NOT NULL;
|
||||
|
||||
-- 3. Drop old column (and its unique constraint)
|
||||
ALTER TABLE instructors DROP COLUMN rmp_professor_id;
|
||||
@@ -0,0 +1,31 @@
|
||||
-- Scrape job results log: one row per completed (or failed) job for effectiveness tracking.
|
||||
CREATE TABLE scrape_job_results (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
target_type target_type NOT NULL,
|
||||
payload JSONB NOT NULL,
|
||||
priority scrape_priority NOT NULL,
|
||||
|
||||
-- Timing
|
||||
queued_at TIMESTAMPTZ NOT NULL,
|
||||
started_at TIMESTAMPTZ NOT NULL,
|
||||
completed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
duration_ms INT NOT NULL,
|
||||
|
||||
-- Outcome
|
||||
success BOOLEAN NOT NULL,
|
||||
error_message TEXT,
|
||||
retry_count INT NOT NULL DEFAULT 0,
|
||||
|
||||
-- Effectiveness (NULL when success = false)
|
||||
courses_fetched INT,
|
||||
courses_changed INT,
|
||||
courses_unchanged INT,
|
||||
audits_generated INT,
|
||||
metrics_generated INT
|
||||
);
|
||||
|
||||
CREATE INDEX idx_scrape_job_results_target_time
|
||||
ON scrape_job_results (target_type, completed_at);
|
||||
|
||||
CREATE INDEX idx_scrape_job_results_completed
|
||||
ON scrape_job_results (completed_at);
|
||||
@@ -0,0 +1,13 @@
|
||||
-- Indexes for the timeline aggregation endpoint.
|
||||
-- The query buckets course_metrics by 15-minute intervals, joins to courses
|
||||
-- for subject, and aggregates enrollment. These indexes support efficient
|
||||
-- time-range scans and the join.
|
||||
|
||||
-- Primary access pattern: scan course_metrics by timestamp range
|
||||
CREATE INDEX IF NOT EXISTS idx_course_metrics_timestamp
|
||||
ON course_metrics (timestamp);
|
||||
|
||||
-- Composite index for the DISTINCT ON (bucket, course_id) ordered by timestamp DESC
|
||||
-- to efficiently pick the latest metric per course per bucket.
|
||||
CREATE INDEX IF NOT EXISTS idx_course_metrics_course_timestamp
|
||||
ON course_metrics (course_id, timestamp DESC);
|
||||
@@ -0,0 +1,5 @@
|
||||
-- Add structured first/last name columns to instructors.
|
||||
-- Populated by Rust-side backfill (parse_banner_name) since we need
|
||||
-- HTML entity decoding and suffix extraction that SQL can't handle well.
|
||||
ALTER TABLE instructors ADD COLUMN first_name VARCHAR;
|
||||
ALTER TABLE instructors ADD COLUMN last_name VARCHAR;
|
||||
+31
-2
@@ -6,6 +6,7 @@ use crate::services::bot::BotService;
|
||||
use crate::services::manager::ServiceManager;
|
||||
use crate::services::web::WebService;
|
||||
use crate::state::AppState;
|
||||
use crate::web::auth::AuthConfig;
|
||||
use anyhow::Context;
|
||||
use figment::value::UncasedStr;
|
||||
use figment::{Figment, providers::Env};
|
||||
@@ -13,7 +14,7 @@ use sqlx::postgres::PgPoolOptions;
|
||||
use std::process::ExitCode;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tracing::{error, info};
|
||||
use tracing::{error, info, warn};
|
||||
|
||||
/// Main application struct containing all necessary components
|
||||
pub struct App {
|
||||
@@ -69,6 +70,11 @@ impl App {
|
||||
.context("Failed to run database migrations")?;
|
||||
info!("Database migrations completed successfully");
|
||||
|
||||
// Backfill structured name columns for existing instructors
|
||||
if let Err(e) = crate::data::names::backfill_instructor_names(&db_pool).await {
|
||||
warn!(error = ?e, "Failed to backfill instructor names (non-fatal)");
|
||||
}
|
||||
|
||||
// Create BannerApi and AppState
|
||||
let banner_api = BannerApi::new_with_config(
|
||||
config.banner_base_url.clone(),
|
||||
@@ -84,6 +90,19 @@ impl App {
|
||||
info!(error = ?e, "Could not load reference cache on startup (may be empty)");
|
||||
}
|
||||
|
||||
// Load schedule cache for timeline enrollment queries
|
||||
if let Err(e) = app_state.schedule_cache.load().await {
|
||||
info!(error = ?e, "Could not load schedule cache on startup (may be empty)");
|
||||
}
|
||||
|
||||
// Seed the initial admin user if configured
|
||||
if let Some(admin_id) = config.admin_discord_id {
|
||||
let user = crate::data::users::ensure_seed_admin(&db_pool, admin_id as i64)
|
||||
.await
|
||||
.context("Failed to seed admin user")?;
|
||||
info!(discord_id = admin_id, username = %user.discord_username, "Seed admin ensured");
|
||||
}
|
||||
|
||||
Ok(App {
|
||||
config,
|
||||
db_pool,
|
||||
@@ -97,7 +116,16 @@ impl App {
|
||||
pub fn setup_services(&mut self, services: &[ServiceName]) -> Result<(), anyhow::Error> {
|
||||
// Register enabled services with the manager
|
||||
if services.contains(&ServiceName::Web) {
|
||||
let web_service = Box::new(WebService::new(self.config.port, self.app_state.clone()));
|
||||
let auth_config = AuthConfig {
|
||||
client_id: self.config.discord_client_id.clone(),
|
||||
client_secret: self.config.discord_client_secret.clone(),
|
||||
redirect_base: self.config.discord_redirect_uri.clone(),
|
||||
};
|
||||
let web_service = Box::new(WebService::new(
|
||||
self.config.port,
|
||||
self.app_state.clone(),
|
||||
auth_config,
|
||||
));
|
||||
self.service_manager
|
||||
.register_service(ServiceName::Web.as_str(), web_service);
|
||||
}
|
||||
@@ -108,6 +136,7 @@ impl App {
|
||||
self.banner_api.clone(),
|
||||
self.app_state.reference_cache.clone(),
|
||||
self.app_state.service_statuses.clone(),
|
||||
self.app_state.scrape_job_tx.clone(),
|
||||
));
|
||||
self.service_manager
|
||||
.register_service(ServiceName::Scraper.as_str(), scraper_service);
|
||||
|
||||
+3
-3
@@ -40,9 +40,9 @@ impl BannerApi {
|
||||
.cookie_store(false)
|
||||
.user_agent(user_agent())
|
||||
.tcp_keepalive(Some(std::time::Duration::from_secs(60 * 5)))
|
||||
.read_timeout(std::time::Duration::from_secs(10))
|
||||
.connect_timeout(std::time::Duration::from_secs(10))
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.read_timeout(std::time::Duration::from_secs(20))
|
||||
.connect_timeout(std::time::Duration::from_secs(15))
|
||||
.timeout(std::time::Duration::from_secs(40))
|
||||
.build()
|
||||
.context("Failed to create HTTP client")?,
|
||||
)
|
||||
|
||||
@@ -325,6 +325,7 @@ mod tests {
|
||||
fn test_parse_json_with_context_null_value() {
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct TestStruct {
|
||||
#[allow(dead_code)]
|
||||
name: String,
|
||||
}
|
||||
|
||||
@@ -363,12 +364,14 @@ mod tests {
|
||||
#[allow(dead_code)]
|
||||
#[serde(rename = "courseTitle")]
|
||||
course_title: String,
|
||||
#[allow(dead_code)]
|
||||
faculty: Vec<Faculty>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Faculty {
|
||||
#[serde(rename = "displayName")]
|
||||
#[allow(dead_code)]
|
||||
display_name: String,
|
||||
#[allow(dead_code)]
|
||||
email: String,
|
||||
@@ -376,6 +379,7 @@ mod tests {
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct SearchResult {
|
||||
#[allow(dead_code)]
|
||||
data: Vec<Course>,
|
||||
}
|
||||
|
||||
|
||||
@@ -320,10 +320,11 @@ pub enum MeetingType {
|
||||
Unknown(String),
|
||||
}
|
||||
|
||||
impl MeetingType {
|
||||
/// Parse from the meeting type string
|
||||
pub fn from_string(s: &str) -> Self {
|
||||
match s {
|
||||
impl std::str::FromStr for MeetingType {
|
||||
type Err = std::convert::Infallible;
|
||||
|
||||
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||
Ok(match s {
|
||||
"HB" | "H2" | "H1" => MeetingType::HybridBlended,
|
||||
"OS" => MeetingType::OnlineSynchronous,
|
||||
"OA" => MeetingType::OnlineAsynchronous,
|
||||
@@ -331,9 +332,11 @@ impl MeetingType {
|
||||
"ID" => MeetingType::IndependentStudy,
|
||||
"FF" => MeetingType::FaceToFace,
|
||||
other => MeetingType::Unknown(other.to_string()),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl MeetingType {
|
||||
/// Get description for the meeting type
|
||||
pub fn description(&self) -> &'static str {
|
||||
match self {
|
||||
@@ -424,7 +427,7 @@ impl MeetingScheduleInfo {
|
||||
end: now,
|
||||
}
|
||||
});
|
||||
let meeting_type = MeetingType::from_string(&meeting_time.meeting_type);
|
||||
let meeting_type: MeetingType = meeting_time.meeting_type.parse().unwrap();
|
||||
let location = MeetingLocation::from_meeting_time(meeting_time);
|
||||
let duration_weeks = date_range.weeks_duration();
|
||||
|
||||
|
||||
+3
-14
@@ -10,8 +10,9 @@ pub struct Range {
|
||||
pub high: i32,
|
||||
}
|
||||
|
||||
/// Builder for constructing Banner API search queries
|
||||
/// Builder for constructing Banner API search queries.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[allow(dead_code)]
|
||||
pub struct SearchQuery {
|
||||
subject: Option<String>,
|
||||
title: Option<String>,
|
||||
@@ -32,6 +33,7 @@ pub struct SearchQuery {
|
||||
course_number_range: Option<Range>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl SearchQuery {
|
||||
/// Creates a new SearchQuery with default values
|
||||
pub fn new() -> Self {
|
||||
@@ -67,7 +69,6 @@ impl SearchQuery {
|
||||
}
|
||||
|
||||
/// Adds a keyword to the query
|
||||
#[allow(dead_code)]
|
||||
pub fn keyword<S: Into<String>>(mut self, keyword: S) -> Self {
|
||||
match &mut self.keywords {
|
||||
Some(keywords) => keywords.push(keyword.into()),
|
||||
@@ -77,63 +78,54 @@ impl SearchQuery {
|
||||
}
|
||||
|
||||
/// Sets whether to search for open courses only
|
||||
#[allow(dead_code)]
|
||||
pub fn open_only(mut self, open_only: bool) -> Self {
|
||||
self.open_only = Some(open_only);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the term part for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn term_part(mut self, term_part: Vec<String>) -> Self {
|
||||
self.term_part = Some(term_part);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the campuses for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn campus(mut self, campus: Vec<String>) -> Self {
|
||||
self.campus = Some(campus);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the instructional methods for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn instructional_method(mut self, instructional_method: Vec<String>) -> Self {
|
||||
self.instructional_method = Some(instructional_method);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the attributes for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn attributes(mut self, attributes: Vec<String>) -> Self {
|
||||
self.attributes = Some(attributes);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the instructors for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn instructor(mut self, instructor: Vec<u64>) -> Self {
|
||||
self.instructor = Some(instructor);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the start time for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn start_time(mut self, start_time: Duration) -> Self {
|
||||
self.start_time = Some(start_time);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the end time for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn end_time(mut self, end_time: Duration) -> Self {
|
||||
self.end_time = Some(end_time);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the credit range for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn credits(mut self, low: i32, high: i32) -> Self {
|
||||
self.min_credits = Some(low);
|
||||
self.max_credits = Some(high);
|
||||
@@ -141,14 +133,12 @@ impl SearchQuery {
|
||||
}
|
||||
|
||||
/// Sets the minimum credits for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn min_credits(mut self, value: i32) -> Self {
|
||||
self.min_credits = Some(value);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the maximum credits for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn max_credits(mut self, value: i32) -> Self {
|
||||
self.max_credits = Some(value);
|
||||
self
|
||||
@@ -161,7 +151,6 @@ impl SearchQuery {
|
||||
}
|
||||
|
||||
/// Sets the offset for pagination
|
||||
#[allow(dead_code)]
|
||||
pub fn offset(mut self, offset: i32) -> Self {
|
||||
self.offset = offset;
|
||||
self
|
||||
|
||||
+112
-64
@@ -11,7 +11,9 @@ use rand::distr::{Alphanumeric, SampleString};
|
||||
use reqwest_middleware::ClientWithMiddleware;
|
||||
use std::collections::{HashMap, VecDeque};
|
||||
|
||||
use std::mem::ManuallyDrop;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::{Arc, LazyLock};
|
||||
use std::time::{Duration, Instant};
|
||||
use tokio::sync::{Mutex, Notify};
|
||||
@@ -121,6 +123,64 @@ impl BannerSession {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::time::Duration;
|
||||
|
||||
/// Verifies that cancelling `acquire()` mid-session-creation resets `is_creating`,
|
||||
/// allowing subsequent callers to proceed rather than deadlocking.
|
||||
#[tokio::test]
|
||||
async fn test_acquire_not_deadlocked_after_cancellation() {
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
let (tx, mut rx) = mpsc::channel::<()>(10);
|
||||
|
||||
// Local server: /registration signals arrival via `tx`, then hangs forever.
|
||||
let listener = tokio::net::TcpListener::bind("127.0.0.1:0").await.unwrap();
|
||||
let addr = listener.local_addr().unwrap();
|
||||
|
||||
let app = axum::Router::new().route(
|
||||
"/StudentRegistrationSsb/registration",
|
||||
axum::routing::get(move || {
|
||||
let tx = tx.clone();
|
||||
async move {
|
||||
let _ = tx.send(()).await;
|
||||
std::future::pending::<&str>().await
|
||||
}
|
||||
}),
|
||||
);
|
||||
tokio::spawn(async move {
|
||||
axum::serve(listener, app).await.unwrap();
|
||||
});
|
||||
|
||||
let base_url = format!("http://{}/StudentRegistrationSsb", addr);
|
||||
let client = reqwest_middleware::ClientBuilder::new(
|
||||
reqwest::Client::builder()
|
||||
.timeout(Duration::from_secs(300))
|
||||
.build()
|
||||
.unwrap(),
|
||||
)
|
||||
.build();
|
||||
|
||||
let pool = SessionPool::new(client, base_url);
|
||||
let term: Term = "202620".parse().unwrap();
|
||||
|
||||
// First acquire: cancel once the request reaches the server.
|
||||
tokio::select! {
|
||||
_ = pool.acquire(term) => panic!("server hangs — acquire should never complete"),
|
||||
_ = rx.recv() => {} // Request arrived; dropping the future simulates timeout cancellation.
|
||||
}
|
||||
|
||||
// Second acquire: verify it reaches the server (i.e., is_creating was reset).
|
||||
// The global rate limiter has a 10s period, so allow 15s for the second attempt.
|
||||
tokio::select! {
|
||||
_ = pool.acquire(term) => {}
|
||||
result = tokio::time::timeout(Duration::from_secs(15), rx.recv()) => {
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"acquire() deadlocked — is_creating was not reset after cancellation"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_session_creates_session() {
|
||||
@@ -200,50 +260,53 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
/// A smart pointer that returns a BannerSession to the pool when dropped.
|
||||
/// A smart pointer that returns a `BannerSession` to the pool when dropped.
|
||||
pub struct PooledSession {
|
||||
session: Option<BannerSession>,
|
||||
// This Arc points directly to the term-specific pool.
|
||||
session: ManuallyDrop<BannerSession>,
|
||||
pool: Arc<TermPool>,
|
||||
}
|
||||
|
||||
impl PooledSession {
|
||||
pub fn been_used(&self) -> bool {
|
||||
self.session.as_ref().unwrap().been_used()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for PooledSession {
|
||||
type Target = BannerSession;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
// The option is only ever None after drop is called, so this is safe.
|
||||
self.session.as_ref().unwrap()
|
||||
&self.session
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for PooledSession {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
self.session.as_mut().unwrap()
|
||||
&mut self.session
|
||||
}
|
||||
}
|
||||
|
||||
/// The magic happens here: when the guard goes out of scope, this is called.
|
||||
impl Drop for PooledSession {
|
||||
fn drop(&mut self) {
|
||||
if let Some(session) = self.session.take() {
|
||||
let pool = self.pool.clone();
|
||||
// Since drop() cannot be async, we spawn a task to return the session.
|
||||
tokio::spawn(async move {
|
||||
pool.release(session).await;
|
||||
});
|
||||
}
|
||||
// SAFETY: `drop` is called exactly once by Rust's drop semantics,
|
||||
// so `ManuallyDrop::take` is guaranteed to see a valid value.
|
||||
let session = unsafe { ManuallyDrop::take(&mut self.session) };
|
||||
let pool = self.pool.clone();
|
||||
tokio::spawn(async move {
|
||||
pool.release(session).await;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TermPool {
|
||||
sessions: Mutex<VecDeque<BannerSession>>,
|
||||
notifier: Notify,
|
||||
is_creating: Mutex<bool>,
|
||||
is_creating: AtomicBool,
|
||||
}
|
||||
|
||||
/// RAII guard ensuring `is_creating` is reset on drop for cancellation safety.
|
||||
/// Without this, a cancelled `acquire()` future would leave the flag set permanently,
|
||||
/// deadlocking all subsequent callers.
|
||||
struct CreatingGuard(Arc<TermPool>);
|
||||
|
||||
impl Drop for CreatingGuard {
|
||||
fn drop(&mut self) {
|
||||
self.0.is_creating.store(false, Ordering::Release);
|
||||
self.0.notifier.notify_waiters();
|
||||
}
|
||||
}
|
||||
|
||||
impl TermPool {
|
||||
@@ -251,7 +314,7 @@ impl TermPool {
|
||||
Self {
|
||||
sessions: Mutex::new(VecDeque::new()),
|
||||
notifier: Notify::new(),
|
||||
is_creating: Mutex::new(false),
|
||||
is_creating: AtomicBool::new(false),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -308,7 +371,7 @@ impl SessionPool {
|
||||
if let Some(session) = queue.pop_front() {
|
||||
if !session.is_expired() {
|
||||
return Ok(PooledSession {
|
||||
session: Some(session),
|
||||
session: ManuallyDrop::new(session),
|
||||
pool: Arc::clone(&term_pool),
|
||||
});
|
||||
} else {
|
||||
@@ -317,45 +380,38 @@ impl SessionPool {
|
||||
}
|
||||
} // MutexGuard is dropped, lock is released.
|
||||
|
||||
// Slow path: No sessions available. We must either wait or become the creator.
|
||||
let mut is_creating_guard = term_pool.is_creating.lock().await;
|
||||
if *is_creating_guard {
|
||||
// Another task is already creating a session. Release the lock and wait.
|
||||
drop(is_creating_guard);
|
||||
// Slow path: wait for an in-progress creation, or become the creator.
|
||||
if term_pool.is_creating.load(Ordering::Acquire) {
|
||||
if !waited_for_creation {
|
||||
trace!("Waiting for another task to create session");
|
||||
waited_for_creation = true;
|
||||
}
|
||||
term_pool.notifier.notified().await;
|
||||
// Loop back to the top to try the fast path again.
|
||||
continue;
|
||||
}
|
||||
|
||||
// This task is now the designated creator.
|
||||
*is_creating_guard = true;
|
||||
drop(is_creating_guard);
|
||||
// CAS to become the designated creator.
|
||||
if term_pool
|
||||
.is_creating
|
||||
.compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
|
||||
.is_err()
|
||||
{
|
||||
continue; // Lost the race — loop back and wait.
|
||||
}
|
||||
|
||||
// Guard resets is_creating on drop (including cancellation).
|
||||
let creating_guard = CreatingGuard(Arc::clone(&term_pool));
|
||||
|
||||
// Race: wait for a session to be returned OR for the rate limiter to allow a new one.
|
||||
trace!("Pool empty, creating new session");
|
||||
tokio::select! {
|
||||
_ = term_pool.notifier.notified() => {
|
||||
// A session was returned while we were waiting!
|
||||
// We are no longer the creator. Reset the flag and loop to race for the new session.
|
||||
let mut guard = term_pool.is_creating.lock().await;
|
||||
*guard = false;
|
||||
drop(guard);
|
||||
// A session was returned — release creator role and race for it.
|
||||
drop(creating_guard);
|
||||
continue;
|
||||
}
|
||||
_ = SESSION_CREATION_RATE_LIMITER.until_ready() => {
|
||||
// The rate limit has elapsed. It's our job to create the session.
|
||||
let new_session_result = self.create_session(&term).await;
|
||||
|
||||
// After creation, we are no longer the creator. Reset the flag
|
||||
// and notify all other waiting tasks.
|
||||
let mut guard = term_pool.is_creating.lock().await;
|
||||
*guard = false;
|
||||
drop(guard);
|
||||
term_pool.notifier.notify_waiters();
|
||||
drop(creating_guard);
|
||||
|
||||
match new_session_result {
|
||||
Ok(new_session) => {
|
||||
@@ -366,12 +422,11 @@ impl SessionPool {
|
||||
"Created new session"
|
||||
);
|
||||
return Ok(PooledSession {
|
||||
session: Some(new_session),
|
||||
session: ManuallyDrop::new(new_session),
|
||||
pool: term_pool,
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
// Propagate the error if session creation failed.
|
||||
return Err(e.context("Failed to create new session in pool"));
|
||||
}
|
||||
}
|
||||
@@ -380,8 +435,8 @@ impl SessionPool {
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets up initial session cookies by making required Banner API requests
|
||||
pub async fn create_session(&self, term: &Term) -> Result<BannerSession> {
|
||||
/// Sets up initial session cookies by making required Banner API requests.
|
||||
async fn create_session(&self, term: &Term) -> Result<BannerSession> {
|
||||
info!(term = %term, "setting up banner session");
|
||||
|
||||
// The 'register' or 'search' registration page
|
||||
@@ -392,22 +447,15 @@ impl SessionPool {
|
||||
.await?;
|
||||
// TODO: Validate success
|
||||
|
||||
let cookies = initial_registration
|
||||
let cookies: HashMap<String, String> = initial_registration
|
||||
.headers()
|
||||
.get_all("Set-Cookie")
|
||||
.iter()
|
||||
.filter_map(|header_value| {
|
||||
if let Ok(cookie_str) = header_value.to_str() {
|
||||
if let Ok(cookie) = Cookie::parse(cookie_str) {
|
||||
Some((cookie.name().to_string(), cookie.value().to_string()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
.filter_map(|v| {
|
||||
let c = Cookie::parse(v.to_str().ok()?).ok()?;
|
||||
Some((c.name().to_string(), c.value().to_string()))
|
||||
})
|
||||
.collect::<HashMap<String, String>>();
|
||||
.collect();
|
||||
|
||||
let jsessionid = cookies
|
||||
.get("JSESSIONID")
|
||||
@@ -494,8 +542,8 @@ impl SessionPool {
|
||||
Ok(terms)
|
||||
}
|
||||
|
||||
/// Selects a term for the current session
|
||||
pub async fn select_term(
|
||||
/// Selects a term for the current session.
|
||||
async fn select_term(
|
||||
&self,
|
||||
term: &str,
|
||||
unique_session_id: &str,
|
||||
|
||||
+462
@@ -0,0 +1,462 @@
|
||||
//! Shared calendar generation logic for ICS files and Google Calendar URLs.
|
||||
//!
|
||||
//! Used by both the Discord bot commands and the web API endpoints.
|
||||
|
||||
use crate::data::models::DbMeetingTime;
|
||||
use chrono::{Datelike, Duration, NaiveDate, NaiveTime, Weekday};
|
||||
|
||||
/// Course metadata needed for calendar generation (shared interface between bot and web).
|
||||
pub struct CalendarCourse {
|
||||
pub crn: String,
|
||||
pub subject: String,
|
||||
pub course_number: String,
|
||||
pub title: String,
|
||||
pub sequence_number: Option<String>,
|
||||
pub primary_instructor: Option<String>,
|
||||
}
|
||||
|
||||
impl CalendarCourse {
|
||||
/// Display title like "CS 1083 - Introduction to Computer Science"
|
||||
pub fn display_title(&self) -> String {
|
||||
format!("{} {} - {}", self.subject, self.course_number, self.title)
|
||||
}
|
||||
|
||||
/// Filename-safe identifier: "CS_1083_001"
|
||||
pub fn filename_stem(&self) -> String {
|
||||
format!(
|
||||
"{}_{}{}",
|
||||
self.subject.replace(' ', "_"),
|
||||
self.course_number,
|
||||
self.sequence_number
|
||||
.as_deref()
|
||||
.map(|s| format!("_{s}"))
|
||||
.unwrap_or_default()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Date parsing helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Parse a date string in either MM/DD/YYYY or YYYY-MM-DD format.
|
||||
fn parse_date(s: &str) -> Option<NaiveDate> {
|
||||
NaiveDate::parse_from_str(s, "%m/%d/%Y")
|
||||
.or_else(|_| NaiveDate::parse_from_str(s, "%Y-%m-%d"))
|
||||
.ok()
|
||||
}
|
||||
|
||||
/// Parse an HHMM time string into `NaiveTime`.
|
||||
fn parse_hhmm(s: &str) -> Option<NaiveTime> {
|
||||
if s.len() != 4 {
|
||||
return None;
|
||||
}
|
||||
let hours = s[..2].parse::<u32>().ok()?;
|
||||
let minutes = s[2..].parse::<u32>().ok()?;
|
||||
NaiveTime::from_hms_opt(hours, minutes, 0)
|
||||
}
|
||||
|
||||
/// Active weekdays for a meeting time.
|
||||
fn active_weekdays(mt: &DbMeetingTime) -> Vec<Weekday> {
|
||||
let mapping: [(bool, Weekday); 7] = [
|
||||
(mt.monday, Weekday::Mon),
|
||||
(mt.tuesday, Weekday::Tue),
|
||||
(mt.wednesday, Weekday::Wed),
|
||||
(mt.thursday, Weekday::Thu),
|
||||
(mt.friday, Weekday::Fri),
|
||||
(mt.saturday, Weekday::Sat),
|
||||
(mt.sunday, Weekday::Sun),
|
||||
];
|
||||
mapping
|
||||
.iter()
|
||||
.filter(|(active, _)| *active)
|
||||
.map(|(_, day)| *day)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// ICS two-letter day code for RRULE BYDAY.
|
||||
fn ics_day_code(day: Weekday) -> &'static str {
|
||||
match day {
|
||||
Weekday::Mon => "MO",
|
||||
Weekday::Tue => "TU",
|
||||
Weekday::Wed => "WE",
|
||||
Weekday::Thu => "TH",
|
||||
Weekday::Fri => "FR",
|
||||
Weekday::Sat => "SA",
|
||||
Weekday::Sun => "SU",
|
||||
}
|
||||
}
|
||||
|
||||
/// Location string from a `DbMeetingTime`.
|
||||
fn location_string(mt: &DbMeetingTime) -> String {
|
||||
let building = mt
|
||||
.building_description
|
||||
.as_deref()
|
||||
.or(mt.building.as_deref())
|
||||
.unwrap_or("");
|
||||
let room = mt.room.as_deref().unwrap_or("");
|
||||
let combined = format!("{building} {room}").trim().to_string();
|
||||
if combined.is_empty() {
|
||||
"Online".to_string()
|
||||
} else {
|
||||
combined
|
||||
}
|
||||
}
|
||||
|
||||
/// Days display string (e.g. "MWF", "TTh").
|
||||
fn days_display(mt: &DbMeetingTime) -> String {
|
||||
let weekdays = active_weekdays(mt);
|
||||
if weekdays.is_empty() {
|
||||
return "TBA".to_string();
|
||||
}
|
||||
weekdays
|
||||
.iter()
|
||||
.map(|d| ics_day_code(*d))
|
||||
.collect::<Vec<_>>()
|
||||
.join("")
|
||||
}
|
||||
|
||||
/// Escape text for ICS property values.
|
||||
fn escape_ics(text: &str) -> String {
|
||||
text.replace('\\', "\\\\")
|
||||
.replace(';', "\\;")
|
||||
.replace(',', "\\,")
|
||||
.replace('\n', "\\n")
|
||||
.replace('\r', "")
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// University holidays (ported from bot/commands/ics.rs)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Find the nth occurrence of a weekday in a given month/year (1-based).
|
||||
fn nth_weekday_of_month(year: i32, month: u32, weekday: Weekday, n: u32) -> Option<NaiveDate> {
|
||||
let first = NaiveDate::from_ymd_opt(year, month, 1)?;
|
||||
let days_ahead = (weekday.num_days_from_monday() as i64
|
||||
- first.weekday().num_days_from_monday() as i64)
|
||||
.rem_euclid(7) as u32;
|
||||
let day = 1 + days_ahead + 7 * (n - 1);
|
||||
NaiveDate::from_ymd_opt(year, month, day)
|
||||
}
|
||||
|
||||
/// Compute a consecutive range of dates starting from `start` for `count` days.
|
||||
fn date_range(start: NaiveDate, count: i64) -> Vec<NaiveDate> {
|
||||
(0..count)
|
||||
.filter_map(|i| start.checked_add_signed(Duration::days(i)))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Compute university holidays for a given year.
|
||||
fn compute_holidays_for_year(year: i32) -> Vec<(&'static str, Vec<NaiveDate>)> {
|
||||
let mut holidays = Vec::new();
|
||||
|
||||
// Labor Day: 1st Monday of September
|
||||
if let Some(d) = nth_weekday_of_month(year, 9, Weekday::Mon, 1) {
|
||||
holidays.push(("Labor Day", vec![d]));
|
||||
}
|
||||
|
||||
// Fall Break: Mon-Tue of Columbus Day week
|
||||
if let Some(mon) = nth_weekday_of_month(year, 10, Weekday::Mon, 2) {
|
||||
holidays.push(("Fall Break", date_range(mon, 2)));
|
||||
}
|
||||
|
||||
// Day before Thanksgiving
|
||||
if let Some(thu) = nth_weekday_of_month(year, 11, Weekday::Thu, 4)
|
||||
&& let Some(wed) = thu.checked_sub_signed(Duration::days(1))
|
||||
{
|
||||
holidays.push(("Day Before Thanksgiving", vec![wed]));
|
||||
}
|
||||
|
||||
// Thanksgiving: 4th Thursday + Friday
|
||||
if let Some(thu) = nth_weekday_of_month(year, 11, Weekday::Thu, 4) {
|
||||
holidays.push(("Thanksgiving", date_range(thu, 2)));
|
||||
}
|
||||
|
||||
// Winter Holiday: Dec 23-31
|
||||
if let Some(start) = NaiveDate::from_ymd_opt(year, 12, 23) {
|
||||
holidays.push(("Winter Holiday", date_range(start, 9)));
|
||||
}
|
||||
|
||||
// New Year's Day
|
||||
if let Some(d) = NaiveDate::from_ymd_opt(year, 1, 1) {
|
||||
holidays.push(("New Year's Day", vec![d]));
|
||||
}
|
||||
|
||||
// MLK Day: 3rd Monday of January
|
||||
if let Some(d) = nth_weekday_of_month(year, 1, Weekday::Mon, 3) {
|
||||
holidays.push(("MLK Day", vec![d]));
|
||||
}
|
||||
|
||||
// Spring Break: full week starting 2nd Monday of March
|
||||
if let Some(mon) = nth_weekday_of_month(year, 3, Weekday::Mon, 2) {
|
||||
holidays.push(("Spring Break", date_range(mon, 6)));
|
||||
}
|
||||
|
||||
holidays
|
||||
}
|
||||
|
||||
/// Get holiday dates within a date range that fall on specific weekdays.
|
||||
fn holiday_exceptions(start: NaiveDate, end: NaiveDate, weekdays: &[Weekday]) -> Vec<NaiveDate> {
|
||||
let start_year = start.year();
|
||||
let end_year = end.year();
|
||||
|
||||
(start_year..=end_year)
|
||||
.flat_map(compute_holidays_for_year)
|
||||
.flat_map(|(_, dates)| dates)
|
||||
.filter(|&date| date >= start && date <= end && weekdays.contains(&date.weekday()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Names of excluded holidays (for user-facing messages).
|
||||
fn excluded_holiday_names(
|
||||
start: NaiveDate,
|
||||
end: NaiveDate,
|
||||
exceptions: &[NaiveDate],
|
||||
) -> Vec<String> {
|
||||
let start_year = start.year();
|
||||
let end_year = end.year();
|
||||
let all_holidays: Vec<_> = (start_year..=end_year)
|
||||
.flat_map(compute_holidays_for_year)
|
||||
.collect();
|
||||
|
||||
let mut names = Vec::new();
|
||||
for (holiday_name, holiday_dates) in &all_holidays {
|
||||
for &exc in exceptions {
|
||||
if holiday_dates.contains(&exc) {
|
||||
names.push(format!("{} ({})", holiday_name, exc.format("%a, %b %d")));
|
||||
}
|
||||
}
|
||||
}
|
||||
names.sort();
|
||||
names.dedup();
|
||||
names
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// ICS generation
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Result from ICS generation, including the file content and excluded holiday names.
|
||||
pub struct IcsResult {
|
||||
pub content: String,
|
||||
pub filename: String,
|
||||
/// Holiday dates excluded via EXDATE rules, for user-facing messages.
|
||||
#[allow(dead_code)]
|
||||
pub excluded_holidays: Vec<String>,
|
||||
}
|
||||
|
||||
/// Generate an ICS calendar file for a course.
|
||||
pub fn generate_ics(
|
||||
course: &CalendarCourse,
|
||||
meeting_times: &[DbMeetingTime],
|
||||
) -> Result<IcsResult, anyhow::Error> {
|
||||
let mut ics = String::new();
|
||||
let mut all_excluded = Vec::new();
|
||||
|
||||
// Header
|
||||
ics.push_str("BEGIN:VCALENDAR\r\n");
|
||||
ics.push_str("VERSION:2.0\r\n");
|
||||
ics.push_str("PRODID:-//Banner Bot//Course Calendar//EN\r\n");
|
||||
ics.push_str("CALSCALE:GREGORIAN\r\n");
|
||||
ics.push_str("METHOD:PUBLISH\r\n");
|
||||
ics.push_str(&format!(
|
||||
"X-WR-CALNAME:{}\r\n",
|
||||
escape_ics(&course.display_title())
|
||||
));
|
||||
|
||||
for (index, mt) in meeting_times.iter().enumerate() {
|
||||
let (event, holidays) = generate_ics_event(course, mt, index)?;
|
||||
ics.push_str(&event);
|
||||
all_excluded.extend(holidays);
|
||||
}
|
||||
|
||||
ics.push_str("END:VCALENDAR\r\n");
|
||||
|
||||
Ok(IcsResult {
|
||||
content: ics,
|
||||
filename: format!("{}.ics", course.filename_stem()),
|
||||
excluded_holidays: all_excluded,
|
||||
})
|
||||
}
|
||||
|
||||
/// Generate a single VEVENT for one meeting time.
|
||||
fn generate_ics_event(
|
||||
course: &CalendarCourse,
|
||||
mt: &DbMeetingTime,
|
||||
index: usize,
|
||||
) -> Result<(String, Vec<String>), anyhow::Error> {
|
||||
let start_date = parse_date(&mt.start_date)
|
||||
.ok_or_else(|| anyhow::anyhow!("Invalid start_date: {}", mt.start_date))?;
|
||||
let end_date = parse_date(&mt.end_date)
|
||||
.ok_or_else(|| anyhow::anyhow!("Invalid end_date: {}", mt.end_date))?;
|
||||
|
||||
let start_time = mt.begin_time.as_deref().and_then(parse_hhmm);
|
||||
let end_time = mt.end_time.as_deref().and_then(parse_hhmm);
|
||||
|
||||
// DTSTART/DTEND: first occurrence with time, or all-day on start_date
|
||||
let (dtstart, dtend) = match (start_time, end_time) {
|
||||
(Some(st), Some(et)) => {
|
||||
let s = start_date.and_time(st).and_utc();
|
||||
let e = start_date.and_time(et).and_utc();
|
||||
(
|
||||
s.format("%Y%m%dT%H%M%SZ").to_string(),
|
||||
e.format("%Y%m%dT%H%M%SZ").to_string(),
|
||||
)
|
||||
}
|
||||
_ => {
|
||||
let s = start_date.and_hms_opt(0, 0, 0).unwrap().and_utc();
|
||||
let e = start_date.and_hms_opt(0, 0, 0).unwrap().and_utc();
|
||||
(
|
||||
s.format("%Y%m%dT%H%M%SZ").to_string(),
|
||||
e.format("%Y%m%dT%H%M%SZ").to_string(),
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let event_title = if index > 0 {
|
||||
format!("{} (Meeting {})", course.display_title(), index + 1)
|
||||
} else {
|
||||
course.display_title()
|
||||
};
|
||||
|
||||
let instructor = course.primary_instructor.as_deref().unwrap_or("Staff");
|
||||
|
||||
let description = format!(
|
||||
"CRN: {}\\nInstructor: {}\\nDays: {}\\nMeeting Type: {}",
|
||||
course.crn,
|
||||
instructor,
|
||||
days_display(mt),
|
||||
mt.meeting_type,
|
||||
);
|
||||
|
||||
let location = location_string(mt);
|
||||
|
||||
let uid = format!(
|
||||
"{}-{}-{}@banner-bot.local",
|
||||
course.crn,
|
||||
index,
|
||||
start_date
|
||||
.and_hms_opt(0, 0, 0)
|
||||
.unwrap()
|
||||
.and_utc()
|
||||
.timestamp()
|
||||
);
|
||||
|
||||
let mut event = String::new();
|
||||
event.push_str("BEGIN:VEVENT\r\n");
|
||||
event.push_str(&format!("UID:{uid}\r\n"));
|
||||
event.push_str(&format!("DTSTART:{dtstart}\r\n"));
|
||||
event.push_str(&format!("DTEND:{dtend}\r\n"));
|
||||
event.push_str(&format!("SUMMARY:{}\r\n", escape_ics(&event_title)));
|
||||
event.push_str(&format!("DESCRIPTION:{}\r\n", escape_ics(&description)));
|
||||
event.push_str(&format!("LOCATION:{}\r\n", escape_ics(&location)));
|
||||
|
||||
let weekdays = active_weekdays(mt);
|
||||
let mut holiday_names = Vec::new();
|
||||
|
||||
if let (false, Some(st)) = (weekdays.is_empty(), start_time) {
|
||||
let by_day: Vec<&str> = weekdays.iter().map(|d| ics_day_code(*d)).collect();
|
||||
let until = end_date.format("%Y%m%dT000000Z").to_string();
|
||||
|
||||
event.push_str(&format!(
|
||||
"RRULE:FREQ=WEEKLY;BYDAY={};UNTIL={}\r\n",
|
||||
by_day.join(","),
|
||||
until,
|
||||
));
|
||||
|
||||
// Holiday exceptions
|
||||
let exceptions = holiday_exceptions(start_date, end_date, &weekdays);
|
||||
if !exceptions.is_empty() {
|
||||
let start_utc = start_date.and_time(st).and_utc();
|
||||
let exdates: Vec<String> = exceptions
|
||||
.iter()
|
||||
.map(|&d| {
|
||||
d.and_time(start_utc.time())
|
||||
.and_utc()
|
||||
.format("%Y%m%dT%H%M%SZ")
|
||||
.to_string()
|
||||
})
|
||||
.collect();
|
||||
event.push_str(&format!("EXDATE:{}\r\n", exdates.join(",")));
|
||||
}
|
||||
|
||||
holiday_names = excluded_holiday_names(start_date, end_date, &exceptions);
|
||||
}
|
||||
|
||||
event.push_str("END:VEVENT\r\n");
|
||||
Ok((event, holiday_names))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Google Calendar URL generation
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Generate a Google Calendar "add event" URL for a single meeting time.
|
||||
pub fn generate_gcal_url(
|
||||
course: &CalendarCourse,
|
||||
mt: &DbMeetingTime,
|
||||
) -> Result<String, anyhow::Error> {
|
||||
let start_date = parse_date(&mt.start_date)
|
||||
.ok_or_else(|| anyhow::anyhow!("Invalid start_date: {}", mt.start_date))?;
|
||||
let end_date = parse_date(&mt.end_date)
|
||||
.ok_or_else(|| anyhow::anyhow!("Invalid end_date: {}", mt.end_date))?;
|
||||
|
||||
let start_time = mt.begin_time.as_deref().and_then(parse_hhmm);
|
||||
let end_time = mt.end_time.as_deref().and_then(parse_hhmm);
|
||||
|
||||
let dates_text = match (start_time, end_time) {
|
||||
(Some(st), Some(et)) => {
|
||||
let s = start_date.and_time(st);
|
||||
let e = start_date.and_time(et);
|
||||
format!(
|
||||
"{}/{}",
|
||||
s.format("%Y%m%dT%H%M%S"),
|
||||
e.format("%Y%m%dT%H%M%S")
|
||||
)
|
||||
}
|
||||
_ => {
|
||||
let s = start_date.format("%Y%m%d").to_string();
|
||||
format!("{s}/{s}")
|
||||
}
|
||||
};
|
||||
|
||||
let instructor = course.primary_instructor.as_deref().unwrap_or("Staff");
|
||||
|
||||
let details = format!(
|
||||
"CRN: {}\nInstructor: {}\nDays: {}",
|
||||
course.crn,
|
||||
instructor,
|
||||
days_display(mt),
|
||||
);
|
||||
|
||||
let location = location_string(mt);
|
||||
|
||||
let weekdays = active_weekdays(mt);
|
||||
let recur = if !weekdays.is_empty() && start_time.is_some() {
|
||||
let by_day: Vec<&str> = weekdays.iter().map(|d| ics_day_code(*d)).collect();
|
||||
let until = end_date.format("%Y%m%dT000000Z").to_string();
|
||||
format!(
|
||||
"RRULE:FREQ=WEEKLY;BYDAY={};UNTIL={}",
|
||||
by_day.join(","),
|
||||
until
|
||||
)
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
let course_text = course.display_title();
|
||||
|
||||
let params: Vec<(&str, &str)> = vec![
|
||||
("action", "TEMPLATE"),
|
||||
("text", &course_text),
|
||||
("dates", &dates_text),
|
||||
("details", &details),
|
||||
("location", &location),
|
||||
("trp", "true"),
|
||||
("ctz", "America/Chicago"),
|
||||
("recur", &recur),
|
||||
];
|
||||
|
||||
let url = url::Url::parse_with_params("https://calendar.google.com/calendar/render", ¶ms)?;
|
||||
Ok(url.to_string())
|
||||
}
|
||||
+1
-105
@@ -2,34 +2,16 @@ use clap::Parser;
|
||||
|
||||
/// Banner Discord Bot - Course availability monitoring
|
||||
///
|
||||
/// This application runs multiple services that can be controlled via CLI arguments:
|
||||
/// This application runs all services:
|
||||
/// - bot: Discord bot for course monitoring commands
|
||||
/// - web: HTTP server for web interface and API
|
||||
/// - scraper: Background service for scraping course data
|
||||
///
|
||||
/// Use --services to specify which services to run, or --disable-services to exclude specific services.
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
pub struct Args {
|
||||
/// Log formatter to use
|
||||
#[arg(long, value_enum, default_value_t = default_tracing_format())]
|
||||
pub tracing: TracingFormat,
|
||||
|
||||
/// Services to run (comma-separated). Default: all services
|
||||
///
|
||||
/// Examples:
|
||||
/// --services bot,web # Run only bot and web services
|
||||
/// --services scraper # Run only the scraper service
|
||||
#[arg(long, value_delimiter = ',', conflicts_with = "disable_services")]
|
||||
pub services: Option<Vec<ServiceName>>,
|
||||
|
||||
/// Services to disable (comma-separated)
|
||||
///
|
||||
/// Examples:
|
||||
/// --disable-services bot # Run web and scraper only
|
||||
/// --disable-services bot,web # Run only the scraper service
|
||||
#[arg(long, value_delimiter = ',', conflicts_with = "services")]
|
||||
pub disable_services: Option<Vec<ServiceName>>,
|
||||
}
|
||||
|
||||
#[derive(clap::ValueEnum, Clone, Debug)]
|
||||
@@ -66,34 +48,6 @@ impl ServiceName {
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine which services should be enabled based on CLI arguments
|
||||
pub fn determine_enabled_services(args: &Args) -> Result<Vec<ServiceName>, anyhow::Error> {
|
||||
match (&args.services, &args.disable_services) {
|
||||
(Some(services), None) => {
|
||||
// User specified which services to run
|
||||
Ok(services.clone())
|
||||
}
|
||||
(None, Some(disabled)) => {
|
||||
// User specified which services to disable
|
||||
let enabled: Vec<ServiceName> = ServiceName::all()
|
||||
.into_iter()
|
||||
.filter(|s| !disabled.contains(s))
|
||||
.collect();
|
||||
Ok(enabled)
|
||||
}
|
||||
(None, None) => {
|
||||
// Default: run all services
|
||||
Ok(ServiceName::all())
|
||||
}
|
||||
(Some(_), Some(_)) => {
|
||||
// This should be prevented by clap's conflicts_with, but just in case
|
||||
Err(anyhow::anyhow!(
|
||||
"Cannot specify both --services and --disable-services"
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
const DEFAULT_TRACING_FORMAT: TracingFormat = TracingFormat::Pretty;
|
||||
#[cfg(not(debug_assertions))]
|
||||
@@ -107,64 +61,6 @@ fn default_tracing_format() -> TracingFormat {
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn args_with_services(
|
||||
services: Option<Vec<ServiceName>>,
|
||||
disable: Option<Vec<ServiceName>>,
|
||||
) -> Args {
|
||||
Args {
|
||||
tracing: TracingFormat::Pretty,
|
||||
services,
|
||||
disable_services: disable,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_enables_all_services() {
|
||||
let result = determine_enabled_services(&args_with_services(None, None)).unwrap();
|
||||
assert_eq!(result.len(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_explicit_services_only_those() {
|
||||
let result =
|
||||
determine_enabled_services(&args_with_services(Some(vec![ServiceName::Web]), None))
|
||||
.unwrap();
|
||||
assert_eq!(result.len(), 1);
|
||||
assert_eq!(result[0].as_str(), "web");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_disable_bot_leaves_web_and_scraper() {
|
||||
let result =
|
||||
determine_enabled_services(&args_with_services(None, Some(vec![ServiceName::Bot])))
|
||||
.unwrap();
|
||||
assert_eq!(result.len(), 2);
|
||||
assert!(result.iter().all(|s| s.as_str() != "bot"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_disable_all_leaves_empty() {
|
||||
let result = determine_enabled_services(&args_with_services(
|
||||
None,
|
||||
Some(vec![
|
||||
ServiceName::Bot,
|
||||
ServiceName::Web,
|
||||
ServiceName::Scraper,
|
||||
]),
|
||||
))
|
||||
.unwrap();
|
||||
assert!(result.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_both_specified_returns_error() {
|
||||
let result = determine_enabled_services(&args_with_services(
|
||||
Some(vec![ServiceName::Web]),
|
||||
Some(vec![ServiceName::Bot]),
|
||||
));
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_service_name_as_str() {
|
||||
assert_eq!(ServiceName::Bot.as_str(), "bot");
|
||||
|
||||
@@ -47,6 +47,19 @@ pub struct Config {
|
||||
/// Rate limiting configuration for Banner API requests
|
||||
#[serde(default = "default_rate_limiting")]
|
||||
pub rate_limiting: RateLimitingConfig,
|
||||
|
||||
/// Discord OAuth2 client ID for web authentication
|
||||
#[serde(deserialize_with = "deserialize_string_or_uint")]
|
||||
pub discord_client_id: String,
|
||||
/// Discord OAuth2 client secret for web authentication
|
||||
pub discord_client_secret: String,
|
||||
/// Optional base URL override for OAuth2 redirect (e.g. "https://banner.xevion.dev").
|
||||
/// When unset, the redirect URI is derived from the incoming request's Origin/Host.
|
||||
#[serde(default)]
|
||||
pub discord_redirect_uri: Option<String>,
|
||||
/// Discord user ID to seed as initial admin on startup (optional)
|
||||
#[serde(default)]
|
||||
pub admin_discord_id: Option<u64>,
|
||||
}
|
||||
|
||||
/// Default log level of "info"
|
||||
@@ -216,6 +229,43 @@ where
|
||||
deserializer.deserialize_any(DurationVisitor)
|
||||
}
|
||||
|
||||
/// Deserializes a value that may arrive as either a string or unsigned integer.
|
||||
///
|
||||
/// Figment's env provider infers types from raw values, so numeric-looking strings
|
||||
/// like Discord client IDs get parsed as integers. This accepts both forms.
|
||||
fn deserialize_string_or_uint<'de, D>(deserializer: D) -> Result<String, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
use serde::de::Visitor;
|
||||
|
||||
struct StringOrUintVisitor;
|
||||
|
||||
impl<'de> Visitor<'de> for StringOrUintVisitor {
|
||||
type Value = String;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("a string or unsigned integer")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(value.to_owned())
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_any(StringOrUintVisitor)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
+547
-111
@@ -1,10 +1,12 @@
|
||||
//! Batch database operations for improved performance.
|
||||
|
||||
use crate::banner::Course;
|
||||
use crate::data::models::DbMeetingTime;
|
||||
use crate::data::models::{DbMeetingTime, UpsertCounts};
|
||||
use crate::data::names::parse_banner_name;
|
||||
use crate::error::Result;
|
||||
use sqlx::PgConnection;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashSet;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::time::Instant;
|
||||
use tracing::info;
|
||||
|
||||
@@ -57,46 +59,389 @@ fn extract_campus_code(course: &Course) -> Option<String> {
|
||||
.and_then(|mf| mf.meeting_time.campus.clone())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Task 1: UpsertDiffRow — captures pre- and post-upsert state for diffing
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Row returned by the CTE-based upsert query, carrying both old and new values
|
||||
/// for every auditable field. `old_id` is `None` for fresh inserts.
|
||||
#[derive(sqlx::FromRow, Debug)]
|
||||
struct UpsertDiffRow {
|
||||
id: i32,
|
||||
old_id: Option<i32>,
|
||||
crn: String,
|
||||
term_code: String,
|
||||
|
||||
// enrollment fields
|
||||
old_enrollment: Option<i32>,
|
||||
new_enrollment: i32,
|
||||
old_max_enrollment: Option<i32>,
|
||||
new_max_enrollment: i32,
|
||||
old_wait_count: Option<i32>,
|
||||
new_wait_count: i32,
|
||||
old_wait_capacity: Option<i32>,
|
||||
new_wait_capacity: i32,
|
||||
|
||||
// text fields (non-nullable in DB)
|
||||
old_subject: Option<String>,
|
||||
new_subject: String,
|
||||
old_course_number: Option<String>,
|
||||
new_course_number: String,
|
||||
old_title: Option<String>,
|
||||
new_title: String,
|
||||
|
||||
// nullable text fields
|
||||
old_sequence_number: Option<String>,
|
||||
new_sequence_number: Option<String>,
|
||||
old_part_of_term: Option<String>,
|
||||
new_part_of_term: Option<String>,
|
||||
old_instructional_method: Option<String>,
|
||||
new_instructional_method: Option<String>,
|
||||
old_campus: Option<String>,
|
||||
new_campus: Option<String>,
|
||||
|
||||
// nullable int fields
|
||||
old_credit_hours: Option<i32>,
|
||||
new_credit_hours: Option<i32>,
|
||||
old_credit_hour_low: Option<i32>,
|
||||
new_credit_hour_low: Option<i32>,
|
||||
old_credit_hour_high: Option<i32>,
|
||||
new_credit_hour_high: Option<i32>,
|
||||
|
||||
// cross-list fields
|
||||
old_cross_list: Option<String>,
|
||||
new_cross_list: Option<String>,
|
||||
old_cross_list_capacity: Option<i32>,
|
||||
new_cross_list_capacity: Option<i32>,
|
||||
old_cross_list_count: Option<i32>,
|
||||
new_cross_list_count: Option<i32>,
|
||||
|
||||
// link fields
|
||||
old_link_identifier: Option<String>,
|
||||
new_link_identifier: Option<String>,
|
||||
old_is_section_linked: Option<bool>,
|
||||
new_is_section_linked: Option<bool>,
|
||||
|
||||
// JSONB fields
|
||||
old_meeting_times: Option<serde_json::Value>,
|
||||
new_meeting_times: serde_json::Value,
|
||||
old_attributes: Option<serde_json::Value>,
|
||||
new_attributes: serde_json::Value,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Task 3: Entry types and diff logic
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
struct AuditEntry {
|
||||
course_id: i32,
|
||||
field_changed: &'static str,
|
||||
old_value: String,
|
||||
new_value: String,
|
||||
}
|
||||
|
||||
struct MetricEntry {
|
||||
course_id: i32,
|
||||
enrollment: i32,
|
||||
wait_count: i32,
|
||||
seats_available: i32,
|
||||
}
|
||||
|
||||
/// Compare old vs new for a single field, pushing an `AuditEntry` when they differ.
|
||||
///
|
||||
/// Three variants:
|
||||
/// - `diff_field!(audits, row, field_name, old_field, new_field)` — `Option<T>` old vs `T` new
|
||||
/// - `diff_field!(opt audits, row, field_name, old_field, new_field)` — `Option<T>` old vs `Option<T>` new
|
||||
/// - `diff_field!(json audits, row, field_name, old_field, new_field)` — `Option<Value>` old vs `Value` new
|
||||
///
|
||||
/// All variants skip when `old_id` is None (fresh insert).
|
||||
macro_rules! diff_field {
|
||||
// Standard: Option<T> old vs T new (non-nullable columns)
|
||||
($audits:ident, $row:ident, $field:expr, $old:ident, $new:ident) => {
|
||||
if $row.old_id.is_some() {
|
||||
let old_str = $row
|
||||
.$old
|
||||
.as_ref()
|
||||
.map(|v| v.to_string())
|
||||
.unwrap_or_default();
|
||||
let new_str = $row.$new.to_string();
|
||||
if old_str != new_str {
|
||||
$audits.push(AuditEntry {
|
||||
course_id: $row.id,
|
||||
field_changed: $field,
|
||||
old_value: old_str,
|
||||
new_value: new_str,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
// Nullable: Option<T> old vs Option<T> new
|
||||
(opt $audits:ident, $row:ident, $field:expr, $old:ident, $new:ident) => {
|
||||
if $row.old_id.is_some() {
|
||||
let old_str = $row
|
||||
.$old
|
||||
.as_ref()
|
||||
.map(|v| v.to_string())
|
||||
.unwrap_or_default();
|
||||
let new_str = $row
|
||||
.$new
|
||||
.as_ref()
|
||||
.map(|v| v.to_string())
|
||||
.unwrap_or_default();
|
||||
if old_str != new_str {
|
||||
$audits.push(AuditEntry {
|
||||
course_id: $row.id,
|
||||
field_changed: $field,
|
||||
old_value: old_str,
|
||||
new_value: new_str,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
// JSONB: Option<Value> old vs Value new
|
||||
(json $audits:ident, $row:ident, $field:expr, $old:ident, $new:ident) => {
|
||||
if $row.old_id.is_some() {
|
||||
let old_val = $row
|
||||
.$old
|
||||
.as_ref()
|
||||
.cloned()
|
||||
.unwrap_or(serde_json::Value::Null);
|
||||
let new_val = &$row.$new;
|
||||
if old_val != *new_val {
|
||||
$audits.push(AuditEntry {
|
||||
course_id: $row.id,
|
||||
field_changed: $field,
|
||||
old_value: old_val.to_string(),
|
||||
new_value: new_val.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Compute audit entries (field-level diffs) and metric entries from upsert diff rows.
|
||||
fn compute_diffs(rows: &[UpsertDiffRow]) -> (Vec<AuditEntry>, Vec<MetricEntry>) {
|
||||
let mut audits = Vec::new();
|
||||
let mut metrics = Vec::new();
|
||||
|
||||
for row in rows {
|
||||
// Non-nullable fields
|
||||
diff_field!(audits, row, "enrollment", old_enrollment, new_enrollment);
|
||||
diff_field!(
|
||||
audits,
|
||||
row,
|
||||
"max_enrollment",
|
||||
old_max_enrollment,
|
||||
new_max_enrollment
|
||||
);
|
||||
diff_field!(audits, row, "wait_count", old_wait_count, new_wait_count);
|
||||
diff_field!(
|
||||
audits,
|
||||
row,
|
||||
"wait_capacity",
|
||||
old_wait_capacity,
|
||||
new_wait_capacity
|
||||
);
|
||||
diff_field!(audits, row, "subject", old_subject, new_subject);
|
||||
diff_field!(
|
||||
audits,
|
||||
row,
|
||||
"course_number",
|
||||
old_course_number,
|
||||
new_course_number
|
||||
);
|
||||
diff_field!(audits, row, "title", old_title, new_title);
|
||||
|
||||
// Nullable text fields
|
||||
diff_field!(opt audits, row, "sequence_number", old_sequence_number, new_sequence_number);
|
||||
diff_field!(opt audits, row, "part_of_term", old_part_of_term, new_part_of_term);
|
||||
diff_field!(opt audits, row, "instructional_method", old_instructional_method, new_instructional_method);
|
||||
diff_field!(opt audits, row, "campus", old_campus, new_campus);
|
||||
|
||||
// Nullable int fields
|
||||
diff_field!(opt audits, row, "credit_hours", old_credit_hours, new_credit_hours);
|
||||
diff_field!(opt audits, row, "credit_hour_low", old_credit_hour_low, new_credit_hour_low);
|
||||
diff_field!(opt audits, row, "credit_hour_high", old_credit_hour_high, new_credit_hour_high);
|
||||
|
||||
// Cross-list fields
|
||||
diff_field!(opt audits, row, "cross_list", old_cross_list, new_cross_list);
|
||||
diff_field!(opt audits, row, "cross_list_capacity", old_cross_list_capacity, new_cross_list_capacity);
|
||||
diff_field!(opt audits, row, "cross_list_count", old_cross_list_count, new_cross_list_count);
|
||||
|
||||
// Link fields
|
||||
diff_field!(opt audits, row, "link_identifier", old_link_identifier, new_link_identifier);
|
||||
diff_field!(opt audits, row, "is_section_linked", old_is_section_linked, new_is_section_linked);
|
||||
|
||||
// JSONB fields
|
||||
diff_field!(json audits, row, "meeting_times", old_meeting_times, new_meeting_times);
|
||||
diff_field!(json audits, row, "attributes", old_attributes, new_attributes);
|
||||
|
||||
// Emit a metric entry when enrollment/wait_count/max_enrollment changed
|
||||
// Skip fresh inserts (no old data to compare against)
|
||||
let enrollment_changed = row.old_id.is_some()
|
||||
&& (row.old_enrollment != Some(row.new_enrollment)
|
||||
|| row.old_wait_count != Some(row.new_wait_count)
|
||||
|| row.old_max_enrollment != Some(row.new_max_enrollment));
|
||||
|
||||
if enrollment_changed {
|
||||
metrics.push(MetricEntry {
|
||||
course_id: row.id,
|
||||
enrollment: row.new_enrollment,
|
||||
wait_count: row.new_wait_count,
|
||||
seats_available: row.new_max_enrollment - row.new_enrollment,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
(audits, metrics)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Task 4: Batch insert functions for audits and metrics
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async fn insert_audits(audits: &[AuditEntry], conn: &mut PgConnection) -> Result<()> {
|
||||
if audits.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let course_ids: Vec<i32> = audits.iter().map(|a| a.course_id).collect();
|
||||
let fields: Vec<&str> = audits.iter().map(|a| a.field_changed).collect();
|
||||
let old_values: Vec<&str> = audits.iter().map(|a| a.old_value.as_str()).collect();
|
||||
let new_values: Vec<&str> = audits.iter().map(|a| a.new_value.as_str()).collect();
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO course_audits (course_id, timestamp, field_changed, old_value, new_value)
|
||||
SELECT v.course_id, NOW(), v.field_changed, v.old_value, v.new_value
|
||||
FROM UNNEST($1::int4[], $2::text[], $3::text[], $4::text[])
|
||||
AS v(course_id, field_changed, old_value, new_value)
|
||||
"#,
|
||||
)
|
||||
.bind(&course_ids)
|
||||
.bind(&fields)
|
||||
.bind(&old_values)
|
||||
.bind(&new_values)
|
||||
.execute(&mut *conn)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch insert course_audits: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn insert_metrics(metrics: &[MetricEntry], conn: &mut PgConnection) -> Result<()> {
|
||||
if metrics.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let course_ids: Vec<i32> = metrics.iter().map(|m| m.course_id).collect();
|
||||
let enrollments: Vec<i32> = metrics.iter().map(|m| m.enrollment).collect();
|
||||
let wait_counts: Vec<i32> = metrics.iter().map(|m| m.wait_count).collect();
|
||||
let seats_available: Vec<i32> = metrics.iter().map(|m| m.seats_available).collect();
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO course_metrics (course_id, timestamp, enrollment, wait_count, seats_available)
|
||||
SELECT v.course_id, NOW(), v.enrollment, v.wait_count, v.seats_available
|
||||
FROM UNNEST($1::int4[], $2::int4[], $3::int4[], $4::int4[])
|
||||
AS v(course_id, enrollment, wait_count, seats_available)
|
||||
"#,
|
||||
)
|
||||
.bind(&course_ids)
|
||||
.bind(&enrollments)
|
||||
.bind(&wait_counts)
|
||||
.bind(&seats_available)
|
||||
.execute(&mut *conn)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch insert course_metrics: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Core upsert functions (updated to use &mut PgConnection)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Batch upsert courses in a single database query.
|
||||
///
|
||||
/// Performs a bulk INSERT...ON CONFLICT DO UPDATE for all courses, including
|
||||
/// new fields (meeting times, attributes, instructor data). Returns the
|
||||
/// database IDs for all upserted courses (in input order) so instructors
|
||||
/// can be linked.
|
||||
/// new fields (meeting times, attributes, instructor data). Captures pre-update
|
||||
/// state for audit/metric tracking, all within a single transaction.
|
||||
///
|
||||
/// # Performance
|
||||
/// - Reduces N database round-trips to 3 (courses, instructors, junction)
|
||||
/// - Reduces N database round-trips to 5 (old-data CTE + upsert, audits, metrics, instructors, junction)
|
||||
/// - Typical usage: 50-200 courses per batch
|
||||
pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<()> {
|
||||
pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<UpsertCounts> {
|
||||
if courses.is_empty() {
|
||||
info!("No courses to upsert, skipping batch operation");
|
||||
return Ok(());
|
||||
return Ok(UpsertCounts::default());
|
||||
}
|
||||
|
||||
let start = Instant::now();
|
||||
let course_count = courses.len();
|
||||
|
||||
// Step 1: Upsert courses with all fields, returning IDs
|
||||
let course_ids = upsert_courses(courses, db_pool).await?;
|
||||
let mut tx = db_pool.begin().await?;
|
||||
|
||||
// Step 2: Upsert instructors (deduplicated across batch)
|
||||
upsert_instructors(courses, db_pool).await?;
|
||||
// Step 1: Upsert courses with CTE, returning diff rows
|
||||
let diff_rows = upsert_courses(courses, &mut tx).await?;
|
||||
|
||||
// Step 3: Link courses to instructors via junction table
|
||||
upsert_course_instructors(courses, &course_ids, db_pool).await?;
|
||||
// Step 2: Build (crn, term_code) → course_id map for instructor linking.
|
||||
// RETURNING order from INSERT ... ON CONFLICT is not guaranteed to match
|
||||
// the input array order, so we must key by (crn, term_code) rather than
|
||||
// relying on positional correspondence.
|
||||
let crn_term_to_id: HashMap<(&str, &str), i32> = diff_rows
|
||||
.iter()
|
||||
.map(|r| ((r.crn.as_str(), r.term_code.as_str()), r.id))
|
||||
.collect();
|
||||
|
||||
// Step 3: Compute audit/metric diffs
|
||||
let (audits, metrics) = compute_diffs(&diff_rows);
|
||||
|
||||
// Count courses that had at least one field change (existing rows only)
|
||||
let changed_ids: HashSet<i32> = audits.iter().map(|a| a.course_id).collect();
|
||||
let existing_count = diff_rows.iter().filter(|r| r.old_id.is_some()).count() as i32;
|
||||
let courses_changed = changed_ids.len() as i32;
|
||||
|
||||
let counts = UpsertCounts {
|
||||
courses_fetched: course_count as i32,
|
||||
courses_changed,
|
||||
courses_unchanged: existing_count - courses_changed,
|
||||
audits_generated: audits.len() as i32,
|
||||
metrics_generated: metrics.len() as i32,
|
||||
};
|
||||
|
||||
// Step 4: Insert audits and metrics
|
||||
insert_audits(&audits, &mut tx).await?;
|
||||
insert_metrics(&metrics, &mut tx).await?;
|
||||
|
||||
// Step 5: Upsert instructors (returns email -> id map)
|
||||
let email_to_id = upsert_instructors(courses, &mut tx).await?;
|
||||
|
||||
// Step 6: Link courses to instructors via junction table
|
||||
upsert_course_instructors(courses, &crn_term_to_id, &email_to_id, &mut tx).await?;
|
||||
|
||||
tx.commit().await?;
|
||||
|
||||
let duration = start.elapsed();
|
||||
info!(
|
||||
courses_count = course_count,
|
||||
courses_changed = counts.courses_changed,
|
||||
courses_unchanged = counts.courses_unchanged,
|
||||
audit_entries = counts.audits_generated,
|
||||
metric_entries = counts.metrics_generated,
|
||||
duration_ms = duration.as_millis(),
|
||||
"Batch upserted courses with instructors"
|
||||
"Batch upserted courses with instructors, audits, and metrics"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
Ok(counts)
|
||||
}
|
||||
|
||||
/// Upsert all courses and return their database IDs in input order.
|
||||
async fn upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<Vec<i32>> {
|
||||
// ---------------------------------------------------------------------------
|
||||
// Task 2: CTE-based upsert returning old+new values
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Upsert all courses and return diff rows with old and new values for auditing.
|
||||
async fn upsert_courses(courses: &[Course], conn: &mut PgConnection) -> Result<Vec<UpsertDiffRow>> {
|
||||
let crns: Vec<&str> = courses
|
||||
.iter()
|
||||
.map(|c| c.course_reference_number.as_str())
|
||||
@@ -143,67 +488,107 @@ async fn upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<Vec<i32>
|
||||
courses.iter().map(to_db_meeting_times).collect();
|
||||
let attributes_json: Vec<serde_json::Value> = courses.iter().map(to_db_attributes).collect();
|
||||
|
||||
let rows = sqlx::query_scalar::<_, i32>(
|
||||
let rows = sqlx::query_as::<_, UpsertDiffRow>(
|
||||
r#"
|
||||
INSERT INTO courses (
|
||||
crn, subject, course_number, title, term_code,
|
||||
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at,
|
||||
sequence_number, part_of_term, instructional_method, campus,
|
||||
credit_hours, credit_hour_low, credit_hour_high,
|
||||
cross_list, cross_list_capacity, cross_list_count,
|
||||
link_identifier, is_section_linked,
|
||||
meeting_times, attributes
|
||||
WITH old_data AS (
|
||||
SELECT id, enrollment, max_enrollment, wait_count, wait_capacity,
|
||||
subject, course_number, title,
|
||||
sequence_number, part_of_term, instructional_method, campus,
|
||||
credit_hours, credit_hour_low, credit_hour_high,
|
||||
cross_list, cross_list_capacity, cross_list_count,
|
||||
link_identifier, is_section_linked,
|
||||
meeting_times, attributes,
|
||||
crn, term_code
|
||||
FROM courses
|
||||
WHERE (crn, term_code) IN (SELECT * FROM UNNEST($1::text[], $5::text[]))
|
||||
),
|
||||
upserted AS (
|
||||
INSERT INTO courses (
|
||||
crn, subject, course_number, title, term_code,
|
||||
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at,
|
||||
sequence_number, part_of_term, instructional_method, campus,
|
||||
credit_hours, credit_hour_low, credit_hour_high,
|
||||
cross_list, cross_list_capacity, cross_list_count,
|
||||
link_identifier, is_section_linked,
|
||||
meeting_times, attributes
|
||||
)
|
||||
SELECT
|
||||
v.crn, v.subject, v.course_number, v.title, v.term_code,
|
||||
v.enrollment, v.max_enrollment, v.wait_count, v.wait_capacity, NOW(),
|
||||
v.sequence_number, v.part_of_term, v.instructional_method, v.campus,
|
||||
v.credit_hours, v.credit_hour_low, v.credit_hour_high,
|
||||
v.cross_list, v.cross_list_capacity, v.cross_list_count,
|
||||
v.link_identifier, v.is_section_linked,
|
||||
v.meeting_times, v.attributes
|
||||
FROM UNNEST(
|
||||
$1::text[], $2::text[], $3::text[], $4::text[], $5::text[],
|
||||
$6::int4[], $7::int4[], $8::int4[], $9::int4[],
|
||||
$10::text[], $11::text[], $12::text[], $13::text[],
|
||||
$14::int4[], $15::int4[], $16::int4[],
|
||||
$17::text[], $18::int4[], $19::int4[],
|
||||
$20::text[], $21::bool[],
|
||||
$22::jsonb[], $23::jsonb[]
|
||||
) AS v(
|
||||
crn, subject, course_number, title, term_code,
|
||||
enrollment, max_enrollment, wait_count, wait_capacity,
|
||||
sequence_number, part_of_term, instructional_method, campus,
|
||||
credit_hours, credit_hour_low, credit_hour_high,
|
||||
cross_list, cross_list_capacity, cross_list_count,
|
||||
link_identifier, is_section_linked,
|
||||
meeting_times, attributes
|
||||
)
|
||||
ON CONFLICT (crn, term_code)
|
||||
DO UPDATE SET
|
||||
subject = EXCLUDED.subject,
|
||||
course_number = EXCLUDED.course_number,
|
||||
title = EXCLUDED.title,
|
||||
enrollment = EXCLUDED.enrollment,
|
||||
max_enrollment = EXCLUDED.max_enrollment,
|
||||
wait_count = EXCLUDED.wait_count,
|
||||
wait_capacity = EXCLUDED.wait_capacity,
|
||||
last_scraped_at = EXCLUDED.last_scraped_at,
|
||||
sequence_number = EXCLUDED.sequence_number,
|
||||
part_of_term = EXCLUDED.part_of_term,
|
||||
instructional_method = EXCLUDED.instructional_method,
|
||||
campus = EXCLUDED.campus,
|
||||
credit_hours = EXCLUDED.credit_hours,
|
||||
credit_hour_low = EXCLUDED.credit_hour_low,
|
||||
credit_hour_high = EXCLUDED.credit_hour_high,
|
||||
cross_list = EXCLUDED.cross_list,
|
||||
cross_list_capacity = EXCLUDED.cross_list_capacity,
|
||||
cross_list_count = EXCLUDED.cross_list_count,
|
||||
link_identifier = EXCLUDED.link_identifier,
|
||||
is_section_linked = EXCLUDED.is_section_linked,
|
||||
meeting_times = EXCLUDED.meeting_times,
|
||||
attributes = EXCLUDED.attributes
|
||||
RETURNING *
|
||||
)
|
||||
SELECT
|
||||
v.crn, v.subject, v.course_number, v.title, v.term_code,
|
||||
v.enrollment, v.max_enrollment, v.wait_count, v.wait_capacity, NOW(),
|
||||
v.sequence_number, v.part_of_term, v.instructional_method, v.campus,
|
||||
v.credit_hours, v.credit_hour_low, v.credit_hour_high,
|
||||
v.cross_list, v.cross_list_capacity, v.cross_list_count,
|
||||
v.link_identifier, v.is_section_linked,
|
||||
v.meeting_times, v.attributes
|
||||
FROM UNNEST(
|
||||
$1::text[], $2::text[], $3::text[], $4::text[], $5::text[],
|
||||
$6::int4[], $7::int4[], $8::int4[], $9::int4[],
|
||||
$10::text[], $11::text[], $12::text[], $13::text[],
|
||||
$14::int4[], $15::int4[], $16::int4[],
|
||||
$17::text[], $18::int4[], $19::int4[],
|
||||
$20::text[], $21::bool[],
|
||||
$22::jsonb[], $23::jsonb[]
|
||||
) AS v(
|
||||
crn, subject, course_number, title, term_code,
|
||||
enrollment, max_enrollment, wait_count, wait_capacity,
|
||||
sequence_number, part_of_term, instructional_method, campus,
|
||||
credit_hours, credit_hour_low, credit_hour_high,
|
||||
cross_list, cross_list_capacity, cross_list_count,
|
||||
link_identifier, is_section_linked,
|
||||
meeting_times, attributes
|
||||
)
|
||||
ON CONFLICT (crn, term_code)
|
||||
DO UPDATE SET
|
||||
subject = EXCLUDED.subject,
|
||||
course_number = EXCLUDED.course_number,
|
||||
title = EXCLUDED.title,
|
||||
enrollment = EXCLUDED.enrollment,
|
||||
max_enrollment = EXCLUDED.max_enrollment,
|
||||
wait_count = EXCLUDED.wait_count,
|
||||
wait_capacity = EXCLUDED.wait_capacity,
|
||||
last_scraped_at = EXCLUDED.last_scraped_at,
|
||||
sequence_number = EXCLUDED.sequence_number,
|
||||
part_of_term = EXCLUDED.part_of_term,
|
||||
instructional_method = EXCLUDED.instructional_method,
|
||||
campus = EXCLUDED.campus,
|
||||
credit_hours = EXCLUDED.credit_hours,
|
||||
credit_hour_low = EXCLUDED.credit_hour_low,
|
||||
credit_hour_high = EXCLUDED.credit_hour_high,
|
||||
cross_list = EXCLUDED.cross_list,
|
||||
cross_list_capacity = EXCLUDED.cross_list_capacity,
|
||||
cross_list_count = EXCLUDED.cross_list_count,
|
||||
link_identifier = EXCLUDED.link_identifier,
|
||||
is_section_linked = EXCLUDED.is_section_linked,
|
||||
meeting_times = EXCLUDED.meeting_times,
|
||||
attributes = EXCLUDED.attributes
|
||||
RETURNING id
|
||||
SELECT u.id,
|
||||
o.id AS old_id,
|
||||
u.crn, u.term_code,
|
||||
o.enrollment AS old_enrollment, u.enrollment AS new_enrollment,
|
||||
o.max_enrollment AS old_max_enrollment, u.max_enrollment AS new_max_enrollment,
|
||||
o.wait_count AS old_wait_count, u.wait_count AS new_wait_count,
|
||||
o.wait_capacity AS old_wait_capacity, u.wait_capacity AS new_wait_capacity,
|
||||
o.subject AS old_subject, u.subject AS new_subject,
|
||||
o.course_number AS old_course_number, u.course_number AS new_course_number,
|
||||
o.title AS old_title, u.title AS new_title,
|
||||
o.sequence_number AS old_sequence_number, u.sequence_number AS new_sequence_number,
|
||||
o.part_of_term AS old_part_of_term, u.part_of_term AS new_part_of_term,
|
||||
o.instructional_method AS old_instructional_method, u.instructional_method AS new_instructional_method,
|
||||
o.campus AS old_campus, u.campus AS new_campus,
|
||||
o.credit_hours AS old_credit_hours, u.credit_hours AS new_credit_hours,
|
||||
o.credit_hour_low AS old_credit_hour_low, u.credit_hour_low AS new_credit_hour_low,
|
||||
o.credit_hour_high AS old_credit_hour_high, u.credit_hour_high AS new_credit_hour_high,
|
||||
o.cross_list AS old_cross_list, u.cross_list AS new_cross_list,
|
||||
o.cross_list_capacity AS old_cross_list_capacity, u.cross_list_capacity AS new_cross_list_capacity,
|
||||
o.cross_list_count AS old_cross_list_count, u.cross_list_count AS new_cross_list_count,
|
||||
o.link_identifier AS old_link_identifier, u.link_identifier AS new_link_identifier,
|
||||
o.is_section_linked AS old_is_section_linked, u.is_section_linked AS new_is_section_linked,
|
||||
o.meeting_times AS old_meeting_times, u.meeting_times AS new_meeting_times,
|
||||
o.attributes AS old_attributes, u.attributes AS new_attributes
|
||||
FROM upserted u
|
||||
LEFT JOIN old_data o ON u.crn = o.crn AND u.term_code = o.term_code
|
||||
"#,
|
||||
)
|
||||
.bind(&crns)
|
||||
@@ -229,69 +614,117 @@ async fn upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<Vec<i32>
|
||||
.bind(&is_section_linkeds)
|
||||
.bind(&meeting_times_json)
|
||||
.bind(&attributes_json)
|
||||
.fetch_all(db_pool)
|
||||
.fetch_all(&mut *conn)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch upsert courses: {}", e))?;
|
||||
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
/// Deduplicate and upsert all instructors from the batch.
|
||||
async fn upsert_instructors(courses: &[Course], db_pool: &PgPool) -> Result<()> {
|
||||
/// Deduplicate and upsert all instructors from the batch by email.
|
||||
/// Returns a map of lowercased_email -> instructor id for junction linking.
|
||||
async fn upsert_instructors(
|
||||
courses: &[Course],
|
||||
conn: &mut PgConnection,
|
||||
) -> Result<HashMap<String, i32>> {
|
||||
let mut seen = HashSet::new();
|
||||
let mut banner_ids = Vec::new();
|
||||
let mut display_names = Vec::new();
|
||||
let mut emails: Vec<Option<&str>> = Vec::new();
|
||||
let mut display_names: Vec<&str> = Vec::new();
|
||||
let mut first_names: Vec<Option<String>> = Vec::new();
|
||||
let mut last_names: Vec<Option<String>> = Vec::new();
|
||||
let mut emails_lower: Vec<String> = Vec::new();
|
||||
let mut skipped_no_email = 0u32;
|
||||
|
||||
for course in courses {
|
||||
for faculty in &course.faculty {
|
||||
if seen.insert(faculty.banner_id.as_str()) {
|
||||
banner_ids.push(faculty.banner_id.as_str());
|
||||
display_names.push(faculty.display_name.as_str());
|
||||
emails.push(faculty.email_address.as_deref());
|
||||
if let Some(email) = &faculty.email_address {
|
||||
let email_lower = email.to_lowercase();
|
||||
if seen.insert(email_lower.clone()) {
|
||||
let parts = parse_banner_name(&faculty.display_name);
|
||||
display_names.push(faculty.display_name.as_str());
|
||||
first_names.push(parts.as_ref().map(|p| p.first.clone()));
|
||||
last_names.push(parts.as_ref().map(|p| p.last.clone()));
|
||||
emails_lower.push(email_lower);
|
||||
}
|
||||
} else {
|
||||
skipped_no_email += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if banner_ids.is_empty() {
|
||||
return Ok(());
|
||||
if skipped_no_email > 0 {
|
||||
tracing::warn!(
|
||||
count = skipped_no_email,
|
||||
"Skipped instructors with no email address"
|
||||
);
|
||||
}
|
||||
|
||||
sqlx::query(
|
||||
if display_names.is_empty() {
|
||||
return Ok(HashMap::new());
|
||||
}
|
||||
|
||||
let email_refs: Vec<&str> = emails_lower.iter().map(|s| s.as_str()).collect();
|
||||
let first_name_refs: Vec<Option<&str>> = first_names.iter().map(|s| s.as_deref()).collect();
|
||||
let last_name_refs: Vec<Option<&str>> = last_names.iter().map(|s| s.as_deref()).collect();
|
||||
|
||||
let rows: Vec<(i32, String)> = sqlx::query_as(
|
||||
r#"
|
||||
INSERT INTO instructors (banner_id, display_name, email)
|
||||
SELECT * FROM UNNEST($1::text[], $2::text[], $3::text[])
|
||||
ON CONFLICT (banner_id)
|
||||
INSERT INTO instructors (display_name, email, first_name, last_name)
|
||||
SELECT * FROM UNNEST($1::text[], $2::text[], $3::text[], $4::text[])
|
||||
ON CONFLICT (email)
|
||||
DO UPDATE SET
|
||||
display_name = EXCLUDED.display_name,
|
||||
email = COALESCE(EXCLUDED.email, instructors.email)
|
||||
first_name = EXCLUDED.first_name,
|
||||
last_name = EXCLUDED.last_name
|
||||
RETURNING id, email
|
||||
"#,
|
||||
)
|
||||
.bind(&banner_ids)
|
||||
.bind(&display_names)
|
||||
.bind(&emails)
|
||||
.execute(db_pool)
|
||||
.bind(&email_refs)
|
||||
.bind(&first_name_refs)
|
||||
.bind(&last_name_refs)
|
||||
.fetch_all(&mut *conn)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch upsert instructors: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
Ok(rows.into_iter().map(|(id, email)| (email, id)).collect())
|
||||
}
|
||||
|
||||
/// Link courses to their instructors via the junction table.
|
||||
async fn upsert_course_instructors(
|
||||
courses: &[Course],
|
||||
course_ids: &[i32],
|
||||
db_pool: &PgPool,
|
||||
crn_term_to_id: &HashMap<(&str, &str), i32>,
|
||||
email_to_id: &HashMap<String, i32>,
|
||||
conn: &mut PgConnection,
|
||||
) -> Result<()> {
|
||||
let mut cids = Vec::new();
|
||||
let mut iids = Vec::new();
|
||||
let mut instructor_ids: Vec<i32> = Vec::new();
|
||||
let mut banner_ids: Vec<&str> = Vec::new();
|
||||
let mut primaries = Vec::new();
|
||||
|
||||
for (course, &course_id) in courses.iter().zip(course_ids) {
|
||||
for course in courses {
|
||||
let key = (
|
||||
course.course_reference_number.as_str(),
|
||||
course.term.as_str(),
|
||||
);
|
||||
let Some(&course_id) = crn_term_to_id.get(&key) else {
|
||||
tracing::warn!(
|
||||
crn = %course.course_reference_number,
|
||||
term = %course.term,
|
||||
"No course_id found for CRN/term pair during instructor linking"
|
||||
);
|
||||
continue;
|
||||
};
|
||||
|
||||
for faculty in &course.faculty {
|
||||
cids.push(course_id);
|
||||
iids.push(faculty.banner_id.as_str());
|
||||
primaries.push(faculty.primary_indicator);
|
||||
if let Some(email) = &faculty.email_address {
|
||||
let email_lower = email.to_lowercase();
|
||||
if let Some(&instructor_id) = email_to_id.get(&email_lower) {
|
||||
cids.push(course_id);
|
||||
instructor_ids.push(instructor_id);
|
||||
banner_ids.push(faculty.banner_id.as_str());
|
||||
primaries.push(faculty.primary_indicator);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -303,21 +736,24 @@ async fn upsert_course_instructors(
|
||||
// This handles instructor changes cleanly.
|
||||
sqlx::query("DELETE FROM course_instructors WHERE course_id = ANY($1)")
|
||||
.bind(&cids)
|
||||
.execute(db_pool)
|
||||
.execute(&mut *conn)
|
||||
.await?;
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO course_instructors (course_id, instructor_id, is_primary)
|
||||
SELECT * FROM UNNEST($1::int4[], $2::text[], $3::bool[])
|
||||
INSERT INTO course_instructors (course_id, instructor_id, banner_id, is_primary)
|
||||
SELECT * FROM UNNEST($1::int4[], $2::int4[], $3::text[], $4::bool[])
|
||||
ON CONFLICT (course_id, instructor_id)
|
||||
DO UPDATE SET is_primary = EXCLUDED.is_primary
|
||||
DO UPDATE SET
|
||||
banner_id = EXCLUDED.banner_id,
|
||||
is_primary = EXCLUDED.is_primary
|
||||
"#,
|
||||
)
|
||||
.bind(&cids)
|
||||
.bind(&iids)
|
||||
.bind(&instructor_ids)
|
||||
.bind(&banner_ids)
|
||||
.bind(&primaries)
|
||||
.execute(db_pool)
|
||||
.execute(&mut *conn)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch upsert course_instructors: {}", e))?;
|
||||
|
||||
|
||||
+171
-75
@@ -1,8 +1,74 @@
|
||||
//! Database query functions for courses, used by the web API.
|
||||
|
||||
use crate::data::models::Course;
|
||||
use crate::data::models::{Course, CourseInstructorDetail};
|
||||
use crate::error::Result;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Column to sort search results by.
|
||||
#[derive(Debug, Clone, Copy, serde::Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SortColumn {
|
||||
CourseCode,
|
||||
Title,
|
||||
Instructor,
|
||||
Time,
|
||||
Seats,
|
||||
}
|
||||
|
||||
/// Sort direction.
|
||||
#[derive(Debug, Clone, Copy, serde::Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SortDirection {
|
||||
Asc,
|
||||
Desc,
|
||||
}
|
||||
|
||||
/// Shared WHERE clause for course search filters.
|
||||
///
|
||||
/// Parameters $1-$8 match the bind order in `search_courses`.
|
||||
const SEARCH_WHERE: &str = r#"
|
||||
WHERE term_code = $1
|
||||
AND ($2::text[] IS NULL OR subject = ANY($2))
|
||||
AND ($3::text IS NULL OR title_search @@ plainto_tsquery('simple', $3) OR title ILIKE '%' || $3 || '%')
|
||||
AND ($4::int IS NULL OR course_number::int >= $4)
|
||||
AND ($5::int IS NULL OR course_number::int <= $5)
|
||||
AND ($6::bool = false OR max_enrollment > enrollment)
|
||||
AND ($7::text IS NULL OR instructional_method = $7)
|
||||
AND ($8::text IS NULL OR campus = $8)
|
||||
"#;
|
||||
|
||||
/// Build a safe ORDER BY clause from typed sort parameters.
|
||||
///
|
||||
/// All column names are hardcoded string literals — no caller input is interpolated.
|
||||
fn sort_clause(column: Option<SortColumn>, direction: Option<SortDirection>) -> String {
|
||||
let dir = match direction.unwrap_or(SortDirection::Asc) {
|
||||
SortDirection::Asc => "ASC",
|
||||
SortDirection::Desc => "DESC",
|
||||
};
|
||||
|
||||
match column {
|
||||
Some(SortColumn::CourseCode) => {
|
||||
format!("subject {dir}, course_number {dir}, sequence_number {dir}")
|
||||
}
|
||||
Some(SortColumn::Title) => format!("title {dir}"),
|
||||
Some(SortColumn::Instructor) => {
|
||||
format!(
|
||||
"(SELECT i.display_name FROM course_instructors ci \
|
||||
JOIN instructors i ON i.id = ci.instructor_id \
|
||||
WHERE ci.course_id = courses.id AND ci.is_primary = true \
|
||||
LIMIT 1) {dir} NULLS LAST"
|
||||
)
|
||||
}
|
||||
Some(SortColumn::Time) => {
|
||||
format!("(meeting_times->0->>'begin_time') {dir} NULLS LAST")
|
||||
}
|
||||
Some(SortColumn::Seats) => {
|
||||
format!("(max_enrollment - enrollment) {dir}")
|
||||
}
|
||||
None => "subject ASC, course_number ASC, sequence_number ASC".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Search courses by term with optional filters.
|
||||
///
|
||||
@@ -12,7 +78,7 @@ use sqlx::PgPool;
|
||||
pub async fn search_courses(
|
||||
db_pool: &PgPool,
|
||||
term_code: &str,
|
||||
subject: Option<&str>,
|
||||
subject: Option<&[String]>,
|
||||
title_query: Option<&str>,
|
||||
course_number_low: Option<i32>,
|
||||
course_number_high: Option<i32>,
|
||||
@@ -21,32 +87,16 @@ pub async fn search_courses(
|
||||
campus: Option<&str>,
|
||||
limit: i32,
|
||||
offset: i32,
|
||||
order_by: &str,
|
||||
sort_by: Option<SortColumn>,
|
||||
sort_dir: Option<SortDirection>,
|
||||
) -> Result<(Vec<Course>, i64)> {
|
||||
// Build WHERE clauses dynamically via parameter binding + COALESCE trick:
|
||||
// each optional filter uses ($N IS NULL OR column = $N) so NULL means "no filter".
|
||||
//
|
||||
// ORDER BY is interpolated as a string since column names can't be bound as
|
||||
// parameters. The caller must provide a safe, pre-validated clause (see
|
||||
// `sort_clause` in routes.rs).
|
||||
let query = format!(
|
||||
r#"
|
||||
SELECT *
|
||||
FROM courses
|
||||
WHERE term_code = $1
|
||||
AND ($2::text IS NULL OR subject = $2)
|
||||
AND ($3::text IS NULL OR title_search @@ plainto_tsquery('simple', $3) OR title ILIKE '%' || $3 || '%')
|
||||
AND ($4::int IS NULL OR course_number::int >= $4)
|
||||
AND ($5::int IS NULL OR course_number::int <= $5)
|
||||
AND ($6::bool = false OR max_enrollment > enrollment)
|
||||
AND ($7::text IS NULL OR instructional_method = $7)
|
||||
AND ($8::text IS NULL OR campus = $8)
|
||||
ORDER BY {order_by}
|
||||
LIMIT $9 OFFSET $10
|
||||
"#
|
||||
);
|
||||
let order_by = sort_clause(sort_by, sort_dir);
|
||||
|
||||
let courses = sqlx::query_as::<_, Course>(&query)
|
||||
let data_query =
|
||||
format!("SELECT * FROM courses {SEARCH_WHERE} ORDER BY {order_by} LIMIT $9 OFFSET $10");
|
||||
let count_query = format!("SELECT COUNT(*) FROM courses {SEARCH_WHERE}");
|
||||
|
||||
let courses = sqlx::query_as::<_, Course>(&data_query)
|
||||
.bind(term_code)
|
||||
.bind(subject)
|
||||
.bind(title_query)
|
||||
@@ -60,30 +110,17 @@ pub async fn search_courses(
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
let total: (i64,) = sqlx::query_as(
|
||||
r#"
|
||||
SELECT COUNT(*)
|
||||
FROM courses
|
||||
WHERE term_code = $1
|
||||
AND ($2::text IS NULL OR subject = $2)
|
||||
AND ($3::text IS NULL OR title_search @@ plainto_tsquery('simple', $3) OR title ILIKE '%' || $3 || '%')
|
||||
AND ($4::int IS NULL OR course_number::int >= $4)
|
||||
AND ($5::int IS NULL OR course_number::int <= $5)
|
||||
AND ($6::bool = false OR max_enrollment > enrollment)
|
||||
AND ($7::text IS NULL OR instructional_method = $7)
|
||||
AND ($8::text IS NULL OR campus = $8)
|
||||
"#,
|
||||
)
|
||||
.bind(term_code)
|
||||
.bind(subject)
|
||||
.bind(title_query)
|
||||
.bind(course_number_low)
|
||||
.bind(course_number_high)
|
||||
.bind(open_only)
|
||||
.bind(instructional_method)
|
||||
.bind(campus)
|
||||
.fetch_one(db_pool)
|
||||
.await?;
|
||||
let total: (i64,) = sqlx::query_as(&count_query)
|
||||
.bind(term_code)
|
||||
.bind(subject)
|
||||
.bind(title_query)
|
||||
.bind(course_number_low)
|
||||
.bind(course_number_high)
|
||||
.bind(open_only)
|
||||
.bind(instructional_method)
|
||||
.bind(campus)
|
||||
.fetch_one(db_pool)
|
||||
.await?;
|
||||
|
||||
Ok((courses, total.0))
|
||||
}
|
||||
@@ -103,36 +140,26 @@ pub async fn get_course_by_crn(
|
||||
Ok(course)
|
||||
}
|
||||
|
||||
/// Get instructors for a course by course ID.
|
||||
///
|
||||
/// Returns `(banner_id, display_name, email, is_primary, rmp_avg_rating, rmp_num_ratings)` tuples.
|
||||
/// Get instructors for a single course by course ID.
|
||||
pub async fn get_course_instructors(
|
||||
db_pool: &PgPool,
|
||||
course_id: i32,
|
||||
) -> Result<
|
||||
Vec<(
|
||||
String,
|
||||
String,
|
||||
Option<String>,
|
||||
bool,
|
||||
Option<f32>,
|
||||
Option<i32>,
|
||||
)>,
|
||||
> {
|
||||
let rows: Vec<(
|
||||
String,
|
||||
String,
|
||||
Option<String>,
|
||||
bool,
|
||||
Option<f32>,
|
||||
Option<i32>,
|
||||
)> = sqlx::query_as(
|
||||
) -> Result<Vec<CourseInstructorDetail>> {
|
||||
let rows = sqlx::query_as::<_, CourseInstructorDetail>(
|
||||
r#"
|
||||
SELECT i.banner_id, i.display_name, i.email, ci.is_primary,
|
||||
rp.avg_rating, rp.num_ratings
|
||||
SELECT i.id as instructor_id, ci.banner_id, i.display_name, i.email, ci.is_primary,
|
||||
rmp.avg_rating, rmp.num_ratings, rmp.rmp_legacy_id,
|
||||
ci.course_id
|
||||
FROM course_instructors ci
|
||||
JOIN instructors i ON i.banner_id = ci.instructor_id
|
||||
LEFT JOIN rmp_professors rp ON rp.legacy_id = i.rmp_legacy_id
|
||||
JOIN instructors i ON i.id = ci.instructor_id
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT rp.avg_rating, rp.num_ratings, rp.legacy_id as rmp_legacy_id
|
||||
FROM instructor_rmp_links irl
|
||||
JOIN rmp_professors rp ON rp.legacy_id = irl.rmp_legacy_id
|
||||
WHERE irl.instructor_id = i.id
|
||||
ORDER BY rp.num_ratings DESC NULLS LAST, rp.legacy_id ASC
|
||||
LIMIT 1
|
||||
) rmp ON true
|
||||
WHERE ci.course_id = $1
|
||||
ORDER BY ci.is_primary DESC, i.display_name
|
||||
"#,
|
||||
@@ -143,6 +170,75 @@ pub async fn get_course_instructors(
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
/// Batch-fetch instructors for multiple courses in a single query.
|
||||
///
|
||||
/// Returns a map of `course_id → Vec<CourseInstructorDetail>`.
|
||||
pub async fn get_instructors_for_courses(
|
||||
db_pool: &PgPool,
|
||||
course_ids: &[i32],
|
||||
) -> Result<HashMap<i32, Vec<CourseInstructorDetail>>> {
|
||||
if course_ids.is_empty() {
|
||||
return Ok(HashMap::new());
|
||||
}
|
||||
|
||||
let rows = sqlx::query_as::<_, CourseInstructorDetail>(
|
||||
r#"
|
||||
SELECT i.id as instructor_id, ci.banner_id, i.display_name, i.email, ci.is_primary,
|
||||
rmp.avg_rating, rmp.num_ratings, rmp.rmp_legacy_id,
|
||||
ci.course_id
|
||||
FROM course_instructors ci
|
||||
JOIN instructors i ON i.id = ci.instructor_id
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT rp.avg_rating, rp.num_ratings, rp.legacy_id as rmp_legacy_id
|
||||
FROM instructor_rmp_links irl
|
||||
JOIN rmp_professors rp ON rp.legacy_id = irl.rmp_legacy_id
|
||||
WHERE irl.instructor_id = i.id
|
||||
ORDER BY rp.num_ratings DESC NULLS LAST, rp.legacy_id ASC
|
||||
LIMIT 1
|
||||
) rmp ON true
|
||||
WHERE ci.course_id = ANY($1)
|
||||
ORDER BY ci.course_id, ci.is_primary DESC, i.display_name
|
||||
"#,
|
||||
)
|
||||
.bind(course_ids)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
let mut map: HashMap<i32, Vec<CourseInstructorDetail>> = HashMap::new();
|
||||
for row in rows {
|
||||
// course_id is always present in the batch query
|
||||
let cid = row.course_id.unwrap_or_default();
|
||||
map.entry(cid).or_default().push(row);
|
||||
}
|
||||
Ok(map)
|
||||
}
|
||||
|
||||
/// Get subjects for a term, sorted by total enrollment (descending).
|
||||
///
|
||||
/// Returns only subjects that have courses in the given term, with their
|
||||
/// descriptions from reference_data and enrollment totals for ranking.
|
||||
pub async fn get_subjects_by_enrollment(
|
||||
db_pool: &PgPool,
|
||||
term_code: &str,
|
||||
) -> Result<Vec<(String, String, i64)>> {
|
||||
let rows: Vec<(String, String, i64)> = sqlx::query_as(
|
||||
r#"
|
||||
SELECT c.subject,
|
||||
COALESCE(rd.description, c.subject),
|
||||
COALESCE(SUM(c.enrollment), 0) as total_enrollment
|
||||
FROM courses c
|
||||
LEFT JOIN reference_data rd ON rd.category = 'subject' AND rd.code = c.subject
|
||||
WHERE c.term_code = $1
|
||||
GROUP BY c.subject, rd.description
|
||||
ORDER BY total_enrollment DESC
|
||||
"#,
|
||||
)
|
||||
.bind(term_code)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
/// Get all distinct term codes that have courses in the DB.
|
||||
pub async fn get_available_terms(db_pool: &PgPool) -> Result<Vec<String>> {
|
||||
let rows: Vec<(String,)> =
|
||||
|
||||
@@ -3,6 +3,10 @@
|
||||
pub mod batch;
|
||||
pub mod courses;
|
||||
pub mod models;
|
||||
pub mod names;
|
||||
pub mod reference;
|
||||
pub mod rmp;
|
||||
pub mod rmp_matching;
|
||||
pub mod scrape_jobs;
|
||||
pub mod sessions;
|
||||
pub mod users;
|
||||
|
||||
+133
-4
@@ -1,10 +1,46 @@
|
||||
//! `sqlx` models for the database schema.
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use serde_json::Value;
|
||||
use ts_rs::TS;
|
||||
|
||||
/// Serialize an `i64` as a string to avoid JavaScript precision loss for values exceeding 2^53.
|
||||
fn serialize_i64_as_string<S: Serializer>(value: &i64, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
serializer.serialize_str(&value.to_string())
|
||||
}
|
||||
|
||||
/// Deserialize an `i64` from either a number or a string.
|
||||
fn deserialize_i64_from_string<'de, D: Deserializer<'de>>(
|
||||
deserializer: D,
|
||||
) -> Result<i64, D::Error> {
|
||||
use serde::de;
|
||||
|
||||
struct I64OrStringVisitor;
|
||||
|
||||
impl<'de> de::Visitor<'de> for I64OrStringVisitor {
|
||||
type Value = i64;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("an integer or a string containing an integer")
|
||||
}
|
||||
|
||||
fn visit_i64<E: de::Error>(self, value: i64) -> Result<i64, E> {
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
fn visit_u64<E: de::Error>(self, value: u64) -> Result<i64, E> {
|
||||
i64::try_from(value).map_err(|_| E::custom(format!("u64 {value} out of i64 range")))
|
||||
}
|
||||
|
||||
fn visit_str<E: de::Error>(self, value: &str) -> Result<i64, E> {
|
||||
value.parse().map_err(de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_any(I64OrStringVisitor)
|
||||
}
|
||||
|
||||
/// Represents a meeting time stored as JSONB in the courses table.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
@@ -63,19 +99,38 @@ pub struct Course {
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct Instructor {
|
||||
pub banner_id: String,
|
||||
pub id: i32,
|
||||
pub display_name: String,
|
||||
pub email: Option<String>,
|
||||
pub email: String,
|
||||
pub rmp_match_status: String,
|
||||
pub first_name: Option<String>,
|
||||
pub last_name: Option<String>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct CourseInstructor {
|
||||
pub course_id: i32,
|
||||
pub instructor_id: String,
|
||||
pub instructor_id: i32,
|
||||
pub banner_id: String,
|
||||
pub is_primary: bool,
|
||||
}
|
||||
|
||||
/// Joined instructor data for a course (from course_instructors + instructors + rmp_professors).
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct CourseInstructorDetail {
|
||||
pub instructor_id: i32,
|
||||
pub banner_id: String,
|
||||
pub display_name: String,
|
||||
pub email: String,
|
||||
pub is_primary: bool,
|
||||
pub avg_rating: Option<f32>,
|
||||
pub num_ratings: Option<i32>,
|
||||
pub rmp_legacy_id: Option<i32>,
|
||||
/// Present when fetched via batch query; `None` for single-course queries.
|
||||
pub course_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct ReferenceData {
|
||||
@@ -106,6 +161,16 @@ pub struct CourseAudit {
|
||||
pub new_value: String,
|
||||
}
|
||||
|
||||
/// Aggregate counts returned by batch upsert, used for scrape job result logging.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct UpsertCounts {
|
||||
pub courses_fetched: i32,
|
||||
pub courses_changed: i32,
|
||||
pub courses_unchanged: i32,
|
||||
pub audits_generated: i32,
|
||||
pub metrics_generated: i32,
|
||||
}
|
||||
|
||||
/// The priority level of a scrape job.
|
||||
#[derive(sqlx::Type, Copy, Debug, Clone)]
|
||||
#[sqlx(type_name = "scrape_priority", rename_all = "PascalCase")]
|
||||
@@ -126,6 +191,20 @@ pub enum TargetType {
|
||||
SingleCrn,
|
||||
}
|
||||
|
||||
/// Computed status for a scrape job, derived from existing fields.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum ScrapeJobStatus {
|
||||
Processing,
|
||||
StaleLock,
|
||||
Exhausted,
|
||||
Scheduled,
|
||||
Pending,
|
||||
}
|
||||
|
||||
/// How long a lock can be held before it is considered stale (mirrors `scrape_jobs::LOCK_EXPIRY`).
|
||||
const LOCK_EXPIRY_SECS: i64 = 10 * 60;
|
||||
|
||||
/// Represents a queryable job from the database.
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
@@ -141,4 +220,54 @@ pub struct ScrapeJob {
|
||||
pub retry_count: i32,
|
||||
/// Maximum number of retry attempts allowed (non-negative, enforced by CHECK constraint)
|
||||
pub max_retries: i32,
|
||||
/// When the job last entered the "ready to pick up" state.
|
||||
/// Set to NOW() on creation; updated to NOW() on retry.
|
||||
pub queued_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl ScrapeJob {
|
||||
/// Compute the current status of this job from its fields.
|
||||
pub fn status(&self) -> ScrapeJobStatus {
|
||||
let now = Utc::now();
|
||||
match self.locked_at {
|
||||
Some(locked) if (now - locked).num_seconds() < LOCK_EXPIRY_SECS => {
|
||||
ScrapeJobStatus::Processing
|
||||
}
|
||||
Some(_) => ScrapeJobStatus::StaleLock,
|
||||
None if self.retry_count >= self.max_retries && self.max_retries > 0 => {
|
||||
ScrapeJobStatus::Exhausted
|
||||
}
|
||||
None if self.execute_at > now => ScrapeJobStatus::Scheduled,
|
||||
None => ScrapeJobStatus::Pending,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A user authenticated via Discord OAuth.
|
||||
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct User {
|
||||
#[serde(
|
||||
serialize_with = "serialize_i64_as_string",
|
||||
deserialize_with = "deserialize_i64_from_string"
|
||||
)]
|
||||
#[ts(type = "string")]
|
||||
pub discord_id: i64,
|
||||
pub discord_username: String,
|
||||
pub discord_avatar_hash: Option<String>,
|
||||
pub is_admin: bool,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// A server-side session for an authenticated user.
|
||||
#[allow(dead_code)] // Fields read via sqlx::FromRow; some only used in DB queries
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct UserSession {
|
||||
pub id: String,
|
||||
pub user_id: i64,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub expires_at: DateTime<Utc>,
|
||||
pub last_active_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
@@ -0,0 +1,728 @@
|
||||
//! Name parsing, normalization, and matching utilities.
|
||||
//!
|
||||
//! Handles the mismatch between Banner's single `display_name` ("Last, First Middle")
|
||||
//! and RMP's separate `first_name`/`last_name` fields, plus data quality issues
|
||||
//! from both sources (HTML entities, accents, nicknames, suffixes, junk).
|
||||
|
||||
use sqlx::PgPool;
|
||||
use tracing::{info, warn};
|
||||
use unicode_normalization::UnicodeNormalization;
|
||||
|
||||
/// Known name suffixes to extract from the last-name portion.
|
||||
const SUFFIXES: &[&str] = &["iv", "iii", "ii", "jr", "sr"];
|
||||
|
||||
/// Parsed, cleaned name components.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct NameParts {
|
||||
/// Cleaned display-quality first name(s): "H. Paul", "María"
|
||||
pub first: String,
|
||||
/// Cleaned display-quality last name: "O'Brien", "LeBlanc"
|
||||
pub last: String,
|
||||
/// Middle name/initial if detected: "Manuel", "L."
|
||||
pub middle: Option<String>,
|
||||
/// Suffix if detected: "III", "Jr"
|
||||
pub suffix: Option<String>,
|
||||
/// Nicknames extracted from parentheses: ["Ken"], ["Qian"]
|
||||
pub nicknames: Vec<String>,
|
||||
}
|
||||
|
||||
/// Decode common HTML entities found in Banner data.
|
||||
///
|
||||
/// Handles both named entities (`&`, `ü`) and numeric references
|
||||
/// (`'`, `'`).
|
||||
fn decode_html_entities(s: &str) -> String {
|
||||
if !s.contains('&') {
|
||||
return s.to_string();
|
||||
}
|
||||
htmlize::unescape(s).to_string()
|
||||
}
|
||||
|
||||
/// Extract parenthesized nicknames from a name string.
|
||||
///
|
||||
/// `"William (Ken)"` → `("William", vec!["Ken"])`
|
||||
/// `"Guenevere (Qian)"` → `("Guenevere", vec!["Qian"])`
|
||||
/// `"John (jack) C."` → `("John C.", vec!["jack"])`
|
||||
fn extract_nicknames(s: &str) -> (String, Vec<String>) {
|
||||
let mut nicknames = Vec::new();
|
||||
let mut cleaned = String::with_capacity(s.len());
|
||||
let mut chars = s.chars().peekable();
|
||||
|
||||
while let Some(ch) = chars.next() {
|
||||
if ch == '(' {
|
||||
let mut nick = String::new();
|
||||
for inner in chars.by_ref() {
|
||||
if inner == ')' {
|
||||
break;
|
||||
}
|
||||
nick.push(inner);
|
||||
}
|
||||
let nick = nick.trim().to_string();
|
||||
if !nick.is_empty() {
|
||||
nicknames.push(nick);
|
||||
}
|
||||
} else if ch == '"' || ch == '\u{201C}' || ch == '\u{201D}' {
|
||||
// Extract quoted nicknames: Thomas "Butch" → nickname "Butch"
|
||||
let mut nick = String::new();
|
||||
for inner in chars.by_ref() {
|
||||
if inner == '"' || inner == '\u{201C}' || inner == '\u{201D}' {
|
||||
break;
|
||||
}
|
||||
nick.push(inner);
|
||||
}
|
||||
let nick = nick.trim().to_string();
|
||||
if !nick.is_empty() {
|
||||
nicknames.push(nick);
|
||||
}
|
||||
} else {
|
||||
cleaned.push(ch);
|
||||
}
|
||||
}
|
||||
|
||||
// Collapse multiple spaces left by extraction
|
||||
let cleaned = collapse_whitespace(&cleaned);
|
||||
(cleaned, nicknames)
|
||||
}
|
||||
|
||||
/// Extract a suffix (Jr, Sr, II, III, IV) from the last-name portion.
|
||||
///
|
||||
/// `"LeBlanc III"` → `("LeBlanc", Some("III"))`
|
||||
/// `"Smith Jr."` → `("Smith", Some("Jr."))`
|
||||
fn extract_suffix(last: &str) -> (String, Option<String>) {
|
||||
// Try to match the last token as a suffix
|
||||
let tokens: Vec<&str> = last.split_whitespace().collect();
|
||||
if tokens.len() < 2 {
|
||||
return (last.to_string(), None);
|
||||
}
|
||||
|
||||
let candidate = tokens.last().unwrap();
|
||||
let candidate_normalized = candidate.to_lowercase().trim_end_matches('.').to_string();
|
||||
|
||||
if SUFFIXES.contains(&candidate_normalized.as_str()) {
|
||||
let name_part = tokens[..tokens.len() - 1].join(" ");
|
||||
return (name_part, Some(candidate.to_string()));
|
||||
}
|
||||
|
||||
(last.to_string(), None)
|
||||
}
|
||||
|
||||
/// Strip junk commonly found in RMP name fields.
|
||||
///
|
||||
/// - Trailing commas: `"Cronenberger,"` → `"Cronenberger"`
|
||||
/// - Email addresses: `"Neel.Baumgardner@utsa.edu"` → `""` (returns empty)
|
||||
fn strip_junk(s: &str) -> String {
|
||||
let s = s.trim();
|
||||
|
||||
// If the string looks like an email, return empty
|
||||
if s.contains('@') && s.contains('.') && !s.contains(' ') {
|
||||
return String::new();
|
||||
}
|
||||
|
||||
// Strip trailing commas
|
||||
s.trim_end_matches(',').trim().to_string()
|
||||
}
|
||||
|
||||
/// Collapse runs of whitespace into single spaces and trim.
|
||||
fn collapse_whitespace(s: &str) -> String {
|
||||
s.split_whitespace().collect::<Vec<_>>().join(" ")
|
||||
}
|
||||
|
||||
/// Parse a Banner `display_name` ("Last, First Middle") into structured parts.
|
||||
///
|
||||
/// Handles HTML entities, suffixes, and multi-token names.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use banner::data::names::parse_banner_name;
|
||||
///
|
||||
/// let parts = parse_banner_name("O'Brien, Erin").unwrap();
|
||||
/// assert_eq!(parts.first, "Erin");
|
||||
/// assert_eq!(parts.last, "O'Brien");
|
||||
/// ```
|
||||
pub fn parse_banner_name(display_name: &str) -> Option<NameParts> {
|
||||
// 1. Decode HTML entities
|
||||
let decoded = decode_html_entities(display_name);
|
||||
|
||||
// 2. Split on first comma
|
||||
let (last_part, first_part) = decoded.split_once(',')?;
|
||||
let last_part = last_part.trim();
|
||||
let first_part = first_part.trim();
|
||||
|
||||
if last_part.is_empty() || first_part.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// 3. Extract suffix from last name
|
||||
let (last_clean, suffix) = extract_suffix(last_part);
|
||||
|
||||
// 4. Parse first-name portion: first token(s) + optional middle
|
||||
// Banner format is "First Middle", so we keep all tokens as first_name
|
||||
// to support "H. Paul" style names
|
||||
let first_clean = collapse_whitespace(first_part);
|
||||
|
||||
Some(NameParts {
|
||||
first: first_clean,
|
||||
last: last_clean,
|
||||
middle: None, // Banner doesn't clearly delineate middle vs first
|
||||
suffix,
|
||||
nicknames: Vec::new(), // Banner doesn't include nicknames
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse RMP professor name fields into structured parts.
|
||||
///
|
||||
/// Handles junk data, nicknames in parentheses/quotes, and suffixes.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use banner::data::names::parse_rmp_name;
|
||||
///
|
||||
/// let parts = parse_rmp_name("William (Ken)", "Burchenal").unwrap();
|
||||
/// assert_eq!(parts.first, "William");
|
||||
/// assert_eq!(parts.nicknames, vec!["Ken"]);
|
||||
/// ```
|
||||
pub fn parse_rmp_name(first_name: &str, last_name: &str) -> Option<NameParts> {
|
||||
let first_cleaned = strip_junk(first_name);
|
||||
let last_cleaned = strip_junk(last_name);
|
||||
|
||||
if first_cleaned.is_empty() || last_cleaned.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Extract nicknames from parens/quotes in first name
|
||||
let (first_no_nicks, nicknames) = extract_nicknames(&first_cleaned);
|
||||
let first_final = collapse_whitespace(&first_no_nicks);
|
||||
|
||||
// Extract suffix from last name
|
||||
let (last_final, suffix) = extract_suffix(&last_cleaned);
|
||||
|
||||
if first_final.is_empty() || last_final.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(NameParts {
|
||||
first: first_final,
|
||||
last: last_final,
|
||||
middle: None,
|
||||
suffix,
|
||||
nicknames,
|
||||
})
|
||||
}
|
||||
|
||||
/// Normalize a name string for matching comparison.
|
||||
///
|
||||
/// Pipeline: lowercase → NFD decompose → strip combining marks →
|
||||
/// strip punctuation/hyphens → collapse whitespace → trim.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use banner::data::names::normalize_for_matching;
|
||||
///
|
||||
/// assert_eq!(normalize_for_matching("García"), "garcia");
|
||||
/// assert_eq!(normalize_for_matching("O'Brien"), "obrien");
|
||||
/// assert_eq!(normalize_for_matching("Aguirre-Mesa"), "aguirremesa");
|
||||
/// ```
|
||||
/// Normalize a name string for matching index keys.
|
||||
///
|
||||
/// Pipeline: lowercase → NFD decompose → strip combining marks →
|
||||
/// strip ALL punctuation, hyphens, and whitespace.
|
||||
///
|
||||
/// This produces a compact, space-free string so that "Aguirre Mesa" (Banner)
|
||||
/// and "Aguirre-Mesa" (RMP) both become "aguirremesa".
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use banner::data::names::normalize_for_matching;
|
||||
///
|
||||
/// assert_eq!(normalize_for_matching("García"), "garcia");
|
||||
/// assert_eq!(normalize_for_matching("O'Brien"), "obrien");
|
||||
/// assert_eq!(normalize_for_matching("Aguirre-Mesa"), "aguirremesa");
|
||||
/// assert_eq!(normalize_for_matching("Aguirre Mesa"), "aguirremesa");
|
||||
/// ```
|
||||
pub fn normalize_for_matching(s: &str) -> String {
|
||||
s.to_lowercase()
|
||||
.nfd()
|
||||
.filter(|c| {
|
||||
// Keep only non-combining alphabetic characters — strip everything else
|
||||
c.is_alphabetic() && !unicode_normalization::char::is_combining_mark(*c)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Generate all matching index keys for a parsed name.
|
||||
///
|
||||
/// For a name like "H. Paul" / "LeBlanc" with no nicknames, generates:
|
||||
/// - `("leblanc", "h paul")` — full normalized first
|
||||
/// - `("leblanc", "paul")` — individual token (if multi-token)
|
||||
/// - `("leblanc", "h")` — individual token (if multi-token)
|
||||
///
|
||||
/// For a name like "William" / "Burchenal" with nickname "Ken":
|
||||
/// - `("burchenal", "william")` — primary
|
||||
/// - `("burchenal", "ken")` — nickname variant
|
||||
pub fn matching_keys(parts: &NameParts) -> Vec<(String, String)> {
|
||||
let norm_last = normalize_for_matching(&parts.last);
|
||||
if norm_last.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let mut keys = Vec::new();
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
|
||||
// Primary key: full first name (all spaces stripped)
|
||||
let norm_first_full = normalize_for_matching(&parts.first);
|
||||
if !norm_first_full.is_empty() && seen.insert(norm_first_full.clone()) {
|
||||
keys.push((norm_last.clone(), norm_first_full));
|
||||
}
|
||||
|
||||
// Individual tokens from the display-form first name
|
||||
// (split before full normalization so we can generate per-token keys)
|
||||
let first_tokens: Vec<&str> = parts.first.split_whitespace().collect();
|
||||
if first_tokens.len() > 1 {
|
||||
for token in &first_tokens {
|
||||
let norm_token = normalize_for_matching(token);
|
||||
if !norm_token.is_empty() && seen.insert(norm_token.clone()) {
|
||||
keys.push((norm_last.clone(), norm_token));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Nickname variants
|
||||
for nick in &parts.nicknames {
|
||||
let norm_nick = normalize_for_matching(nick);
|
||||
if !norm_nick.is_empty() && seen.insert(norm_nick.clone()) {
|
||||
keys.push((norm_last.clone(), norm_nick));
|
||||
}
|
||||
}
|
||||
|
||||
keys
|
||||
}
|
||||
|
||||
/// Backfill `first_name`/`last_name` columns for all instructors that have
|
||||
/// a `display_name` but NULL structured name fields.
|
||||
///
|
||||
/// Parses each `display_name` using [`parse_banner_name`] and updates the row.
|
||||
/// Logs warnings for any names that fail to parse.
|
||||
pub async fn backfill_instructor_names(db_pool: &PgPool) -> crate::error::Result<()> {
|
||||
let rows: Vec<(i32, String)> = sqlx::query_as(
|
||||
"SELECT id, display_name FROM instructors WHERE first_name IS NULL OR last_name IS NULL",
|
||||
)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
if rows.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let total = rows.len();
|
||||
let mut ids: Vec<i32> = Vec::with_capacity(total);
|
||||
let mut firsts: Vec<String> = Vec::with_capacity(total);
|
||||
let mut lasts: Vec<String> = Vec::with_capacity(total);
|
||||
let mut unparseable = 0usize;
|
||||
|
||||
for (id, display_name) in &rows {
|
||||
match parse_banner_name(display_name) {
|
||||
Some(parts) => {
|
||||
ids.push(*id);
|
||||
firsts.push(parts.first);
|
||||
lasts.push(parts.last);
|
||||
}
|
||||
None => {
|
||||
warn!(
|
||||
id,
|
||||
display_name, "Failed to parse instructor display_name during backfill"
|
||||
);
|
||||
unparseable += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !ids.is_empty() {
|
||||
let first_refs: Vec<&str> = firsts.iter().map(|s| s.as_str()).collect();
|
||||
let last_refs: Vec<&str> = lasts.iter().map(|s| s.as_str()).collect();
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
UPDATE instructors i
|
||||
SET first_name = v.first_name, last_name = v.last_name
|
||||
FROM UNNEST($1::int4[], $2::text[], $3::text[])
|
||||
AS v(id, first_name, last_name)
|
||||
WHERE i.id = v.id
|
||||
"#,
|
||||
)
|
||||
.bind(&ids)
|
||||
.bind(&first_refs)
|
||||
.bind(&last_refs)
|
||||
.execute(db_pool)
|
||||
.await?;
|
||||
}
|
||||
|
||||
info!(
|
||||
total,
|
||||
updated = ids.len(),
|
||||
unparseable,
|
||||
"Instructor name backfill complete"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// HTML entity decoding
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn decode_apostrophe_entity() {
|
||||
assert_eq!(decode_html_entities("O'Brien"), "O'Brien");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn decode_umlaut_entity() {
|
||||
assert_eq!(decode_html_entities("Bülent"), "Bülent");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn decode_no_entities() {
|
||||
assert_eq!(decode_html_entities("Smith"), "Smith");
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Nickname extraction
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn extract_paren_nickname() {
|
||||
let (cleaned, nicks) = extract_nicknames("William (Ken)");
|
||||
assert_eq!(cleaned, "William");
|
||||
assert_eq!(nicks, vec!["Ken"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_quoted_nickname() {
|
||||
let (cleaned, nicks) = extract_nicknames("Thomas \"Butch\"");
|
||||
assert_eq!(cleaned, "Thomas");
|
||||
assert_eq!(nicks, vec!["Butch"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_paren_with_extra_text() {
|
||||
let (cleaned, nicks) = extract_nicknames("John (jack) C.");
|
||||
assert_eq!(cleaned, "John C.");
|
||||
assert_eq!(nicks, vec!["jack"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_no_nicknames() {
|
||||
let (cleaned, nicks) = extract_nicknames("Maria Elena");
|
||||
assert_eq!(cleaned, "Maria Elena");
|
||||
assert!(nicks.is_empty());
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Suffix extraction
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn extract_suffix_iii() {
|
||||
let (name, suffix) = extract_suffix("LeBlanc III");
|
||||
assert_eq!(name, "LeBlanc");
|
||||
assert_eq!(suffix, Some("III".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_suffix_jr_period() {
|
||||
let (name, suffix) = extract_suffix("Smith Jr.");
|
||||
assert_eq!(name, "Smith");
|
||||
assert_eq!(suffix, Some("Jr.".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_no_suffix() {
|
||||
let (name, suffix) = extract_suffix("García");
|
||||
assert_eq!(name, "García");
|
||||
assert_eq!(suffix, None);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Junk stripping
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn strip_trailing_comma() {
|
||||
assert_eq!(strip_junk("Cronenberger,"), "Cronenberger");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strip_email_address() {
|
||||
assert_eq!(strip_junk("Neel.Baumgardner@utsa.edu"), "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strip_clean_name() {
|
||||
assert_eq!(strip_junk(" Maria "), "Maria");
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// normalize_for_matching
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn normalize_strips_accents() {
|
||||
assert_eq!(normalize_for_matching("García"), "garcia");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_strips_apostrophe() {
|
||||
assert_eq!(normalize_for_matching("O'Brien"), "obrien");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_strips_hyphen() {
|
||||
assert_eq!(normalize_for_matching("Aguirre-Mesa"), "aguirremesa");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_tilde_n() {
|
||||
assert_eq!(normalize_for_matching("Muñoz"), "munoz");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_umlaut() {
|
||||
assert_eq!(normalize_for_matching("Müller"), "muller");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_period() {
|
||||
assert_eq!(normalize_for_matching("H. Paul"), "hpaul");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_strips_spaces() {
|
||||
assert_eq!(normalize_for_matching("Mary Lou"), "marylou");
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// parse_banner_name
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn banner_standard_name() {
|
||||
let p = parse_banner_name("Smith, John").unwrap();
|
||||
assert_eq!(p.first, "John");
|
||||
assert_eq!(p.last, "Smith");
|
||||
assert_eq!(p.suffix, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn banner_html_entity_apostrophe() {
|
||||
let p = parse_banner_name("O'Brien, Erin").unwrap();
|
||||
assert_eq!(p.first, "Erin");
|
||||
assert_eq!(p.last, "O'Brien");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn banner_html_entity_umlaut() {
|
||||
let p = parse_banner_name("Temel, Bülent").unwrap();
|
||||
assert_eq!(p.first, "Bülent");
|
||||
assert_eq!(p.last, "Temel");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn banner_suffix_iii() {
|
||||
let p = parse_banner_name("LeBlanc III, H. Paul").unwrap();
|
||||
assert_eq!(p.first, "H. Paul");
|
||||
assert_eq!(p.last, "LeBlanc");
|
||||
assert_eq!(p.suffix, Some("III".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn banner_suffix_ii() {
|
||||
let p = parse_banner_name("Ellis II, Ronald").unwrap();
|
||||
assert_eq!(p.first, "Ronald");
|
||||
assert_eq!(p.last, "Ellis");
|
||||
assert_eq!(p.suffix, Some("II".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn banner_multi_word_last() {
|
||||
let p = parse_banner_name("Aguirre Mesa, Andres").unwrap();
|
||||
assert_eq!(p.first, "Andres");
|
||||
assert_eq!(p.last, "Aguirre Mesa");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn banner_hyphenated_last() {
|
||||
let p = parse_banner_name("Abu-Lail, Nehal").unwrap();
|
||||
assert_eq!(p.first, "Nehal");
|
||||
assert_eq!(p.last, "Abu-Lail");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn banner_with_middle_name() {
|
||||
let p = parse_banner_name("Smith, John David").unwrap();
|
||||
assert_eq!(p.first, "John David");
|
||||
assert_eq!(p.last, "Smith");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn banner_no_comma() {
|
||||
assert!(parse_banner_name("SingleName").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn banner_empty_first() {
|
||||
assert!(parse_banner_name("Smith,").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn banner_empty_last() {
|
||||
assert!(parse_banner_name(", John").is_none());
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// parse_rmp_name
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn rmp_standard_name() {
|
||||
let p = parse_rmp_name("John", "Smith").unwrap();
|
||||
assert_eq!(p.first, "John");
|
||||
assert_eq!(p.last, "Smith");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rmp_with_nickname() {
|
||||
let p = parse_rmp_name("William (Ken)", "Burchenal").unwrap();
|
||||
assert_eq!(p.first, "William");
|
||||
assert_eq!(p.nicknames, vec!["Ken"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rmp_trailing_comma_last() {
|
||||
let p = parse_rmp_name("J.", "Cronenberger,").unwrap();
|
||||
assert_eq!(p.last, "Cronenberger");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rmp_email_in_first() {
|
||||
assert!(parse_rmp_name("Neel.Baumgardner@utsa.edu", "Baumgardner").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rmp_suffix_in_last() {
|
||||
let p = parse_rmp_name("H. Paul", "LeBlanc III").unwrap();
|
||||
assert_eq!(p.first, "H. Paul");
|
||||
assert_eq!(p.last, "LeBlanc");
|
||||
assert_eq!(p.suffix, Some("III".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rmp_quoted_nickname() {
|
||||
let p = parse_rmp_name("Thomas \"Butch\"", "Matjeka").unwrap();
|
||||
assert_eq!(p.first, "Thomas");
|
||||
assert_eq!(p.nicknames, vec!["Butch"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rmp_accented_last() {
|
||||
let p = parse_rmp_name("Liliana", "Saldaña").unwrap();
|
||||
assert_eq!(p.last, "Saldaña");
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// matching_keys
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn keys_simple_name() {
|
||||
let parts = NameParts {
|
||||
first: "John".into(),
|
||||
last: "Smith".into(),
|
||||
middle: None,
|
||||
suffix: None,
|
||||
nicknames: vec![],
|
||||
};
|
||||
let keys = matching_keys(&parts);
|
||||
assert_eq!(keys, vec![("smith".into(), "john".into())]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keys_multi_token_first() {
|
||||
let parts = NameParts {
|
||||
first: "H. Paul".into(),
|
||||
last: "LeBlanc".into(),
|
||||
middle: None,
|
||||
suffix: Some("III".into()),
|
||||
nicknames: vec![],
|
||||
};
|
||||
let keys = matching_keys(&parts);
|
||||
assert!(keys.contains(&("leblanc".into(), "hpaul".into())));
|
||||
assert!(keys.contains(&("leblanc".into(), "paul".into())));
|
||||
assert!(keys.contains(&("leblanc".into(), "h".into())));
|
||||
assert_eq!(keys.len(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keys_with_nickname() {
|
||||
let parts = NameParts {
|
||||
first: "William".into(),
|
||||
last: "Burchenal".into(),
|
||||
middle: None,
|
||||
suffix: None,
|
||||
nicknames: vec!["Ken".into()],
|
||||
};
|
||||
let keys = matching_keys(&parts);
|
||||
assert!(keys.contains(&("burchenal".into(), "william".into())));
|
||||
assert!(keys.contains(&("burchenal".into(), "ken".into())));
|
||||
assert_eq!(keys.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keys_hyphenated_last() {
|
||||
let parts = parse_banner_name("Aguirre-Mesa, Andres").unwrap();
|
||||
let keys = matching_keys(&parts);
|
||||
// Hyphen removed: "aguirremesa"
|
||||
assert!(keys.contains(&("aguirremesa".into(), "andres".into())));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keys_accented_name() {
|
||||
let parts = parse_rmp_name("Liliana", "Saldaña").unwrap();
|
||||
let keys = matching_keys(&parts);
|
||||
assert!(keys.contains(&("saldana".into(), "liliana".into())));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keys_cross_source_match() {
|
||||
// Banner: "Aguirre Mesa, Andres" → last="Aguirre Mesa"
|
||||
let banner = parse_banner_name("Aguirre Mesa, Andres").unwrap();
|
||||
let banner_keys = matching_keys(&banner);
|
||||
|
||||
// RMP: "Andres" / "Aguirre-Mesa" → last="Aguirre-Mesa"
|
||||
let rmp = parse_rmp_name("Andres", "Aguirre-Mesa").unwrap();
|
||||
let rmp_keys = matching_keys(&rmp);
|
||||
|
||||
// Both should normalize to ("aguirremesa", "andres")
|
||||
assert!(banner_keys.iter().any(|k| rmp_keys.contains(k)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keys_accent_cross_match() {
|
||||
// Banner: "García, José" (if Banner ever has accents)
|
||||
let banner = parse_banner_name("Garcia, Jose").unwrap();
|
||||
let banner_keys = matching_keys(&banner);
|
||||
|
||||
// RMP: "José" / "García"
|
||||
let rmp = parse_rmp_name("José", "García").unwrap();
|
||||
let rmp_keys = matching_keys(&rmp);
|
||||
|
||||
// Both normalize to ("garcia", "jose")
|
||||
assert!(banner_keys.iter().any(|k| rmp_keys.contains(k)));
|
||||
}
|
||||
}
|
||||
+76
-195
@@ -3,8 +3,7 @@
|
||||
use crate::error::Result;
|
||||
use crate::rmp::RmpProfessor;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use tracing::{debug, info, warn};
|
||||
use std::collections::HashSet;
|
||||
|
||||
/// Bulk upsert RMP professors using the UNNEST pattern.
|
||||
///
|
||||
@@ -92,220 +91,102 @@ pub async fn batch_upsert_rmp_professors(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Normalize a name for matching: lowercase, trim, strip trailing periods.
|
||||
fn normalize(s: &str) -> String {
|
||||
s.trim().to_lowercase().trim_end_matches('.').to_string()
|
||||
}
|
||||
|
||||
/// Parse Banner's "Last, First Middle" display name into (last, first) tokens.
|
||||
/// Retrieve RMP rating data for an instructor by instructor id.
|
||||
///
|
||||
/// Returns `None` if the format is unparseable (no comma, empty parts).
|
||||
fn parse_display_name(display_name: &str) -> Option<(String, String)> {
|
||||
let (last_part, first_part) = display_name.split_once(',')?;
|
||||
let last = normalize(last_part);
|
||||
// Take only the first token of the first-name portion to drop middle names/initials.
|
||||
let first = normalize(first_part.split_whitespace().next()?);
|
||||
if last.is_empty() || first.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some((last, first))
|
||||
}
|
||||
|
||||
/// Auto-match instructors to RMP professors by normalized name.
|
||||
///
|
||||
/// Loads all pending instructors and all RMP professors, then matches in Rust
|
||||
/// using normalized name comparison. Only assigns a match when exactly one RMP
|
||||
/// professor matches a given instructor.
|
||||
pub async fn auto_match_instructors(db_pool: &PgPool) -> Result<u64> {
|
||||
// Load pending instructors
|
||||
let instructors: Vec<(String, String)> = sqlx::query_as(
|
||||
"SELECT banner_id, display_name FROM instructors WHERE rmp_match_status = 'pending'",
|
||||
)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
if instructors.is_empty() {
|
||||
info!(matched = 0, "No pending instructors to match");
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
// Load all RMP professors
|
||||
let professors: Vec<(i32, String, String)> =
|
||||
sqlx::query_as("SELECT legacy_id, first_name, last_name FROM rmp_professors")
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
// Build a lookup: (normalized_last, normalized_first) -> list of legacy_ids
|
||||
let mut rmp_index: HashMap<(String, String), Vec<i32>> = HashMap::new();
|
||||
for (legacy_id, first, last) in &professors {
|
||||
let key = (normalize(last), normalize(first));
|
||||
rmp_index.entry(key).or_default().push(*legacy_id);
|
||||
}
|
||||
|
||||
// Match each instructor
|
||||
let mut matches: Vec<(i32, String)> = Vec::new(); // (legacy_id, banner_id)
|
||||
let mut no_comma = 0u64;
|
||||
let mut no_match = 0u64;
|
||||
let mut ambiguous = 0u64;
|
||||
|
||||
for (banner_id, display_name) in &instructors {
|
||||
let Some((last, first)) = parse_display_name(display_name) else {
|
||||
no_comma += 1;
|
||||
continue;
|
||||
};
|
||||
|
||||
let key = (last, first);
|
||||
match rmp_index.get(&key) {
|
||||
Some(ids) if ids.len() == 1 => {
|
||||
matches.push((ids[0], banner_id.clone()));
|
||||
}
|
||||
Some(ids) => {
|
||||
ambiguous += 1;
|
||||
debug!(
|
||||
banner_id,
|
||||
display_name,
|
||||
candidates = ids.len(),
|
||||
"Ambiguous RMP match, skipping"
|
||||
);
|
||||
}
|
||||
None => {
|
||||
no_match += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if no_comma > 0 || ambiguous > 0 {
|
||||
warn!(
|
||||
total_pending = instructors.len(),
|
||||
no_comma,
|
||||
no_match,
|
||||
ambiguous,
|
||||
matched = matches.len(),
|
||||
"RMP matching diagnostics"
|
||||
);
|
||||
}
|
||||
|
||||
// Batch update matches
|
||||
if matches.is_empty() {
|
||||
info!(matched = 0, "Auto-matched instructors to RMP professors");
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let legacy_ids: Vec<i32> = matches.iter().map(|(id, _)| *id).collect();
|
||||
let banner_ids: Vec<&str> = matches.iter().map(|(_, bid)| bid.as_str()).collect();
|
||||
|
||||
let result = sqlx::query(
|
||||
r#"
|
||||
UPDATE instructors i
|
||||
SET
|
||||
rmp_legacy_id = m.legacy_id,
|
||||
rmp_match_status = 'auto'
|
||||
FROM UNNEST($1::int4[], $2::text[]) AS m(legacy_id, banner_id)
|
||||
WHERE i.banner_id = m.banner_id
|
||||
"#,
|
||||
)
|
||||
.bind(&legacy_ids)
|
||||
.bind(&banner_ids)
|
||||
.execute(db_pool)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to update instructor RMP matches: {}", e))?;
|
||||
|
||||
let matched = result.rows_affected();
|
||||
info!(matched, "Auto-matched instructors to RMP professors");
|
||||
Ok(matched)
|
||||
}
|
||||
|
||||
/// Retrieve RMP rating data for an instructor by banner_id.
|
||||
///
|
||||
/// Returns `(avg_rating, num_ratings)` if the instructor has an RMP match.
|
||||
/// Returns `(avg_rating, num_ratings)` for the best linked RMP profile
|
||||
/// (most ratings). Returns `None` if no link exists.
|
||||
#[allow(dead_code)]
|
||||
pub async fn get_instructor_rmp_data(
|
||||
db_pool: &PgPool,
|
||||
banner_id: &str,
|
||||
instructor_id: i32,
|
||||
) -> Result<Option<(f32, i32)>> {
|
||||
let row: Option<(f32, i32)> = sqlx::query_as(
|
||||
r#"
|
||||
SELECT rp.avg_rating, rp.num_ratings
|
||||
FROM instructors i
|
||||
JOIN rmp_professors rp ON rp.legacy_id = i.rmp_legacy_id
|
||||
WHERE i.banner_id = $1
|
||||
FROM instructor_rmp_links irl
|
||||
JOIN rmp_professors rp ON rp.legacy_id = irl.rmp_legacy_id
|
||||
WHERE irl.instructor_id = $1
|
||||
AND rp.avg_rating IS NOT NULL
|
||||
ORDER BY rp.num_ratings DESC NULLS LAST
|
||||
LIMIT 1
|
||||
"#,
|
||||
)
|
||||
.bind(banner_id)
|
||||
.bind(instructor_id)
|
||||
.fetch_optional(db_pool)
|
||||
.await?;
|
||||
Ok(row)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
/// Unmatch an instructor from an RMP profile.
|
||||
///
|
||||
/// Removes the link from `instructor_rmp_links` and updates the instructor's
|
||||
/// `rmp_match_status` to 'unmatched' if no links remain.
|
||||
///
|
||||
/// If `rmp_legacy_id` is `Some`, removes only that specific link.
|
||||
/// If `None`, removes all links for the instructor.
|
||||
pub async fn unmatch_instructor(
|
||||
db_pool: &PgPool,
|
||||
instructor_id: i32,
|
||||
rmp_legacy_id: Option<i32>,
|
||||
) -> Result<()> {
|
||||
let mut tx = db_pool.begin().await?;
|
||||
|
||||
#[test]
|
||||
fn parse_standard_name() {
|
||||
assert_eq!(
|
||||
parse_display_name("Smith, John"),
|
||||
Some(("smith".into(), "john".into()))
|
||||
);
|
||||
// Delete specific link or all links
|
||||
if let Some(legacy_id) = rmp_legacy_id {
|
||||
sqlx::query(
|
||||
"DELETE FROM instructor_rmp_links WHERE instructor_id = $1 AND rmp_legacy_id = $2",
|
||||
)
|
||||
.bind(instructor_id)
|
||||
.bind(legacy_id)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
} else {
|
||||
sqlx::query("DELETE FROM instructor_rmp_links WHERE instructor_id = $1")
|
||||
.bind(instructor_id)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_name_with_middle() {
|
||||
assert_eq!(
|
||||
parse_display_name("Smith, John David"),
|
||||
Some(("smith".into(), "john".into()))
|
||||
);
|
||||
// Check if any links remain
|
||||
let (remaining,): (i64,) =
|
||||
sqlx::query_as("SELECT COUNT(*) FROM instructor_rmp_links WHERE instructor_id = $1")
|
||||
.bind(instructor_id)
|
||||
.fetch_one(&mut *tx)
|
||||
.await?;
|
||||
|
||||
// Update instructor status if no links remain
|
||||
if remaining == 0 {
|
||||
sqlx::query("UPDATE instructors SET rmp_match_status = 'unmatched' WHERE id = $1")
|
||||
.bind(instructor_id)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_name_with_middle_initial() {
|
||||
assert_eq!(
|
||||
parse_display_name("Garcia, Maria L."),
|
||||
Some(("garcia".into(), "maria".into()))
|
||||
);
|
||||
// Reset accepted candidates back to pending when unmatching
|
||||
// This allows the candidates to be re-matched later
|
||||
if let Some(legacy_id) = rmp_legacy_id {
|
||||
// Reset only the specific candidate
|
||||
sqlx::query(
|
||||
"UPDATE rmp_match_candidates
|
||||
SET status = 'pending', resolved_at = NULL, resolved_by = NULL
|
||||
WHERE instructor_id = $1 AND rmp_legacy_id = $2 AND status = 'accepted'",
|
||||
)
|
||||
.bind(instructor_id)
|
||||
.bind(legacy_id)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
} else {
|
||||
// Reset all accepted candidates for this instructor
|
||||
sqlx::query(
|
||||
"UPDATE rmp_match_candidates
|
||||
SET status = 'pending', resolved_at = NULL, resolved_by = NULL
|
||||
WHERE instructor_id = $1 AND status = 'accepted'",
|
||||
)
|
||||
.bind(instructor_id)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_name_with_suffix_in_last() {
|
||||
// Banner may encode "Jr." as part of the last name.
|
||||
// normalize() strips trailing periods so "Jr." becomes "jr".
|
||||
assert_eq!(
|
||||
parse_display_name("Smith Jr., James"),
|
||||
Some(("smith jr".into(), "james".into()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_no_comma_returns_none() {
|
||||
assert_eq!(parse_display_name("SingleName"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_empty_first_returns_none() {
|
||||
assert_eq!(parse_display_name("Smith,"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_empty_last_returns_none() {
|
||||
assert_eq!(parse_display_name(", John"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_extra_whitespace() {
|
||||
assert_eq!(
|
||||
parse_display_name(" Doe , Jane Marie "),
|
||||
Some(("doe".into(), "jane".into()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_trims_and_lowercases() {
|
||||
assert_eq!(normalize(" FOO "), "foo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_strips_trailing_period() {
|
||||
assert_eq!(normalize("Jr."), "jr");
|
||||
}
|
||||
tx.commit().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -0,0 +1,695 @@
|
||||
//! Confidence scoring and candidate generation for RMP instructor matching.
|
||||
|
||||
use crate::data::names::{matching_keys, parse_banner_name, parse_rmp_name};
|
||||
use crate::error::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use tracing::{debug, info};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Scoring types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Breakdown of individual scoring signals.
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
||||
pub struct ScoreBreakdown {
|
||||
pub name: f32,
|
||||
pub department: f32,
|
||||
pub uniqueness: f32,
|
||||
pub volume: f32,
|
||||
}
|
||||
|
||||
/// Result of scoring a single instructor–RMP candidate pair.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MatchScore {
|
||||
pub score: f32,
|
||||
pub breakdown: ScoreBreakdown,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Thresholds
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Minimum composite score to store a candidate row.
|
||||
const MIN_CANDIDATE_THRESHOLD: f32 = 0.40;
|
||||
|
||||
/// Score at or above which a candidate is auto-accepted.
|
||||
const AUTO_ACCEPT_THRESHOLD: f32 = 0.85;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Weights (must sum to 1.0)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const WEIGHT_NAME: f32 = 0.50;
|
||||
const WEIGHT_DEPARTMENT: f32 = 0.25;
|
||||
const WEIGHT_UNIQUENESS: f32 = 0.15;
|
||||
const WEIGHT_VOLUME: f32 = 0.10;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Pure scoring functions
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Check if an instructor's subjects overlap with an RMP department.
|
||||
///
|
||||
/// Returns `1.0` for a match, `0.2` for a mismatch, `0.5` when the RMP
|
||||
/// department is unknown.
|
||||
fn department_similarity(subjects: &[String], rmp_department: Option<&str>) -> f32 {
|
||||
let Some(dept) = rmp_department else {
|
||||
return 0.5;
|
||||
};
|
||||
let dept_lower = dept.to_lowercase();
|
||||
|
||||
// Quick check: does any subject appear directly in the department string
|
||||
// or vice-versa?
|
||||
for subj in subjects {
|
||||
let subj_lower = subj.to_lowercase();
|
||||
if dept_lower.contains(&subj_lower) || subj_lower.contains(&dept_lower) {
|
||||
return 1.0;
|
||||
}
|
||||
|
||||
// Handle common UTSA abbreviation mappings.
|
||||
if matches_known_abbreviation(&subj_lower, &dept_lower) {
|
||||
return 1.0;
|
||||
}
|
||||
}
|
||||
|
||||
0.2
|
||||
}
|
||||
|
||||
/// Expand common subject abbreviations used at UTSA and check for overlap.
|
||||
fn matches_known_abbreviation(subject: &str, department: &str) -> bool {
|
||||
const MAPPINGS: &[(&str, &[&str])] = &[
|
||||
// Core subjects (original mappings, corrected)
|
||||
("cs", &["computer science"]),
|
||||
("ece", &["early childhood education", "early childhood"]),
|
||||
("ee", &["electrical engineering", "electrical"]),
|
||||
("me", &["mechanical engineering", "mechanical"]),
|
||||
("ce", &["civil engineering", "civil"]),
|
||||
("bio", &["biology", "biological"]),
|
||||
("chem", &["chemistry"]),
|
||||
("phys", &["physics"]),
|
||||
("math", &["mathematics"]),
|
||||
("sta", &["statistics"]),
|
||||
("eng", &["english"]),
|
||||
("his", &["history"]),
|
||||
("pol", &["political science"]),
|
||||
("psy", &["psychology"]),
|
||||
("soc", &["sociology"]),
|
||||
("mus", &["music"]),
|
||||
("art", &["art"]),
|
||||
("phi", &["philosophy"]),
|
||||
("eco", &["economics"]),
|
||||
("acc", &["accounting"]),
|
||||
("fin", &["finance"]),
|
||||
("mgt", &["management"]),
|
||||
("mkt", &["marketing"]),
|
||||
("is", &["information systems"]),
|
||||
("ms", &["management science"]),
|
||||
("kin", &["kinesiology"]),
|
||||
("com", &["communication"]),
|
||||
// Architecture & Design
|
||||
("arc", &["architecture"]),
|
||||
("ide", &["interior design", "design"]),
|
||||
// Anthropology & Ethnic Studies
|
||||
("ant", &["anthropology"]),
|
||||
("aas", &["african american studies", "ethnic studies"]),
|
||||
("mas", &["mexican american studies", "ethnic studies"]),
|
||||
("regs", &["ethnic studies", "gender"]),
|
||||
// Languages
|
||||
("lng", &["linguistics", "applied linguistics"]),
|
||||
("spn", &["spanish"]),
|
||||
("frn", &["french"]),
|
||||
("ger", &["german"]),
|
||||
("chn", &["chinese"]),
|
||||
("jpn", &["japanese"]),
|
||||
("kor", &["korean"]),
|
||||
("itl", &["italian"]),
|
||||
("rus", &["russian"]),
|
||||
("lat", &["latin"]),
|
||||
("grk", &["greek"]),
|
||||
("asl", &["american sign language", "sign language"]),
|
||||
(
|
||||
"fl",
|
||||
&["foreign languages", "languages", "modern languages"],
|
||||
),
|
||||
// Education
|
||||
("edu", &["education"]),
|
||||
("ci", &["curriculum", "education"]),
|
||||
("edl", &["educational leadership", "education"]),
|
||||
("edp", &["educational psychology", "education"]),
|
||||
("bbl", &["bilingual education"]),
|
||||
("spe", &["special education", "education"]),
|
||||
// Business
|
||||
("ent", &["entrepreneurship"]),
|
||||
("gba", &["general business", "business"]),
|
||||
("blw", &["business law", "law"]),
|
||||
("rfd", &["real estate"]),
|
||||
("mot", &["management of technology", "management"]),
|
||||
// Engineering
|
||||
("egr", &["engineering"]),
|
||||
("bme", &["biomedical engineering", "engineering"]),
|
||||
("cme", &["chemical engineering", "engineering"]),
|
||||
("cpe", &["computer engineering", "engineering"]),
|
||||
("ise", &["industrial", "systems engineering", "engineering"]),
|
||||
("mate", &["materials engineering", "engineering"]),
|
||||
// Sciences
|
||||
("che", &["chemistry"]),
|
||||
("bch", &["biochemistry", "chemistry"]),
|
||||
("geo", &["geology"]),
|
||||
("phy", &["physics"]),
|
||||
("ast", &["astronomy"]),
|
||||
("es", &["environmental science"]),
|
||||
// Social Sciences
|
||||
("crj", &["criminal justice"]),
|
||||
("swk", &["social work"]),
|
||||
("pad", &["public administration"]),
|
||||
("grg", &["geography"]),
|
||||
("ges", &["geography"]),
|
||||
// Humanities
|
||||
("cla", &["classics"]),
|
||||
("hum", &["humanities"]),
|
||||
("wgss", &["women's studies"]),
|
||||
// Health
|
||||
("hth", &["health"]),
|
||||
("hcp", &["health science", "health"]),
|
||||
("ntr", &["nutrition"]),
|
||||
// Military
|
||||
("msc", &["military science"]),
|
||||
("asc", &["aerospace"]),
|
||||
// Arts
|
||||
("dan", &["dance"]),
|
||||
("thr", &["theater"]),
|
||||
("ahc", &["art history"]),
|
||||
// Other
|
||||
("cou", &["counseling"]),
|
||||
("hon", &["honors"]),
|
||||
("csm", &["construction"]),
|
||||
("wrc", &["writing"]),
|
||||
("set", &["tourism management", "tourism"]),
|
||||
];
|
||||
|
||||
for &(abbr, expansions) in MAPPINGS {
|
||||
if subject == abbr {
|
||||
return expansions
|
||||
.iter()
|
||||
.any(|expansion| department.contains(expansion));
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Compute match confidence score (0.0–1.0) for an instructor–RMP pair.
|
||||
///
|
||||
/// The name signal is always 1.0 since candidates are only generated for
|
||||
/// exact normalized name matches. The effective score range is 0.50–1.0.
|
||||
pub fn compute_match_score(
|
||||
instructor_subjects: &[String],
|
||||
rmp_department: Option<&str>,
|
||||
candidate_count: usize,
|
||||
rmp_num_ratings: i32,
|
||||
) -> MatchScore {
|
||||
// --- Name (0.50) — always 1.0, candidates only exist for exact matches ---
|
||||
let name_score = 1.0;
|
||||
|
||||
// --- Department (0.25) ---
|
||||
let dept_score = department_similarity(instructor_subjects, rmp_department);
|
||||
|
||||
// --- Uniqueness (0.15) ---
|
||||
let uniqueness_score = match candidate_count {
|
||||
0 | 1 => 1.0,
|
||||
2 => 0.5,
|
||||
_ => 0.2,
|
||||
};
|
||||
|
||||
// --- Volume (0.10) ---
|
||||
let volume_score = ((rmp_num_ratings as f32).ln_1p() / 5.0_f32.ln_1p()).clamp(0.0, 1.0);
|
||||
|
||||
let composite = name_score * WEIGHT_NAME
|
||||
+ dept_score * WEIGHT_DEPARTMENT
|
||||
+ uniqueness_score * WEIGHT_UNIQUENESS
|
||||
+ volume_score * WEIGHT_VOLUME;
|
||||
|
||||
MatchScore {
|
||||
score: composite,
|
||||
breakdown: ScoreBreakdown {
|
||||
name: name_score,
|
||||
department: dept_score,
|
||||
uniqueness: uniqueness_score,
|
||||
volume: volume_score,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Candidate generation (DB)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Statistics returned from candidate generation.
|
||||
#[derive(Debug)]
|
||||
pub struct MatchingStats {
|
||||
pub total_unmatched: usize,
|
||||
pub candidates_created: usize,
|
||||
pub candidates_rescored: usize,
|
||||
pub auto_matched: usize,
|
||||
pub skipped_unparseable: usize,
|
||||
pub skipped_no_candidates: usize,
|
||||
}
|
||||
|
||||
/// Lightweight row for building the in-memory RMP name index.
|
||||
struct RmpProfForMatching {
|
||||
legacy_id: i32,
|
||||
department: Option<String>,
|
||||
num_ratings: i32,
|
||||
}
|
||||
|
||||
/// Generate match candidates for all unmatched instructors.
|
||||
///
|
||||
/// For each unmatched instructor:
|
||||
/// 1. Parse `display_name` into [`NameParts`] and generate matching keys.
|
||||
/// 2. Find RMP professors with matching normalized name keys.
|
||||
/// 3. Score each candidate.
|
||||
/// 4. Store candidates scoring above [`MIN_CANDIDATE_THRESHOLD`].
|
||||
/// 5. Auto-accept if the top candidate scores ≥ [`AUTO_ACCEPT_THRESHOLD`]
|
||||
/// and no existing rejected candidate exists for that pair.
|
||||
///
|
||||
/// Already-evaluated instructor–RMP pairs (any status) are skipped.
|
||||
pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
|
||||
// 1. Load unmatched instructors
|
||||
let instructors: Vec<(i32, String)> = sqlx::query_as(
|
||||
"SELECT id, display_name FROM instructors WHERE rmp_match_status = 'unmatched'",
|
||||
)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
if instructors.is_empty() {
|
||||
info!("No unmatched instructors to generate candidates for");
|
||||
return Ok(MatchingStats {
|
||||
total_unmatched: 0,
|
||||
candidates_created: 0,
|
||||
candidates_rescored: 0,
|
||||
auto_matched: 0,
|
||||
skipped_unparseable: 0,
|
||||
skipped_no_candidates: 0,
|
||||
});
|
||||
}
|
||||
|
||||
let instructor_ids: Vec<i32> = instructors.iter().map(|(id, _)| *id).collect();
|
||||
let total_unmatched = instructors.len();
|
||||
|
||||
// 2. Load instructor subjects
|
||||
let subject_rows: Vec<(i32, String)> = sqlx::query_as(
|
||||
r#"
|
||||
SELECT DISTINCT ci.instructor_id, c.subject
|
||||
FROM course_instructors ci
|
||||
JOIN courses c ON c.id = ci.course_id
|
||||
WHERE ci.instructor_id = ANY($1)
|
||||
"#,
|
||||
)
|
||||
.bind(&instructor_ids)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
let mut subject_map: HashMap<i32, Vec<String>> = HashMap::new();
|
||||
for (iid, subject) in subject_rows {
|
||||
subject_map.entry(iid).or_default().push(subject);
|
||||
}
|
||||
|
||||
// 3. Load all RMP professors and build multi-key name index
|
||||
let prof_rows: Vec<(i32, String, String, Option<String>, i32)> = sqlx::query_as(
|
||||
"SELECT legacy_id, first_name, last_name, department, num_ratings FROM rmp_professors",
|
||||
)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
// Build name index: (normalized_last, normalized_first) -> Vec<RmpProfForMatching>
|
||||
// Each professor may appear under multiple keys (nicknames, token variants).
|
||||
let mut name_index: HashMap<(String, String), Vec<RmpProfForMatching>> = HashMap::new();
|
||||
let mut rmp_parse_failures = 0usize;
|
||||
for (legacy_id, first_name, last_name, department, num_ratings) in &prof_rows {
|
||||
match parse_rmp_name(first_name, last_name) {
|
||||
Some(parts) => {
|
||||
let keys = matching_keys(&parts);
|
||||
for key in keys {
|
||||
name_index.entry(key).or_default().push(RmpProfForMatching {
|
||||
legacy_id: *legacy_id,
|
||||
department: department.clone(),
|
||||
num_ratings: *num_ratings,
|
||||
});
|
||||
}
|
||||
}
|
||||
None => {
|
||||
rmp_parse_failures += 1;
|
||||
debug!(
|
||||
legacy_id,
|
||||
first_name, last_name, "Unparseable RMP professor name, skipping"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if rmp_parse_failures > 0 {
|
||||
debug!(
|
||||
count = rmp_parse_failures,
|
||||
"RMP professors with unparseable names"
|
||||
);
|
||||
}
|
||||
|
||||
// 4. Load existing candidate pairs — only skip resolved (accepted/rejected) pairs.
|
||||
// Pending candidates are rescored so updated mappings take effect.
|
||||
let candidate_rows: Vec<(i32, i32, String)> =
|
||||
sqlx::query_as("SELECT instructor_id, rmp_legacy_id, status FROM rmp_match_candidates")
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
let mut resolved_pairs: HashSet<(i32, i32)> = HashSet::new();
|
||||
let mut pending_pairs: HashSet<(i32, i32)> = HashSet::new();
|
||||
let mut rejected_pairs: HashSet<(i32, i32)> = HashSet::new();
|
||||
for (iid, lid, status) in candidate_rows {
|
||||
match status.as_str() {
|
||||
"accepted" | "rejected" => {
|
||||
resolved_pairs.insert((iid, lid));
|
||||
if status == "rejected" {
|
||||
rejected_pairs.insert((iid, lid));
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
pending_pairs.insert((iid, lid));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Score and collect candidates (new + rescored pending)
|
||||
let empty_subjects: Vec<String> = Vec::new();
|
||||
let mut new_candidates: Vec<(i32, i32, f32, serde_json::Value)> = Vec::new();
|
||||
let mut rescored_candidates: Vec<(i32, i32, f32, serde_json::Value)> = Vec::new();
|
||||
let mut auto_accept: Vec<(i32, i32)> = Vec::new(); // (instructor_id, legacy_id)
|
||||
let mut skipped_unparseable = 0usize;
|
||||
let mut skipped_no_candidates = 0usize;
|
||||
|
||||
for (instructor_id, display_name) in &instructors {
|
||||
let Some(instructor_parts) = parse_banner_name(display_name) else {
|
||||
skipped_unparseable += 1;
|
||||
debug!(
|
||||
instructor_id,
|
||||
display_name, "Unparseable display name, skipping"
|
||||
);
|
||||
continue;
|
||||
};
|
||||
|
||||
let subjects = subject_map.get(instructor_id).unwrap_or(&empty_subjects);
|
||||
|
||||
// Generate all matching keys for this instructor and collect candidate
|
||||
// RMP professors across all key variants (deduplicated by legacy_id).
|
||||
let instructor_keys = matching_keys(&instructor_parts);
|
||||
let mut seen_profs: HashSet<i32> = HashSet::new();
|
||||
let mut matched_profs: Vec<&RmpProfForMatching> = Vec::new();
|
||||
|
||||
for key in &instructor_keys {
|
||||
if let Some(profs) = name_index.get(key) {
|
||||
for prof in profs {
|
||||
if seen_profs.insert(prof.legacy_id) {
|
||||
matched_profs.push(prof);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matched_profs.is_empty() {
|
||||
skipped_no_candidates += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
let candidate_count = matched_profs.len();
|
||||
let mut best: Option<(f32, i32)> = None;
|
||||
|
||||
for prof in &matched_profs {
|
||||
let pair = (*instructor_id, prof.legacy_id);
|
||||
if resolved_pairs.contains(&pair) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let ms = compute_match_score(
|
||||
subjects,
|
||||
prof.department.as_deref(),
|
||||
candidate_count,
|
||||
prof.num_ratings,
|
||||
);
|
||||
|
||||
if ms.score < MIN_CANDIDATE_THRESHOLD {
|
||||
continue;
|
||||
}
|
||||
|
||||
let breakdown_json =
|
||||
serde_json::to_value(&ms.breakdown).unwrap_or_else(|_| serde_json::json!({}));
|
||||
|
||||
if pending_pairs.contains(&pair) {
|
||||
rescored_candidates.push((
|
||||
*instructor_id,
|
||||
prof.legacy_id,
|
||||
ms.score,
|
||||
breakdown_json,
|
||||
));
|
||||
} else {
|
||||
new_candidates.push((*instructor_id, prof.legacy_id, ms.score, breakdown_json));
|
||||
}
|
||||
|
||||
match best {
|
||||
Some((s, _)) if ms.score > s => best = Some((ms.score, prof.legacy_id)),
|
||||
None => best = Some((ms.score, prof.legacy_id)),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Auto-accept the top candidate if it meets the threshold and is not
|
||||
// previously rejected.
|
||||
if let Some((score, legacy_id)) = best
|
||||
&& score >= AUTO_ACCEPT_THRESHOLD
|
||||
&& !rejected_pairs.contains(&(*instructor_id, legacy_id))
|
||||
{
|
||||
auto_accept.push((*instructor_id, legacy_id));
|
||||
}
|
||||
}
|
||||
|
||||
// 6–7. Write candidates, rescore, and auto-accept within a single transaction
|
||||
let candidates_created = new_candidates.len();
|
||||
let candidates_rescored = rescored_candidates.len();
|
||||
let auto_matched = auto_accept.len();
|
||||
|
||||
let mut tx = db_pool.begin().await?;
|
||||
|
||||
// 6a. Batch-insert new candidates
|
||||
if !new_candidates.is_empty() {
|
||||
let c_instructor_ids: Vec<i32> = new_candidates.iter().map(|(iid, _, _, _)| *iid).collect();
|
||||
let c_legacy_ids: Vec<i32> = new_candidates.iter().map(|(_, lid, _, _)| *lid).collect();
|
||||
let c_scores: Vec<f32> = new_candidates.iter().map(|(_, _, s, _)| *s).collect();
|
||||
let c_breakdowns: Vec<serde_json::Value> =
|
||||
new_candidates.into_iter().map(|(_, _, _, b)| b).collect();
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO rmp_match_candidates (instructor_id, rmp_legacy_id, score, score_breakdown)
|
||||
SELECT v.instructor_id, v.rmp_legacy_id, v.score, v.score_breakdown
|
||||
FROM UNNEST($1::int4[], $2::int4[], $3::real[], $4::jsonb[])
|
||||
AS v(instructor_id, rmp_legacy_id, score, score_breakdown)
|
||||
ON CONFLICT (instructor_id, rmp_legacy_id) DO NOTHING
|
||||
"#,
|
||||
)
|
||||
.bind(&c_instructor_ids)
|
||||
.bind(&c_legacy_ids)
|
||||
.bind(&c_scores)
|
||||
.bind(&c_breakdowns)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// 6b. Batch-update rescored pending candidates
|
||||
if !rescored_candidates.is_empty() {
|
||||
let r_instructor_ids: Vec<i32> = rescored_candidates
|
||||
.iter()
|
||||
.map(|(iid, _, _, _)| *iid)
|
||||
.collect();
|
||||
let r_legacy_ids: Vec<i32> = rescored_candidates
|
||||
.iter()
|
||||
.map(|(_, lid, _, _)| *lid)
|
||||
.collect();
|
||||
let r_scores: Vec<f32> = rescored_candidates.iter().map(|(_, _, s, _)| *s).collect();
|
||||
let r_breakdowns: Vec<serde_json::Value> = rescored_candidates
|
||||
.into_iter()
|
||||
.map(|(_, _, _, b)| b)
|
||||
.collect();
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
UPDATE rmp_match_candidates mc
|
||||
SET score = v.score, score_breakdown = v.score_breakdown
|
||||
FROM UNNEST($1::int4[], $2::int4[], $3::real[], $4::jsonb[])
|
||||
AS v(instructor_id, rmp_legacy_id, score, score_breakdown)
|
||||
WHERE mc.instructor_id = v.instructor_id
|
||||
AND mc.rmp_legacy_id = v.rmp_legacy_id
|
||||
"#,
|
||||
)
|
||||
.bind(&r_instructor_ids)
|
||||
.bind(&r_legacy_ids)
|
||||
.bind(&r_scores)
|
||||
.bind(&r_breakdowns)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// 7. Auto-accept top candidates
|
||||
if !auto_accept.is_empty() {
|
||||
let aa_instructor_ids: Vec<i32> = auto_accept.iter().map(|(iid, _)| *iid).collect();
|
||||
let aa_legacy_ids: Vec<i32> = auto_accept.iter().map(|(_, lid)| *lid).collect();
|
||||
|
||||
// Mark the candidate row as accepted
|
||||
sqlx::query(
|
||||
r#"
|
||||
UPDATE rmp_match_candidates mc
|
||||
SET status = 'accepted', resolved_at = NOW()
|
||||
FROM UNNEST($1::int4[], $2::int4[]) AS v(instructor_id, rmp_legacy_id)
|
||||
WHERE mc.instructor_id = v.instructor_id
|
||||
AND mc.rmp_legacy_id = v.rmp_legacy_id
|
||||
"#,
|
||||
)
|
||||
.bind(&aa_instructor_ids)
|
||||
.bind(&aa_legacy_ids)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
|
||||
// Insert links into instructor_rmp_links
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO instructor_rmp_links (instructor_id, rmp_legacy_id, source)
|
||||
SELECT v.instructor_id, v.rmp_legacy_id, 'auto'
|
||||
FROM UNNEST($1::int4[], $2::int4[]) AS v(instructor_id, rmp_legacy_id)
|
||||
ON CONFLICT (rmp_legacy_id) DO NOTHING
|
||||
"#,
|
||||
)
|
||||
.bind(&aa_instructor_ids)
|
||||
.bind(&aa_legacy_ids)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
|
||||
// Update instructor match status
|
||||
sqlx::query(
|
||||
r#"
|
||||
UPDATE instructors i
|
||||
SET rmp_match_status = 'auto'
|
||||
FROM UNNEST($1::int4[]) AS v(instructor_id)
|
||||
WHERE i.id = v.instructor_id
|
||||
"#,
|
||||
)
|
||||
.bind(&aa_instructor_ids)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
tx.commit().await?;
|
||||
|
||||
let stats = MatchingStats {
|
||||
total_unmatched,
|
||||
candidates_created,
|
||||
candidates_rescored,
|
||||
auto_matched,
|
||||
skipped_unparseable,
|
||||
skipped_no_candidates,
|
||||
};
|
||||
|
||||
info!(
|
||||
total_unmatched = stats.total_unmatched,
|
||||
candidates_created = stats.candidates_created,
|
||||
candidates_rescored = stats.candidates_rescored,
|
||||
auto_matched = stats.auto_matched,
|
||||
skipped_unparseable = stats.skipped_unparseable,
|
||||
skipped_no_candidates = stats.skipped_no_candidates,
|
||||
"Candidate generation complete"
|
||||
);
|
||||
|
||||
Ok(stats)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_ideal_candidate_high_score() {
|
||||
let ms = compute_match_score(
|
||||
&["CS".to_string()],
|
||||
Some("Computer Science"),
|
||||
1, // unique candidate
|
||||
50, // decent ratings
|
||||
);
|
||||
// name 1.0*0.50 + dept 1.0*0.25 + unique 1.0*0.15 + volume ~0.97*0.10 ≈ 0.997
|
||||
assert!(ms.score >= 0.85, "Expected score >= 0.85, got {}", ms.score);
|
||||
assert_eq!(ms.breakdown.name, 1.0);
|
||||
assert_eq!(ms.breakdown.uniqueness, 1.0);
|
||||
assert_eq!(ms.breakdown.department, 1.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ambiguous_candidates_lower_score() {
|
||||
let unique = compute_match_score(&[], None, 1, 10);
|
||||
let ambiguous = compute_match_score(&[], None, 3, 10);
|
||||
assert!(
|
||||
unique.score > ambiguous.score,
|
||||
"Unique ({}) should outscore ambiguous ({})",
|
||||
unique.score,
|
||||
ambiguous.score
|
||||
);
|
||||
assert_eq!(unique.breakdown.uniqueness, 1.0);
|
||||
assert_eq!(ambiguous.breakdown.uniqueness, 0.2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_department_neutral() {
|
||||
let ms = compute_match_score(&["CS".to_string()], None, 1, 10);
|
||||
assert_eq!(ms.breakdown.department, 0.5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_department_match() {
|
||||
let ms = compute_match_score(&["CS".to_string()], Some("Computer Science"), 1, 10);
|
||||
assert_eq!(ms.breakdown.department, 1.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_department_mismatch() {
|
||||
let ms = compute_match_score(&["CS".to_string()], Some("History"), 1, 10);
|
||||
assert_eq!(ms.breakdown.department, 0.2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_department_match_outscores_mismatch() {
|
||||
let matched = compute_match_score(&["CS".to_string()], Some("Computer Science"), 1, 10);
|
||||
let mismatched = compute_match_score(&["CS".to_string()], Some("History"), 1, 10);
|
||||
assert!(
|
||||
matched.score > mismatched.score,
|
||||
"Department match ({}) should outscore mismatch ({})",
|
||||
matched.score,
|
||||
mismatched.score
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_volume_scaling() {
|
||||
let zero = compute_match_score(&[], None, 1, 0);
|
||||
let many = compute_match_score(&[], None, 1, 100);
|
||||
assert!(
|
||||
many.breakdown.volume > zero.breakdown.volume,
|
||||
"100 ratings ({}) should outscore 0 ratings ({})",
|
||||
many.breakdown.volume,
|
||||
zero.breakdown.volume
|
||||
);
|
||||
assert_eq!(zero.breakdown.volume, 0.0);
|
||||
assert!(
|
||||
many.breakdown.volume > 0.9,
|
||||
"100 ratings should be near max"
|
||||
);
|
||||
}
|
||||
}
|
||||
+186
-35
@@ -1,15 +1,40 @@
|
||||
//! Database operations for scrape job queue management.
|
||||
|
||||
use crate::data::models::{ScrapeJob, ScrapePriority, TargetType};
|
||||
use crate::data::models::{ScrapeJob, ScrapePriority, TargetType, UpsertCounts};
|
||||
use crate::error::Result;
|
||||
use chrono::{DateTime, Utc};
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashSet;
|
||||
|
||||
/// Force-unlock all jobs that have a non-NULL `locked_at`.
|
||||
///
|
||||
/// Intended to be called once at startup to recover jobs left locked by
|
||||
/// a previous unclean shutdown (crash, OOM kill, etc.).
|
||||
///
|
||||
/// # Returns
|
||||
/// The number of jobs that were unlocked.
|
||||
pub async fn force_unlock_all(db_pool: &PgPool) -> Result<u64> {
|
||||
let result = sqlx::query(
|
||||
"UPDATE scrape_jobs SET locked_at = NULL, queued_at = NOW() WHERE locked_at IS NOT NULL",
|
||||
)
|
||||
.execute(db_pool)
|
||||
.await?;
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
|
||||
/// How long a lock can be held before it is considered expired and reclaimable.
|
||||
///
|
||||
/// This acts as a safety net for cases where a worker dies without unlocking
|
||||
/// (OOM kill, crash, network partition). Under normal operation, the worker's
|
||||
/// own job timeout fires well before this threshold.
|
||||
const LOCK_EXPIRY: std::time::Duration = std::time::Duration::from_secs(10 * 60);
|
||||
|
||||
/// Atomically fetch and lock the next available scrape job.
|
||||
///
|
||||
/// Uses `FOR UPDATE SKIP LOCKED` to allow multiple workers to poll the queue
|
||||
/// concurrently without conflicts. Only jobs that are unlocked and ready to
|
||||
/// execute (based on `execute_at`) are considered.
|
||||
/// concurrently without conflicts. Considers jobs that are:
|
||||
/// - Unlocked and ready to execute, OR
|
||||
/// - Locked but past [`LOCK_EXPIRY`] (abandoned by a dead worker)
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `db_pool` - PostgreSQL connection pool
|
||||
@@ -20,9 +45,16 @@ use std::collections::HashSet;
|
||||
pub async fn fetch_and_lock_job(db_pool: &PgPool) -> Result<Option<ScrapeJob>> {
|
||||
let mut tx = db_pool.begin().await?;
|
||||
|
||||
let lock_expiry_secs = LOCK_EXPIRY.as_secs() as i32;
|
||||
let job = sqlx::query_as::<_, ScrapeJob>(
|
||||
"SELECT * FROM scrape_jobs WHERE locked_at IS NULL AND execute_at <= NOW() ORDER BY priority DESC, execute_at ASC LIMIT 1 FOR UPDATE SKIP LOCKED"
|
||||
"SELECT * FROM scrape_jobs \
|
||||
WHERE (locked_at IS NULL OR locked_at < NOW() - make_interval(secs => $1::double precision)) \
|
||||
AND execute_at <= NOW() \
|
||||
ORDER BY priority DESC, execute_at ASC \
|
||||
LIMIT 1 \
|
||||
FOR UPDATE SKIP LOCKED"
|
||||
)
|
||||
.bind(lock_expiry_secs)
|
||||
.fetch_optional(&mut *tx)
|
||||
.await?;
|
||||
|
||||
@@ -68,10 +100,11 @@ pub async fn unlock_job(job_id: i32, db_pool: &PgPool) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Atomically unlock a job and increment its retry count.
|
||||
/// Atomically unlock a job, increment its retry count, and reset `queued_at`.
|
||||
///
|
||||
/// Returns whether the job still has retries remaining. This is determined
|
||||
/// atomically in the database to avoid race conditions between workers.
|
||||
/// Returns the new `queued_at` timestamp if retries remain, or `None` if
|
||||
/// the job has exhausted its retries. This is determined atomically in the
|
||||
/// database to avoid race conditions between workers.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `job_id` - The database ID of the job
|
||||
@@ -79,31 +112,31 @@ pub async fn unlock_job(job_id: i32, db_pool: &PgPool) -> Result<()> {
|
||||
/// * `db_pool` - PostgreSQL connection pool
|
||||
///
|
||||
/// # Returns
|
||||
/// * `Ok(true)` if the job was unlocked and retries remain
|
||||
/// * `Ok(false)` if the job has exhausted its retries
|
||||
/// * `Ok(Some(queued_at))` if the job was unlocked and retries remain
|
||||
/// * `Ok(None)` if the job has exhausted its retries
|
||||
pub async fn unlock_and_increment_retry(
|
||||
job_id: i32,
|
||||
max_retries: i32,
|
||||
db_pool: &PgPool,
|
||||
) -> Result<bool> {
|
||||
let result = sqlx::query_scalar::<_, Option<i32>>(
|
||||
) -> Result<Option<chrono::DateTime<chrono::Utc>>> {
|
||||
let result = sqlx::query_scalar::<_, Option<chrono::DateTime<chrono::Utc>>>(
|
||||
"UPDATE scrape_jobs
|
||||
SET locked_at = NULL, retry_count = retry_count + 1
|
||||
SET locked_at = NULL, retry_count = retry_count + 1, queued_at = NOW()
|
||||
WHERE id = $1
|
||||
RETURNING CASE WHEN retry_count < $2 THEN retry_count ELSE NULL END",
|
||||
RETURNING CASE WHEN retry_count <= $2 THEN queued_at ELSE NULL END",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(max_retries)
|
||||
.fetch_one(db_pool)
|
||||
.await?;
|
||||
|
||||
Ok(result.is_some())
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Find existing unlocked job payloads matching the given target type and candidates.
|
||||
/// Find existing job payloads matching the given target type and candidates.
|
||||
///
|
||||
/// Returns a set of stringified JSON payloads that already exist in the queue,
|
||||
/// used for deduplication when scheduling new jobs.
|
||||
/// Returns a set of stringified JSON payloads that already exist in the queue
|
||||
/// (both locked and unlocked), used for deduplication when scheduling new jobs.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `target_type` - The target type to filter by
|
||||
@@ -111,7 +144,7 @@ pub async fn unlock_and_increment_retry(
|
||||
/// * `db_pool` - PostgreSQL connection pool
|
||||
///
|
||||
/// # Returns
|
||||
/// A `HashSet` of stringified JSON payloads that already have pending jobs
|
||||
/// A `HashSet` of stringified JSON payloads that already have pending or in-progress jobs
|
||||
pub async fn find_existing_job_payloads(
|
||||
target_type: TargetType,
|
||||
candidate_payloads: &[serde_json::Value],
|
||||
@@ -119,7 +152,7 @@ pub async fn find_existing_job_payloads(
|
||||
) -> Result<HashSet<String>> {
|
||||
let existing_jobs: Vec<(serde_json::Value,)> = sqlx::query_as(
|
||||
"SELECT target_payload FROM scrape_jobs
|
||||
WHERE target_type = $1 AND target_payload = ANY($2) AND locked_at IS NULL",
|
||||
WHERE target_type = $1 AND target_payload = ANY($2)",
|
||||
)
|
||||
.bind(target_type)
|
||||
.bind(candidate_payloads)
|
||||
@@ -134,7 +167,117 @@ pub async fn find_existing_job_payloads(
|
||||
Ok(existing_payloads)
|
||||
}
|
||||
|
||||
/// Batch insert scrape jobs in a single transaction.
|
||||
/// Insert a scrape job result log entry.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn insert_job_result(
|
||||
target_type: TargetType,
|
||||
payload: serde_json::Value,
|
||||
priority: ScrapePriority,
|
||||
queued_at: DateTime<Utc>,
|
||||
started_at: DateTime<Utc>,
|
||||
duration_ms: i32,
|
||||
success: bool,
|
||||
error_message: Option<&str>,
|
||||
retry_count: i32,
|
||||
counts: Option<&UpsertCounts>,
|
||||
db_pool: &PgPool,
|
||||
) -> Result<()> {
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO scrape_job_results (
|
||||
target_type, payload, priority,
|
||||
queued_at, started_at, duration_ms,
|
||||
success, error_message, retry_count,
|
||||
courses_fetched, courses_changed, courses_unchanged,
|
||||
audits_generated, metrics_generated
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
|
||||
"#,
|
||||
)
|
||||
.bind(target_type)
|
||||
.bind(&payload)
|
||||
.bind(priority)
|
||||
.bind(queued_at)
|
||||
.bind(started_at)
|
||||
.bind(duration_ms)
|
||||
.bind(success)
|
||||
.bind(error_message)
|
||||
.bind(retry_count)
|
||||
.bind(counts.map(|c| c.courses_fetched))
|
||||
.bind(counts.map(|c| c.courses_changed))
|
||||
.bind(counts.map(|c| c.courses_unchanged))
|
||||
.bind(counts.map(|c| c.audits_generated))
|
||||
.bind(counts.map(|c| c.metrics_generated))
|
||||
.execute(db_pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Per-subject aggregated stats from recent scrape results.
|
||||
///
|
||||
/// Populated by [`fetch_subject_stats`] and converted into
|
||||
/// [`crate::scraper::adaptive::SubjectStats`] for interval computation.
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct SubjectResultStats {
|
||||
pub subject: String,
|
||||
pub recent_runs: i64,
|
||||
pub avg_change_ratio: f64,
|
||||
pub consecutive_zero_changes: i64,
|
||||
pub consecutive_empty_fetches: i64,
|
||||
pub recent_failure_count: i64,
|
||||
pub recent_success_count: i64,
|
||||
pub last_completed: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// Fetch aggregated per-subject statistics from the last 24 hours of results.
|
||||
///
|
||||
/// For each subject, examines the 20 most recent results and computes:
|
||||
/// - Average change ratio (courses_changed / courses_fetched)
|
||||
/// - Consecutive zero-change runs from the most recent result
|
||||
/// - Consecutive empty-fetch runs from the most recent result
|
||||
/// - Failure and success counts
|
||||
/// - Last completion timestamp
|
||||
pub async fn fetch_subject_stats(db_pool: &PgPool) -> Result<Vec<SubjectResultStats>> {
|
||||
let rows = sqlx::query_as::<_, SubjectResultStats>(
|
||||
r#"
|
||||
WITH recent AS (
|
||||
SELECT payload->>'subject' AS subject, success,
|
||||
COALESCE(courses_fetched, 0) AS courses_fetched,
|
||||
COALESCE(courses_changed, 0) AS courses_changed,
|
||||
completed_at,
|
||||
ROW_NUMBER() OVER (PARTITION BY payload->>'subject' ORDER BY completed_at DESC) AS rn
|
||||
FROM scrape_job_results
|
||||
WHERE target_type = 'Subject' AND completed_at > NOW() - INTERVAL '24 hours'
|
||||
),
|
||||
filtered AS (SELECT * FROM recent WHERE rn <= 20),
|
||||
zero_break AS (
|
||||
SELECT subject,
|
||||
MIN(rn) FILTER (WHERE courses_changed > 0 AND success) AS first_nonzero_rn,
|
||||
MIN(rn) FILTER (WHERE courses_fetched > 0 AND success) AS first_nonempty_rn
|
||||
FROM filtered GROUP BY subject
|
||||
)
|
||||
SELECT
|
||||
f.subject::TEXT AS subject,
|
||||
COUNT(*)::BIGINT AS recent_runs,
|
||||
COALESCE(AVG(CASE WHEN f.success AND f.courses_fetched > 0
|
||||
THEN f.courses_changed::FLOAT / f.courses_fetched ELSE NULL END), 0.0)::FLOAT8 AS avg_change_ratio,
|
||||
COALESCE(zb.first_nonzero_rn - 1, COUNT(*) FILTER (WHERE f.success AND f.courses_changed = 0))::BIGINT AS consecutive_zero_changes,
|
||||
COALESCE(zb.first_nonempty_rn - 1, COUNT(*) FILTER (WHERE f.success AND f.courses_fetched = 0))::BIGINT AS consecutive_empty_fetches,
|
||||
COUNT(*) FILTER (WHERE NOT f.success)::BIGINT AS recent_failure_count,
|
||||
COUNT(*) FILTER (WHERE f.success)::BIGINT AS recent_success_count,
|
||||
MAX(f.completed_at) AS last_completed
|
||||
FROM filtered f
|
||||
LEFT JOIN zero_break zb ON f.subject = zb.subject
|
||||
GROUP BY f.subject, zb.first_nonzero_rn, zb.first_nonempty_rn
|
||||
"#,
|
||||
)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
/// Batch insert scrape jobs using UNNEST for a single round-trip.
|
||||
///
|
||||
/// All jobs are inserted with `execute_at` set to the current time.
|
||||
///
|
||||
@@ -144,27 +287,35 @@ pub async fn find_existing_job_payloads(
|
||||
pub async fn batch_insert_jobs(
|
||||
jobs: &[(serde_json::Value, TargetType, ScrapePriority)],
|
||||
db_pool: &PgPool,
|
||||
) -> Result<()> {
|
||||
) -> Result<Vec<ScrapeJob>> {
|
||||
if jobs.is_empty() {
|
||||
return Ok(());
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let now = chrono::Utc::now();
|
||||
let mut tx = db_pool.begin().await?;
|
||||
let mut target_types: Vec<String> = Vec::with_capacity(jobs.len());
|
||||
let mut payloads: Vec<serde_json::Value> = Vec::with_capacity(jobs.len());
|
||||
let mut priorities: Vec<String> = Vec::with_capacity(jobs.len());
|
||||
|
||||
for (payload, target_type, priority) in jobs {
|
||||
sqlx::query(
|
||||
"INSERT INTO scrape_jobs (target_type, target_payload, priority, execute_at) VALUES ($1, $2, $3, $4)"
|
||||
)
|
||||
.bind(target_type)
|
||||
.bind(payload)
|
||||
.bind(priority)
|
||||
.bind(now)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
target_types.push(format!("{target_type:?}"));
|
||||
payloads.push(payload.clone());
|
||||
priorities.push(format!("{priority:?}"));
|
||||
}
|
||||
|
||||
tx.commit().await?;
|
||||
let inserted = sqlx::query_as::<_, ScrapeJob>(
|
||||
r#"
|
||||
INSERT INTO scrape_jobs (target_type, target_payload, priority, execute_at, queued_at)
|
||||
SELECT v.target_type::target_type, v.payload, v.priority::scrape_priority, NOW(), NOW()
|
||||
FROM UNNEST($1::text[], $2::jsonb[], $3::text[])
|
||||
AS v(target_type, payload, priority)
|
||||
RETURNING *
|
||||
"#,
|
||||
)
|
||||
.bind(&target_types)
|
||||
.bind(&payloads)
|
||||
.bind(&priorities)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
Ok(inserted)
|
||||
}
|
||||
|
||||
@@ -0,0 +1,100 @@
|
||||
//! Database query functions for user sessions.
|
||||
|
||||
use anyhow::Context;
|
||||
use rand::Rng;
|
||||
use sqlx::PgPool;
|
||||
|
||||
use super::models::UserSession;
|
||||
use crate::error::Result;
|
||||
|
||||
/// Session lifetime: 7 days (in seconds).
|
||||
pub const SESSION_DURATION_SECS: u64 = 7 * 24 * 3600;
|
||||
|
||||
/// Generate a cryptographically random 32-byte hex token.
|
||||
fn generate_token() -> String {
|
||||
let bytes: [u8; 32] = rand::rng().random();
|
||||
bytes.iter().map(|b| format!("{b:02x}")).collect()
|
||||
}
|
||||
|
||||
/// Create a new session for a user with the given duration.
|
||||
pub async fn create_session(
|
||||
pool: &PgPool,
|
||||
user_id: i64,
|
||||
duration: std::time::Duration,
|
||||
) -> Result<UserSession> {
|
||||
let token = generate_token();
|
||||
let duration_secs = duration.as_secs() as i64;
|
||||
|
||||
sqlx::query_as::<_, UserSession>(
|
||||
r#"
|
||||
INSERT INTO user_sessions (id, user_id, expires_at)
|
||||
VALUES ($1, $2, now() + make_interval(secs => $3::double precision))
|
||||
RETURNING *
|
||||
"#,
|
||||
)
|
||||
.bind(&token)
|
||||
.bind(user_id)
|
||||
.bind(duration_secs as f64)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.context("failed to create session")
|
||||
}
|
||||
|
||||
/// Fetch a session by token, only if it has not expired.
|
||||
pub async fn get_session(pool: &PgPool, token: &str) -> Result<Option<UserSession>> {
|
||||
sqlx::query_as::<_, UserSession>(
|
||||
"SELECT * FROM user_sessions WHERE id = $1 AND expires_at > now()",
|
||||
)
|
||||
.bind(token)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.context("failed to get session")
|
||||
}
|
||||
|
||||
/// Update the last-active timestamp and extend session expiry (sliding window).
|
||||
pub async fn touch_session(pool: &PgPool, token: &str) -> Result<()> {
|
||||
sqlx::query(
|
||||
r#"
|
||||
UPDATE user_sessions
|
||||
SET last_active_at = now(),
|
||||
expires_at = now() + make_interval(secs => $2::double precision)
|
||||
WHERE id = $1
|
||||
"#,
|
||||
)
|
||||
.bind(token)
|
||||
.bind(SESSION_DURATION_SECS as f64)
|
||||
.execute(pool)
|
||||
.await
|
||||
.context("failed to touch session")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Delete a session by token.
|
||||
pub async fn delete_session(pool: &PgPool, token: &str) -> Result<()> {
|
||||
sqlx::query("DELETE FROM user_sessions WHERE id = $1")
|
||||
.bind(token)
|
||||
.execute(pool)
|
||||
.await
|
||||
.context("failed to delete session")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Delete all sessions for a user. Returns the number of sessions deleted.
|
||||
#[allow(dead_code)] // Available for admin user-deletion flow
|
||||
pub async fn delete_user_sessions(pool: &PgPool, user_id: i64) -> Result<u64> {
|
||||
let result = sqlx::query("DELETE FROM user_sessions WHERE user_id = $1")
|
||||
.bind(user_id)
|
||||
.execute(pool)
|
||||
.await
|
||||
.context("failed to delete user sessions")?;
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
|
||||
/// Delete all expired sessions. Returns the number of sessions cleaned up.
|
||||
pub async fn cleanup_expired(pool: &PgPool) -> Result<u64> {
|
||||
let result = sqlx::query("DELETE FROM user_sessions WHERE expires_at <= now()")
|
||||
.execute(pool)
|
||||
.await
|
||||
.context("failed to cleanup expired sessions")?;
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
//! Database query functions for users.
|
||||
|
||||
use anyhow::Context;
|
||||
use sqlx::PgPool;
|
||||
|
||||
use super::models::User;
|
||||
use crate::error::Result;
|
||||
|
||||
/// Insert a new user or update username/avatar on conflict.
|
||||
pub async fn upsert_user(
|
||||
pool: &PgPool,
|
||||
discord_id: i64,
|
||||
username: &str,
|
||||
avatar_hash: Option<&str>,
|
||||
) -> Result<User> {
|
||||
sqlx::query_as::<_, User>(
|
||||
r#"
|
||||
INSERT INTO users (discord_id, discord_username, discord_avatar_hash)
|
||||
VALUES ($1, $2, $3)
|
||||
ON CONFLICT (discord_id) DO UPDATE
|
||||
SET discord_username = EXCLUDED.discord_username,
|
||||
discord_avatar_hash = EXCLUDED.discord_avatar_hash,
|
||||
updated_at = now()
|
||||
RETURNING *
|
||||
"#,
|
||||
)
|
||||
.bind(discord_id)
|
||||
.bind(username)
|
||||
.bind(avatar_hash)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.context("failed to upsert user")
|
||||
}
|
||||
|
||||
/// Fetch a user by Discord ID.
|
||||
pub async fn get_user(pool: &PgPool, discord_id: i64) -> Result<Option<User>> {
|
||||
sqlx::query_as::<_, User>("SELECT * FROM users WHERE discord_id = $1")
|
||||
.bind(discord_id)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.context("failed to get user")
|
||||
}
|
||||
|
||||
/// List all users ordered by creation date (newest first).
|
||||
pub async fn list_users(pool: &PgPool) -> Result<Vec<User>> {
|
||||
sqlx::query_as::<_, User>("SELECT * FROM users ORDER BY created_at DESC")
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.context("failed to list users")
|
||||
}
|
||||
|
||||
/// Set the admin flag for a user, returning the updated user if found.
|
||||
pub async fn set_admin(pool: &PgPool, discord_id: i64, is_admin: bool) -> Result<Option<User>> {
|
||||
sqlx::query_as::<_, User>(
|
||||
r#"
|
||||
UPDATE users
|
||||
SET is_admin = $2, updated_at = now()
|
||||
WHERE discord_id = $1
|
||||
RETURNING *
|
||||
"#,
|
||||
)
|
||||
.bind(discord_id)
|
||||
.bind(is_admin)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.context("failed to set admin status")
|
||||
}
|
||||
|
||||
/// Ensure a seed admin exists. Upserts with `is_admin = true` and a placeholder
|
||||
/// username that will be replaced on first OAuth login.
|
||||
pub async fn ensure_seed_admin(pool: &PgPool, discord_id: i64) -> Result<User> {
|
||||
sqlx::query_as::<_, User>(
|
||||
r#"
|
||||
INSERT INTO users (discord_id, discord_username, is_admin)
|
||||
VALUES ($1, 'seed-admin', true)
|
||||
ON CONFLICT (discord_id) DO UPDATE
|
||||
SET is_admin = true,
|
||||
updated_at = now()
|
||||
RETURNING *
|
||||
"#,
|
||||
)
|
||||
.bind(discord_id)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.context("failed to ensure seed admin")
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
pub mod app;
|
||||
pub mod banner;
|
||||
pub mod bot;
|
||||
pub mod calendar;
|
||||
pub mod cli;
|
||||
pub mod config;
|
||||
pub mod data;
|
||||
|
||||
+4
-5
@@ -1,5 +1,5 @@
|
||||
use crate::app::App;
|
||||
use crate::cli::{Args, ServiceName, determine_enabled_services};
|
||||
use crate::cli::{Args, ServiceName};
|
||||
use crate::logging::setup_logging;
|
||||
use clap::Parser;
|
||||
use std::process::ExitCode;
|
||||
@@ -8,6 +8,7 @@ use tracing::info;
|
||||
mod app;
|
||||
mod banner;
|
||||
mod bot;
|
||||
mod calendar;
|
||||
mod cli;
|
||||
mod config;
|
||||
mod data;
|
||||
@@ -19,7 +20,6 @@ mod scraper;
|
||||
mod services;
|
||||
mod signals;
|
||||
mod state;
|
||||
#[allow(dead_code)]
|
||||
mod status;
|
||||
mod web;
|
||||
|
||||
@@ -30,9 +30,8 @@ async fn main() -> ExitCode {
|
||||
// Parse CLI arguments
|
||||
let args = Args::parse();
|
||||
|
||||
// Determine which services should be enabled
|
||||
let enabled_services: Vec<ServiceName> =
|
||||
determine_enabled_services(&args).expect("Failed to determine enabled services");
|
||||
// Always run all services
|
||||
let enabled_services = ServiceName::all();
|
||||
|
||||
// Create and initialize the application
|
||||
let mut app = App::new().await.expect("Failed to initialize application");
|
||||
|
||||
@@ -35,6 +35,12 @@ pub struct RmpClient {
|
||||
http: reqwest::Client,
|
||||
}
|
||||
|
||||
impl Default for RmpClient {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl RmpClient {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
|
||||
@@ -0,0 +1,326 @@
|
||||
//! Adaptive scraping interval computation.
|
||||
//!
|
||||
//! Assigns per-subject scrape intervals based on recent change rates,
|
||||
//! consecutive zero-change runs, failure patterns, and time of day.
|
||||
|
||||
use chrono::{DateTime, Datelike, Timelike, Utc};
|
||||
use chrono_tz::US::Central;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::data::scrape_jobs::SubjectResultStats;
|
||||
|
||||
const FLOOR_INTERVAL: Duration = Duration::from_secs(3 * 60);
|
||||
const MODERATE_HIGH_INTERVAL: Duration = Duration::from_secs(5 * 60);
|
||||
const MODERATE_LOW_INTERVAL: Duration = Duration::from_secs(15 * 60);
|
||||
const LOW_CHANGE_INTERVAL: Duration = Duration::from_secs(30 * 60);
|
||||
const ZERO_5_INTERVAL: Duration = Duration::from_secs(60 * 60);
|
||||
const ZERO_10_INTERVAL: Duration = Duration::from_secs(2 * 60 * 60);
|
||||
const CEILING_INTERVAL: Duration = Duration::from_secs(4 * 60 * 60);
|
||||
const COLD_START_INTERVAL: Duration = FLOOR_INTERVAL;
|
||||
const PAUSE_PROBE_INTERVAL: Duration = Duration::from_secs(6 * 60 * 60);
|
||||
const EMPTY_FETCH_PAUSE_THRESHOLD: i64 = 3;
|
||||
const FAILURE_PAUSE_THRESHOLD: i64 = 5;
|
||||
|
||||
/// Aggregated per-subject statistics derived from recent scrape results.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SubjectStats {
|
||||
pub subject: String,
|
||||
pub recent_runs: i64,
|
||||
pub avg_change_ratio: f64,
|
||||
pub consecutive_zero_changes: i64,
|
||||
pub consecutive_empty_fetches: i64,
|
||||
pub recent_failure_count: i64,
|
||||
pub recent_success_count: i64,
|
||||
pub last_completed: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// Scheduling decision for a subject.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum SubjectSchedule {
|
||||
/// Subject is due for scraping, with the computed interval.
|
||||
Eligible(Duration),
|
||||
/// Subject was scraped recently; wait for the remaining cooldown.
|
||||
Cooldown(Duration),
|
||||
/// Subject is paused due to repeated empty fetches or failures.
|
||||
Paused,
|
||||
/// Subject belongs to a past term and should not be scraped.
|
||||
ReadOnly,
|
||||
}
|
||||
|
||||
impl From<SubjectResultStats> for SubjectStats {
|
||||
fn from(row: SubjectResultStats) -> Self {
|
||||
Self {
|
||||
subject: row.subject,
|
||||
recent_runs: row.recent_runs,
|
||||
avg_change_ratio: row.avg_change_ratio,
|
||||
consecutive_zero_changes: row.consecutive_zero_changes,
|
||||
consecutive_empty_fetches: row.consecutive_empty_fetches,
|
||||
recent_failure_count: row.recent_failure_count,
|
||||
recent_success_count: row.recent_success_count,
|
||||
last_completed: row.last_completed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute the base interval tier from change-rate statistics.
|
||||
pub fn compute_base_interval(stats: &SubjectStats) -> Duration {
|
||||
if stats.recent_runs == 0 {
|
||||
return COLD_START_INTERVAL;
|
||||
}
|
||||
|
||||
// Consecutive-zero tiers take precedence when change ratio is near zero
|
||||
if stats.avg_change_ratio < 0.001 {
|
||||
return match stats.consecutive_zero_changes {
|
||||
0..5 => LOW_CHANGE_INTERVAL,
|
||||
5..10 => ZERO_5_INTERVAL,
|
||||
10..20 => ZERO_10_INTERVAL,
|
||||
_ => CEILING_INTERVAL,
|
||||
};
|
||||
}
|
||||
|
||||
match stats.avg_change_ratio {
|
||||
r if r >= 0.10 => FLOOR_INTERVAL,
|
||||
r if r >= 0.05 => MODERATE_HIGH_INTERVAL,
|
||||
r if r >= 0.01 => MODERATE_LOW_INTERVAL,
|
||||
_ => LOW_CHANGE_INTERVAL,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a time-of-day multiplier for the given UTC timestamp.
|
||||
///
|
||||
/// Peak hours (weekdays 8am-6pm CT) return 1; off-peak (weekdays 6pm-midnight CT)
|
||||
/// return 2; night (midnight-8am CT) and weekends return 4.
|
||||
pub fn time_of_day_multiplier(now: DateTime<Utc>) -> u32 {
|
||||
let ct = now.with_timezone(&Central);
|
||||
let weekday = ct.weekday();
|
||||
let hour = ct.hour();
|
||||
|
||||
// Weekends get the slowest multiplier
|
||||
if matches!(weekday, chrono::Weekday::Sat | chrono::Weekday::Sun) {
|
||||
return 4;
|
||||
}
|
||||
|
||||
match hour {
|
||||
8..18 => 1, // peak
|
||||
18..24 => 2, // off-peak
|
||||
_ => 4, // night (0..8)
|
||||
}
|
||||
}
|
||||
|
||||
/// Evaluate whether a subject should be scraped now.
|
||||
///
|
||||
/// Combines base interval, time-of-day multiplier, pause detection (empty
|
||||
/// fetches / consecutive failures), and past-term read-only status.
|
||||
pub fn evaluate_subject(
|
||||
stats: &SubjectStats,
|
||||
now: DateTime<Utc>,
|
||||
is_past_term: bool,
|
||||
) -> SubjectSchedule {
|
||||
if is_past_term {
|
||||
return SubjectSchedule::ReadOnly;
|
||||
}
|
||||
|
||||
let elapsed = (now - stats.last_completed)
|
||||
.to_std()
|
||||
.unwrap_or(Duration::ZERO);
|
||||
let probe_due = elapsed >= PAUSE_PROBE_INTERVAL;
|
||||
|
||||
// Pause on repeated empty fetches
|
||||
if stats.consecutive_empty_fetches >= EMPTY_FETCH_PAUSE_THRESHOLD {
|
||||
return if probe_due {
|
||||
SubjectSchedule::Eligible(PAUSE_PROBE_INTERVAL)
|
||||
} else {
|
||||
SubjectSchedule::Paused
|
||||
};
|
||||
}
|
||||
|
||||
// Pause on all-failures
|
||||
if stats.recent_success_count == 0 && stats.recent_failure_count >= FAILURE_PAUSE_THRESHOLD {
|
||||
return if probe_due {
|
||||
SubjectSchedule::Eligible(PAUSE_PROBE_INTERVAL)
|
||||
} else {
|
||||
SubjectSchedule::Paused
|
||||
};
|
||||
}
|
||||
|
||||
let base = compute_base_interval(stats);
|
||||
let multiplier = time_of_day_multiplier(now);
|
||||
let effective = base * multiplier;
|
||||
|
||||
if elapsed >= effective {
|
||||
SubjectSchedule::Eligible(effective)
|
||||
} else {
|
||||
let remaining = effective - elapsed;
|
||||
SubjectSchedule::Cooldown(remaining)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::TimeZone;
|
||||
|
||||
/// Create a default `SubjectStats` for testing. Callers mutate fields as needed.
|
||||
fn make_stats(subject: &str) -> SubjectStats {
|
||||
SubjectStats {
|
||||
subject: subject.to_string(),
|
||||
recent_runs: 10,
|
||||
avg_change_ratio: 0.0,
|
||||
consecutive_zero_changes: 0,
|
||||
consecutive_empty_fetches: 0,
|
||||
recent_failure_count: 0,
|
||||
recent_success_count: 10,
|
||||
last_completed: Utc::now() - chrono::Duration::hours(1),
|
||||
}
|
||||
}
|
||||
|
||||
// -- compute_base_interval tests --
|
||||
|
||||
#[test]
|
||||
fn test_cold_start_returns_floor() {
|
||||
let mut stats = make_stats("CS");
|
||||
stats.recent_runs = 0;
|
||||
assert_eq!(compute_base_interval(&stats), COLD_START_INTERVAL);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_high_change_rate() {
|
||||
let mut stats = make_stats("CS");
|
||||
stats.avg_change_ratio = 0.15;
|
||||
assert_eq!(compute_base_interval(&stats), FLOOR_INTERVAL);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_moderate_high_change() {
|
||||
let mut stats = make_stats("CS");
|
||||
stats.avg_change_ratio = 0.07;
|
||||
assert_eq!(compute_base_interval(&stats), MODERATE_HIGH_INTERVAL);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_moderate_low_change() {
|
||||
let mut stats = make_stats("CS");
|
||||
stats.avg_change_ratio = 0.03;
|
||||
assert_eq!(compute_base_interval(&stats), MODERATE_LOW_INTERVAL);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_low_change() {
|
||||
let mut stats = make_stats("CS");
|
||||
stats.avg_change_ratio = 0.005;
|
||||
assert_eq!(compute_base_interval(&stats), LOW_CHANGE_INTERVAL);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_zero_5_consecutive() {
|
||||
let mut stats = make_stats("CS");
|
||||
stats.avg_change_ratio = 0.0;
|
||||
stats.consecutive_zero_changes = 5;
|
||||
assert_eq!(compute_base_interval(&stats), ZERO_5_INTERVAL);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_zero_10_consecutive() {
|
||||
let mut stats = make_stats("CS");
|
||||
stats.avg_change_ratio = 0.0;
|
||||
stats.consecutive_zero_changes = 10;
|
||||
assert_eq!(compute_base_interval(&stats), ZERO_10_INTERVAL);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_zero_20_consecutive() {
|
||||
let mut stats = make_stats("CS");
|
||||
stats.avg_change_ratio = 0.0;
|
||||
stats.consecutive_zero_changes = 20;
|
||||
assert_eq!(compute_base_interval(&stats), CEILING_INTERVAL);
|
||||
}
|
||||
|
||||
// -- evaluate_subject tests --
|
||||
|
||||
#[test]
|
||||
fn test_pause_empty_fetches() {
|
||||
let mut stats = make_stats("CS");
|
||||
stats.consecutive_empty_fetches = 3;
|
||||
stats.last_completed = Utc::now() - chrono::Duration::minutes(10);
|
||||
let result = evaluate_subject(&stats, Utc::now(), false);
|
||||
assert_eq!(result, SubjectSchedule::Paused);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pause_all_failures() {
|
||||
let mut stats = make_stats("CS");
|
||||
stats.recent_success_count = 0;
|
||||
stats.recent_failure_count = 5;
|
||||
stats.last_completed = Utc::now() - chrono::Duration::minutes(10);
|
||||
let result = evaluate_subject(&stats, Utc::now(), false);
|
||||
assert_eq!(result, SubjectSchedule::Paused);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_probe_after_pause() {
|
||||
let mut stats = make_stats("CS");
|
||||
stats.consecutive_empty_fetches = 5;
|
||||
stats.last_completed = Utc::now() - chrono::Duration::hours(7);
|
||||
let result = evaluate_subject(&stats, Utc::now(), false);
|
||||
assert_eq!(result, SubjectSchedule::Eligible(PAUSE_PROBE_INTERVAL));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_read_only_past_term() {
|
||||
let stats = make_stats("CS");
|
||||
let result = evaluate_subject(&stats, Utc::now(), true);
|
||||
assert_eq!(result, SubjectSchedule::ReadOnly);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cooldown_not_elapsed() {
|
||||
let mut stats = make_stats("CS");
|
||||
stats.avg_change_ratio = 0.15; // floor = 3 min
|
||||
stats.last_completed = Utc::now() - chrono::Duration::seconds(30);
|
||||
// Use a peak-hours timestamp so multiplier = 1
|
||||
let peak = Utc.with_ymd_and_hms(2025, 7, 14, 15, 0, 0).unwrap(); // Mon 10am CT
|
||||
stats.last_completed = peak - chrono::Duration::seconds(30);
|
||||
let result = evaluate_subject(&stats, peak, false);
|
||||
assert!(matches!(result, SubjectSchedule::Cooldown(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_eligible_elapsed() {
|
||||
let mut stats = make_stats("CS");
|
||||
stats.avg_change_ratio = 0.15; // floor = 3 min
|
||||
let peak = Utc.with_ymd_and_hms(2025, 7, 14, 15, 0, 0).unwrap(); // Mon 10am CT
|
||||
stats.last_completed = peak - chrono::Duration::minutes(5);
|
||||
let result = evaluate_subject(&stats, peak, false);
|
||||
assert!(matches!(result, SubjectSchedule::Eligible(_)));
|
||||
}
|
||||
|
||||
// -- time_of_day_multiplier tests --
|
||||
|
||||
#[test]
|
||||
fn test_time_multiplier_peak() {
|
||||
// Monday 10am CT = 15:00 UTC
|
||||
let dt = Utc.with_ymd_and_hms(2025, 7, 14, 15, 0, 0).unwrap();
|
||||
assert_eq!(time_of_day_multiplier(dt), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_time_multiplier_offpeak() {
|
||||
// Monday 8pm CT = 01:00 UTC next day, but let's use Tuesday 01:00 UTC = Mon 8pm CT
|
||||
let dt = Utc.with_ymd_and_hms(2025, 7, 15, 1, 0, 0).unwrap();
|
||||
assert_eq!(time_of_day_multiplier(dt), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_time_multiplier_night() {
|
||||
// 3am CT = 08:00 UTC
|
||||
let dt = Utc.with_ymd_and_hms(2025, 7, 14, 8, 0, 0).unwrap();
|
||||
assert_eq!(time_of_day_multiplier(dt), 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_time_multiplier_weekend() {
|
||||
// Saturday noon CT = 17:00 UTC
|
||||
let dt = Utc.with_ymd_and_hms(2025, 7, 12, 17, 0, 0).unwrap();
|
||||
assert_eq!(time_of_day_multiplier(dt), 4);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
pub mod subject;
|
||||
|
||||
use crate::banner::BannerApi;
|
||||
use crate::data::models::TargetType;
|
||||
use crate::data::models::{TargetType, UpsertCounts};
|
||||
use crate::error::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
@@ -32,8 +32,9 @@ pub trait Job: Send + Sync {
|
||||
#[allow(dead_code)]
|
||||
fn target_type(&self) -> TargetType;
|
||||
|
||||
/// Process the job with the given API client and database pool
|
||||
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<()>;
|
||||
/// Process the job with the given API client and database pool.
|
||||
/// Returns upsert effectiveness counts on success.
|
||||
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<UpsertCounts>;
|
||||
|
||||
/// Get a human-readable description of the job
|
||||
fn description(&self) -> String;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use super::Job;
|
||||
use crate::banner::{BannerApi, SearchQuery, Term};
|
||||
use crate::data::batch::batch_upsert_courses;
|
||||
use crate::data::models::TargetType;
|
||||
use crate::data::models::{TargetType, UpsertCounts};
|
||||
use crate::error::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
@@ -26,7 +26,7 @@ impl Job for SubjectJob {
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, banner_api, db_pool), fields(subject = %self.subject))]
|
||||
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<()> {
|
||||
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<UpsertCounts> {
|
||||
let subject_code = &self.subject;
|
||||
|
||||
// Get the current term
|
||||
@@ -37,17 +37,19 @@ impl Job for SubjectJob {
|
||||
.search(&term, &query, "subjectDescription", false)
|
||||
.await?;
|
||||
|
||||
if let Some(courses_from_api) = search_result.data {
|
||||
let counts = if let Some(courses_from_api) = search_result.data {
|
||||
info!(
|
||||
subject = %subject_code,
|
||||
count = courses_from_api.len(),
|
||||
"Found courses"
|
||||
);
|
||||
batch_upsert_courses(&courses_from_api, db_pool).await?;
|
||||
}
|
||||
batch_upsert_courses(&courses_from_api, db_pool).await?
|
||||
} else {
|
||||
UpsertCounts::default()
|
||||
};
|
||||
|
||||
debug!(subject = %subject_code, "Subject job completed");
|
||||
Ok(())
|
||||
Ok(counts)
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
|
||||
+25
-3
@@ -1,11 +1,14 @@
|
||||
pub mod adaptive;
|
||||
pub mod jobs;
|
||||
pub mod scheduler;
|
||||
pub mod worker;
|
||||
|
||||
use crate::banner::BannerApi;
|
||||
use crate::data::scrape_jobs;
|
||||
use crate::services::Service;
|
||||
use crate::state::ReferenceCache;
|
||||
use crate::status::{ServiceStatus, ServiceStatusRegistry};
|
||||
use crate::web::ws::ScrapeJobEvent;
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{RwLock, broadcast};
|
||||
@@ -24,6 +27,7 @@ pub struct ScraperService {
|
||||
banner_api: Arc<BannerApi>,
|
||||
reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
service_statuses: ServiceStatusRegistry,
|
||||
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
|
||||
scheduler_handle: Option<JoinHandle<()>>,
|
||||
worker_handles: Vec<JoinHandle<()>>,
|
||||
shutdown_tx: Option<broadcast::Sender<()>>,
|
||||
@@ -36,12 +40,14 @@ impl ScraperService {
|
||||
banner_api: Arc<BannerApi>,
|
||||
reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
service_statuses: ServiceStatusRegistry,
|
||||
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
|
||||
) -> Self {
|
||||
Self {
|
||||
db_pool,
|
||||
banner_api,
|
||||
reference_cache,
|
||||
service_statuses,
|
||||
job_events_tx,
|
||||
scheduler_handle: None,
|
||||
worker_handles: Vec::new(),
|
||||
shutdown_tx: None,
|
||||
@@ -49,7 +55,17 @@ impl ScraperService {
|
||||
}
|
||||
|
||||
/// Starts the scheduler and a pool of workers.
|
||||
pub fn start(&mut self) {
|
||||
///
|
||||
/// Force-unlocks any jobs left locked by a previous unclean shutdown before
|
||||
/// spawning workers, so those jobs re-enter the queue immediately.
|
||||
pub async fn start(&mut self) {
|
||||
// Recover jobs left locked by a previous crash/unclean shutdown
|
||||
match scrape_jobs::force_unlock_all(&self.db_pool).await {
|
||||
Ok(0) => {}
|
||||
Ok(count) => warn!(count, "Force-unlocked stale jobs from previous run"),
|
||||
Err(e) => warn!(error = ?e, "Failed to force-unlock stale jobs"),
|
||||
}
|
||||
|
||||
info!("ScraperService starting");
|
||||
|
||||
// Create shutdown channel
|
||||
@@ -60,6 +76,7 @@ impl ScraperService {
|
||||
self.db_pool.clone(),
|
||||
self.banner_api.clone(),
|
||||
self.reference_cache.clone(),
|
||||
self.job_events_tx.clone(),
|
||||
);
|
||||
let shutdown_rx = shutdown_tx.subscribe();
|
||||
let scheduler_handle = tokio::spawn(async move {
|
||||
@@ -70,7 +87,12 @@ impl ScraperService {
|
||||
|
||||
let worker_count = 4; // This could be configurable
|
||||
for i in 0..worker_count {
|
||||
let worker = Worker::new(i, self.db_pool.clone(), self.banner_api.clone());
|
||||
let worker = Worker::new(
|
||||
i,
|
||||
self.db_pool.clone(),
|
||||
self.banner_api.clone(),
|
||||
self.job_events_tx.clone(),
|
||||
);
|
||||
let shutdown_rx = shutdown_tx.subscribe();
|
||||
let worker_handle = tokio::spawn(async move {
|
||||
worker.run(shutdown_rx).await;
|
||||
@@ -92,7 +114,7 @@ impl Service for ScraperService {
|
||||
}
|
||||
|
||||
async fn run(&mut self) -> Result<(), anyhow::Error> {
|
||||
self.start();
|
||||
self.start().await;
|
||||
std::future::pending::<()>().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
+105
-23
@@ -3,10 +3,14 @@ use crate::data::models::{ReferenceData, ScrapePriority, TargetType};
|
||||
use crate::data::scrape_jobs;
|
||||
use crate::error::Result;
|
||||
use crate::rmp::RmpClient;
|
||||
use crate::scraper::adaptive::{SubjectSchedule, SubjectStats, evaluate_subject};
|
||||
use crate::scraper::jobs::subject::SubjectJob;
|
||||
use crate::state::ReferenceCache;
|
||||
use crate::web::ws::{ScrapeJobDto, ScrapeJobEvent};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde_json::json;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
use tokio::sync::{RwLock, broadcast};
|
||||
@@ -25,6 +29,7 @@ pub struct Scheduler {
|
||||
db_pool: PgPool,
|
||||
banner_api: Arc<BannerApi>,
|
||||
reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
|
||||
}
|
||||
|
||||
impl Scheduler {
|
||||
@@ -32,11 +37,13 @@ impl Scheduler {
|
||||
db_pool: PgPool,
|
||||
banner_api: Arc<BannerApi>,
|
||||
reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
|
||||
) -> Self {
|
||||
Self {
|
||||
db_pool,
|
||||
banner_api,
|
||||
reference_cache,
|
||||
job_events_tx,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -74,6 +81,7 @@ impl Scheduler {
|
||||
let banner_api = self.banner_api.clone();
|
||||
let cancel_token = cancel_token.clone();
|
||||
let reference_cache = self.reference_cache.clone();
|
||||
let job_events_tx = self.job_events_tx.clone();
|
||||
|
||||
async move {
|
||||
tokio::select! {
|
||||
@@ -99,7 +107,7 @@ impl Scheduler {
|
||||
|
||||
tokio::join!(rmp_fut, ref_fut);
|
||||
|
||||
if let Err(e) = Self::schedule_jobs_impl(&db_pool, &banner_api).await {
|
||||
if let Err(e) = Self::schedule_jobs_impl(&db_pool, &banner_api, Some(&job_events_tx)).await {
|
||||
error!(error = ?e, "Failed to schedule jobs");
|
||||
}
|
||||
} => {}
|
||||
@@ -143,18 +151,17 @@ impl Scheduler {
|
||||
|
||||
/// Core scheduling logic that analyzes data and creates scrape jobs.
|
||||
///
|
||||
/// Strategy:
|
||||
/// 1. Fetch all subjects for the current term from Banner API
|
||||
/// 2. Query existing jobs in a single batch query
|
||||
/// 3. Create jobs only for subjects that don't have pending jobs
|
||||
/// Uses adaptive scheduling to determine per-subject scrape intervals based
|
||||
/// on recent change rates, failure patterns, and time of day. Only subjects
|
||||
/// that are eligible (i.e. their cooldown has elapsed) are enqueued.
|
||||
///
|
||||
/// This is a static method (not &self) to allow it to be called from spawned tasks.
|
||||
#[tracing::instrument(skip_all, fields(term))]
|
||||
async fn schedule_jobs_impl(db_pool: &PgPool, banner_api: &BannerApi) -> Result<()> {
|
||||
// For now, we will implement a simple baseline scheduling strategy:
|
||||
// 1. Get a list of all subjects from the Banner API.
|
||||
// 2. Query existing jobs for all subjects in a single query.
|
||||
// 3. Create new jobs only for subjects that don't have existing jobs.
|
||||
async fn schedule_jobs_impl(
|
||||
db_pool: &PgPool,
|
||||
banner_api: &BannerApi,
|
||||
job_events_tx: Option<&broadcast::Sender<ScrapeJobEvent>>,
|
||||
) -> Result<()> {
|
||||
let term = Term::get_current().inner().to_string();
|
||||
|
||||
tracing::Span::current().record("term", term.as_str());
|
||||
@@ -166,13 +173,70 @@ impl Scheduler {
|
||||
"Retrieved subjects from API"
|
||||
);
|
||||
|
||||
// Create payloads for all subjects
|
||||
let subject_payloads: Vec<_> = subjects
|
||||
.iter()
|
||||
.map(|subject| json!({ "subject": subject.code }))
|
||||
// Fetch per-subject stats and build a lookup map
|
||||
let stats_rows = scrape_jobs::fetch_subject_stats(db_pool).await?;
|
||||
let stats_map: HashMap<String, SubjectStats> = stats_rows
|
||||
.into_iter()
|
||||
.map(|row| {
|
||||
let subject = row.subject.clone();
|
||||
(subject, SubjectStats::from(row))
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Query existing jobs for all subjects in a single query
|
||||
// Evaluate each subject using adaptive scheduling
|
||||
let now = Utc::now();
|
||||
let is_past_term = false; // Scheduler currently only fetches current term subjects
|
||||
let mut eligible_subjects: Vec<String> = Vec::new();
|
||||
let mut cooldown_count: usize = 0;
|
||||
let mut paused_count: usize = 0;
|
||||
let mut read_only_count: usize = 0;
|
||||
|
||||
for subject in &subjects {
|
||||
let stats = stats_map.get(&subject.code).cloned().unwrap_or_else(|| {
|
||||
// Cold start: no history for this subject
|
||||
SubjectStats {
|
||||
subject: subject.code.clone(),
|
||||
recent_runs: 0,
|
||||
avg_change_ratio: 0.0,
|
||||
consecutive_zero_changes: 0,
|
||||
consecutive_empty_fetches: 0,
|
||||
recent_failure_count: 0,
|
||||
recent_success_count: 0,
|
||||
last_completed: DateTime::<Utc>::MIN_UTC,
|
||||
}
|
||||
});
|
||||
|
||||
match evaluate_subject(&stats, now, is_past_term) {
|
||||
SubjectSchedule::Eligible(_) => {
|
||||
eligible_subjects.push(subject.code.clone());
|
||||
}
|
||||
SubjectSchedule::Cooldown(_) => cooldown_count += 1,
|
||||
SubjectSchedule::Paused => paused_count += 1,
|
||||
SubjectSchedule::ReadOnly => read_only_count += 1,
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
total = subjects.len(),
|
||||
eligible = eligible_subjects.len(),
|
||||
cooldown = cooldown_count,
|
||||
paused = paused_count,
|
||||
read_only = read_only_count,
|
||||
"Adaptive scheduling decisions"
|
||||
);
|
||||
|
||||
if eligible_subjects.is_empty() {
|
||||
debug!("No eligible subjects to schedule");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Create payloads only for eligible subjects
|
||||
let subject_payloads: Vec<_> = eligible_subjects
|
||||
.iter()
|
||||
.map(|code| json!({ "subject": code }))
|
||||
.collect();
|
||||
|
||||
// Query existing jobs for eligible subjects only
|
||||
let existing_payloads = scrape_jobs::find_existing_job_payloads(
|
||||
TargetType::Subject,
|
||||
&subject_payloads,
|
||||
@@ -180,12 +244,12 @@ impl Scheduler {
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Filter out subjects that already have jobs and prepare new jobs
|
||||
// Filter out subjects that already have pending jobs
|
||||
let mut skipped_count = 0;
|
||||
let new_jobs: Vec<_> = subjects
|
||||
let new_jobs: Vec<_> = eligible_subjects
|
||||
.into_iter()
|
||||
.filter_map(|subject| {
|
||||
let job = SubjectJob::new(subject.code.clone());
|
||||
.filter_map(|subject_code| {
|
||||
let job = SubjectJob::new(subject_code.clone());
|
||||
let payload = serde_json::to_value(&job).unwrap();
|
||||
let payload_str = payload.to_string();
|
||||
|
||||
@@ -193,7 +257,7 @@ impl Scheduler {
|
||||
skipped_count += 1;
|
||||
None
|
||||
} else {
|
||||
Some((payload, subject.code))
|
||||
Some((payload, subject_code))
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
@@ -213,7 +277,16 @@ impl Scheduler {
|
||||
.map(|(payload, _)| (payload, TargetType::Subject, ScrapePriority::Low))
|
||||
.collect();
|
||||
|
||||
scrape_jobs::batch_insert_jobs(&jobs, db_pool).await?;
|
||||
let inserted = scrape_jobs::batch_insert_jobs(&jobs, db_pool).await?;
|
||||
|
||||
if let Some(tx) = job_events_tx {
|
||||
inserted.iter().for_each(|job| {
|
||||
debug!(job_id = job.id, "Emitting JobCreated event");
|
||||
let _ = tx.send(ScrapeJobEvent::JobCreated {
|
||||
job: ScrapeJobDto::from(job),
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
debug!("Job scheduling complete");
|
||||
@@ -232,8 +305,17 @@ impl Scheduler {
|
||||
crate::data::rmp::batch_upsert_rmp_professors(&professors, db_pool).await?;
|
||||
info!(total, "RMP professors upserted");
|
||||
|
||||
let matched = crate::data::rmp::auto_match_instructors(db_pool).await?;
|
||||
info!(total, matched, "RMP sync complete");
|
||||
let stats = crate::data::rmp_matching::generate_candidates(db_pool).await?;
|
||||
info!(
|
||||
total,
|
||||
stats.total_unmatched,
|
||||
stats.candidates_created,
|
||||
stats.candidates_rescored,
|
||||
stats.auto_matched,
|
||||
stats.skipped_unparseable,
|
||||
stats.skipped_no_candidates,
|
||||
"RMP sync complete"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
+178
-18
@@ -1,8 +1,10 @@
|
||||
use crate::banner::{BannerApi, BannerApiError};
|
||||
use crate::data::models::ScrapeJob;
|
||||
use crate::data::models::{ScrapeJob, ScrapeJobStatus, UpsertCounts};
|
||||
use crate::data::scrape_jobs;
|
||||
use crate::error::Result;
|
||||
use crate::scraper::jobs::{JobError, JobType};
|
||||
use crate::web::ws::ScrapeJobEvent;
|
||||
use chrono::{DateTime, Utc};
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
@@ -10,6 +12,9 @@ use tokio::sync::broadcast;
|
||||
use tokio::time;
|
||||
use tracing::{Instrument, debug, error, info, trace, warn};
|
||||
|
||||
/// Maximum time a single job is allowed to run before being considered stuck.
|
||||
const JOB_TIMEOUT: Duration = Duration::from_secs(5 * 60);
|
||||
|
||||
/// A single worker instance.
|
||||
///
|
||||
/// Each worker runs in its own asynchronous task and continuously polls the
|
||||
@@ -18,14 +23,21 @@ pub struct Worker {
|
||||
id: usize, // For logging purposes
|
||||
db_pool: PgPool,
|
||||
banner_api: Arc<BannerApi>,
|
||||
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
|
||||
}
|
||||
|
||||
impl Worker {
|
||||
pub fn new(id: usize, db_pool: PgPool, banner_api: Arc<BannerApi>) -> Self {
|
||||
pub fn new(
|
||||
id: usize,
|
||||
db_pool: PgPool,
|
||||
banner_api: Arc<BannerApi>,
|
||||
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
|
||||
) -> Self {
|
||||
Self {
|
||||
id,
|
||||
db_pool,
|
||||
banner_api,
|
||||
job_events_tx,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,22 +72,57 @@ impl Worker {
|
||||
let job_id = job.id;
|
||||
let retry_count = job.retry_count;
|
||||
let max_retries = job.max_retries;
|
||||
let target_type = job.target_type;
|
||||
let payload = job.target_payload.clone();
|
||||
let priority = job.priority;
|
||||
let queued_at = job.queued_at;
|
||||
let started_at = Utc::now();
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
// Process the job, racing against shutdown signal
|
||||
// Emit JobLocked event
|
||||
let locked_at = started_at.to_rfc3339();
|
||||
debug!(job_id, "Emitting JobLocked event");
|
||||
let _ = self.job_events_tx.send(ScrapeJobEvent::JobLocked {
|
||||
id: job_id,
|
||||
locked_at,
|
||||
status: ScrapeJobStatus::Processing,
|
||||
});
|
||||
|
||||
// Process the job, racing against shutdown signal and timeout
|
||||
let process_result = tokio::select! {
|
||||
_ = shutdown_rx.recv() => {
|
||||
self.handle_shutdown_during_processing(job_id).await;
|
||||
break;
|
||||
}
|
||||
result = self.process_job(job) => result
|
||||
result = async {
|
||||
match time::timeout(JOB_TIMEOUT, self.process_job(job)).await {
|
||||
Ok(result) => result,
|
||||
Err(_elapsed) => {
|
||||
Err(JobError::Recoverable(anyhow::anyhow!(
|
||||
"job timed out after {}s",
|
||||
JOB_TIMEOUT.as_secs()
|
||||
)))
|
||||
}
|
||||
}
|
||||
} => result
|
||||
};
|
||||
|
||||
let duration = start.elapsed();
|
||||
|
||||
// Handle the job processing result
|
||||
self.handle_job_result(job_id, retry_count, max_retries, process_result, duration)
|
||||
.await;
|
||||
self.handle_job_result(
|
||||
job_id,
|
||||
retry_count,
|
||||
max_retries,
|
||||
process_result,
|
||||
duration,
|
||||
target_type,
|
||||
payload,
|
||||
priority,
|
||||
queued_at,
|
||||
started_at,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,7 +134,7 @@ impl Worker {
|
||||
scrape_jobs::fetch_and_lock_job(&self.db_pool).await
|
||||
}
|
||||
|
||||
async fn process_job(&self, job: ScrapeJob) -> Result<(), JobError> {
|
||||
async fn process_job(&self, job: ScrapeJob) -> Result<UpsertCounts, JobError> {
|
||||
// Convert the database job to our job type
|
||||
let job_type = JobType::from_target_type_and_payload(job.target_type, job.target_payload)
|
||||
.map_err(|e| JobError::Unrecoverable(anyhow::anyhow!(e)))?; // Parse errors are unrecoverable
|
||||
@@ -114,9 +161,7 @@ impl Worker {
|
||||
job_impl
|
||||
.process(&self.banner_api, &self.db_pool)
|
||||
.await
|
||||
.map_err(JobError::Recoverable)?;
|
||||
|
||||
Ok(())
|
||||
.map_err(JobError::Recoverable)
|
||||
}
|
||||
.instrument(span)
|
||||
.await
|
||||
@@ -130,7 +175,11 @@ impl Worker {
|
||||
scrape_jobs::unlock_job(job_id, &self.db_pool).await
|
||||
}
|
||||
|
||||
async fn unlock_and_increment_retry(&self, job_id: i32, max_retries: i32) -> Result<bool> {
|
||||
async fn unlock_and_increment_retry(
|
||||
&self,
|
||||
job_id: i32,
|
||||
max_retries: i32,
|
||||
) -> Result<Option<chrono::DateTime<chrono::Utc>>> {
|
||||
scrape_jobs::unlock_and_increment_retry(job_id, max_retries, &self.db_pool).await
|
||||
}
|
||||
|
||||
@@ -156,31 +205,97 @@ impl Worker {
|
||||
}
|
||||
|
||||
/// Handle the result of job processing
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn handle_job_result(
|
||||
&self,
|
||||
job_id: i32,
|
||||
retry_count: i32,
|
||||
max_retries: i32,
|
||||
result: Result<(), JobError>,
|
||||
result: Result<UpsertCounts, JobError>,
|
||||
duration: std::time::Duration,
|
||||
target_type: crate::data::models::TargetType,
|
||||
payload: serde_json::Value,
|
||||
priority: crate::data::models::ScrapePriority,
|
||||
queued_at: DateTime<Utc>,
|
||||
started_at: DateTime<Utc>,
|
||||
) {
|
||||
let duration_ms = duration.as_millis() as i32;
|
||||
|
||||
match result {
|
||||
Ok(()) => {
|
||||
Ok(counts) => {
|
||||
debug!(
|
||||
worker_id = self.id,
|
||||
job_id,
|
||||
duration_ms = duration.as_millis(),
|
||||
courses_fetched = counts.courses_fetched,
|
||||
courses_changed = counts.courses_changed,
|
||||
courses_unchanged = counts.courses_unchanged,
|
||||
"Job completed successfully"
|
||||
);
|
||||
|
||||
// Log the result
|
||||
if let Err(e) = scrape_jobs::insert_job_result(
|
||||
target_type,
|
||||
payload,
|
||||
priority,
|
||||
queued_at,
|
||||
started_at,
|
||||
duration_ms,
|
||||
true,
|
||||
None,
|
||||
retry_count,
|
||||
Some(&counts),
|
||||
&self.db_pool,
|
||||
)
|
||||
.await
|
||||
{
|
||||
error!(worker_id = self.id, job_id, error = ?e, "Failed to insert job result");
|
||||
}
|
||||
|
||||
if let Err(e) = self.delete_job(job_id).await {
|
||||
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete completed job");
|
||||
}
|
||||
debug!(job_id, "Emitting JobCompleted event");
|
||||
let _ = self
|
||||
.job_events_tx
|
||||
.send(ScrapeJobEvent::JobCompleted { id: job_id });
|
||||
}
|
||||
Err(JobError::Recoverable(e)) => {
|
||||
self.handle_recoverable_error(job_id, retry_count, max_retries, e, duration)
|
||||
.await;
|
||||
self.handle_recoverable_error(
|
||||
job_id,
|
||||
retry_count,
|
||||
max_retries,
|
||||
e,
|
||||
duration,
|
||||
target_type,
|
||||
payload,
|
||||
priority,
|
||||
queued_at,
|
||||
started_at,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
Err(JobError::Unrecoverable(e)) => {
|
||||
// Log the failed result
|
||||
let err_msg = format!("{e:#}");
|
||||
if let Err(log_err) = scrape_jobs::insert_job_result(
|
||||
target_type,
|
||||
payload,
|
||||
priority,
|
||||
queued_at,
|
||||
started_at,
|
||||
duration_ms,
|
||||
false,
|
||||
Some(&err_msg),
|
||||
retry_count,
|
||||
None,
|
||||
&self.db_pool,
|
||||
)
|
||||
.await
|
||||
{
|
||||
error!(worker_id = self.id, job_id, error = ?log_err, "Failed to insert job result");
|
||||
}
|
||||
|
||||
error!(
|
||||
worker_id = self.id,
|
||||
job_id,
|
||||
@@ -191,11 +306,16 @@ impl Worker {
|
||||
if let Err(e) = self.delete_job(job_id).await {
|
||||
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete corrupted job");
|
||||
}
|
||||
debug!(job_id, "Emitting JobDeleted event");
|
||||
let _ = self
|
||||
.job_events_tx
|
||||
.send(ScrapeJobEvent::JobDeleted { id: job_id });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Handle recoverable errors by logging appropriately and unlocking the job
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn handle_recoverable_error(
|
||||
&self,
|
||||
job_id: i32,
|
||||
@@ -203,6 +323,11 @@ impl Worker {
|
||||
max_retries: i32,
|
||||
e: anyhow::Error,
|
||||
duration: std::time::Duration,
|
||||
target_type: crate::data::models::TargetType,
|
||||
payload: serde_json::Value,
|
||||
priority: crate::data::models::ScrapePriority,
|
||||
queued_at: DateTime<Utc>,
|
||||
started_at: DateTime<Utc>,
|
||||
) {
|
||||
let next_attempt = retry_count.saturating_add(1);
|
||||
let remaining_retries = max_retries.saturating_sub(next_attempt);
|
||||
@@ -233,7 +358,7 @@ impl Worker {
|
||||
|
||||
// Atomically unlock and increment retry count, checking if retry is allowed
|
||||
match self.unlock_and_increment_retry(job_id, max_retries).await {
|
||||
Ok(can_retry) if can_retry => {
|
||||
Ok(Some(new_queued_at)) => {
|
||||
debug!(
|
||||
worker_id = self.id,
|
||||
job_id,
|
||||
@@ -241,9 +366,37 @@ impl Worker {
|
||||
remaining_retries = remaining_retries,
|
||||
"Job unlocked for retry"
|
||||
);
|
||||
debug!(job_id, "Emitting JobRetried event");
|
||||
let _ = self.job_events_tx.send(ScrapeJobEvent::JobRetried {
|
||||
id: job_id,
|
||||
retry_count: next_attempt,
|
||||
queued_at: new_queued_at.to_rfc3339(),
|
||||
status: ScrapeJobStatus::Pending,
|
||||
});
|
||||
// Don't log a result yet — the job will be retried
|
||||
}
|
||||
Ok(_) => {
|
||||
// Max retries exceeded (detected atomically)
|
||||
Ok(None) => {
|
||||
// Max retries exceeded — log final failure result
|
||||
let duration_ms = duration.as_millis() as i32;
|
||||
let err_msg = format!("{e:#}");
|
||||
if let Err(log_err) = scrape_jobs::insert_job_result(
|
||||
target_type,
|
||||
payload,
|
||||
priority,
|
||||
queued_at,
|
||||
started_at,
|
||||
duration_ms,
|
||||
false,
|
||||
Some(&err_msg),
|
||||
next_attempt,
|
||||
None,
|
||||
&self.db_pool,
|
||||
)
|
||||
.await
|
||||
{
|
||||
error!(worker_id = self.id, job_id, error = ?log_err, "Failed to insert job result");
|
||||
}
|
||||
|
||||
error!(
|
||||
worker_id = self.id,
|
||||
job_id,
|
||||
@@ -256,6 +409,13 @@ impl Worker {
|
||||
if let Err(e) = self.delete_job(job_id).await {
|
||||
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete failed job");
|
||||
}
|
||||
debug!(job_id, "Emitting JobExhausted and JobDeleted events");
|
||||
let _ = self
|
||||
.job_events_tx
|
||||
.send(ScrapeJobEvent::JobExhausted { id: job_id });
|
||||
let _ = self
|
||||
.job_events_tx
|
||||
.send(ScrapeJobEvent::JobDeleted { id: job_id });
|
||||
}
|
||||
Err(e) => {
|
||||
error!(worker_id = self.id, job_id, error = ?e, "Failed to unlock and increment retry count");
|
||||
|
||||
+39
-2
@@ -1,6 +1,7 @@
|
||||
use super::Service;
|
||||
use crate::state::AppState;
|
||||
use crate::status::ServiceStatus;
|
||||
use crate::web::auth::AuthConfig;
|
||||
use crate::web::create_router;
|
||||
use std::net::SocketAddr;
|
||||
use tokio::net::TcpListener;
|
||||
@@ -11,14 +12,16 @@ use tracing::{info, trace, warn};
|
||||
pub struct WebService {
|
||||
port: u16,
|
||||
app_state: AppState,
|
||||
auth_config: AuthConfig,
|
||||
shutdown_tx: Option<broadcast::Sender<()>>,
|
||||
}
|
||||
|
||||
impl WebService {
|
||||
pub fn new(port: u16, app_state: AppState) -> Self {
|
||||
pub fn new(port: u16, app_state: AppState, auth_config: AuthConfig) -> Self {
|
||||
Self {
|
||||
port,
|
||||
app_state,
|
||||
auth_config,
|
||||
shutdown_tx: None,
|
||||
}
|
||||
}
|
||||
@@ -48,6 +51,33 @@ impl WebService {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Periodically cleans up expired sessions from the database and in-memory cache.
|
||||
async fn session_cleanup_loop(state: AppState, mut shutdown_rx: broadcast::Receiver<()>) {
|
||||
use std::time::Duration;
|
||||
// Run every hour
|
||||
let mut interval = tokio::time::interval(Duration::from_secs(3600));
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
_ = interval.tick() => {
|
||||
match state.session_cache.cleanup_expired().await {
|
||||
Ok(deleted) => {
|
||||
if deleted > 0 {
|
||||
info!(deleted, "cleaned up expired sessions");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
warn!(error = %e, "session cleanup failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = shutdown_rx.recv() => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
@@ -58,7 +88,7 @@ impl Service for WebService {
|
||||
|
||||
async fn run(&mut self) -> Result<(), anyhow::Error> {
|
||||
// Create the main router with Banner API routes
|
||||
let app = create_router(self.app_state.clone());
|
||||
let app = create_router(self.app_state.clone(), self.auth_config.clone());
|
||||
|
||||
let addr = SocketAddr::from(([0, 0, 0, 0], self.port));
|
||||
|
||||
@@ -84,6 +114,13 @@ impl Service for WebService {
|
||||
Self::db_health_check_loop(health_state, health_shutdown_rx).await;
|
||||
});
|
||||
|
||||
// Spawn session cleanup task
|
||||
let cleanup_state = self.app_state.clone();
|
||||
let cleanup_shutdown_rx = shutdown_tx.subscribe();
|
||||
tokio::spawn(async move {
|
||||
Self::session_cleanup_loop(cleanup_state, cleanup_shutdown_rx).await;
|
||||
});
|
||||
|
||||
// Use axum's graceful shutdown with the internal shutdown signal
|
||||
axum::serve(listener, app)
|
||||
.with_graceful_shutdown(async move {
|
||||
|
||||
+36
-13
@@ -4,18 +4,22 @@ use crate::banner::BannerApi;
|
||||
use crate::banner::Course;
|
||||
use crate::data::models::ReferenceData;
|
||||
use crate::status::ServiceStatusRegistry;
|
||||
use crate::web::schedule_cache::ScheduleCache;
|
||||
use crate::web::session_cache::{OAuthStateStore, SessionCache};
|
||||
use crate::web::ws::ScrapeJobEvent;
|
||||
use anyhow::Result;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use tokio::sync::{RwLock, broadcast};
|
||||
|
||||
/// In-memory cache for reference data (code→description lookups).
|
||||
///
|
||||
/// Loaded from the `reference_data` table on startup and refreshed periodically.
|
||||
/// Uses a two-level HashMap so lookups take `&str` without allocating.
|
||||
pub struct ReferenceCache {
|
||||
/// `(category, code)` → `description`
|
||||
data: HashMap<(String, String), String>,
|
||||
/// category → (code → description)
|
||||
data: HashMap<String, HashMap<String, String>>,
|
||||
}
|
||||
|
||||
impl Default for ReferenceCache {
|
||||
@@ -34,27 +38,31 @@ impl ReferenceCache {
|
||||
|
||||
/// Build cache from a list of reference data entries.
|
||||
pub fn from_entries(entries: Vec<ReferenceData>) -> Self {
|
||||
let data = entries
|
||||
.into_iter()
|
||||
.map(|e| ((e.category, e.code), e.description))
|
||||
.collect();
|
||||
let mut data: HashMap<String, HashMap<String, String>> = HashMap::new();
|
||||
for e in entries {
|
||||
data.entry(e.category)
|
||||
.or_default()
|
||||
.insert(e.code, e.description);
|
||||
}
|
||||
Self { data }
|
||||
}
|
||||
|
||||
/// Look up a description by category and code.
|
||||
/// Look up a description by category and code. Zero allocations.
|
||||
pub fn lookup(&self, category: &str, code: &str) -> Option<&str> {
|
||||
self.data
|
||||
.get(&(category.to_string(), code.to_string()))
|
||||
.get(category)
|
||||
.and_then(|codes| codes.get(code))
|
||||
.map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Get all `(code, description)` pairs for a category, sorted by description.
|
||||
pub fn entries_for_category(&self, category: &str) -> Vec<(&str, &str)> {
|
||||
let mut entries: Vec<(&str, &str)> = self
|
||||
.data
|
||||
let Some(codes) = self.data.get(category) else {
|
||||
return Vec::new();
|
||||
};
|
||||
let mut entries: Vec<(&str, &str)> = codes
|
||||
.iter()
|
||||
.filter(|((cat, _), _)| cat == category)
|
||||
.map(|((_, code), desc)| (code.as_str(), desc.as_str()))
|
||||
.map(|(code, desc)| (code.as_str(), desc.as_str()))
|
||||
.collect();
|
||||
entries.sort_by(|a, b| a.1.cmp(b.1));
|
||||
entries
|
||||
@@ -67,18 +75,33 @@ pub struct AppState {
|
||||
pub db_pool: PgPool,
|
||||
pub service_statuses: ServiceStatusRegistry,
|
||||
pub reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
pub session_cache: SessionCache,
|
||||
pub oauth_state_store: OAuthStateStore,
|
||||
pub schedule_cache: ScheduleCache,
|
||||
pub scrape_job_tx: broadcast::Sender<ScrapeJobEvent>,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
pub fn new(banner_api: Arc<BannerApi>, db_pool: PgPool) -> Self {
|
||||
let (scrape_job_tx, _) = broadcast::channel(64);
|
||||
let schedule_cache = ScheduleCache::new(db_pool.clone());
|
||||
Self {
|
||||
session_cache: SessionCache::new(db_pool.clone()),
|
||||
oauth_state_store: OAuthStateStore::new(),
|
||||
banner_api,
|
||||
db_pool,
|
||||
service_statuses: ServiceStatusRegistry::new(),
|
||||
reference_cache: Arc::new(RwLock::new(ReferenceCache::new())),
|
||||
schedule_cache,
|
||||
scrape_job_tx,
|
||||
}
|
||||
}
|
||||
|
||||
/// Subscribe to scrape job lifecycle events.
|
||||
pub fn scrape_job_events(&self) -> broadcast::Receiver<ScrapeJobEvent> {
|
||||
self.scrape_job_tx.subscribe()
|
||||
}
|
||||
|
||||
/// Initialize the reference cache from the database.
|
||||
pub async fn load_reference_cache(&self) -> Result<()> {
|
||||
let entries = crate::data::reference::get_all(&self.db_pool).await?;
|
||||
|
||||
@@ -10,6 +10,7 @@ use ts_rs::TS;
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[ts(export)]
|
||||
pub enum ServiceStatus {
|
||||
#[allow(dead_code)]
|
||||
Starting,
|
||||
Active,
|
||||
Connected,
|
||||
@@ -21,6 +22,7 @@ pub enum ServiceStatus {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StatusEntry {
|
||||
pub status: ServiceStatus,
|
||||
#[allow(dead_code)]
|
||||
pub updated_at: Instant,
|
||||
}
|
||||
|
||||
@@ -48,6 +50,7 @@ impl ServiceStatusRegistry {
|
||||
}
|
||||
|
||||
/// Returns the current status of a named service, if present.
|
||||
#[allow(dead_code)]
|
||||
pub fn get(&self, name: &str) -> Option<ServiceStatus> {
|
||||
self.inner.get(name).map(|entry| entry.status.clone())
|
||||
}
|
||||
|
||||
@@ -0,0 +1,269 @@
|
||||
//! Admin API handlers.
|
||||
//!
|
||||
//! All endpoints require the `AdminUser` extractor, returning 401/403 as needed.
|
||||
|
||||
use axum::extract::{Path, State};
|
||||
use axum::http::{HeaderMap, StatusCode, header};
|
||||
use axum::response::{IntoResponse, Json, Response};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::Deserialize;
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use crate::data::models::User;
|
||||
use crate::state::AppState;
|
||||
use crate::web::extractors::AdminUser;
|
||||
|
||||
/// `GET /api/admin/status` — Enhanced system status for admins.
|
||||
pub async fn admin_status(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Value>, (StatusCode, Json<Value>)> {
|
||||
let (user_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM users")
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to count users");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to count users"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let (session_count,): (i64,) =
|
||||
sqlx::query_as("SELECT COUNT(*) FROM user_sessions WHERE expires_at > now()")
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to count sessions");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to count sessions"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let course_count = state.get_course_count().await.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to count courses");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to count courses"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let (scrape_job_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM scrape_jobs")
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to count scrape jobs");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to count scrape jobs"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let services: Vec<Value> = state
|
||||
.service_statuses
|
||||
.all()
|
||||
.into_iter()
|
||||
.map(|(name, status)| {
|
||||
json!({
|
||||
"name": name,
|
||||
"status": status,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(json!({
|
||||
"userCount": user_count,
|
||||
"sessionCount": session_count,
|
||||
"courseCount": course_count,
|
||||
"scrapeJobCount": scrape_job_count,
|
||||
"services": services,
|
||||
})))
|
||||
}
|
||||
|
||||
/// `GET /api/admin/users` — List all users.
|
||||
pub async fn list_users(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Vec<User>>, (StatusCode, Json<Value>)> {
|
||||
let users = crate::data::users::list_users(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to list users");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to list users"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(Json(users))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SetAdminBody {
|
||||
is_admin: bool,
|
||||
}
|
||||
|
||||
/// `PUT /api/admin/users/{discord_id}/admin` — Set admin status for a user.
|
||||
pub async fn set_user_admin(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
Path(discord_id): Path<i64>,
|
||||
Json(body): Json<SetAdminBody>,
|
||||
) -> Result<Json<User>, (StatusCode, Json<Value>)> {
|
||||
let user = crate::data::users::set_admin(&state.db_pool, discord_id, body.is_admin)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to set admin status");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to set admin status"})),
|
||||
)
|
||||
})?
|
||||
.ok_or_else(|| {
|
||||
(
|
||||
StatusCode::NOT_FOUND,
|
||||
Json(json!({"error": "user not found"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
state.session_cache.evict_user(discord_id);
|
||||
|
||||
Ok(Json(user))
|
||||
}
|
||||
|
||||
/// `GET /api/admin/scrape-jobs` — List scrape jobs.
|
||||
pub async fn list_scrape_jobs(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Value>, (StatusCode, Json<Value>)> {
|
||||
let rows = sqlx::query_as::<_, crate::data::models::ScrapeJob>(
|
||||
"SELECT * FROM scrape_jobs ORDER BY priority DESC, execute_at ASC LIMIT 100",
|
||||
)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to list scrape jobs");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to list scrape jobs"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let jobs: Vec<Value> = rows
|
||||
.iter()
|
||||
.map(|j| {
|
||||
json!({
|
||||
"id": j.id,
|
||||
"targetType": format!("{:?}", j.target_type),
|
||||
"targetPayload": j.target_payload,
|
||||
"priority": format!("{:?}", j.priority),
|
||||
"executeAt": j.execute_at.to_rfc3339(),
|
||||
"createdAt": j.created_at.to_rfc3339(),
|
||||
"lockedAt": j.locked_at.map(|t| t.to_rfc3339()),
|
||||
"retryCount": j.retry_count,
|
||||
"maxRetries": j.max_retries,
|
||||
"queuedAt": j.queued_at.to_rfc3339(),
|
||||
"status": j.status(),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(json!({ "jobs": jobs })))
|
||||
}
|
||||
|
||||
/// Row returned by the audit-log query (audit + joined course fields).
|
||||
#[derive(sqlx::FromRow, Debug)]
|
||||
struct AuditRow {
|
||||
id: i32,
|
||||
course_id: i32,
|
||||
timestamp: chrono::DateTime<chrono::Utc>,
|
||||
field_changed: String,
|
||||
old_value: String,
|
||||
new_value: String,
|
||||
// Joined from courses table (nullable in case the course was deleted)
|
||||
subject: Option<String>,
|
||||
course_number: Option<String>,
|
||||
crn: Option<String>,
|
||||
title: Option<String>,
|
||||
}
|
||||
|
||||
/// Format a `DateTime<Utc>` as an HTTP-date (RFC 2822) for Last-Modified headers.
|
||||
fn to_http_date(dt: &DateTime<Utc>) -> String {
|
||||
dt.format("%a, %d %b %Y %H:%M:%S GMT").to_string()
|
||||
}
|
||||
|
||||
/// Parse an `If-Modified-Since` header value into a `DateTime<Utc>`.
|
||||
fn parse_if_modified_since(headers: &HeaderMap) -> Option<DateTime<Utc>> {
|
||||
let val = headers.get(header::IF_MODIFIED_SINCE)?.to_str().ok()?;
|
||||
DateTime::parse_from_rfc2822(val)
|
||||
.ok()
|
||||
.map(|dt| dt.with_timezone(&Utc))
|
||||
}
|
||||
|
||||
/// `GET /api/admin/audit-log` — List recent audit entries.
|
||||
///
|
||||
/// Supports `If-Modified-Since`: returns 304 when the newest entry hasn't changed.
|
||||
pub async fn list_audit_log(
|
||||
AdminUser(_user): AdminUser,
|
||||
headers: HeaderMap,
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Response, (StatusCode, Json<Value>)> {
|
||||
let rows = sqlx::query_as::<_, AuditRow>(
|
||||
"SELECT a.id, a.course_id, a.timestamp, a.field_changed, a.old_value, a.new_value, \
|
||||
c.subject, c.course_number, c.crn, c.title \
|
||||
FROM course_audits a \
|
||||
LEFT JOIN courses c ON c.id = a.course_id \
|
||||
ORDER BY a.timestamp DESC LIMIT 200",
|
||||
)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to list audit log");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to list audit log"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Determine the latest timestamp across all rows (query is DESC so first row is newest)
|
||||
let latest = rows.first().map(|r| r.timestamp);
|
||||
|
||||
// If the client sent If-Modified-Since and our data hasn't changed, return 304
|
||||
if let (Some(since), Some(latest_ts)) = (parse_if_modified_since(&headers), latest) {
|
||||
// Truncate to seconds for comparison (HTTP dates have second precision)
|
||||
if latest_ts.timestamp() <= since.timestamp() {
|
||||
let mut resp = StatusCode::NOT_MODIFIED.into_response();
|
||||
if let Ok(val) = to_http_date(&latest_ts).parse() {
|
||||
resp.headers_mut().insert(header::LAST_MODIFIED, val);
|
||||
}
|
||||
return Ok(resp);
|
||||
}
|
||||
}
|
||||
|
||||
let entries: Vec<Value> = rows
|
||||
.iter()
|
||||
.map(|a| {
|
||||
json!({
|
||||
"id": a.id,
|
||||
"courseId": a.course_id,
|
||||
"timestamp": a.timestamp.to_rfc3339(),
|
||||
"fieldChanged": a.field_changed,
|
||||
"oldValue": a.old_value,
|
||||
"newValue": a.new_value,
|
||||
"subject": a.subject,
|
||||
"courseNumber": a.course_number,
|
||||
"crn": a.crn,
|
||||
"courseTitle": a.title,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut resp = Json(json!({ "entries": entries })).into_response();
|
||||
if let Some(latest_ts) = latest
|
||||
&& let Ok(val) = to_http_date(&latest_ts).parse()
|
||||
{
|
||||
resp.headers_mut().insert(header::LAST_MODIFIED, val);
|
||||
}
|
||||
Ok(resp)
|
||||
}
|
||||
@@ -0,0 +1,827 @@
|
||||
//! Admin API handlers for RMP instructor matching management.
|
||||
|
||||
use axum::extract::{Path, Query, State};
|
||||
use axum::http::StatusCode;
|
||||
use axum::response::Json;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Value, json};
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::state::AppState;
|
||||
use crate::web::extractors::AdminUser;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Query / body types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ListInstructorsParams {
|
||||
status: Option<String>,
|
||||
search: Option<String>,
|
||||
page: Option<i32>,
|
||||
per_page: Option<i32>,
|
||||
sort: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct MatchBody {
|
||||
rmp_legacy_id: i32,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RejectCandidateBody {
|
||||
rmp_legacy_id: i32,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Response types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Simple acknowledgement response for mutating operations.
|
||||
#[derive(Debug, Clone, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct OkResponse {
|
||||
pub ok: bool,
|
||||
}
|
||||
|
||||
/// A top-candidate summary shown in the instructor list view.
|
||||
#[derive(Debug, Clone, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct TopCandidateResponse {
|
||||
pub rmp_legacy_id: i32,
|
||||
pub score: Option<f32>,
|
||||
#[ts(as = "Option<std::collections::HashMap<String, f32>>")]
|
||||
pub score_breakdown: Option<serde_json::Value>,
|
||||
pub first_name: Option<String>,
|
||||
pub last_name: Option<String>,
|
||||
pub department: Option<String>,
|
||||
pub avg_rating: Option<f32>,
|
||||
pub num_ratings: Option<i32>,
|
||||
}
|
||||
|
||||
/// An instructor row in the paginated list.
|
||||
#[derive(Debug, Clone, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct InstructorListItem {
|
||||
pub id: i32,
|
||||
pub display_name: String,
|
||||
pub email: String,
|
||||
pub rmp_match_status: String,
|
||||
#[ts(as = "i32")]
|
||||
pub rmp_link_count: i64,
|
||||
#[ts(as = "i32")]
|
||||
pub candidate_count: i64,
|
||||
#[ts(as = "i32")]
|
||||
pub course_subject_count: i64,
|
||||
pub top_candidate: Option<TopCandidateResponse>,
|
||||
}
|
||||
|
||||
/// Aggregate status counts for the instructor list.
|
||||
#[derive(Debug, Clone, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct InstructorStats {
|
||||
#[ts(as = "i32")]
|
||||
pub total: i64,
|
||||
#[ts(as = "i32")]
|
||||
pub unmatched: i64,
|
||||
#[ts(as = "i32")]
|
||||
pub auto: i64,
|
||||
#[ts(as = "i32")]
|
||||
pub confirmed: i64,
|
||||
#[ts(as = "i32")]
|
||||
pub rejected: i64,
|
||||
#[ts(as = "i32")]
|
||||
pub with_candidates: i64,
|
||||
}
|
||||
|
||||
/// Response for `GET /api/admin/instructors`.
|
||||
#[derive(Debug, Clone, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct ListInstructorsResponse {
|
||||
pub instructors: Vec<InstructorListItem>,
|
||||
#[ts(as = "i32")]
|
||||
pub total: i64,
|
||||
pub page: i32,
|
||||
pub per_page: i32,
|
||||
pub stats: InstructorStats,
|
||||
}
|
||||
|
||||
/// Instructor summary in the detail view.
|
||||
#[derive(Debug, Clone, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct InstructorDetail {
|
||||
pub id: i32,
|
||||
pub display_name: String,
|
||||
pub email: String,
|
||||
pub rmp_match_status: String,
|
||||
pub subjects_taught: Vec<String>,
|
||||
#[ts(as = "i32")]
|
||||
pub course_count: i64,
|
||||
}
|
||||
|
||||
/// A linked RMP profile in the detail view.
|
||||
#[derive(Debug, Clone, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct LinkedRmpProfile {
|
||||
pub link_id: i32,
|
||||
pub legacy_id: i32,
|
||||
pub first_name: Option<String>,
|
||||
pub last_name: Option<String>,
|
||||
pub department: Option<String>,
|
||||
pub avg_rating: Option<f32>,
|
||||
pub avg_difficulty: Option<f32>,
|
||||
pub num_ratings: Option<i32>,
|
||||
pub would_take_again_pct: Option<f32>,
|
||||
}
|
||||
|
||||
/// A match candidate in the detail view.
|
||||
#[derive(Debug, Clone, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct CandidateResponse {
|
||||
pub id: i32,
|
||||
pub rmp_legacy_id: i32,
|
||||
pub first_name: Option<String>,
|
||||
pub last_name: Option<String>,
|
||||
pub department: Option<String>,
|
||||
pub avg_rating: Option<f32>,
|
||||
pub avg_difficulty: Option<f32>,
|
||||
pub num_ratings: Option<i32>,
|
||||
pub would_take_again_pct: Option<f32>,
|
||||
pub score: Option<f32>,
|
||||
#[ts(as = "Option<std::collections::HashMap<String, f32>>")]
|
||||
pub score_breakdown: Option<serde_json::Value>,
|
||||
pub status: String,
|
||||
}
|
||||
|
||||
/// Response for `GET /api/admin/instructors/{id}` and `POST .../match`.
|
||||
#[derive(Debug, Clone, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct InstructorDetailResponse {
|
||||
pub instructor: InstructorDetail,
|
||||
pub current_matches: Vec<LinkedRmpProfile>,
|
||||
pub candidates: Vec<CandidateResponse>,
|
||||
}
|
||||
|
||||
/// Response for `POST /api/admin/rmp/rescore`.
|
||||
#[derive(Debug, Clone, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct RescoreResponse {
|
||||
pub total_unmatched: usize,
|
||||
pub candidates_created: usize,
|
||||
pub candidates_rescored: usize,
|
||||
pub auto_matched: usize,
|
||||
pub skipped_unparseable: usize,
|
||||
pub skipped_no_candidates: usize,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helper: map sqlx errors to the standard admin error tuple
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn db_error(context: &str, e: sqlx::Error) -> (StatusCode, Json<Value>) {
|
||||
tracing::error!(error = %e, "{context}");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": context})),
|
||||
)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Row types for SQL queries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(sqlx::FromRow)]
|
||||
struct InstructorRow {
|
||||
id: i32,
|
||||
display_name: String,
|
||||
email: String,
|
||||
rmp_match_status: String,
|
||||
rmp_link_count: Option<i64>,
|
||||
top_candidate_rmp_id: Option<i32>,
|
||||
top_candidate_score: Option<f32>,
|
||||
top_candidate_breakdown: Option<serde_json::Value>,
|
||||
tc_first_name: Option<String>,
|
||||
tc_last_name: Option<String>,
|
||||
tc_department: Option<String>,
|
||||
tc_avg_rating: Option<f32>,
|
||||
tc_num_ratings: Option<i32>,
|
||||
candidate_count: Option<i64>,
|
||||
course_subject_count: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow)]
|
||||
struct StatusCount {
|
||||
rmp_match_status: String,
|
||||
count: i64,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow)]
|
||||
struct CandidateRow {
|
||||
id: i32,
|
||||
rmp_legacy_id: i32,
|
||||
score: Option<f32>,
|
||||
score_breakdown: Option<serde_json::Value>,
|
||||
status: String,
|
||||
first_name: Option<String>,
|
||||
last_name: Option<String>,
|
||||
department: Option<String>,
|
||||
avg_rating: Option<f32>,
|
||||
avg_difficulty: Option<f32>,
|
||||
num_ratings: Option<i32>,
|
||||
would_take_again_pct: Option<f32>,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow)]
|
||||
struct LinkedRmpProfileRow {
|
||||
link_id: i32,
|
||||
legacy_id: i32,
|
||||
first_name: Option<String>,
|
||||
last_name: Option<String>,
|
||||
department: Option<String>,
|
||||
avg_rating: Option<f32>,
|
||||
avg_difficulty: Option<f32>,
|
||||
num_ratings: Option<i32>,
|
||||
would_take_again_pct: Option<f32>,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 1. GET /api/admin/instructors — paginated list with filtering
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// `GET /api/admin/instructors` — List instructors with filtering and pagination.
|
||||
pub async fn list_instructors(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<ListInstructorsParams>,
|
||||
) -> Result<Json<ListInstructorsResponse>, (StatusCode, Json<Value>)> {
|
||||
let page = params.page.unwrap_or(1).max(1);
|
||||
let per_page = params.per_page.unwrap_or(50).clamp(1, 100);
|
||||
let offset = (page - 1) * per_page;
|
||||
|
||||
let sort_clause = match params.sort.as_deref() {
|
||||
Some("name_asc") => "i.display_name ASC",
|
||||
Some("name_desc") => "i.display_name DESC",
|
||||
Some("status") => "i.rmp_match_status ASC, i.display_name ASC",
|
||||
_ => "tc.score DESC NULLS LAST, i.display_name ASC",
|
||||
};
|
||||
|
||||
// Build WHERE clause
|
||||
let mut conditions = Vec::new();
|
||||
let mut bind_idx = 0u32;
|
||||
|
||||
if params.status.is_some() {
|
||||
bind_idx += 1;
|
||||
conditions.push(format!("i.rmp_match_status = ${bind_idx}"));
|
||||
}
|
||||
if params.search.is_some() {
|
||||
bind_idx += 1;
|
||||
conditions.push(format!(
|
||||
"(i.display_name ILIKE ${bind_idx} OR i.email ILIKE ${bind_idx})"
|
||||
));
|
||||
}
|
||||
|
||||
let where_clause = if conditions.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("WHERE {}", conditions.join(" AND "))
|
||||
};
|
||||
|
||||
let query_str = format!(
|
||||
r#"
|
||||
SELECT
|
||||
i.id, i.display_name, i.email, i.rmp_match_status,
|
||||
(SELECT COUNT(*) FROM instructor_rmp_links irl WHERE irl.instructor_id = i.id) as rmp_link_count,
|
||||
tc.rmp_legacy_id as top_candidate_rmp_id,
|
||||
tc.score as top_candidate_score,
|
||||
tc.score_breakdown as top_candidate_breakdown,
|
||||
rp.first_name as tc_first_name,
|
||||
rp.last_name as tc_last_name,
|
||||
rp.department as tc_department,
|
||||
rp.avg_rating as tc_avg_rating,
|
||||
rp.num_ratings as tc_num_ratings,
|
||||
(SELECT COUNT(*) FROM rmp_match_candidates mc WHERE mc.instructor_id = i.id AND mc.status = 'pending') as candidate_count,
|
||||
(SELECT COUNT(DISTINCT c.subject) FROM course_instructors ci JOIN courses c ON c.id = ci.course_id WHERE ci.instructor_id = i.id) as course_subject_count
|
||||
FROM instructors i
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT mc.rmp_legacy_id, mc.score, mc.score_breakdown
|
||||
FROM rmp_match_candidates mc
|
||||
WHERE mc.instructor_id = i.id AND mc.status = 'pending'
|
||||
ORDER BY mc.score DESC
|
||||
LIMIT 1
|
||||
) tc ON true
|
||||
LEFT JOIN rmp_professors rp ON rp.legacy_id = tc.rmp_legacy_id
|
||||
{where_clause}
|
||||
ORDER BY {sort_clause}
|
||||
LIMIT {per_page} OFFSET {offset}
|
||||
"#
|
||||
);
|
||||
|
||||
// Build the query with dynamic binds
|
||||
let mut query = sqlx::query_as::<_, InstructorRow>(&query_str);
|
||||
if let Some(ref status) = params.status {
|
||||
query = query.bind(status);
|
||||
}
|
||||
if let Some(ref search) = params.search {
|
||||
query = query.bind(format!("%{search}%"));
|
||||
}
|
||||
|
||||
let rows = query
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to list instructors", e))?;
|
||||
|
||||
// Count total with filters
|
||||
let count_query_str = format!("SELECT COUNT(*) FROM instructors i {where_clause}");
|
||||
let mut count_query = sqlx::query_as::<_, (i64,)>(&count_query_str);
|
||||
if let Some(ref status) = params.status {
|
||||
count_query = count_query.bind(status);
|
||||
}
|
||||
if let Some(ref search) = params.search {
|
||||
count_query = count_query.bind(format!("%{search}%"));
|
||||
}
|
||||
|
||||
let (total,) = count_query
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to count instructors", e))?;
|
||||
|
||||
// Aggregate stats (unfiltered)
|
||||
let stats_rows = sqlx::query_as::<_, StatusCount>(
|
||||
"SELECT rmp_match_status, COUNT(*) as count FROM instructors GROUP BY rmp_match_status",
|
||||
)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to get instructor stats", e))?;
|
||||
|
||||
// Count instructors with at least one candidate (for progress bar denominator)
|
||||
let (with_candidates,): (i64,) =
|
||||
sqlx::query_as("SELECT COUNT(DISTINCT instructor_id) FROM rmp_match_candidates")
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to count instructors with candidates", e))?;
|
||||
|
||||
let mut stats = InstructorStats {
|
||||
total: 0,
|
||||
unmatched: 0,
|
||||
auto: 0,
|
||||
confirmed: 0,
|
||||
rejected: 0,
|
||||
with_candidates,
|
||||
};
|
||||
for row in &stats_rows {
|
||||
stats.total += row.count;
|
||||
match row.rmp_match_status.as_str() {
|
||||
"unmatched" => stats.unmatched = row.count,
|
||||
"auto" => stats.auto = row.count,
|
||||
"confirmed" => stats.confirmed = row.count,
|
||||
"rejected" => stats.rejected = row.count,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let instructors: Vec<InstructorListItem> = rows
|
||||
.iter()
|
||||
.map(|r| {
|
||||
let top_candidate = r.top_candidate_rmp_id.map(|rmp_id| TopCandidateResponse {
|
||||
rmp_legacy_id: rmp_id,
|
||||
score: r.top_candidate_score,
|
||||
score_breakdown: r.top_candidate_breakdown.clone(),
|
||||
first_name: r.tc_first_name.clone(),
|
||||
last_name: r.tc_last_name.clone(),
|
||||
department: r.tc_department.clone(),
|
||||
avg_rating: r.tc_avg_rating,
|
||||
num_ratings: r.tc_num_ratings,
|
||||
});
|
||||
|
||||
InstructorListItem {
|
||||
id: r.id,
|
||||
display_name: r.display_name.clone(),
|
||||
email: r.email.clone(),
|
||||
rmp_match_status: r.rmp_match_status.clone(),
|
||||
rmp_link_count: r.rmp_link_count.unwrap_or(0),
|
||||
candidate_count: r.candidate_count.unwrap_or(0),
|
||||
course_subject_count: r.course_subject_count.unwrap_or(0),
|
||||
top_candidate,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(ListInstructorsResponse {
|
||||
instructors,
|
||||
total,
|
||||
page,
|
||||
per_page,
|
||||
stats,
|
||||
}))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 2. GET /api/admin/instructors/{id} — full detail
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// `GET /api/admin/instructors/{id}` — Full instructor detail with candidates.
|
||||
pub async fn get_instructor(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<i32>,
|
||||
) -> Result<Json<InstructorDetailResponse>, (StatusCode, Json<Value>)> {
|
||||
build_instructor_detail(&state, id).await
|
||||
}
|
||||
|
||||
/// Shared helper that builds the full instructor detail response.
|
||||
async fn build_instructor_detail(
|
||||
state: &AppState,
|
||||
id: i32,
|
||||
) -> Result<Json<InstructorDetailResponse>, (StatusCode, Json<Value>)> {
|
||||
// Fetch instructor
|
||||
let instructor: Option<(i32, String, String, String)> = sqlx::query_as(
|
||||
"SELECT id, display_name, email, rmp_match_status FROM instructors WHERE id = $1",
|
||||
)
|
||||
.bind(id)
|
||||
.fetch_optional(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to fetch instructor", e))?;
|
||||
|
||||
let (inst_id, display_name, email, rmp_match_status) = instructor.ok_or_else(|| {
|
||||
(
|
||||
StatusCode::NOT_FOUND,
|
||||
Json(json!({"error": "instructor not found"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Subjects taught
|
||||
let subjects: Vec<(String,)> = sqlx::query_as(
|
||||
"SELECT DISTINCT c.subject FROM course_instructors ci JOIN courses c ON c.id = ci.course_id WHERE ci.instructor_id = $1 ORDER BY c.subject",
|
||||
)
|
||||
.bind(inst_id)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to fetch subjects", e))?;
|
||||
|
||||
// Course count
|
||||
let (course_count,): (i64,) = sqlx::query_as(
|
||||
"SELECT COUNT(DISTINCT ci.course_id) FROM course_instructors ci WHERE ci.instructor_id = $1",
|
||||
)
|
||||
.bind(inst_id)
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to count courses", e))?;
|
||||
|
||||
// Candidates with RMP professor info
|
||||
let candidates = sqlx::query_as::<_, CandidateRow>(
|
||||
r#"
|
||||
SELECT mc.id, mc.rmp_legacy_id, mc.score, mc.score_breakdown, mc.status,
|
||||
rp.first_name, rp.last_name, rp.department,
|
||||
rp.avg_rating, rp.avg_difficulty, rp.num_ratings, rp.would_take_again_pct
|
||||
FROM rmp_match_candidates mc
|
||||
JOIN rmp_professors rp ON rp.legacy_id = mc.rmp_legacy_id
|
||||
WHERE mc.instructor_id = $1
|
||||
ORDER BY mc.score DESC
|
||||
"#,
|
||||
)
|
||||
.bind(inst_id)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to fetch candidates", e))?;
|
||||
|
||||
// Current matches (all linked RMP profiles)
|
||||
let current_matches = sqlx::query_as::<_, LinkedRmpProfileRow>(
|
||||
r#"
|
||||
SELECT irl.id as link_id,
|
||||
rp.legacy_id, rp.first_name, rp.last_name, rp.department,
|
||||
rp.avg_rating, rp.avg_difficulty, rp.num_ratings, rp.would_take_again_pct
|
||||
FROM instructor_rmp_links irl
|
||||
JOIN rmp_professors rp ON rp.legacy_id = irl.rmp_legacy_id
|
||||
WHERE irl.instructor_id = $1
|
||||
ORDER BY rp.num_ratings DESC NULLS LAST
|
||||
"#,
|
||||
)
|
||||
.bind(inst_id)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to fetch linked rmp profiles", e))?;
|
||||
|
||||
let current_matches_resp: Vec<LinkedRmpProfile> = current_matches
|
||||
.into_iter()
|
||||
.map(|p| LinkedRmpProfile {
|
||||
link_id: p.link_id,
|
||||
legacy_id: p.legacy_id,
|
||||
first_name: p.first_name,
|
||||
last_name: p.last_name,
|
||||
department: p.department,
|
||||
avg_rating: p.avg_rating,
|
||||
avg_difficulty: p.avg_difficulty,
|
||||
num_ratings: p.num_ratings,
|
||||
would_take_again_pct: p.would_take_again_pct,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let candidates_resp: Vec<CandidateResponse> = candidates
|
||||
.into_iter()
|
||||
.map(|c| CandidateResponse {
|
||||
id: c.id,
|
||||
rmp_legacy_id: c.rmp_legacy_id,
|
||||
first_name: c.first_name,
|
||||
last_name: c.last_name,
|
||||
department: c.department,
|
||||
avg_rating: c.avg_rating,
|
||||
avg_difficulty: c.avg_difficulty,
|
||||
num_ratings: c.num_ratings,
|
||||
would_take_again_pct: c.would_take_again_pct,
|
||||
score: c.score,
|
||||
score_breakdown: c.score_breakdown,
|
||||
status: c.status,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(InstructorDetailResponse {
|
||||
instructor: InstructorDetail {
|
||||
id: inst_id,
|
||||
display_name,
|
||||
email,
|
||||
rmp_match_status,
|
||||
subjects_taught: subjects.into_iter().map(|(s,)| s).collect(),
|
||||
course_count,
|
||||
},
|
||||
current_matches: current_matches_resp,
|
||||
candidates: candidates_resp,
|
||||
}))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 3. POST /api/admin/instructors/{id}/match — accept a candidate
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// `POST /api/admin/instructors/{id}/match` — Accept a candidate match.
|
||||
pub async fn match_instructor(
|
||||
AdminUser(user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<i32>,
|
||||
Json(body): Json<MatchBody>,
|
||||
) -> Result<Json<InstructorDetailResponse>, (StatusCode, Json<Value>)> {
|
||||
// Verify the candidate exists and is pending
|
||||
let candidate: Option<(i32,)> = sqlx::query_as(
|
||||
"SELECT id FROM rmp_match_candidates WHERE instructor_id = $1 AND rmp_legacy_id = $2 AND status = 'pending'",
|
||||
)
|
||||
.bind(id)
|
||||
.bind(body.rmp_legacy_id)
|
||||
.fetch_optional(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to check candidate", e))?;
|
||||
|
||||
if candidate.is_none() {
|
||||
return Err((
|
||||
StatusCode::NOT_FOUND,
|
||||
Json(json!({"error": "pending candidate not found for this instructor"})),
|
||||
));
|
||||
}
|
||||
|
||||
// Check if this RMP profile is already linked to a different instructor
|
||||
let conflict: Option<(i32,)> = sqlx::query_as(
|
||||
"SELECT instructor_id FROM instructor_rmp_links WHERE rmp_legacy_id = $1 AND instructor_id != $2",
|
||||
)
|
||||
.bind(body.rmp_legacy_id)
|
||||
.bind(id)
|
||||
.fetch_optional(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to check rmp uniqueness", e))?;
|
||||
|
||||
if let Some((other_id,)) = conflict {
|
||||
return Err((
|
||||
StatusCode::CONFLICT,
|
||||
Json(json!({
|
||||
"error": "RMP profile already linked to another instructor",
|
||||
"conflictingInstructorId": other_id,
|
||||
})),
|
||||
));
|
||||
}
|
||||
|
||||
let mut tx = state
|
||||
.db_pool
|
||||
.begin()
|
||||
.await
|
||||
.map_err(|e| db_error("failed to begin transaction", e))?;
|
||||
|
||||
// Insert link into instructor_rmp_links
|
||||
sqlx::query(
|
||||
"INSERT INTO instructor_rmp_links (instructor_id, rmp_legacy_id, created_by, source) VALUES ($1, $2, $3, 'manual') ON CONFLICT (rmp_legacy_id) DO NOTHING",
|
||||
)
|
||||
.bind(id)
|
||||
.bind(body.rmp_legacy_id)
|
||||
.bind(user.discord_id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to insert rmp link", e))?;
|
||||
|
||||
// Update instructor match status
|
||||
sqlx::query("UPDATE instructors SET rmp_match_status = 'confirmed' WHERE id = $1")
|
||||
.bind(id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to update instructor match status", e))?;
|
||||
|
||||
// Accept the candidate
|
||||
sqlx::query(
|
||||
"UPDATE rmp_match_candidates SET status = 'accepted', resolved_at = NOW(), resolved_by = $1 WHERE instructor_id = $2 AND rmp_legacy_id = $3",
|
||||
)
|
||||
.bind(user.discord_id)
|
||||
.bind(id)
|
||||
.bind(body.rmp_legacy_id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to accept candidate", e))?;
|
||||
|
||||
tx.commit()
|
||||
.await
|
||||
.map_err(|e| db_error("failed to commit transaction", e))?;
|
||||
|
||||
build_instructor_detail(&state, id).await
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 4. POST /api/admin/instructors/{id}/reject-candidate — reject one candidate
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// `POST /api/admin/instructors/{id}/reject-candidate` — Reject a single candidate.
|
||||
pub async fn reject_candidate(
|
||||
AdminUser(user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<i32>,
|
||||
Json(body): Json<RejectCandidateBody>,
|
||||
) -> Result<Json<OkResponse>, (StatusCode, Json<Value>)> {
|
||||
let result = sqlx::query(
|
||||
"UPDATE rmp_match_candidates SET status = 'rejected', resolved_at = NOW(), resolved_by = $1 WHERE instructor_id = $2 AND rmp_legacy_id = $3 AND status = 'pending'",
|
||||
)
|
||||
.bind(user.discord_id)
|
||||
.bind(id)
|
||||
.bind(body.rmp_legacy_id)
|
||||
.execute(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to reject candidate", e))?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err((
|
||||
StatusCode::NOT_FOUND,
|
||||
Json(json!({"error": "pending candidate not found"})),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Json(OkResponse { ok: true }))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 5. POST /api/admin/instructors/{id}/reject-all — no valid match
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// `POST /api/admin/instructors/{id}/reject-all` — Mark instructor as having no valid RMP match.
|
||||
pub async fn reject_all(
|
||||
AdminUser(user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<i32>,
|
||||
) -> Result<Json<OkResponse>, (StatusCode, Json<Value>)> {
|
||||
let mut tx = state
|
||||
.db_pool
|
||||
.begin()
|
||||
.await
|
||||
.map_err(|e| db_error("failed to begin transaction", e))?;
|
||||
|
||||
// Check current status — cannot reject an instructor with confirmed matches
|
||||
let current_status: Option<(String,)> =
|
||||
sqlx::query_as("SELECT rmp_match_status FROM instructors WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_optional(&mut *tx)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to fetch instructor status", e))?;
|
||||
|
||||
let (status,) = current_status.ok_or_else(|| {
|
||||
(
|
||||
StatusCode::NOT_FOUND,
|
||||
Json(json!({"error": "instructor not found"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
if status == "confirmed" {
|
||||
return Err((
|
||||
StatusCode::CONFLICT,
|
||||
Json(
|
||||
json!({"error": "cannot reject instructor with confirmed matches — unmatch first"}),
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
// Update instructor status
|
||||
sqlx::query("UPDATE instructors SET rmp_match_status = 'rejected' WHERE id = $1")
|
||||
.bind(id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to update instructor status", e))?;
|
||||
|
||||
// Reject all pending candidates
|
||||
sqlx::query(
|
||||
"UPDATE rmp_match_candidates SET status = 'rejected', resolved_at = NOW(), resolved_by = $1 WHERE instructor_id = $2 AND status = 'pending'",
|
||||
)
|
||||
.bind(user.discord_id)
|
||||
.bind(id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to reject candidates", e))?;
|
||||
|
||||
tx.commit()
|
||||
.await
|
||||
.map_err(|e| db_error("failed to commit transaction", e))?;
|
||||
|
||||
Ok(Json(OkResponse { ok: true }))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 6. POST /api/admin/instructors/{id}/unmatch — remove current match
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Body for unmatch — optional `rmpLegacyId` to remove a specific link.
|
||||
/// If omitted (or null), all links are removed.
|
||||
#[derive(Deserialize, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UnmatchBody {
|
||||
rmp_legacy_id: Option<i32>,
|
||||
}
|
||||
|
||||
/// `POST /api/admin/instructors/{id}/unmatch` — Remove RMP link(s).
|
||||
///
|
||||
/// Send `{ "rmpLegacyId": N }` to remove a specific link, or an empty body / `{}`
|
||||
/// to remove all links for the instructor.
|
||||
pub async fn unmatch_instructor(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<i32>,
|
||||
body: Option<Json<UnmatchBody>>,
|
||||
) -> Result<Json<OkResponse>, (StatusCode, Json<Value>)> {
|
||||
let rmp_legacy_id = body.and_then(|b| b.rmp_legacy_id);
|
||||
|
||||
// Verify instructor exists
|
||||
let exists: Option<(i32,)> = sqlx::query_as("SELECT id FROM instructors WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_optional(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| db_error("failed to check instructor", e))?;
|
||||
|
||||
if exists.is_none() {
|
||||
return Err((
|
||||
StatusCode::NOT_FOUND,
|
||||
Json(json!({"error": "instructor not found"})),
|
||||
));
|
||||
}
|
||||
|
||||
// Use the data layer function to perform the unmatch
|
||||
crate::data::rmp::unmatch_instructor(&state.db_pool, id, rmp_legacy_id)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to unmatch instructor");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to unmatch instructor"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(Json(OkResponse { ok: true }))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 7. POST /api/admin/rmp/rescore — re-run candidate generation
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// `POST /api/admin/rmp/rescore` — Re-run RMP candidate generation.
|
||||
pub async fn rescore(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<RescoreResponse>, (StatusCode, Json<Value>)> {
|
||||
let stats = crate::data::rmp_matching::generate_candidates(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to run candidate generation");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "candidate generation failed"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(Json(RescoreResponse {
|
||||
total_unmatched: stats.total_unmatched,
|
||||
candidates_created: stats.candidates_created,
|
||||
candidates_rescored: stats.candidates_rescored,
|
||||
auto_matched: stats.auto_matched,
|
||||
skipped_unparseable: stats.skipped_unparseable,
|
||||
skipped_no_candidates: stats.skipped_no_candidates,
|
||||
}))
|
||||
}
|
||||
@@ -0,0 +1,523 @@
|
||||
//! Admin API handlers for scraper observability.
|
||||
//!
|
||||
//! All endpoints require the `AdminUser` extractor, returning 401/403 as needed.
|
||||
|
||||
use axum::extract::{Path, Query, State};
|
||||
use axum::http::StatusCode;
|
||||
use axum::response::Json;
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use sqlx::Row;
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::banner::models::terms::Term;
|
||||
use crate::data::scrape_jobs;
|
||||
use crate::scraper::adaptive::{self, SubjectSchedule, SubjectStats};
|
||||
use crate::state::AppState;
|
||||
use crate::web::extractors::AdminUser;
|
||||
|
||||
type ApiError = (StatusCode, Json<serde_json::Value>);
|
||||
|
||||
fn parse_period(period: &str) -> Result<chrono::Duration, ApiError> {
|
||||
match period {
|
||||
"1h" => Ok(chrono::Duration::hours(1)),
|
||||
"6h" => Ok(chrono::Duration::hours(6)),
|
||||
"24h" => Ok(chrono::Duration::hours(24)),
|
||||
"7d" => Ok(chrono::Duration::days(7)),
|
||||
"30d" => Ok(chrono::Duration::days(30)),
|
||||
_ => Err((
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(
|
||||
json!({"error": format!("Invalid period '{period}'. Valid: 1h, 6h, 24h, 7d, 30d")}),
|
||||
),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
fn period_to_interval_str(period: &str) -> &'static str {
|
||||
match period {
|
||||
"1h" => "1 hour",
|
||||
"6h" => "6 hours",
|
||||
"24h" => "24 hours",
|
||||
"7d" => "7 days",
|
||||
"30d" => "30 days",
|
||||
_ => "24 hours",
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_bucket(bucket: &str) -> Result<&'static str, ApiError> {
|
||||
match bucket {
|
||||
"1m" => Ok("1 minute"),
|
||||
"5m" => Ok("5 minutes"),
|
||||
"15m" => Ok("15 minutes"),
|
||||
"1h" => Ok("1 hour"),
|
||||
"6h" => Ok("6 hours"),
|
||||
_ => Err((
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(
|
||||
json!({"error": format!("Invalid bucket '{bucket}'. Valid: 1m, 5m, 15m, 1h, 6h")}),
|
||||
),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
fn default_bucket_for_period(period: &str) -> &'static str {
|
||||
match period {
|
||||
"1h" => "1m",
|
||||
"6h" => "5m",
|
||||
"24h" => "15m",
|
||||
"7d" => "1h",
|
||||
"30d" => "6h",
|
||||
_ => "15m",
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Endpoint 1: GET /api/admin/scraper/stats
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct StatsParams {
|
||||
#[serde(default = "default_period")]
|
||||
period: String,
|
||||
}
|
||||
|
||||
fn default_period() -> String {
|
||||
"24h".to_string()
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ScraperStatsResponse {
|
||||
period: String,
|
||||
#[ts(type = "number")]
|
||||
total_scrapes: i64,
|
||||
#[ts(type = "number")]
|
||||
successful_scrapes: i64,
|
||||
#[ts(type = "number")]
|
||||
failed_scrapes: i64,
|
||||
success_rate: Option<f64>,
|
||||
avg_duration_ms: Option<f64>,
|
||||
#[ts(type = "number")]
|
||||
total_courses_changed: i64,
|
||||
#[ts(type = "number")]
|
||||
total_courses_fetched: i64,
|
||||
#[ts(type = "number")]
|
||||
total_audits_generated: i64,
|
||||
#[ts(type = "number")]
|
||||
pending_jobs: i64,
|
||||
#[ts(type = "number")]
|
||||
locked_jobs: i64,
|
||||
}
|
||||
|
||||
pub async fn scraper_stats(
|
||||
_admin: AdminUser,
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<StatsParams>,
|
||||
) -> Result<Json<ScraperStatsResponse>, ApiError> {
|
||||
let _duration = parse_period(¶ms.period)?;
|
||||
let interval_str = period_to_interval_str(¶ms.period);
|
||||
|
||||
let row = sqlx::query(
|
||||
"SELECT \
|
||||
COUNT(*) AS total_scrapes, \
|
||||
COUNT(*) FILTER (WHERE success) AS successful_scrapes, \
|
||||
COUNT(*) FILTER (WHERE NOT success) AS failed_scrapes, \
|
||||
(AVG(duration_ms) FILTER (WHERE success))::FLOAT8 AS avg_duration_ms, \
|
||||
COALESCE(SUM(courses_changed) FILTER (WHERE success), 0) AS total_courses_changed, \
|
||||
COALESCE(SUM(courses_fetched) FILTER (WHERE success), 0) AS total_courses_fetched, \
|
||||
COALESCE(SUM(audits_generated) FILTER (WHERE success), 0) AS total_audits_generated \
|
||||
FROM scrape_job_results \
|
||||
WHERE completed_at > NOW() - $1::interval",
|
||||
)
|
||||
.bind(interval_str)
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Failed to fetch scraper stats");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "Failed to fetch scraper stats"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let total_scrapes: i64 = row.get("total_scrapes");
|
||||
let successful_scrapes: i64 = row.get("successful_scrapes");
|
||||
let failed_scrapes: i64 = row.get("failed_scrapes");
|
||||
let avg_duration_ms: Option<f64> = row.get("avg_duration_ms");
|
||||
let total_courses_changed: i64 = row.get("total_courses_changed");
|
||||
let total_courses_fetched: i64 = row.get("total_courses_fetched");
|
||||
let total_audits_generated: i64 = row.get("total_audits_generated");
|
||||
|
||||
let queue_row = sqlx::query(
|
||||
"SELECT \
|
||||
COUNT(*) FILTER (WHERE locked_at IS NULL) AS pending_jobs, \
|
||||
COUNT(*) FILTER (WHERE locked_at IS NOT NULL) AS locked_jobs \
|
||||
FROM scrape_jobs",
|
||||
)
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Failed to fetch queue stats");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "Failed to fetch queue stats"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let pending_jobs: i64 = queue_row.get("pending_jobs");
|
||||
let locked_jobs: i64 = queue_row.get("locked_jobs");
|
||||
|
||||
let success_rate = if total_scrapes > 0 {
|
||||
Some(successful_scrapes as f64 / total_scrapes as f64)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Json(ScraperStatsResponse {
|
||||
period: params.period,
|
||||
total_scrapes,
|
||||
successful_scrapes,
|
||||
failed_scrapes,
|
||||
success_rate,
|
||||
avg_duration_ms,
|
||||
total_courses_changed,
|
||||
total_courses_fetched,
|
||||
total_audits_generated,
|
||||
pending_jobs,
|
||||
locked_jobs,
|
||||
}))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Endpoint 2: GET /api/admin/scraper/timeseries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct TimeseriesParams {
|
||||
#[serde(default = "default_period")]
|
||||
period: String,
|
||||
bucket: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TimeseriesResponse {
|
||||
period: String,
|
||||
bucket: String,
|
||||
points: Vec<TimeseriesPoint>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TimeseriesPoint {
|
||||
timestamp: DateTime<Utc>,
|
||||
#[ts(type = "number")]
|
||||
scrape_count: i64,
|
||||
#[ts(type = "number")]
|
||||
success_count: i64,
|
||||
#[ts(type = "number")]
|
||||
error_count: i64,
|
||||
#[ts(type = "number")]
|
||||
courses_changed: i64,
|
||||
avg_duration_ms: f64,
|
||||
}
|
||||
|
||||
pub async fn scraper_timeseries(
|
||||
_admin: AdminUser,
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<TimeseriesParams>,
|
||||
) -> Result<Json<TimeseriesResponse>, ApiError> {
|
||||
let _duration = parse_period(¶ms.period)?;
|
||||
let period_interval = period_to_interval_str(¶ms.period);
|
||||
|
||||
let bucket_code = match ¶ms.bucket {
|
||||
Some(b) => {
|
||||
// Validate the bucket
|
||||
parse_bucket(b)?;
|
||||
b.as_str()
|
||||
}
|
||||
None => default_bucket_for_period(¶ms.period),
|
||||
};
|
||||
let bucket_interval = parse_bucket(bucket_code)?;
|
||||
|
||||
let rows = sqlx::query(
|
||||
"WITH buckets AS ( \
|
||||
SELECT generate_series( \
|
||||
date_bin($1::interval, NOW() - $2::interval, '2020-01-01'::timestamptz), \
|
||||
date_bin($1::interval, NOW(), '2020-01-01'::timestamptz), \
|
||||
$1::interval \
|
||||
) AS bucket_start \
|
||||
), \
|
||||
raw AS ( \
|
||||
SELECT date_bin($1::interval, completed_at, '2020-01-01'::timestamptz) AS bucket_start, \
|
||||
COUNT(*)::BIGINT AS scrape_count, \
|
||||
COUNT(*) FILTER (WHERE success)::BIGINT AS success_count, \
|
||||
COUNT(*) FILTER (WHERE NOT success)::BIGINT AS error_count, \
|
||||
COALESCE(SUM(courses_changed) FILTER (WHERE success), 0)::BIGINT AS courses_changed, \
|
||||
COALESCE(AVG(duration_ms) FILTER (WHERE success), 0)::FLOAT8 AS avg_duration_ms \
|
||||
FROM scrape_job_results \
|
||||
WHERE completed_at > NOW() - $2::interval \
|
||||
GROUP BY 1 \
|
||||
) \
|
||||
SELECT b.bucket_start, \
|
||||
COALESCE(r.scrape_count, 0) AS scrape_count, \
|
||||
COALESCE(r.success_count, 0) AS success_count, \
|
||||
COALESCE(r.error_count, 0) AS error_count, \
|
||||
COALESCE(r.courses_changed, 0) AS courses_changed, \
|
||||
COALESCE(r.avg_duration_ms, 0) AS avg_duration_ms \
|
||||
FROM buckets b \
|
||||
LEFT JOIN raw r ON b.bucket_start = r.bucket_start \
|
||||
ORDER BY b.bucket_start",
|
||||
)
|
||||
.bind(bucket_interval)
|
||||
.bind(period_interval)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Failed to fetch scraper timeseries");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "Failed to fetch scraper timeseries"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let points = rows
|
||||
.iter()
|
||||
.map(|row| TimeseriesPoint {
|
||||
timestamp: row.get("bucket_start"),
|
||||
scrape_count: row.get("scrape_count"),
|
||||
success_count: row.get("success_count"),
|
||||
error_count: row.get("error_count"),
|
||||
courses_changed: row.get("courses_changed"),
|
||||
avg_duration_ms: row.get("avg_duration_ms"),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(TimeseriesResponse {
|
||||
period: params.period,
|
||||
bucket: bucket_code.to_string(),
|
||||
points,
|
||||
}))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Endpoint 3: GET /api/admin/scraper/subjects
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SubjectsResponse {
|
||||
subjects: Vec<SubjectSummary>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SubjectSummary {
|
||||
subject: String,
|
||||
subject_description: Option<String>,
|
||||
#[ts(type = "number")]
|
||||
tracked_course_count: i64,
|
||||
schedule_state: String,
|
||||
#[ts(type = "number")]
|
||||
current_interval_secs: u64,
|
||||
time_multiplier: u32,
|
||||
last_scraped: DateTime<Utc>,
|
||||
next_eligible_at: Option<DateTime<Utc>>,
|
||||
#[ts(type = "number | null")]
|
||||
cooldown_remaining_secs: Option<u64>,
|
||||
avg_change_ratio: f64,
|
||||
#[ts(type = "number")]
|
||||
consecutive_zero_changes: i64,
|
||||
#[ts(type = "number")]
|
||||
recent_runs: i64,
|
||||
#[ts(type = "number")]
|
||||
recent_failures: i64,
|
||||
}
|
||||
|
||||
pub async fn scraper_subjects(
|
||||
_admin: AdminUser,
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<SubjectsResponse>, ApiError> {
|
||||
let raw_stats = scrape_jobs::fetch_subject_stats(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Failed to fetch subject stats");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "Failed to fetch subject stats"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let now = Utc::now();
|
||||
let multiplier = adaptive::time_of_day_multiplier(now);
|
||||
|
||||
// Look up subject descriptions from the reference cache
|
||||
let ref_cache = state.reference_cache.read().await;
|
||||
|
||||
// Count tracked courses per subject for the current term
|
||||
let term = Term::get_current().inner().to_string();
|
||||
let course_counts: std::collections::HashMap<String, i64> = sqlx::query_as(
|
||||
"SELECT subject, COUNT(*)::BIGINT AS cnt FROM courses WHERE term_code = $1 GROUP BY subject",
|
||||
)
|
||||
.bind(&term)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Failed to fetch course counts");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "Failed to fetch course counts"})),
|
||||
)
|
||||
})?
|
||||
.into_iter()
|
||||
.map(|(subject, cnt): (String, i64)| (subject, cnt))
|
||||
.collect();
|
||||
|
||||
let subjects = raw_stats
|
||||
.into_iter()
|
||||
.map(|row| {
|
||||
let stats: SubjectStats = row.into();
|
||||
let schedule = adaptive::evaluate_subject(&stats, now, false);
|
||||
let base_interval = adaptive::compute_base_interval(&stats);
|
||||
|
||||
let schedule_state = match &schedule {
|
||||
SubjectSchedule::Eligible(_) => "eligible",
|
||||
SubjectSchedule::Cooldown(_) => "cooldown",
|
||||
SubjectSchedule::Paused => "paused",
|
||||
SubjectSchedule::ReadOnly => "read_only",
|
||||
};
|
||||
|
||||
let current_interval_secs = base_interval.as_secs() * multiplier as u64;
|
||||
|
||||
let (next_eligible_at, cooldown_remaining_secs) = match &schedule {
|
||||
SubjectSchedule::Eligible(_) => (Some(now), Some(0)),
|
||||
SubjectSchedule::Cooldown(remaining) => {
|
||||
let remaining_secs = remaining.as_secs();
|
||||
(
|
||||
Some(now + chrono::Duration::seconds(remaining_secs as i64)),
|
||||
Some(remaining_secs),
|
||||
)
|
||||
}
|
||||
SubjectSchedule::Paused | SubjectSchedule::ReadOnly => (None, None),
|
||||
};
|
||||
|
||||
let subject_description = ref_cache
|
||||
.lookup("subject", &stats.subject)
|
||||
.map(|s| s.to_string());
|
||||
|
||||
let tracked_course_count = course_counts.get(&stats.subject).copied().unwrap_or(0);
|
||||
|
||||
SubjectSummary {
|
||||
subject: stats.subject,
|
||||
subject_description,
|
||||
tracked_course_count,
|
||||
schedule_state: schedule_state.to_string(),
|
||||
current_interval_secs,
|
||||
time_multiplier: multiplier,
|
||||
last_scraped: stats.last_completed,
|
||||
next_eligible_at,
|
||||
cooldown_remaining_secs,
|
||||
avg_change_ratio: stats.avg_change_ratio,
|
||||
consecutive_zero_changes: stats.consecutive_zero_changes,
|
||||
recent_runs: stats.recent_runs,
|
||||
recent_failures: stats.recent_failure_count,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(SubjectsResponse { subjects }))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Endpoint 4: GET /api/admin/scraper/subjects/{subject}
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SubjectDetailParams {
|
||||
#[serde(default = "default_detail_limit")]
|
||||
limit: i32,
|
||||
}
|
||||
|
||||
fn default_detail_limit() -> i32 {
|
||||
50
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SubjectDetailResponse {
|
||||
subject: String,
|
||||
results: Vec<SubjectResultEntry>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SubjectResultEntry {
|
||||
#[ts(type = "number")]
|
||||
id: i64,
|
||||
completed_at: DateTime<Utc>,
|
||||
duration_ms: i32,
|
||||
success: bool,
|
||||
error_message: Option<String>,
|
||||
courses_fetched: Option<i32>,
|
||||
courses_changed: Option<i32>,
|
||||
courses_unchanged: Option<i32>,
|
||||
audits_generated: Option<i32>,
|
||||
metrics_generated: Option<i32>,
|
||||
}
|
||||
|
||||
pub async fn scraper_subject_detail(
|
||||
_admin: AdminUser,
|
||||
State(state): State<AppState>,
|
||||
Path(subject): Path<String>,
|
||||
Query(params): Query<SubjectDetailParams>,
|
||||
) -> Result<Json<SubjectDetailResponse>, ApiError> {
|
||||
let limit = params.limit.clamp(1, 200);
|
||||
|
||||
let rows = sqlx::query(
|
||||
"SELECT id, completed_at, duration_ms, success, error_message, \
|
||||
courses_fetched, courses_changed, courses_unchanged, \
|
||||
audits_generated, metrics_generated \
|
||||
FROM scrape_job_results \
|
||||
WHERE target_type = 'Subject' AND payload->>'subject' = $1 \
|
||||
ORDER BY completed_at DESC \
|
||||
LIMIT $2",
|
||||
)
|
||||
.bind(&subject)
|
||||
.bind(limit)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, subject = %subject, "Failed to fetch subject detail");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "Failed to fetch subject detail"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let results = rows
|
||||
.iter()
|
||||
.map(|row| SubjectResultEntry {
|
||||
id: row.get("id"),
|
||||
completed_at: row.get("completed_at"),
|
||||
duration_ms: row.get("duration_ms"),
|
||||
success: row.get("success"),
|
||||
error_message: row.get("error_message"),
|
||||
courses_fetched: row.get("courses_fetched"),
|
||||
courses_changed: row.get("courses_changed"),
|
||||
courses_unchanged: row.get("courses_unchanged"),
|
||||
audits_generated: row.get("audits_generated"),
|
||||
metrics_generated: row.get("metrics_generated"),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(SubjectDetailResponse { subject, results }))
|
||||
}
|
||||
+114
-19
@@ -1,14 +1,18 @@
|
||||
//! Embedded assets for the web frontend
|
||||
//! Embedded assets for the web frontend.
|
||||
//!
|
||||
//! This module handles serving static assets that are embedded into the binary
|
||||
//! at compile time using rust-embed.
|
||||
//! Serves static assets embedded into the binary at compile time using rust-embed.
|
||||
//! Supports content negotiation for pre-compressed variants (.br, .gz, .zst)
|
||||
//! generated at build time by `web/scripts/compress-assets.ts`.
|
||||
|
||||
use axum::http::{HeaderMap, HeaderValue, header};
|
||||
use dashmap::DashMap;
|
||||
use rapidhash::v3::rapidhash_v3;
|
||||
use rust_embed::RustEmbed;
|
||||
use std::fmt;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use super::encoding::{COMPRESSION_MIN_SIZE, ContentEncoding, parse_accepted_encodings};
|
||||
|
||||
/// Embedded web assets from the dist directory
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "web/dist/"]
|
||||
@@ -21,17 +25,15 @@ pub struct WebAssets;
|
||||
pub struct AssetHash(u64);
|
||||
|
||||
impl AssetHash {
|
||||
/// Create a new AssetHash from u64 value
|
||||
pub fn new(hash: u64) -> Self {
|
||||
Self(hash)
|
||||
}
|
||||
|
||||
/// Get the hash as a hex string
|
||||
pub fn to_hex(&self) -> String {
|
||||
format!("{:016x}", self.0)
|
||||
}
|
||||
|
||||
/// Get the hash as a quoted hex string
|
||||
/// Get the hash as a quoted hex string (for ETag headers)
|
||||
pub fn quoted(&self) -> String {
|
||||
format!("\"{}\"", self.to_hex())
|
||||
}
|
||||
@@ -51,12 +53,8 @@ pub struct AssetMetadata {
|
||||
}
|
||||
|
||||
impl AssetMetadata {
|
||||
/// Check if the etag matches the asset hash
|
||||
pub fn etag_matches(&self, etag: &str) -> bool {
|
||||
// Remove quotes if present (ETags are typically quoted)
|
||||
let etag = etag.trim_matches('"');
|
||||
|
||||
// ETags generated from u64 hex should be 16 characters
|
||||
etag.len() == 16
|
||||
&& u64::from_str_radix(etag, 16)
|
||||
.map(|parsed| parsed == self.hash.0)
|
||||
@@ -68,28 +66,125 @@ impl AssetMetadata {
|
||||
static ASSET_CACHE: LazyLock<DashMap<String, AssetMetadata>> = LazyLock::new(DashMap::new);
|
||||
|
||||
/// Get cached asset metadata for a file path, caching on-demand
|
||||
/// Returns AssetMetadata containing MIME type and RapidHash hash
|
||||
pub fn get_asset_metadata_cached(path: &str, content: &[u8]) -> AssetMetadata {
|
||||
// Check cache first
|
||||
if let Some(cached) = ASSET_CACHE.get(path) {
|
||||
return cached.value().clone();
|
||||
}
|
||||
|
||||
// Calculate MIME type
|
||||
let mime_type = mime_guess::from_path(path)
|
||||
.first()
|
||||
.map(|mime| mime.to_string());
|
||||
|
||||
// Calculate RapidHash hash (using u64 native output size)
|
||||
let hash_value = rapidhash_v3(content);
|
||||
let hash = AssetHash::new(hash_value);
|
||||
|
||||
let hash = AssetHash::new(rapidhash_v3(content));
|
||||
let metadata = AssetMetadata { mime_type, hash };
|
||||
|
||||
// Only cache if we haven't exceeded the limit
|
||||
if ASSET_CACHE.len() < 1000 {
|
||||
ASSET_CACHE.insert(path.to_string(), metadata.clone());
|
||||
}
|
||||
|
||||
metadata
|
||||
}
|
||||
|
||||
/// Set appropriate `Cache-Control` header based on the asset path.
|
||||
///
|
||||
/// SvelteKit outputs fingerprinted assets under `_app/immutable/` which are
|
||||
/// safe to cache indefinitely. Other assets get shorter cache durations.
|
||||
fn set_cache_control(headers: &mut HeaderMap, path: &str) {
|
||||
let cache_control = if path.contains("immutable/") {
|
||||
// SvelteKit fingerprinted assets — cache forever
|
||||
"public, max-age=31536000, immutable"
|
||||
} else if path == "index.html" || path.ends_with(".html") {
|
||||
"public, max-age=300"
|
||||
} else {
|
||||
match path.rsplit_once('.').map(|(_, ext)| ext) {
|
||||
Some("css" | "js") => "public, max-age=86400",
|
||||
Some("png" | "jpg" | "jpeg" | "gif" | "svg" | "ico") => "public, max-age=2592000",
|
||||
_ => "public, max-age=3600",
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(value) = HeaderValue::from_str(cache_control) {
|
||||
headers.insert(header::CACHE_CONTROL, value);
|
||||
}
|
||||
}
|
||||
|
||||
/// Serve an embedded asset with content encoding negotiation.
|
||||
///
|
||||
/// Tries pre-compressed variants (.br, .gz, .zst) in the order preferred by
|
||||
/// the client's `Accept-Encoding` header, falling back to the uncompressed
|
||||
/// original. Returns `None` if the asset doesn't exist at all.
|
||||
pub fn try_serve_asset_with_encoding(
|
||||
path: &str,
|
||||
request_headers: &HeaderMap,
|
||||
) -> Option<axum::response::Response> {
|
||||
use axum::response::IntoResponse;
|
||||
|
||||
let asset_path = path.strip_prefix('/').unwrap_or(path);
|
||||
|
||||
// Get the uncompressed original first (for metadata: MIME type, ETag)
|
||||
let original = WebAssets::get(asset_path)?;
|
||||
let metadata = get_asset_metadata_cached(asset_path, &original.data);
|
||||
|
||||
// Check ETag for conditional requests (304 Not Modified)
|
||||
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
||||
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
|
||||
{
|
||||
return Some(axum::http::StatusCode::NOT_MODIFIED.into_response());
|
||||
}
|
||||
|
||||
let mime_type = metadata
|
||||
.mime_type
|
||||
.unwrap_or_else(|| "application/octet-stream".to_string());
|
||||
|
||||
// Only attempt pre-compressed variants for files above the compression
|
||||
// threshold — the build script skips smaller files too.
|
||||
let accepted_encodings = if original.data.len() >= COMPRESSION_MIN_SIZE {
|
||||
parse_accepted_encodings(request_headers)
|
||||
} else {
|
||||
vec![ContentEncoding::Identity]
|
||||
};
|
||||
|
||||
for encoding in &accepted_encodings {
|
||||
if *encoding == ContentEncoding::Identity {
|
||||
continue;
|
||||
}
|
||||
|
||||
let compressed_path = format!("{}{}", asset_path, encoding.extension());
|
||||
if let Some(compressed) = WebAssets::get(&compressed_path) {
|
||||
let mut response_headers = HeaderMap::new();
|
||||
|
||||
if let Ok(ct) = HeaderValue::from_str(&mime_type) {
|
||||
response_headers.insert(header::CONTENT_TYPE, ct);
|
||||
}
|
||||
if let Some(ce) = encoding.header_value() {
|
||||
response_headers.insert(header::CONTENT_ENCODING, ce);
|
||||
}
|
||||
if let Ok(etag_val) = HeaderValue::from_str(&metadata.hash.quoted()) {
|
||||
response_headers.insert(header::ETAG, etag_val);
|
||||
}
|
||||
// Vary so caches distinguish by encoding
|
||||
response_headers.insert(header::VARY, HeaderValue::from_static("Accept-Encoding"));
|
||||
set_cache_control(&mut response_headers, asset_path);
|
||||
|
||||
return Some(
|
||||
(
|
||||
axum::http::StatusCode::OK,
|
||||
response_headers,
|
||||
compressed.data,
|
||||
)
|
||||
.into_response(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// No compressed variant found — serve uncompressed original
|
||||
let mut response_headers = HeaderMap::new();
|
||||
if let Ok(ct) = HeaderValue::from_str(&mime_type) {
|
||||
response_headers.insert(header::CONTENT_TYPE, ct);
|
||||
}
|
||||
if let Ok(etag_val) = HeaderValue::from_str(&metadata.hash.quoted()) {
|
||||
response_headers.insert(header::ETAG, etag_val);
|
||||
}
|
||||
set_cache_control(&mut response_headers, asset_path);
|
||||
|
||||
Some((axum::http::StatusCode::OK, response_headers, original.data).into_response())
|
||||
}
|
||||
|
||||
+304
@@ -0,0 +1,304 @@
|
||||
//! Discord OAuth2 authentication handlers.
|
||||
//!
|
||||
//! Provides login, callback, logout, and session introspection endpoints
|
||||
//! for Discord OAuth2 authentication flow.
|
||||
|
||||
use axum::extract::{Extension, Query, State};
|
||||
use axum::http::{HeaderMap, StatusCode, header};
|
||||
use axum::response::{IntoResponse, Json, Redirect, Response};
|
||||
use serde::Deserialize;
|
||||
use serde_json::{Value, json};
|
||||
use std::time::Duration;
|
||||
use tracing::{error, info, warn};
|
||||
|
||||
use crate::state::AppState;
|
||||
|
||||
/// OAuth configuration passed as an Axum Extension.
|
||||
#[derive(Clone)]
|
||||
pub struct AuthConfig {
|
||||
pub client_id: String,
|
||||
pub client_secret: String,
|
||||
/// Optional base URL override (e.g. "https://banner.xevion.dev").
|
||||
/// When `None`, the redirect URI is derived from the request's Origin/Host header.
|
||||
pub redirect_base: Option<String>,
|
||||
}
|
||||
|
||||
const CALLBACK_PATH: &str = "/api/auth/callback";
|
||||
|
||||
/// Derive the origin (scheme + host + port) the user's browser is actually on.
|
||||
///
|
||||
/// Priority:
|
||||
/// 1. Configured `redirect_base` (production override)
|
||||
/// 2. `Referer` header — preserves the real browser origin even through
|
||||
/// reverse proxies that rewrite `Host` (e.g. Vite dev proxy with
|
||||
/// `changeOrigin: true`)
|
||||
/// 3. `Origin` header (present on POST / CORS requests)
|
||||
/// 4. `Host` header (last resort, may be rewritten by proxies)
|
||||
fn resolve_origin(auth_config: &AuthConfig, headers: &HeaderMap) -> String {
|
||||
if let Some(base) = &auth_config.redirect_base {
|
||||
return base.trim_end_matches('/').to_owned();
|
||||
}
|
||||
|
||||
// Referer carries the full browser URL; extract just the origin.
|
||||
if let Some(referer) = headers.get(header::REFERER).and_then(|v| v.to_str().ok())
|
||||
&& let Ok(parsed) = url::Url::parse(referer)
|
||||
{
|
||||
let origin = parsed.origin().unicode_serialization();
|
||||
if origin != "null" {
|
||||
return origin;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(origin) = headers.get("origin").and_then(|v| v.to_str().ok()) {
|
||||
return origin.trim_end_matches('/').to_owned();
|
||||
}
|
||||
|
||||
if let Some(host) = headers.get(header::HOST).and_then(|v| v.to_str().ok()) {
|
||||
return format!("http://{host}");
|
||||
}
|
||||
|
||||
"http://localhost:8080".to_owned()
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CallbackParams {
|
||||
code: String,
|
||||
state: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct TokenResponse {
|
||||
access_token: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct DiscordUser {
|
||||
id: String,
|
||||
username: String,
|
||||
avatar: Option<String>,
|
||||
}
|
||||
|
||||
/// Extract the `session` cookie value from request headers.
|
||||
fn extract_session_token(headers: &HeaderMap) -> Option<String> {
|
||||
headers
|
||||
.get(header::COOKIE)?
|
||||
.to_str()
|
||||
.ok()?
|
||||
.split(';')
|
||||
.find_map(|cookie| {
|
||||
let cookie = cookie.trim();
|
||||
cookie.strip_prefix("session=").map(|v| v.to_owned())
|
||||
})
|
||||
}
|
||||
|
||||
/// Build a `Set-Cookie` header value for the session cookie.
|
||||
fn session_cookie(token: &str, max_age: i64, secure: bool) -> String {
|
||||
let mut cookie = format!("session={token}; HttpOnly; SameSite=Lax; Path=/; Max-Age={max_age}");
|
||||
if secure {
|
||||
cookie.push_str("; Secure");
|
||||
}
|
||||
cookie
|
||||
}
|
||||
|
||||
/// `GET /api/auth/login` — Redirect to Discord OAuth2 authorization page.
|
||||
pub async fn auth_login(
|
||||
State(state): State<AppState>,
|
||||
Extension(auth_config): Extension<AuthConfig>,
|
||||
headers: HeaderMap,
|
||||
) -> Redirect {
|
||||
let origin = resolve_origin(&auth_config, &headers);
|
||||
let redirect_uri = format!("{origin}{CALLBACK_PATH}");
|
||||
let csrf_state = state.oauth_state_store.generate(origin);
|
||||
let redirect_uri_encoded = urlencoding::encode(&redirect_uri);
|
||||
|
||||
let url = format!(
|
||||
"https://discord.com/oauth2/authorize\
|
||||
?client_id={}\
|
||||
&redirect_uri={redirect_uri_encoded}\
|
||||
&response_type=code\
|
||||
&scope=identify\
|
||||
&state={csrf_state}",
|
||||
auth_config.client_id,
|
||||
);
|
||||
|
||||
Redirect::temporary(&url)
|
||||
}
|
||||
|
||||
/// `GET /api/auth/callback` — Handle Discord OAuth2 callback.
|
||||
pub async fn auth_callback(
|
||||
State(state): State<AppState>,
|
||||
Extension(auth_config): Extension<AuthConfig>,
|
||||
Query(params): Query<CallbackParams>,
|
||||
) -> Result<Response, (StatusCode, Json<Value>)> {
|
||||
// 1. Validate CSRF state and recover the origin used during login
|
||||
let origin = state
|
||||
.oauth_state_store
|
||||
.validate(¶ms.state)
|
||||
.ok_or_else(|| {
|
||||
warn!("OAuth callback with invalid CSRF state");
|
||||
(
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(json!({ "error": "Invalid OAuth state" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
// 2. Exchange authorization code for access token
|
||||
let redirect_uri = format!("{origin}{CALLBACK_PATH}");
|
||||
let client = reqwest::Client::new();
|
||||
let token_response = client
|
||||
.post("https://discord.com/api/oauth2/token")
|
||||
.form(&[
|
||||
("client_id", auth_config.client_id.as_str()),
|
||||
("client_secret", auth_config.client_secret.as_str()),
|
||||
("grant_type", "authorization_code"),
|
||||
("code", params.code.as_str()),
|
||||
("redirect_uri", redirect_uri.as_str()),
|
||||
])
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to exchange OAuth code for token");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Failed to exchange code with Discord" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
if !token_response.status().is_success() {
|
||||
let status = token_response.status();
|
||||
let body = token_response.text().await.unwrap_or_default();
|
||||
error!(%status, %body, "Discord token exchange returned error");
|
||||
return Err((
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Discord token exchange failed" })),
|
||||
));
|
||||
}
|
||||
|
||||
let token_data: TokenResponse = token_response.json().await.map_err(|e| {
|
||||
error!(error = %e, "failed to parse Discord token response");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Invalid token response from Discord" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
// 3. Fetch Discord user profile
|
||||
let discord_user: DiscordUser = client
|
||||
.get("https://discord.com/api/users/@me")
|
||||
.bearer_auth(&token_data.access_token)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to fetch Discord user profile");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Failed to fetch Discord profile" })),
|
||||
)
|
||||
})?
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to parse Discord user profile");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Invalid user profile from Discord" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
let discord_id: i64 = discord_user.id.parse().map_err(|_| {
|
||||
error!(id = %discord_user.id, "Discord user ID is not a valid i64");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Invalid Discord user ID" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
// 4. Upsert user
|
||||
let user = crate::data::users::upsert_user(
|
||||
&state.db_pool,
|
||||
discord_id,
|
||||
&discord_user.username,
|
||||
discord_user.avatar.as_deref(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to upsert user");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({ "error": "Database error" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
info!(discord_id, username = %user.discord_username, "user authenticated via OAuth");
|
||||
|
||||
// 5. Create session
|
||||
let session = crate::data::sessions::create_session(
|
||||
&state.db_pool,
|
||||
discord_id,
|
||||
Duration::from_secs(crate::data::sessions::SESSION_DURATION_SECS),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to create session");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({ "error": "Failed to create session" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
// 6. Build response with session cookie
|
||||
let secure = redirect_uri.starts_with("https://");
|
||||
let cookie = session_cookie(
|
||||
&session.id,
|
||||
crate::data::sessions::SESSION_DURATION_SECS as i64,
|
||||
secure,
|
||||
);
|
||||
|
||||
let redirect_to = if user.is_admin { "/admin" } else { "/" };
|
||||
|
||||
Ok((
|
||||
[(header::SET_COOKIE, cookie)],
|
||||
Redirect::temporary(redirect_to),
|
||||
)
|
||||
.into_response())
|
||||
}
|
||||
|
||||
/// `POST /api/auth/logout` — Destroy the current session.
|
||||
pub async fn auth_logout(State(state): State<AppState>, headers: HeaderMap) -> Response {
|
||||
if let Some(token) = extract_session_token(&headers) {
|
||||
if let Err(e) = crate::data::sessions::delete_session(&state.db_pool, &token).await {
|
||||
warn!(error = %e, "failed to delete session from database");
|
||||
}
|
||||
state.session_cache.evict(&token);
|
||||
}
|
||||
|
||||
let cookie = session_cookie("", 0, false);
|
||||
|
||||
(
|
||||
StatusCode::OK,
|
||||
[(header::SET_COOKIE, cookie)],
|
||||
Json(json!({ "ok": true })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
|
||||
/// `GET /api/auth/me` — Return the current authenticated user's info.
|
||||
pub async fn auth_me(
|
||||
State(state): State<AppState>,
|
||||
headers: HeaderMap,
|
||||
) -> Result<Json<Value>, StatusCode> {
|
||||
let token = extract_session_token(&headers).ok_or(StatusCode::UNAUTHORIZED)?;
|
||||
|
||||
let user = state
|
||||
.session_cache
|
||||
.get_user(&token)
|
||||
.await
|
||||
.ok_or(StatusCode::UNAUTHORIZED)?;
|
||||
|
||||
Ok(Json(json!({
|
||||
"discordId": user.discord_id.to_string(),
|
||||
"username": user.discord_username,
|
||||
"avatarHash": user.discord_avatar_hash,
|
||||
"isAdmin": user.is_admin,
|
||||
})))
|
||||
}
|
||||
@@ -0,0 +1,136 @@
|
||||
//! Web API endpoints for calendar export (ICS download + Google Calendar redirect).
|
||||
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::{StatusCode, header},
|
||||
response::{IntoResponse, Redirect, Response},
|
||||
};
|
||||
|
||||
use crate::calendar::{CalendarCourse, generate_gcal_url, generate_ics};
|
||||
use crate::data::models::DbMeetingTime;
|
||||
use crate::state::AppState;
|
||||
|
||||
/// Fetch course + meeting times, build a `CalendarCourse`.
|
||||
async fn load_calendar_course(
|
||||
state: &AppState,
|
||||
term: &str,
|
||||
crn: &str,
|
||||
) -> Result<(CalendarCourse, Vec<DbMeetingTime>), (StatusCode, String)> {
|
||||
let course = crate::data::courses::get_course_by_crn(&state.db_pool, crn, term)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Calendar: course lookup failed");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Lookup failed".to_string(),
|
||||
)
|
||||
})?
|
||||
.ok_or_else(|| (StatusCode::NOT_FOUND, "Course not found".to_string()))?;
|
||||
|
||||
let instructors = crate::data::courses::get_course_instructors(&state.db_pool, course.id)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
let primary_instructor = instructors
|
||||
.iter()
|
||||
.find(|i| i.is_primary)
|
||||
.or(instructors.first())
|
||||
.map(|i| i.display_name.clone());
|
||||
|
||||
let meeting_times: Vec<DbMeetingTime> =
|
||||
serde_json::from_value(course.meeting_times.clone()).unwrap_or_default();
|
||||
|
||||
let cal_course = CalendarCourse {
|
||||
crn: course.crn.clone(),
|
||||
subject: course.subject.clone(),
|
||||
course_number: course.course_number.clone(),
|
||||
title: course.title.clone(),
|
||||
sequence_number: course.sequence_number.clone(),
|
||||
primary_instructor,
|
||||
};
|
||||
|
||||
Ok((cal_course, meeting_times))
|
||||
}
|
||||
|
||||
/// `GET /api/courses/{term}/{crn}/calendar.ics`
|
||||
///
|
||||
/// Returns an ICS file download for the course.
|
||||
pub async fn course_ics(
|
||||
State(state): State<AppState>,
|
||||
Path((term, crn)): Path<(String, String)>,
|
||||
) -> Result<Response, (StatusCode, String)> {
|
||||
let (cal_course, meeting_times) = load_calendar_course(&state, &term, &crn).await?;
|
||||
|
||||
if meeting_times.is_empty() {
|
||||
return Err((
|
||||
StatusCode::NOT_FOUND,
|
||||
"No meeting times found for this course".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let result = generate_ics(&cal_course, &meeting_times).map_err(|e| {
|
||||
tracing::error!(error = %e, "ICS generation failed");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to generate ICS file".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let response = (
|
||||
[
|
||||
(header::CONTENT_TYPE, "text/calendar; charset=utf-8"),
|
||||
(
|
||||
header::CONTENT_DISPOSITION,
|
||||
&format!("attachment; filename=\"{}\"", result.filename),
|
||||
),
|
||||
(header::CACHE_CONTROL, "no-cache"),
|
||||
],
|
||||
result.content,
|
||||
)
|
||||
.into_response();
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
/// `GET /api/courses/{term}/{crn}/gcal`
|
||||
///
|
||||
/// Redirects to Google Calendar with a pre-filled event for the first meeting time.
|
||||
/// If multiple meeting times exist, uses the first one with scheduled days/times.
|
||||
pub async fn course_gcal(
|
||||
State(state): State<AppState>,
|
||||
Path((term, crn)): Path<(String, String)>,
|
||||
) -> Result<Response, (StatusCode, String)> {
|
||||
let (cal_course, meeting_times) = load_calendar_course(&state, &term, &crn).await?;
|
||||
|
||||
if meeting_times.is_empty() {
|
||||
return Err((
|
||||
StatusCode::NOT_FOUND,
|
||||
"No meeting times found for this course".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Prefer the first meeting time that has actual days/times scheduled
|
||||
let mt = meeting_times
|
||||
.iter()
|
||||
.find(|mt| {
|
||||
mt.begin_time.is_some()
|
||||
&& (mt.monday
|
||||
|| mt.tuesday
|
||||
|| mt.wednesday
|
||||
|| mt.thursday
|
||||
|| mt.friday
|
||||
|| mt.saturday
|
||||
|| mt.sunday)
|
||||
})
|
||||
.unwrap_or(&meeting_times[0]);
|
||||
|
||||
let url = generate_gcal_url(&cal_course, mt).map_err(|e| {
|
||||
tracing::error!(error = %e, "Google Calendar URL generation failed");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to generate Google Calendar URL".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(Redirect::temporary(&url).into_response())
|
||||
}
|
||||
@@ -0,0 +1,196 @@
|
||||
//! Content encoding negotiation for pre-compressed asset serving.
|
||||
//!
|
||||
//! Parses Accept-Encoding headers with quality values and returns
|
||||
//! supported encodings in priority order for content negotiation.
|
||||
|
||||
use axum::http::{HeaderMap, HeaderValue, header};
|
||||
|
||||
/// Minimum size threshold for compression (bytes).
|
||||
///
|
||||
/// Must match `MIN_SIZE` in `web/scripts/compress-assets.ts`.
|
||||
pub const COMPRESSION_MIN_SIZE: usize = 512;
|
||||
|
||||
/// Supported content encodings in priority order (best compression first).
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum ContentEncoding {
|
||||
Zstd,
|
||||
Brotli,
|
||||
Gzip,
|
||||
Identity,
|
||||
}
|
||||
|
||||
impl ContentEncoding {
|
||||
/// File extension suffix for pre-compressed variant lookup.
|
||||
#[inline]
|
||||
pub fn extension(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Zstd => ".zst",
|
||||
Self::Brotli => ".br",
|
||||
Self::Gzip => ".gz",
|
||||
Self::Identity => "",
|
||||
}
|
||||
}
|
||||
|
||||
/// `Content-Encoding` header value, or `None` for identity.
|
||||
#[inline]
|
||||
pub fn header_value(&self) -> Option<HeaderValue> {
|
||||
match self {
|
||||
Self::Zstd => Some(HeaderValue::from_static("zstd")),
|
||||
Self::Brotli => Some(HeaderValue::from_static("br")),
|
||||
Self::Gzip => Some(HeaderValue::from_static("gzip")),
|
||||
Self::Identity => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Default priority when quality values are equal (higher = better).
|
||||
#[inline]
|
||||
fn default_priority(&self) -> u8 {
|
||||
match self {
|
||||
Self::Zstd => 4,
|
||||
Self::Brotli => 3,
|
||||
Self::Gzip => 2,
|
||||
Self::Identity => 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse `Accept-Encoding` header and return supported encodings in priority order.
|
||||
///
|
||||
/// Supports quality values: `Accept-Encoding: gzip;q=0.8, br;q=1.0, zstd`
|
||||
/// When quality values are equal: zstd > brotli > gzip > identity.
|
||||
/// Encodings with `q=0` are excluded.
|
||||
pub fn parse_accepted_encodings(headers: &HeaderMap) -> Vec<ContentEncoding> {
|
||||
let Some(accept) = headers
|
||||
.get(header::ACCEPT_ENCODING)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
else {
|
||||
return vec![ContentEncoding::Identity];
|
||||
};
|
||||
|
||||
let mut encodings: Vec<(ContentEncoding, f32)> = Vec::new();
|
||||
|
||||
for part in accept.split(',') {
|
||||
let part = part.trim();
|
||||
if part.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let (encoding_str, quality) = if let Some((enc, params)) = part.split_once(';') {
|
||||
let q = params
|
||||
.split(';')
|
||||
.find_map(|p| p.trim().strip_prefix("q="))
|
||||
.and_then(|q| q.parse::<f32>().ok())
|
||||
.unwrap_or(1.0);
|
||||
(enc.trim(), q)
|
||||
} else {
|
||||
(part, 1.0)
|
||||
};
|
||||
|
||||
if quality == 0.0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let encoding = match encoding_str.to_lowercase().as_str() {
|
||||
"zstd" => ContentEncoding::Zstd,
|
||||
"br" | "brotli" => ContentEncoding::Brotli,
|
||||
"gzip" | "x-gzip" => ContentEncoding::Gzip,
|
||||
"*" => ContentEncoding::Gzip,
|
||||
"identity" => ContentEncoding::Identity,
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
encodings.push((encoding, quality));
|
||||
}
|
||||
|
||||
// Sort by quality (desc), then default priority (desc)
|
||||
encodings.sort_by(|a, b| {
|
||||
b.1.partial_cmp(&a.1)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
.then_with(|| b.0.default_priority().cmp(&a.0.default_priority()))
|
||||
});
|
||||
|
||||
if encodings.is_empty() {
|
||||
vec![ContentEncoding::Identity]
|
||||
} else {
|
||||
encodings.into_iter().map(|(e, _)| e).collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_all_encodings() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(header::ACCEPT_ENCODING, "gzip, br, zstd".parse().unwrap());
|
||||
let encodings = parse_accepted_encodings(&headers);
|
||||
assert_eq!(encodings[0], ContentEncoding::Zstd);
|
||||
assert_eq!(encodings[1], ContentEncoding::Brotli);
|
||||
assert_eq!(encodings[2], ContentEncoding::Gzip);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_with_quality_values() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::ACCEPT_ENCODING,
|
||||
"gzip;q=1.0, br;q=0.5, zstd;q=0.8".parse().unwrap(),
|
||||
);
|
||||
let encodings = parse_accepted_encodings(&headers);
|
||||
assert_eq!(encodings[0], ContentEncoding::Gzip);
|
||||
assert_eq!(encodings[1], ContentEncoding::Zstd);
|
||||
assert_eq!(encodings[2], ContentEncoding::Brotli);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_header_returns_identity() {
|
||||
let headers = HeaderMap::new();
|
||||
let encodings = parse_accepted_encodings(&headers);
|
||||
assert_eq!(encodings, vec![ContentEncoding::Identity]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_disabled_encoding_excluded() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::ACCEPT_ENCODING,
|
||||
"zstd;q=0, br, gzip".parse().unwrap(),
|
||||
);
|
||||
let encodings = parse_accepted_encodings(&headers);
|
||||
assert_eq!(encodings[0], ContentEncoding::Brotli);
|
||||
assert_eq!(encodings[1], ContentEncoding::Gzip);
|
||||
assert!(!encodings.contains(&ContentEncoding::Zstd));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_real_chrome_header() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::ACCEPT_ENCODING,
|
||||
"gzip, deflate, br, zstd".parse().unwrap(),
|
||||
);
|
||||
assert_eq!(parse_accepted_encodings(&headers)[0], ContentEncoding::Zstd);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extensions() {
|
||||
assert_eq!(ContentEncoding::Zstd.extension(), ".zst");
|
||||
assert_eq!(ContentEncoding::Brotli.extension(), ".br");
|
||||
assert_eq!(ContentEncoding::Gzip.extension(), ".gz");
|
||||
assert_eq!(ContentEncoding::Identity.extension(), "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_header_values() {
|
||||
assert_eq!(
|
||||
ContentEncoding::Zstd.header_value().unwrap(),
|
||||
HeaderValue::from_static("zstd")
|
||||
);
|
||||
assert_eq!(
|
||||
ContentEncoding::Brotli.header_value().unwrap(),
|
||||
HeaderValue::from_static("br")
|
||||
);
|
||||
assert!(ContentEncoding::Identity.header_value().is_none());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
//! Axum extractors for authentication and authorization.
|
||||
|
||||
use axum::extract::FromRequestParts;
|
||||
use axum::http::{StatusCode, header};
|
||||
use axum::response::Json;
|
||||
use http::request::Parts;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::data::models::User;
|
||||
use crate::state::AppState;
|
||||
|
||||
/// Extractor that resolves the session cookie to an authenticated [`User`].
|
||||
///
|
||||
/// Returns 401 if no valid session cookie is present.
|
||||
pub struct AuthUser(pub User);
|
||||
|
||||
impl FromRequestParts<AppState> for AuthUser {
|
||||
type Rejection = (StatusCode, Json<serde_json::Value>);
|
||||
|
||||
async fn from_request_parts(
|
||||
parts: &mut Parts,
|
||||
state: &AppState,
|
||||
) -> Result<Self, Self::Rejection> {
|
||||
let token = parts
|
||||
.headers
|
||||
.get(header::COOKIE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|cookies| {
|
||||
cookies
|
||||
.split(';')
|
||||
.find_map(|c| c.trim().strip_prefix("session=").map(|v| v.to_owned()))
|
||||
})
|
||||
.ok_or_else(|| {
|
||||
(
|
||||
StatusCode::UNAUTHORIZED,
|
||||
Json(json!({"error": "unauthorized", "message": "No session cookie"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let user = state.session_cache.get_user(&token).await.ok_or_else(|| {
|
||||
(
|
||||
StatusCode::UNAUTHORIZED,
|
||||
Json(json!({"error": "unauthorized", "message": "Invalid or expired session"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(AuthUser(user))
|
||||
}
|
||||
}
|
||||
|
||||
/// Extractor that requires an authenticated admin user.
|
||||
///
|
||||
/// Returns 401 if not authenticated, 403 if not admin.
|
||||
pub struct AdminUser(pub User);
|
||||
|
||||
impl FromRequestParts<AppState> for AdminUser {
|
||||
type Rejection = (StatusCode, Json<serde_json::Value>);
|
||||
|
||||
async fn from_request_parts(
|
||||
parts: &mut Parts,
|
||||
state: &AppState,
|
||||
) -> Result<Self, Self::Rejection> {
|
||||
let AuthUser(user) = AuthUser::from_request_parts(parts, state).await?;
|
||||
|
||||
if !user.is_admin {
|
||||
return Err((
|
||||
StatusCode::FORBIDDEN,
|
||||
Json(json!({"error": "forbidden", "message": "Admin access required"})),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(AdminUser(user))
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,19 @@
|
||||
//! Web API module for the banner application.
|
||||
|
||||
pub mod admin;
|
||||
pub mod admin_rmp;
|
||||
pub mod admin_scraper;
|
||||
#[cfg(feature = "embed-assets")]
|
||||
pub mod assets;
|
||||
pub mod auth;
|
||||
pub mod calendar;
|
||||
#[cfg(feature = "embed-assets")]
|
||||
pub mod encoding;
|
||||
pub mod extractors;
|
||||
pub mod routes;
|
||||
pub mod schedule_cache;
|
||||
pub mod session_cache;
|
||||
pub mod timeline;
|
||||
pub mod ws;
|
||||
|
||||
pub use routes::*;
|
||||
|
||||
+287
-195
@@ -1,20 +1,26 @@
|
||||
//! Web API endpoints for Banner bot monitoring and metrics.
|
||||
|
||||
use axum::{
|
||||
Router,
|
||||
Extension, Router,
|
||||
body::Body,
|
||||
extract::{Path, Query, Request, State},
|
||||
http::StatusCode as AxumStatusCode,
|
||||
response::{Json, Response},
|
||||
routing::get,
|
||||
routing::{get, post, put},
|
||||
};
|
||||
|
||||
use crate::web::admin;
|
||||
use crate::web::admin_rmp;
|
||||
use crate::web::admin_scraper;
|
||||
use crate::web::auth::{self, AuthConfig};
|
||||
use crate::web::calendar;
|
||||
use crate::web::timeline;
|
||||
use crate::web::ws;
|
||||
#[cfg(feature = "embed-assets")]
|
||||
use axum::{
|
||||
http::{HeaderMap, HeaderValue, StatusCode, Uri},
|
||||
response::{Html, IntoResponse},
|
||||
http::{HeaderMap, StatusCode, Uri},
|
||||
response::IntoResponse,
|
||||
};
|
||||
#[cfg(feature = "embed-assets")]
|
||||
use http::header;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Value, json};
|
||||
use std::{collections::BTreeMap, time::Duration};
|
||||
@@ -24,63 +30,90 @@ use crate::state::AppState;
|
||||
use crate::status::ServiceStatus;
|
||||
#[cfg(not(feature = "embed-assets"))]
|
||||
use tower_http::cors::{Any, CorsLayer};
|
||||
use tower_http::{classify::ServerErrorsFailureClass, timeout::TimeoutLayer, trace::TraceLayer};
|
||||
use tower_http::{
|
||||
classify::ServerErrorsFailureClass, compression::CompressionLayer, timeout::TimeoutLayer,
|
||||
trace::TraceLayer,
|
||||
};
|
||||
use tracing::{Span, debug, trace, warn};
|
||||
|
||||
#[cfg(feature = "embed-assets")]
|
||||
use crate::web::assets::{WebAssets, get_asset_metadata_cached};
|
||||
|
||||
/// Set appropriate caching headers based on asset type
|
||||
#[cfg(feature = "embed-assets")]
|
||||
fn set_caching_headers(response: &mut Response, path: &str, etag: &str) {
|
||||
let headers = response.headers_mut();
|
||||
|
||||
// Set ETag
|
||||
if let Ok(etag_value) = HeaderValue::from_str(etag) {
|
||||
headers.insert(header::ETAG, etag_value);
|
||||
}
|
||||
|
||||
// Set Cache-Control based on asset type
|
||||
let cache_control = if path.starts_with("assets/") {
|
||||
// Static assets with hashed filenames - long-term cache
|
||||
"public, max-age=31536000, immutable"
|
||||
} else if path == "index.html" {
|
||||
// HTML files - short-term cache
|
||||
"public, max-age=300"
|
||||
} else {
|
||||
match path.split_once('.').map(|(_, extension)| extension) {
|
||||
Some(ext) => match ext {
|
||||
// CSS/JS files - medium-term cache
|
||||
"css" | "js" => "public, max-age=86400",
|
||||
// Images - long-term cache
|
||||
"png" | "jpg" | "jpeg" | "gif" | "svg" | "ico" => "public, max-age=2592000",
|
||||
// Default for other files
|
||||
_ => "public, max-age=3600",
|
||||
},
|
||||
// Default for files without an extension
|
||||
None => "public, max-age=3600",
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(cache_control_value) = HeaderValue::from_str(cache_control) {
|
||||
headers.insert(header::CACHE_CONTROL, cache_control_value);
|
||||
}
|
||||
}
|
||||
use crate::web::assets::try_serve_asset_with_encoding;
|
||||
|
||||
/// Creates the web server router
|
||||
pub fn create_router(app_state: AppState) -> Router {
|
||||
pub fn create_router(app_state: AppState, auth_config: AuthConfig) -> Router {
|
||||
let api_router = Router::new()
|
||||
.route("/health", get(health))
|
||||
.route("/status", get(status))
|
||||
.route("/metrics", get(metrics))
|
||||
.route("/courses/search", get(search_courses))
|
||||
.route("/courses/{term}/{crn}", get(get_course))
|
||||
.route(
|
||||
"/courses/{term}/{crn}/calendar.ics",
|
||||
get(calendar::course_ics),
|
||||
)
|
||||
.route("/courses/{term}/{crn}/gcal", get(calendar::course_gcal))
|
||||
.route("/terms", get(get_terms))
|
||||
.route("/subjects", get(get_subjects))
|
||||
.route("/reference/{category}", get(get_reference))
|
||||
.route("/timeline", post(timeline::timeline))
|
||||
.with_state(app_state.clone());
|
||||
|
||||
let auth_router = Router::new()
|
||||
.route("/auth/login", get(auth::auth_login))
|
||||
.route("/auth/callback", get(auth::auth_callback))
|
||||
.route("/auth/logout", post(auth::auth_logout))
|
||||
.route("/auth/me", get(auth::auth_me))
|
||||
.layer(Extension(auth_config))
|
||||
.with_state(app_state.clone());
|
||||
|
||||
let admin_router = Router::new()
|
||||
.route("/admin/status", get(admin::admin_status))
|
||||
.route("/admin/users", get(admin::list_users))
|
||||
.route(
|
||||
"/admin/users/{discord_id}/admin",
|
||||
put(admin::set_user_admin),
|
||||
)
|
||||
.route("/admin/scrape-jobs", get(admin::list_scrape_jobs))
|
||||
.route("/admin/scrape-jobs/ws", get(ws::scrape_jobs_ws))
|
||||
.route("/admin/audit-log", get(admin::list_audit_log))
|
||||
.route("/admin/instructors", get(admin_rmp::list_instructors))
|
||||
.route("/admin/instructors/{id}", get(admin_rmp::get_instructor))
|
||||
.route(
|
||||
"/admin/instructors/{id}/match",
|
||||
post(admin_rmp::match_instructor),
|
||||
)
|
||||
.route(
|
||||
"/admin/instructors/{id}/reject-candidate",
|
||||
post(admin_rmp::reject_candidate),
|
||||
)
|
||||
.route(
|
||||
"/admin/instructors/{id}/reject-all",
|
||||
post(admin_rmp::reject_all),
|
||||
)
|
||||
.route(
|
||||
"/admin/instructors/{id}/unmatch",
|
||||
post(admin_rmp::unmatch_instructor),
|
||||
)
|
||||
.route("/admin/rmp/rescore", post(admin_rmp::rescore))
|
||||
.route("/admin/scraper/stats", get(admin_scraper::scraper_stats))
|
||||
.route(
|
||||
"/admin/scraper/timeseries",
|
||||
get(admin_scraper::scraper_timeseries),
|
||||
)
|
||||
.route(
|
||||
"/admin/scraper/subjects",
|
||||
get(admin_scraper::scraper_subjects),
|
||||
)
|
||||
.route(
|
||||
"/admin/scraper/subjects/{subject}",
|
||||
get(admin_scraper::scraper_subject_detail),
|
||||
)
|
||||
.with_state(app_state);
|
||||
|
||||
let mut router = Router::new().nest("/api", api_router);
|
||||
let mut router = Router::new()
|
||||
.nest("/api", api_router)
|
||||
.nest("/api", auth_router)
|
||||
.nest("/api", admin_router);
|
||||
|
||||
// When embed-assets feature is enabled, serve embedded static assets
|
||||
#[cfg(feature = "embed-assets")]
|
||||
@@ -100,6 +133,13 @@ pub fn create_router(app_state: AppState) -> Router {
|
||||
}
|
||||
|
||||
router.layer((
|
||||
// Compress API responses (gzip/brotli/zstd). Pre-compressed static
|
||||
// assets already have Content-Encoding set, so tower-http skips them.
|
||||
CompressionLayer::new()
|
||||
.zstd(true)
|
||||
.br(true)
|
||||
.gzip(true)
|
||||
.quality(tower_http::CompressionLevel::Fastest),
|
||||
TraceLayer::new_for_http()
|
||||
.make_span_with(|request: &Request<Body>| {
|
||||
tracing::debug_span!("request", path = request.uri().path())
|
||||
@@ -146,71 +186,35 @@ pub fn create_router(app_state: AppState) -> Router {
|
||||
))
|
||||
}
|
||||
|
||||
/// Handler that extracts request information for caching
|
||||
/// SPA fallback handler with content encoding negotiation.
|
||||
///
|
||||
/// Serves embedded static assets with pre-compressed variants when available,
|
||||
/// falling back to `index.html` for SPA client-side routing.
|
||||
#[cfg(feature = "embed-assets")]
|
||||
async fn fallback(request: Request) -> Response {
|
||||
async fn fallback(request: Request) -> axum::response::Response {
|
||||
let uri = request.uri().clone();
|
||||
let headers = request.headers().clone();
|
||||
handle_spa_fallback_with_headers(uri, headers).await
|
||||
handle_spa_fallback(uri, headers).await
|
||||
}
|
||||
|
||||
/// Handles SPA routing by serving index.html for non-API, non-asset requests
|
||||
/// This version includes HTTP caching headers and ETag support
|
||||
#[cfg(feature = "embed-assets")]
|
||||
async fn handle_spa_fallback_with_headers(uri: Uri, request_headers: HeaderMap) -> Response {
|
||||
let path = uri.path().trim_start_matches('/');
|
||||
|
||||
if let Some(content) = WebAssets::get(path) {
|
||||
// Get asset metadata (MIME type and hash) with caching
|
||||
let metadata = get_asset_metadata_cached(path, &content.data);
|
||||
|
||||
// Check if client has a matching ETag (conditional request)
|
||||
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
||||
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
|
||||
{
|
||||
return StatusCode::NOT_MODIFIED.into_response();
|
||||
}
|
||||
|
||||
// Use cached MIME type, only set Content-Type if we have a valid MIME type
|
||||
let mut response = (
|
||||
[(
|
||||
header::CONTENT_TYPE,
|
||||
// For unknown types, set to application/octet-stream
|
||||
metadata
|
||||
.mime_type
|
||||
.unwrap_or("application/octet-stream".to_string()),
|
||||
)],
|
||||
content.data,
|
||||
)
|
||||
.into_response();
|
||||
|
||||
// Set caching headers
|
||||
set_caching_headers(&mut response, path, &metadata.hash.quoted());
|
||||
async fn handle_spa_fallback(uri: Uri, request_headers: HeaderMap) -> axum::response::Response {
|
||||
let path = uri.path();
|
||||
|
||||
// Try serving the exact asset (with encoding negotiation)
|
||||
if let Some(response) = try_serve_asset_with_encoding(path, &request_headers) {
|
||||
return response;
|
||||
} else {
|
||||
// Any assets that are not found should be treated as a 404, not falling back to the SPA index.html
|
||||
if path.starts_with("assets/") {
|
||||
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to the SPA index.html
|
||||
match WebAssets::get("index.html") {
|
||||
Some(content) => {
|
||||
let metadata = get_asset_metadata_cached("index.html", &content.data);
|
||||
// SvelteKit assets under _app/ that don't exist are a hard 404
|
||||
let trimmed = path.trim_start_matches('/');
|
||||
if trimmed.starts_with("_app/") || trimmed.starts_with("assets/") {
|
||||
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
|
||||
}
|
||||
|
||||
// Check if client has a matching ETag for index.html
|
||||
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
||||
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
|
||||
{
|
||||
return StatusCode::NOT_MODIFIED.into_response();
|
||||
}
|
||||
|
||||
let mut response = Html(content.data).into_response();
|
||||
set_caching_headers(&mut response, "index.html", &metadata.hash.quoted());
|
||||
response
|
||||
}
|
||||
// SPA fallback: serve index.html with encoding negotiation
|
||||
match try_serve_asset_with_encoding("/index.html", &request_headers) {
|
||||
Some(response) => response,
|
||||
None => (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to load index.html",
|
||||
@@ -284,24 +288,140 @@ async fn status(State(state): State<AppState>) -> Json<StatusResponse> {
|
||||
}
|
||||
|
||||
/// Metrics endpoint for monitoring
|
||||
async fn metrics() -> Json<Value> {
|
||||
// For now, return basic metrics structure
|
||||
Json(json!({
|
||||
"banner_api": {
|
||||
"status": "connected"
|
||||
},
|
||||
"timestamp": chrono::Utc::now().to_rfc3339()
|
||||
}))
|
||||
async fn metrics(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<MetricsParams>,
|
||||
) -> Result<Json<Value>, (AxumStatusCode, String)> {
|
||||
let limit = params.limit.clamp(1, 5000);
|
||||
|
||||
// Parse range shorthand, defaulting to 24h
|
||||
let range_str = params.range.as_deref().unwrap_or("24h");
|
||||
let duration = match range_str {
|
||||
"1h" => chrono::Duration::hours(1),
|
||||
"6h" => chrono::Duration::hours(6),
|
||||
"24h" => chrono::Duration::hours(24),
|
||||
"7d" => chrono::Duration::days(7),
|
||||
"30d" => chrono::Duration::days(30),
|
||||
_ => {
|
||||
return Err((
|
||||
AxumStatusCode::BAD_REQUEST,
|
||||
format!("Invalid range '{range_str}'. Valid: 1h, 6h, 24h, 7d, 30d"),
|
||||
));
|
||||
}
|
||||
};
|
||||
let since = chrono::Utc::now() - duration;
|
||||
|
||||
// Resolve course_id: explicit param takes priority, then term+crn lookup
|
||||
let course_id = if let Some(id) = params.course_id {
|
||||
Some(id)
|
||||
} else if let (Some(term), Some(crn)) = (params.term.as_deref(), params.crn.as_deref()) {
|
||||
let row: Option<(i32,)> =
|
||||
sqlx::query_as("SELECT id FROM courses WHERE term_code = $1 AND crn = $2")
|
||||
.bind(term)
|
||||
.bind(crn)
|
||||
.fetch_optional(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Course lookup for metrics failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Course lookup failed".to_string(),
|
||||
)
|
||||
})?;
|
||||
row.map(|(id,)| id)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Build query dynamically based on filters
|
||||
let metrics: Vec<(i32, i32, chrono::DateTime<chrono::Utc>, i32, i32, i32)> =
|
||||
if let Some(cid) = course_id {
|
||||
sqlx::query_as(
|
||||
"SELECT id, course_id, timestamp, enrollment, wait_count, seats_available \
|
||||
FROM course_metrics \
|
||||
WHERE course_id = $1 AND timestamp >= $2 \
|
||||
ORDER BY timestamp DESC \
|
||||
LIMIT $3",
|
||||
)
|
||||
.bind(cid)
|
||||
.bind(since)
|
||||
.bind(limit)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
} else {
|
||||
sqlx::query_as(
|
||||
"SELECT id, course_id, timestamp, enrollment, wait_count, seats_available \
|
||||
FROM course_metrics \
|
||||
WHERE timestamp >= $1 \
|
||||
ORDER BY timestamp DESC \
|
||||
LIMIT $2",
|
||||
)
|
||||
.bind(since)
|
||||
.bind(limit)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
}
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Metrics query failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Metrics query failed".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let count = metrics.len();
|
||||
let metrics_json: Vec<Value> = metrics
|
||||
.into_iter()
|
||||
.map(
|
||||
|(id, course_id, timestamp, enrollment, wait_count, seats_available)| {
|
||||
json!({
|
||||
"id": id,
|
||||
"courseId": course_id,
|
||||
"timestamp": timestamp.to_rfc3339(),
|
||||
"enrollment": enrollment,
|
||||
"waitCount": wait_count,
|
||||
"seatsAvailable": seats_available,
|
||||
})
|
||||
},
|
||||
)
|
||||
.collect();
|
||||
|
||||
Ok(Json(json!({
|
||||
"metrics": metrics_json,
|
||||
"count": count,
|
||||
"timestamp": chrono::Utc::now().to_rfc3339(),
|
||||
})))
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Course search & detail API
|
||||
// ============================================================
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct MetricsParams {
|
||||
course_id: Option<i32>,
|
||||
term: Option<String>,
|
||||
crn: Option<String>,
|
||||
/// Shorthand durations: "1h", "6h", "24h", "7d", "30d"
|
||||
range: Option<String>,
|
||||
#[serde(default = "default_metrics_limit")]
|
||||
limit: i32,
|
||||
}
|
||||
|
||||
fn default_metrics_limit() -> i32 {
|
||||
500
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SubjectsParams {
|
||||
term: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SearchParams {
|
||||
term: String,
|
||||
subject: Option<String>,
|
||||
#[serde(default)]
|
||||
subject: Vec<String>,
|
||||
q: Option<String>,
|
||||
course_number_low: Option<i32>,
|
||||
course_number_high: Option<i32>,
|
||||
@@ -317,59 +437,12 @@ struct SearchParams {
|
||||
sort_dir: Option<SortDirection>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum SortColumn {
|
||||
CourseCode,
|
||||
Title,
|
||||
Instructor,
|
||||
Time,
|
||||
Seats,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum SortDirection {
|
||||
Asc,
|
||||
Desc,
|
||||
}
|
||||
use crate::data::courses::{SortColumn, SortDirection};
|
||||
|
||||
fn default_limit() -> i32 {
|
||||
25
|
||||
}
|
||||
|
||||
/// Build a safe ORDER BY clause from the validated sort column and direction.
|
||||
fn sort_clause(column: Option<SortColumn>, direction: Option<SortDirection>) -> String {
|
||||
let dir = match direction.unwrap_or(SortDirection::Asc) {
|
||||
SortDirection::Asc => "ASC",
|
||||
SortDirection::Desc => "DESC",
|
||||
};
|
||||
|
||||
match column {
|
||||
Some(SortColumn::CourseCode) => {
|
||||
format!("subject {dir}, course_number {dir}, sequence_number {dir}")
|
||||
}
|
||||
Some(SortColumn::Title) => format!("title {dir}"),
|
||||
Some(SortColumn::Instructor) => {
|
||||
// Sort by primary instructor display name via a subquery
|
||||
format!(
|
||||
"(SELECT i.display_name FROM course_instructors ci \
|
||||
JOIN instructors i ON i.banner_id = ci.instructor_id \
|
||||
WHERE ci.course_id = courses.id AND ci.is_primary = true \
|
||||
LIMIT 1) {dir} NULLS LAST"
|
||||
)
|
||||
}
|
||||
Some(SortColumn::Time) => {
|
||||
// Sort by first meeting time's begin_time via JSONB
|
||||
format!("(meeting_times->0->>'begin_time') {dir} NULLS LAST")
|
||||
}
|
||||
Some(SortColumn::Seats) => {
|
||||
format!("(max_enrollment - enrollment) {dir}")
|
||||
}
|
||||
None => "subject ASC, course_number ASC, sequence_number ASC".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
@@ -404,12 +477,14 @@ pub struct CourseResponse {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct InstructorResponse {
|
||||
instructor_id: i32,
|
||||
banner_id: String,
|
||||
display_name: String,
|
||||
email: Option<String>,
|
||||
email: String,
|
||||
is_primary: bool,
|
||||
rmp_rating: Option<f32>,
|
||||
rmp_num_ratings: Option<i32>,
|
||||
rmp_legacy_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
@@ -430,27 +505,23 @@ pub struct CodeDescription {
|
||||
description: String,
|
||||
}
|
||||
|
||||
/// Build a `CourseResponse` from a DB course, fetching its instructors.
|
||||
async fn build_course_response(
|
||||
/// Build a `CourseResponse` from a DB course with pre-fetched instructor details.
|
||||
fn build_course_response(
|
||||
course: &crate::data::models::Course,
|
||||
db_pool: &sqlx::PgPool,
|
||||
instructors: Vec<crate::data::models::CourseInstructorDetail>,
|
||||
) -> CourseResponse {
|
||||
let instructors = crate::data::courses::get_course_instructors(db_pool, course.id)
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
let instructors = instructors
|
||||
.into_iter()
|
||||
.map(
|
||||
|(banner_id, display_name, email, is_primary, rmp_rating, rmp_num_ratings)| {
|
||||
InstructorResponse {
|
||||
banner_id,
|
||||
display_name,
|
||||
email,
|
||||
is_primary,
|
||||
rmp_rating,
|
||||
rmp_num_ratings,
|
||||
}
|
||||
},
|
||||
)
|
||||
.map(|i| InstructorResponse {
|
||||
instructor_id: i.instructor_id,
|
||||
banner_id: i.banner_id,
|
||||
display_name: i.display_name,
|
||||
email: i.email,
|
||||
is_primary: i.is_primary,
|
||||
rmp_rating: i.avg_rating,
|
||||
rmp_num_ratings: i.num_ratings,
|
||||
rmp_legacy_id: i.rmp_legacy_id,
|
||||
})
|
||||
.collect();
|
||||
|
||||
CourseResponse {
|
||||
@@ -484,17 +555,19 @@ async fn build_course_response(
|
||||
/// `GET /api/courses/search`
|
||||
async fn search_courses(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<SearchParams>,
|
||||
axum_extra::extract::Query(params): axum_extra::extract::Query<SearchParams>,
|
||||
) -> Result<Json<SearchResponse>, (AxumStatusCode, String)> {
|
||||
let limit = params.limit.clamp(1, 100);
|
||||
let offset = params.offset.max(0);
|
||||
|
||||
let order_by = sort_clause(params.sort_by, params.sort_dir);
|
||||
|
||||
let (courses, total_count) = crate::data::courses::search_courses(
|
||||
&state.db_pool,
|
||||
¶ms.term,
|
||||
params.subject.as_deref(),
|
||||
if params.subject.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(¶ms.subject)
|
||||
},
|
||||
params.q.as_deref(),
|
||||
params.course_number_low,
|
||||
params.course_number_high,
|
||||
@@ -503,7 +576,8 @@ async fn search_courses(
|
||||
params.campus.as_deref(),
|
||||
limit,
|
||||
offset,
|
||||
&order_by,
|
||||
params.sort_by,
|
||||
params.sort_dir,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
@@ -514,10 +588,20 @@ async fn search_courses(
|
||||
)
|
||||
})?;
|
||||
|
||||
let mut course_responses = Vec::with_capacity(courses.len());
|
||||
for course in &courses {
|
||||
course_responses.push(build_course_response(course, &state.db_pool).await);
|
||||
}
|
||||
// Batch-fetch all instructors in a single query instead of N+1
|
||||
let course_ids: Vec<i32> = courses.iter().map(|c| c.id).collect();
|
||||
let mut instructor_map =
|
||||
crate::data::courses::get_instructors_for_courses(&state.db_pool, &course_ids)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
let course_responses: Vec<CourseResponse> = courses
|
||||
.iter()
|
||||
.map(|course| {
|
||||
let instructors = instructor_map.remove(&course.id).unwrap_or_default();
|
||||
build_course_response(course, instructors)
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(SearchResponse {
|
||||
courses: course_responses,
|
||||
@@ -543,7 +627,10 @@ async fn get_course(
|
||||
})?
|
||||
.ok_or_else(|| (AxumStatusCode::NOT_FOUND, "Course not found".to_string()))?;
|
||||
|
||||
Ok(Json(build_course_response(&course, &state.db_pool).await))
|
||||
let instructors = crate::data::courses::get_course_instructors(&state.db_pool, course.id)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
Ok(Json(build_course_response(&course, instructors)))
|
||||
}
|
||||
|
||||
/// `GET /api/terms`
|
||||
@@ -575,19 +662,24 @@ async fn get_terms(
|
||||
Ok(Json(terms))
|
||||
}
|
||||
|
||||
/// `GET /api/subjects?term=202420`
|
||||
/// `GET /api/subjects?term=202620`
|
||||
async fn get_subjects(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<SubjectsParams>,
|
||||
) -> Result<Json<Vec<CodeDescription>>, (AxumStatusCode, String)> {
|
||||
let cache = state.reference_cache.read().await;
|
||||
let entries = cache.entries_for_category("subject");
|
||||
let rows = crate::data::courses::get_subjects_by_enrollment(&state.db_pool, ¶ms.term)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Failed to get subjects");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to get subjects".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let subjects: Vec<CodeDescription> = entries
|
||||
let subjects: Vec<CodeDescription> = rows
|
||||
.into_iter()
|
||||
.map(|(code, description)| CodeDescription {
|
||||
code: code.to_string(),
|
||||
description: description.to_string(),
|
||||
})
|
||||
.map(|(code, description, _enrollment)| CodeDescription { code, description })
|
||||
.collect();
|
||||
|
||||
Ok(Json(subjects))
|
||||
|
||||
@@ -0,0 +1,443 @@
|
||||
//! ISR-style schedule cache for timeline enrollment queries.
|
||||
//!
|
||||
//! Loads all courses with their meeting times from the database, parses the
|
||||
//! JSONB meeting times into a compact in-memory representation, and caches
|
||||
//! the result. The cache is refreshed in the background every hour using a
|
||||
//! stale-while-revalidate pattern with singleflight deduplication — readers
|
||||
//! always get the current cached value instantly, never blocking on a refresh.
|
||||
|
||||
use chrono::NaiveDate;
|
||||
use serde_json::Value;
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use tokio::sync::watch;
|
||||
use tracing::{debug, error, info};
|
||||
|
||||
/// How often the cache is considered fresh (1 hour).
|
||||
const REFRESH_INTERVAL: std::time::Duration = std::time::Duration::from_secs(60 * 60);
|
||||
|
||||
// ── Compact schedule representation ─────────────────────────────────
|
||||
|
||||
/// A single meeting time block, pre-parsed for fast filtering.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ParsedSchedule {
|
||||
/// Bitmask of days: bit 0 = Monday, bit 6 = Sunday.
|
||||
days: u8,
|
||||
/// Minutes since midnight for start (e.g. 600 = 10:00).
|
||||
begin_minutes: u16,
|
||||
/// Minutes since midnight for end (e.g. 650 = 10:50).
|
||||
end_minutes: u16,
|
||||
/// First day the meeting pattern is active.
|
||||
start_date: NaiveDate,
|
||||
/// Last day the meeting pattern is active.
|
||||
end_date: NaiveDate,
|
||||
}
|
||||
|
||||
/// A course with its enrollment and pre-parsed schedule blocks.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct CachedCourse {
|
||||
pub(crate) subject: String,
|
||||
pub(crate) enrollment: i32,
|
||||
pub(crate) schedules: Vec<ParsedSchedule>,
|
||||
}
|
||||
|
||||
/// The immutable snapshot of all courses, swapped atomically on refresh.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ScheduleSnapshot {
|
||||
pub(crate) courses: Vec<CachedCourse>,
|
||||
refreshed_at: std::time::Instant,
|
||||
}
|
||||
|
||||
// ── Cache handle ────────────────────────────────────────────────────
|
||||
|
||||
/// Shared schedule cache. Clone-cheap (all `Arc`-wrapped internals).
|
||||
#[derive(Clone)]
|
||||
pub struct ScheduleCache {
|
||||
/// Current snapshot, updated via `watch` channel for lock-free reads.
|
||||
rx: watch::Receiver<Arc<ScheduleSnapshot>>,
|
||||
/// Sender side, held to push new snapshots.
|
||||
tx: Arc<watch::Sender<Arc<ScheduleSnapshot>>>,
|
||||
/// Singleflight guard — true while a refresh task is in flight.
|
||||
refreshing: Arc<AtomicBool>,
|
||||
/// Database pool for refresh queries.
|
||||
pool: PgPool,
|
||||
}
|
||||
|
||||
impl ScheduleCache {
|
||||
/// Create a new cache with an empty initial snapshot.
|
||||
pub(crate) fn new(pool: PgPool) -> Self {
|
||||
let empty = Arc::new(ScheduleSnapshot {
|
||||
courses: Vec::new(),
|
||||
refreshed_at: std::time::Instant::now(),
|
||||
});
|
||||
let (tx, rx) = watch::channel(empty);
|
||||
Self {
|
||||
rx,
|
||||
tx: Arc::new(tx),
|
||||
refreshing: Arc::new(AtomicBool::new(false)),
|
||||
pool,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the current snapshot. Never blocks on refresh.
|
||||
pub(crate) fn snapshot(&self) -> Arc<ScheduleSnapshot> {
|
||||
self.rx.borrow().clone()
|
||||
}
|
||||
|
||||
/// Check freshness and trigger a background refresh if stale.
|
||||
/// Always returns immediately — the caller uses the current snapshot.
|
||||
pub(crate) fn ensure_fresh(&self) {
|
||||
let snap = self.rx.borrow();
|
||||
if snap.refreshed_at.elapsed() < REFRESH_INTERVAL {
|
||||
return;
|
||||
}
|
||||
// Singleflight: only one refresh at a time.
|
||||
if self
|
||||
.refreshing
|
||||
.compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
|
||||
.is_err()
|
||||
{
|
||||
debug!("Schedule cache refresh already in flight, skipping");
|
||||
return;
|
||||
}
|
||||
let cache = self.clone();
|
||||
tokio::spawn(async move {
|
||||
match load_snapshot(&cache.pool).await {
|
||||
Ok(snap) => {
|
||||
let count = snap.courses.len();
|
||||
let _ = cache.tx.send(Arc::new(snap));
|
||||
info!(courses = count, "Schedule cache refreshed");
|
||||
}
|
||||
Err(e) => {
|
||||
error!(error = %e, "Failed to refresh schedule cache");
|
||||
}
|
||||
}
|
||||
cache.refreshing.store(false, Ordering::Release);
|
||||
});
|
||||
}
|
||||
|
||||
/// Force an initial load (blocking). Call once at startup.
|
||||
pub(crate) async fn load(&self) -> anyhow::Result<()> {
|
||||
let snap = load_snapshot(&self.pool).await?;
|
||||
let count = snap.courses.len();
|
||||
let _ = self.tx.send(Arc::new(snap));
|
||||
info!(courses = count, "Schedule cache initially loaded");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// ── Database loading ────────────────────────────────────────────────
|
||||
|
||||
/// Row returned from the lightweight schedule query.
|
||||
#[derive(sqlx::FromRow)]
|
||||
struct ScheduleRow {
|
||||
subject: String,
|
||||
enrollment: i32,
|
||||
meeting_times: Value,
|
||||
}
|
||||
|
||||
/// Load all courses and parse their meeting times into a snapshot.
|
||||
async fn load_snapshot(pool: &PgPool) -> anyhow::Result<ScheduleSnapshot> {
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
let rows: Vec<ScheduleRow> =
|
||||
sqlx::query_as("SELECT subject, enrollment, meeting_times FROM courses")
|
||||
.fetch_all(pool)
|
||||
.await?;
|
||||
|
||||
let courses: Vec<CachedCourse> = rows
|
||||
.into_iter()
|
||||
.map(|row| {
|
||||
let schedules = parse_meeting_times(&row.meeting_times);
|
||||
CachedCourse {
|
||||
subject: row.subject,
|
||||
enrollment: row.enrollment,
|
||||
schedules,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
debug!(
|
||||
courses = courses.len(),
|
||||
elapsed_ms = start.elapsed().as_millis(),
|
||||
"Schedule snapshot built"
|
||||
);
|
||||
|
||||
Ok(ScheduleSnapshot {
|
||||
courses,
|
||||
refreshed_at: std::time::Instant::now(),
|
||||
})
|
||||
}
|
||||
|
||||
// ── Meeting time parsing ────────────────────────────────────────────
|
||||
|
||||
/// Parse the JSONB `meeting_times` array into compact `ParsedSchedule` values.
|
||||
fn parse_meeting_times(value: &Value) -> Vec<ParsedSchedule> {
|
||||
let Value::Array(arr) = value else {
|
||||
return Vec::new();
|
||||
};
|
||||
|
||||
arr.iter().filter_map(parse_one_meeting).collect()
|
||||
}
|
||||
|
||||
fn parse_one_meeting(mt: &Value) -> Option<ParsedSchedule> {
|
||||
let begin_time = mt.get("begin_time")?.as_str()?;
|
||||
let end_time = mt.get("end_time")?.as_str()?;
|
||||
|
||||
let begin_minutes = parse_hhmm(begin_time)?;
|
||||
let end_minutes = parse_hhmm(end_time)?;
|
||||
|
||||
if end_minutes <= begin_minutes {
|
||||
return None;
|
||||
}
|
||||
|
||||
let start_date = parse_date(mt.get("start_date")?.as_str()?)?;
|
||||
let end_date = parse_date(mt.get("end_date")?.as_str()?)?;
|
||||
|
||||
const DAY_KEYS: [&str; 7] = [
|
||||
"monday",
|
||||
"tuesday",
|
||||
"wednesday",
|
||||
"thursday",
|
||||
"friday",
|
||||
"saturday",
|
||||
"sunday",
|
||||
];
|
||||
let mut days: u8 = 0;
|
||||
for (bit, key) in DAY_KEYS.iter().enumerate() {
|
||||
if mt.get(*key).and_then(Value::as_bool).unwrap_or(false) {
|
||||
days |= 1 << bit;
|
||||
}
|
||||
}
|
||||
|
||||
// Skip meetings with no days (online async, etc.)
|
||||
if days == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(ParsedSchedule {
|
||||
days,
|
||||
begin_minutes,
|
||||
end_minutes,
|
||||
start_date,
|
||||
end_date,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse "HHMM" → minutes since midnight.
|
||||
fn parse_hhmm(s: &str) -> Option<u16> {
|
||||
if s.len() != 4 {
|
||||
return None;
|
||||
}
|
||||
let hours: u16 = s[..2].parse().ok()?;
|
||||
let mins: u16 = s[2..].parse().ok()?;
|
||||
if hours >= 24 || mins >= 60 {
|
||||
return None;
|
||||
}
|
||||
Some(hours * 60 + mins)
|
||||
}
|
||||
|
||||
/// Parse "MM/DD/YYYY" → NaiveDate.
|
||||
fn parse_date(s: &str) -> Option<NaiveDate> {
|
||||
NaiveDate::parse_from_str(s, "%m/%d/%Y").ok()
|
||||
}
|
||||
|
||||
// ── Slot matching ───────────────────────────────────────────────────
|
||||
|
||||
/// Day-of-week as our bitmask index (Monday = 0 .. Sunday = 6).
|
||||
/// Chrono's `weekday().num_days_from_monday()` already gives 0=Mon..6=Sun.
|
||||
pub(crate) fn weekday_bit(day: chrono::Weekday) -> u8 {
|
||||
1 << day.num_days_from_monday()
|
||||
}
|
||||
|
||||
impl ParsedSchedule {
|
||||
/// Check if this schedule is active during a given slot.
|
||||
///
|
||||
/// `slot_date` is the calendar date of the slot.
|
||||
/// `slot_start` / `slot_end` are minutes since midnight for the 15-min window.
|
||||
#[inline]
|
||||
pub(crate) fn active_during(
|
||||
&self,
|
||||
slot_date: NaiveDate,
|
||||
slot_weekday_bit: u8,
|
||||
slot_start_minutes: u16,
|
||||
slot_end_minutes: u16,
|
||||
) -> bool {
|
||||
// Day-of-week check
|
||||
if self.days & slot_weekday_bit == 0 {
|
||||
return false;
|
||||
}
|
||||
// Date range check
|
||||
if slot_date < self.start_date || slot_date > self.end_date {
|
||||
return false;
|
||||
}
|
||||
// Time overlap: meeting [begin, end) overlaps slot [start, end)
|
||||
self.begin_minutes < slot_end_minutes && self.end_minutes > slot_start_minutes
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::NaiveDate;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn parse_hhmm_valid() {
|
||||
assert_eq!(parse_hhmm("0000"), Some(0));
|
||||
assert_eq!(parse_hhmm("0930"), Some(570));
|
||||
assert_eq!(parse_hhmm("1350"), Some(830));
|
||||
assert_eq!(parse_hhmm("2359"), Some(1439));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_hhmm_invalid() {
|
||||
assert_eq!(parse_hhmm(""), None);
|
||||
assert_eq!(parse_hhmm("abc"), None);
|
||||
assert_eq!(parse_hhmm("2500"), None);
|
||||
assert_eq!(parse_hhmm("0060"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_date_valid() {
|
||||
assert_eq!(
|
||||
parse_date("08/26/2025"),
|
||||
Some(NaiveDate::from_ymd_opt(2025, 8, 26).unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_meeting_times_basic() {
|
||||
let json = json!([{
|
||||
"begin_time": "1000",
|
||||
"end_time": "1050",
|
||||
"start_date": "08/26/2025",
|
||||
"end_date": "12/13/2025",
|
||||
"monday": true,
|
||||
"tuesday": false,
|
||||
"wednesday": true,
|
||||
"thursday": false,
|
||||
"friday": true,
|
||||
"saturday": false,
|
||||
"sunday": false,
|
||||
"building": "NPB",
|
||||
"building_description": "North Paseo Building",
|
||||
"room": "1.238",
|
||||
"campus": "11",
|
||||
"meeting_type": "FF",
|
||||
"meeting_schedule_type": "AFF"
|
||||
}]);
|
||||
|
||||
let schedules = parse_meeting_times(&json);
|
||||
assert_eq!(schedules.len(), 1);
|
||||
|
||||
let s = &schedules[0];
|
||||
assert_eq!(s.begin_minutes, 600); // 10:00
|
||||
assert_eq!(s.end_minutes, 650); // 10:50
|
||||
assert_eq!(s.days, 0b0010101); // Mon, Wed, Fri
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_meeting_times_skips_null_times() {
|
||||
let json = json!([{
|
||||
"begin_time": null,
|
||||
"end_time": null,
|
||||
"start_date": "08/26/2025",
|
||||
"end_date": "12/13/2025",
|
||||
"monday": false,
|
||||
"tuesday": false,
|
||||
"wednesday": false,
|
||||
"thursday": false,
|
||||
"friday": false,
|
||||
"saturday": false,
|
||||
"sunday": false,
|
||||
"meeting_type": "OS",
|
||||
"meeting_schedule_type": "AFF"
|
||||
}]);
|
||||
|
||||
let schedules = parse_meeting_times(&json);
|
||||
assert!(schedules.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn active_during_matching_slot() {
|
||||
let sched = ParsedSchedule {
|
||||
days: 0b0000001, // Monday
|
||||
begin_minutes: 600,
|
||||
end_minutes: 650,
|
||||
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
|
||||
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
|
||||
};
|
||||
|
||||
// Monday Sept 1 2025, 10:00-10:15 slot
|
||||
let date = NaiveDate::from_ymd_opt(2025, 9, 1).unwrap();
|
||||
assert!(sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 600, 615));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn active_during_wrong_day() {
|
||||
let sched = ParsedSchedule {
|
||||
days: 0b0000001, // Monday only
|
||||
begin_minutes: 600,
|
||||
end_minutes: 650,
|
||||
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
|
||||
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
|
||||
};
|
||||
|
||||
// Tuesday Sept 2 2025
|
||||
let date = NaiveDate::from_ymd_opt(2025, 9, 2).unwrap();
|
||||
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Tue), 600, 615));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn active_during_no_time_overlap() {
|
||||
let sched = ParsedSchedule {
|
||||
days: 0b0000001,
|
||||
begin_minutes: 600, // 10:00
|
||||
end_minutes: 650, // 10:50
|
||||
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
|
||||
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
|
||||
};
|
||||
|
||||
let date = NaiveDate::from_ymd_opt(2025, 9, 1).unwrap(); // Monday
|
||||
// Slot 11:00-11:15 — after the meeting ends
|
||||
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 660, 675));
|
||||
// Slot 9:45-10:00 — just before meeting starts (end=600, begin=600 → no overlap)
|
||||
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 585, 600));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn active_during_outside_date_range() {
|
||||
let sched = ParsedSchedule {
|
||||
days: 0b0000001,
|
||||
begin_minutes: 600,
|
||||
end_minutes: 650,
|
||||
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
|
||||
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
|
||||
};
|
||||
|
||||
// Monday Jan 6 2025 — before semester
|
||||
let date = NaiveDate::from_ymd_opt(2025, 1, 6).unwrap();
|
||||
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 600, 615));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn active_during_edge_overlap() {
|
||||
let sched = ParsedSchedule {
|
||||
days: 0b0000001,
|
||||
begin_minutes: 600,
|
||||
end_minutes: 650,
|
||||
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
|
||||
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
|
||||
};
|
||||
|
||||
let date = NaiveDate::from_ymd_opt(2025, 9, 1).unwrap();
|
||||
// Slot 10:45-11:00 — overlaps last 5 minutes of meeting
|
||||
assert!(sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 645, 660));
|
||||
// Slot 9:45-10:00 — ends exactly when meeting starts, no overlap
|
||||
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 585, 600));
|
||||
// Slot 10:50-11:05 — starts exactly when meeting ends, no overlap
|
||||
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 650, 665));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,187 @@
|
||||
//! In-memory caches for session resolution and OAuth CSRF state.
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use dashmap::DashMap;
|
||||
use rand::Rng;
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use crate::data::models::User;
|
||||
|
||||
/// Cached session entry with TTL.
|
||||
#[derive(Debug, Clone)]
|
||||
struct CachedSession {
|
||||
user: User,
|
||||
session_expires_at: DateTime<Utc>,
|
||||
cached_at: Instant,
|
||||
}
|
||||
|
||||
/// In-memory session cache backed by PostgreSQL.
|
||||
///
|
||||
/// Provides fast session resolution without a DB round-trip on every request.
|
||||
/// Cache entries expire after a configurable TTL (default 5 minutes).
|
||||
#[derive(Clone)]
|
||||
pub struct SessionCache {
|
||||
cache: Arc<DashMap<String, CachedSession>>,
|
||||
db_pool: PgPool,
|
||||
cache_ttl: Duration,
|
||||
}
|
||||
|
||||
impl SessionCache {
|
||||
/// Create a new session cache with a 5-minute default TTL.
|
||||
pub fn new(db_pool: PgPool) -> Self {
|
||||
Self {
|
||||
cache: Arc::new(DashMap::new()),
|
||||
db_pool,
|
||||
cache_ttl: Duration::from_secs(5 * 60),
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve a session token to a [`User`], using the cache when possible.
|
||||
///
|
||||
/// On cache hit (entry present, not stale, session not expired), returns the
|
||||
/// cached user immediately. On miss or stale entry, queries the database for
|
||||
/// the session and user, populates the cache, and fire-and-forgets a
|
||||
/// `touch_session` call to update `last_active_at`.
|
||||
pub async fn get_user(&self, token: &str) -> Option<User> {
|
||||
// Check cache first
|
||||
if let Some(entry) = self.cache.get(token) {
|
||||
let now_instant = Instant::now();
|
||||
let now_utc = Utc::now();
|
||||
|
||||
let cache_fresh = entry.cached_at + self.cache_ttl > now_instant;
|
||||
let session_valid = entry.session_expires_at > now_utc;
|
||||
|
||||
if cache_fresh && session_valid {
|
||||
return Some(entry.user.clone());
|
||||
}
|
||||
|
||||
// Stale or expired — drop the ref before removing
|
||||
drop(entry);
|
||||
self.cache.remove(token);
|
||||
}
|
||||
|
||||
// Cache miss — query DB
|
||||
let session = crate::data::sessions::get_session(&self.db_pool, token)
|
||||
.await
|
||||
.ok()
|
||||
.flatten()?;
|
||||
|
||||
let user = crate::data::users::get_user(&self.db_pool, session.user_id)
|
||||
.await
|
||||
.ok()
|
||||
.flatten()?;
|
||||
|
||||
self.cache.insert(
|
||||
token.to_owned(),
|
||||
CachedSession {
|
||||
user: user.clone(),
|
||||
session_expires_at: session.expires_at,
|
||||
cached_at: Instant::now(),
|
||||
},
|
||||
);
|
||||
|
||||
// Fire-and-forget touch to update last_active_at
|
||||
let pool = self.db_pool.clone();
|
||||
let token_owned = token.to_owned();
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) = crate::data::sessions::touch_session(&pool, &token_owned).await {
|
||||
tracing::warn!(error = %e, "failed to touch session");
|
||||
}
|
||||
});
|
||||
|
||||
Some(user)
|
||||
}
|
||||
|
||||
/// Remove a single session from the cache (e.g. on logout).
|
||||
pub fn evict(&self, token: &str) {
|
||||
self.cache.remove(token);
|
||||
}
|
||||
|
||||
/// Remove all cached sessions belonging to a user.
|
||||
pub fn evict_user(&self, discord_id: i64) {
|
||||
self.cache
|
||||
.retain(|_, entry| entry.user.discord_id != discord_id);
|
||||
}
|
||||
|
||||
/// Delete expired sessions from the database and sweep the in-memory cache.
|
||||
///
|
||||
/// Returns the number of sessions deleted from the database.
|
||||
pub async fn cleanup_expired(&self) -> anyhow::Result<u64> {
|
||||
let deleted = crate::data::sessions::cleanup_expired(&self.db_pool).await?;
|
||||
|
||||
let now = Utc::now();
|
||||
self.cache.retain(|_, entry| entry.session_expires_at > now);
|
||||
|
||||
Ok(deleted)
|
||||
}
|
||||
}
|
||||
|
||||
/// Data stored alongside each OAuth CSRF state token.
|
||||
struct OAuthStateEntry {
|
||||
created_at: Instant,
|
||||
/// The browser origin that initiated the login flow, so the callback
|
||||
/// can reconstruct the exact redirect_uri Discord expects.
|
||||
origin: String,
|
||||
}
|
||||
|
||||
/// Ephemeral store for OAuth CSRF state tokens.
|
||||
///
|
||||
/// Tokens are stored with creation time and expire after a configurable TTL.
|
||||
/// Each token is single-use: validation consumes it.
|
||||
#[derive(Clone)]
|
||||
pub struct OAuthStateStore {
|
||||
states: Arc<DashMap<String, OAuthStateEntry>>,
|
||||
ttl: Duration,
|
||||
}
|
||||
|
||||
impl Default for OAuthStateStore {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl OAuthStateStore {
|
||||
/// Create a new store with a 10-minute TTL.
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
states: Arc::new(DashMap::new()),
|
||||
ttl: Duration::from_secs(10 * 60),
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a random 16-byte hex CSRF token, store it with the given
|
||||
/// origin, and return the token.
|
||||
pub fn generate(&self, origin: String) -> String {
|
||||
let bytes: [u8; 16] = rand::rng().random();
|
||||
let token: String = bytes.iter().map(|b| format!("{b:02x}")).collect();
|
||||
self.states.insert(
|
||||
token.clone(),
|
||||
OAuthStateEntry {
|
||||
created_at: Instant::now(),
|
||||
origin,
|
||||
},
|
||||
);
|
||||
token
|
||||
}
|
||||
|
||||
/// Validate and consume a CSRF token. Returns the stored origin if the
|
||||
/// token was present and not expired.
|
||||
pub fn validate(&self, state: &str) -> Option<String> {
|
||||
let (_, entry) = self.states.remove(state)?;
|
||||
if entry.created_at.elapsed() < self.ttl {
|
||||
Some(entry.origin)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove all expired entries from the store.
|
||||
#[allow(dead_code)] // Intended for periodic cleanup task (not yet wired)
|
||||
pub fn cleanup(&self) {
|
||||
let ttl = self.ttl;
|
||||
self.states
|
||||
.retain(|_, entry| entry.created_at.elapsed() < ttl);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,258 @@
|
||||
//! Timeline API endpoint for enrollment aggregation by subject over time.
|
||||
//!
|
||||
//! Accepts multiple time ranges, merges overlaps, aligns to 15-minute
|
||||
//! slot boundaries, and returns per-subject enrollment totals for each slot.
|
||||
//! Only courses whose meeting times overlap a given slot contribute to that
|
||||
//! slot's totals — so the chart reflects the actual class schedule rhythm.
|
||||
//!
|
||||
//! Course data is served from an ISR-style in-memory cache (see
|
||||
//! [`ScheduleCache`]) that refreshes hourly in the background with
|
||||
//! stale-while-revalidate semantics.
|
||||
|
||||
use axum::{
|
||||
extract::State,
|
||||
http::StatusCode,
|
||||
response::{IntoResponse, Json, Response},
|
||||
};
|
||||
use chrono::{DateTime, Datelike, Duration, NaiveTime, Timelike, Utc};
|
||||
use chrono_tz::US::Central;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::state::AppState;
|
||||
use crate::web::schedule_cache::weekday_bit;
|
||||
|
||||
/// 15 minutes in seconds, matching the frontend `SLOT_INTERVAL_MS`.
|
||||
const SLOT_SECONDS: i64 = 15 * 60;
|
||||
const SLOT_MINUTES: u16 = 15;
|
||||
|
||||
/// Maximum number of ranges in a single request.
|
||||
const MAX_RANGES: usize = 20;
|
||||
|
||||
/// Maximum span of a single range (72 hours).
|
||||
const MAX_RANGE_SPAN: Duration = Duration::hours(72);
|
||||
|
||||
/// Maximum total span across all ranges to prevent excessive queries.
|
||||
const MAX_TOTAL_SPAN: Duration = Duration::hours(168); // 1 week
|
||||
|
||||
// ── Request / Response types ────────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub(crate) struct TimelineRequest {
|
||||
ranges: Vec<TimeRange>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub(crate) struct TimeRange {
|
||||
start: DateTime<Utc>,
|
||||
end: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct TimelineResponse {
|
||||
/// 15-minute slots with per-subject enrollment totals, sorted by time.
|
||||
slots: Vec<TimelineSlot>,
|
||||
/// All subject codes present in the returned data.
|
||||
subjects: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct TimelineSlot {
|
||||
/// ISO-8601 timestamp at the start of this 15-minute bucket.
|
||||
time: DateTime<Utc>,
|
||||
/// Subject code → total enrollment in this slot.
|
||||
subjects: BTreeMap<String, i64>,
|
||||
}
|
||||
|
||||
// ── Error type ──────────────────────────────────────────────────────
|
||||
|
||||
pub(crate) struct TimelineError {
|
||||
status: StatusCode,
|
||||
message: String,
|
||||
}
|
||||
|
||||
impl TimelineError {
|
||||
fn bad_request(msg: impl Into<String>) -> Self {
|
||||
Self {
|
||||
status: StatusCode::BAD_REQUEST,
|
||||
message: msg.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoResponse for TimelineError {
|
||||
fn into_response(self) -> Response {
|
||||
(
|
||||
self.status,
|
||||
Json(serde_json::json!({ "error": self.message })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
|
||||
// ── Alignment helpers ───────────────────────────────────────────────
|
||||
|
||||
/// Floor a timestamp to the nearest 15-minute boundary.
|
||||
fn align_floor(ts: DateTime<Utc>) -> DateTime<Utc> {
|
||||
let secs = ts.timestamp();
|
||||
let aligned = (secs / SLOT_SECONDS) * SLOT_SECONDS;
|
||||
DateTime::from_timestamp(aligned, 0).unwrap_or(ts)
|
||||
}
|
||||
|
||||
/// Ceil a timestamp to the nearest 15-minute boundary.
|
||||
fn align_ceil(ts: DateTime<Utc>) -> DateTime<Utc> {
|
||||
let secs = ts.timestamp();
|
||||
let aligned = ((secs + SLOT_SECONDS - 1) / SLOT_SECONDS) * SLOT_SECONDS;
|
||||
DateTime::from_timestamp(aligned, 0).unwrap_or(ts)
|
||||
}
|
||||
|
||||
// ── Range merging ───────────────────────────────────────────────────
|
||||
|
||||
/// Aligned, validated range.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct AlignedRange {
|
||||
start: DateTime<Utc>,
|
||||
end: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// Merge overlapping/adjacent ranges into a minimal set.
|
||||
fn merge_ranges(mut ranges: Vec<AlignedRange>) -> Vec<AlignedRange> {
|
||||
if ranges.is_empty() {
|
||||
return ranges;
|
||||
}
|
||||
ranges.sort_by_key(|r| r.start);
|
||||
let mut merged: Vec<AlignedRange> = vec![ranges[0]];
|
||||
for r in &ranges[1..] {
|
||||
let last = merged.last_mut().unwrap();
|
||||
if r.start <= last.end {
|
||||
last.end = last.end.max(r.end);
|
||||
} else {
|
||||
merged.push(*r);
|
||||
}
|
||||
}
|
||||
merged
|
||||
}
|
||||
|
||||
/// Generate all aligned slot timestamps within the merged ranges.
|
||||
fn generate_slots(merged: &[AlignedRange]) -> BTreeSet<DateTime<Utc>> {
|
||||
let mut slots = BTreeSet::new();
|
||||
for range in merged {
|
||||
let mut t = range.start;
|
||||
while t < range.end {
|
||||
slots.insert(t);
|
||||
t += Duration::seconds(SLOT_SECONDS);
|
||||
}
|
||||
}
|
||||
slots
|
||||
}
|
||||
|
||||
// ── Handler ─────────────────────────────────────────────────────────
|
||||
|
||||
/// `POST /api/timeline`
|
||||
///
|
||||
/// Accepts a JSON body with multiple time ranges. Returns per-subject
|
||||
/// enrollment totals bucketed into 15-minute slots. Only courses whose
|
||||
/// meeting schedule overlaps a slot contribute to that slot's count.
|
||||
pub(crate) async fn timeline(
|
||||
State(state): State<AppState>,
|
||||
Json(body): Json<TimelineRequest>,
|
||||
) -> Result<Json<TimelineResponse>, TimelineError> {
|
||||
// ── Validate ────────────────────────────────────────────────────
|
||||
if body.ranges.is_empty() {
|
||||
return Err(TimelineError::bad_request("At least one range is required"));
|
||||
}
|
||||
if body.ranges.len() > MAX_RANGES {
|
||||
return Err(TimelineError::bad_request(format!(
|
||||
"Too many ranges (max {MAX_RANGES})"
|
||||
)));
|
||||
}
|
||||
|
||||
let mut aligned: Vec<AlignedRange> = Vec::with_capacity(body.ranges.len());
|
||||
for r in &body.ranges {
|
||||
if r.end <= r.start {
|
||||
return Err(TimelineError::bad_request(format!(
|
||||
"Range end ({}) must be after start ({})",
|
||||
r.end, r.start
|
||||
)));
|
||||
}
|
||||
let span = r.end - r.start;
|
||||
if span > MAX_RANGE_SPAN {
|
||||
return Err(TimelineError::bad_request(format!(
|
||||
"Range span ({} hours) exceeds maximum ({} hours)",
|
||||
span.num_hours(),
|
||||
MAX_RANGE_SPAN.num_hours()
|
||||
)));
|
||||
}
|
||||
aligned.push(AlignedRange {
|
||||
start: align_floor(r.start),
|
||||
end: align_ceil(r.end),
|
||||
});
|
||||
}
|
||||
|
||||
let merged = merge_ranges(aligned);
|
||||
|
||||
// Validate total span
|
||||
let total_span: Duration = merged.iter().map(|r| r.end - r.start).sum();
|
||||
if total_span > MAX_TOTAL_SPAN {
|
||||
return Err(TimelineError::bad_request(format!(
|
||||
"Total time span ({} hours) exceeds maximum ({} hours)",
|
||||
total_span.num_hours(),
|
||||
MAX_TOTAL_SPAN.num_hours()
|
||||
)));
|
||||
}
|
||||
|
||||
// ── Get cached schedule data (ISR: stale-while-revalidate) ───────
|
||||
state.schedule_cache.ensure_fresh();
|
||||
let snapshot = state.schedule_cache.snapshot();
|
||||
|
||||
// ── Build per-slot enrollment by filtering on meeting times ──────
|
||||
let slot_times = generate_slots(&merged);
|
||||
let mut all_subjects: BTreeSet<String> = BTreeSet::new();
|
||||
|
||||
let slots: Vec<TimelineSlot> = slot_times
|
||||
.into_iter()
|
||||
.map(|utc_time| {
|
||||
// Convert UTC slot to Central time for local day-of-week and time-of-day
|
||||
let local = utc_time.with_timezone(&Central);
|
||||
let local_date = local.date_naive();
|
||||
let local_time = local.time();
|
||||
let weekday = local.weekday();
|
||||
let wday_bit = weekday_bit(weekday);
|
||||
let slot_start_minutes = time_to_minutes(local_time);
|
||||
let slot_end_minutes = slot_start_minutes + SLOT_MINUTES;
|
||||
|
||||
let mut subject_totals: BTreeMap<String, i64> = BTreeMap::new();
|
||||
|
||||
for course in &snapshot.courses {
|
||||
let active = course.schedules.iter().any(|s| {
|
||||
s.active_during(local_date, wday_bit, slot_start_minutes, slot_end_minutes)
|
||||
});
|
||||
if active {
|
||||
*subject_totals.entry(course.subject.clone()).or_default() +=
|
||||
course.enrollment as i64;
|
||||
}
|
||||
}
|
||||
|
||||
all_subjects.extend(subject_totals.keys().cloned());
|
||||
|
||||
TimelineSlot {
|
||||
time: utc_time,
|
||||
subjects: subject_totals,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let subjects: Vec<String> = all_subjects.into_iter().collect();
|
||||
|
||||
Ok(Json(TimelineResponse { slots, subjects }))
|
||||
}
|
||||
|
||||
/// Convert a `NaiveTime` to minutes since midnight.
|
||||
fn time_to_minutes(t: NaiveTime) -> u16 {
|
||||
(t.hour() * 60 + t.minute()) as u16
|
||||
}
|
||||
+205
@@ -0,0 +1,205 @@
|
||||
//! WebSocket event types and handler for real-time scrape job updates.
|
||||
|
||||
use axum::{
|
||||
extract::{
|
||||
State,
|
||||
ws::{Message, WebSocket, WebSocketUpgrade},
|
||||
},
|
||||
response::IntoResponse,
|
||||
};
|
||||
use futures::{SinkExt, StreamExt};
|
||||
use serde::Serialize;
|
||||
use sqlx::PgPool;
|
||||
use tokio::sync::broadcast;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::data::models::{ScrapeJob, ScrapeJobStatus};
|
||||
use crate::state::AppState;
|
||||
use crate::web::extractors::AdminUser;
|
||||
|
||||
/// A serializable DTO for `ScrapeJob` with computed `status`.
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ScrapeJobDto {
|
||||
pub id: i32,
|
||||
pub target_type: String,
|
||||
pub target_payload: serde_json::Value,
|
||||
pub priority: String,
|
||||
pub execute_at: String,
|
||||
pub created_at: String,
|
||||
pub locked_at: Option<String>,
|
||||
pub retry_count: i32,
|
||||
pub max_retries: i32,
|
||||
pub queued_at: String,
|
||||
pub status: ScrapeJobStatus,
|
||||
}
|
||||
|
||||
impl From<&ScrapeJob> for ScrapeJobDto {
|
||||
fn from(job: &ScrapeJob) -> Self {
|
||||
Self {
|
||||
id: job.id,
|
||||
target_type: format!("{:?}", job.target_type),
|
||||
target_payload: job.target_payload.clone(),
|
||||
priority: format!("{:?}", job.priority),
|
||||
execute_at: job.execute_at.to_rfc3339(),
|
||||
created_at: job.created_at.to_rfc3339(),
|
||||
locked_at: job.locked_at.map(|t| t.to_rfc3339()),
|
||||
retry_count: job.retry_count,
|
||||
max_retries: job.max_retries,
|
||||
queued_at: job.queued_at.to_rfc3339(),
|
||||
status: job.status(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Events broadcast when scrape job state changes.
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
pub enum ScrapeJobEvent {
|
||||
Init {
|
||||
jobs: Vec<ScrapeJobDto>,
|
||||
},
|
||||
JobCreated {
|
||||
job: ScrapeJobDto,
|
||||
},
|
||||
JobLocked {
|
||||
id: i32,
|
||||
locked_at: String,
|
||||
status: ScrapeJobStatus,
|
||||
},
|
||||
JobCompleted {
|
||||
id: i32,
|
||||
},
|
||||
JobRetried {
|
||||
id: i32,
|
||||
retry_count: i32,
|
||||
queued_at: String,
|
||||
status: ScrapeJobStatus,
|
||||
},
|
||||
JobExhausted {
|
||||
id: i32,
|
||||
},
|
||||
JobDeleted {
|
||||
id: i32,
|
||||
},
|
||||
}
|
||||
|
||||
/// Fetch current scrape jobs from the DB and build an `Init` event.
|
||||
async fn build_init_event(db_pool: &PgPool) -> Result<ScrapeJobEvent, sqlx::Error> {
|
||||
let rows = sqlx::query_as::<_, ScrapeJob>(
|
||||
"SELECT * FROM scrape_jobs ORDER BY priority DESC, execute_at ASC LIMIT 100",
|
||||
)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
let jobs = rows.iter().map(ScrapeJobDto::from).collect();
|
||||
Ok(ScrapeJobEvent::Init { jobs })
|
||||
}
|
||||
|
||||
/// WebSocket endpoint for real-time scrape job updates.
|
||||
///
|
||||
/// Auth is checked via `AdminUser` before the upgrade occurs — if rejected,
|
||||
/// a 401/403 is returned and the upgrade never happens.
|
||||
pub async fn scrape_jobs_ws(
|
||||
ws: WebSocketUpgrade,
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
) -> impl IntoResponse {
|
||||
ws.on_upgrade(|socket| handle_scrape_jobs_ws(socket, state))
|
||||
}
|
||||
|
||||
/// Serialize an event and send it over the WebSocket sink.
|
||||
/// Returns `true` if the message was sent, `false` if the client disconnected.
|
||||
async fn send_event(
|
||||
sink: &mut futures::stream::SplitSink<WebSocket, Message>,
|
||||
event: &ScrapeJobEvent,
|
||||
) -> bool {
|
||||
let Ok(json) = serde_json::to_string(event) else {
|
||||
return true; // serialization failed, but connection is still alive
|
||||
};
|
||||
sink.send(Message::Text(json.into())).await.is_ok()
|
||||
}
|
||||
|
||||
async fn handle_scrape_jobs_ws(socket: WebSocket, state: AppState) {
|
||||
debug!("scrape-jobs WebSocket connected");
|
||||
|
||||
let (mut sink, mut stream) = socket.split();
|
||||
|
||||
// Send initial state
|
||||
let init_event = match build_init_event(&state.db_pool).await {
|
||||
Ok(event) => event,
|
||||
Err(e) => {
|
||||
debug!(error = %e, "failed to build init event, closing WebSocket");
|
||||
return;
|
||||
}
|
||||
};
|
||||
if !send_event(&mut sink, &init_event).await {
|
||||
debug!("client disconnected during init send");
|
||||
return;
|
||||
}
|
||||
|
||||
// Subscribe to broadcast events
|
||||
let mut rx = state.scrape_job_events();
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
result = rx.recv() => {
|
||||
match result {
|
||||
Ok(ref event) => {
|
||||
if !send_event(&mut sink, event).await {
|
||||
debug!("client disconnected during event send");
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(broadcast::error::RecvError::Lagged(n)) => {
|
||||
debug!(missed = n, "broadcast lagged, resyncing");
|
||||
match build_init_event(&state.db_pool).await {
|
||||
Ok(ref event) => {
|
||||
if !send_event(&mut sink, event).await {
|
||||
debug!("client disconnected during resync send");
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
debug!(error = %e, "failed to build resync init event");
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(broadcast::error::RecvError::Closed) => {
|
||||
debug!("broadcast channel closed");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
msg = stream.next() => {
|
||||
match msg {
|
||||
Some(Ok(Message::Text(text))) => {
|
||||
if let Ok(parsed) = serde_json::from_str::<serde_json::Value>(&text)
|
||||
&& parsed.get("type").and_then(|t| t.as_str()) == Some("resync")
|
||||
{
|
||||
debug!("client requested resync");
|
||||
match build_init_event(&state.db_pool).await {
|
||||
Ok(ref event) => {
|
||||
if !send_event(&mut sink, event).await {
|
||||
debug!("client disconnected during resync send");
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
debug!(error = %e, "failed to build resync init event");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(Ok(Message::Close(_))) | None => {
|
||||
debug!("client disconnected");
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
debug!("scrape-jobs WebSocket disconnected");
|
||||
}
|
||||
@@ -0,0 +1,103 @@
|
||||
#[allow(dead_code)]
|
||||
mod helpers;
|
||||
|
||||
use banner::data::rmp::unmatch_instructor;
|
||||
use sqlx::PgPool;
|
||||
|
||||
/// Test that unmatching an instructor resets accepted candidates back to pending.
|
||||
///
|
||||
/// When a user unmatches an instructor, accepted candidates should be reset to
|
||||
/// 'pending' so they can be re-matched later. This prevents the bug where
|
||||
/// candidates remain 'accepted' but have no corresponding link.
|
||||
#[sqlx::test]
|
||||
async fn unmatch_resets_accepted_candidates_to_pending(pool: PgPool) {
|
||||
// ARRANGE: Create an instructor
|
||||
let (instructor_id,): (i32,) = sqlx::query_as(
|
||||
"INSERT INTO instructors (display_name, email)
|
||||
VALUES ('Test, Instructor', 'test@utsa.edu')
|
||||
RETURNING id",
|
||||
)
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.expect("failed to create instructor");
|
||||
|
||||
// ARRANGE: Create an RMP professor
|
||||
let (rmp_legacy_id,): (i32,) = sqlx::query_as(
|
||||
"INSERT INTO rmp_professors (legacy_id, graphql_id, first_name, last_name, num_ratings)
|
||||
VALUES (9999999, 'test-graphql-id', 'Test', 'Professor', 10)
|
||||
RETURNING legacy_id",
|
||||
)
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.expect("failed to create rmp professor");
|
||||
|
||||
// ARRANGE: Create a match candidate with 'accepted' status
|
||||
sqlx::query(
|
||||
"INSERT INTO rmp_match_candidates (instructor_id, rmp_legacy_id, score, status)
|
||||
VALUES ($1, $2, 0.85, 'accepted')",
|
||||
)
|
||||
.bind(instructor_id)
|
||||
.bind(rmp_legacy_id)
|
||||
.execute(&pool)
|
||||
.await
|
||||
.expect("failed to create candidate");
|
||||
|
||||
// ARRANGE: Create a link in instructor_rmp_links
|
||||
sqlx::query(
|
||||
"INSERT INTO instructor_rmp_links (instructor_id, rmp_legacy_id, source)
|
||||
VALUES ($1, $2, 'manual')",
|
||||
)
|
||||
.bind(instructor_id)
|
||||
.bind(rmp_legacy_id)
|
||||
.execute(&pool)
|
||||
.await
|
||||
.expect("failed to create link");
|
||||
|
||||
// ARRANGE: Update instructor status to 'confirmed'
|
||||
sqlx::query("UPDATE instructors SET rmp_match_status = 'confirmed' WHERE id = $1")
|
||||
.bind(instructor_id)
|
||||
.execute(&pool)
|
||||
.await
|
||||
.expect("failed to update instructor status");
|
||||
|
||||
// ACT: Unmatch the specific RMP profile
|
||||
unmatch_instructor(&pool, instructor_id, Some(rmp_legacy_id))
|
||||
.await
|
||||
.expect("unmatch should succeed");
|
||||
|
||||
// ASSERT: Candidate should be reset to pending
|
||||
let (candidate_status,): (String,) = sqlx::query_as(
|
||||
"SELECT status FROM rmp_match_candidates
|
||||
WHERE instructor_id = $1 AND rmp_legacy_id = $2",
|
||||
)
|
||||
.bind(instructor_id)
|
||||
.bind(rmp_legacy_id)
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.expect("failed to fetch candidate status");
|
||||
assert_eq!(
|
||||
candidate_status, "pending",
|
||||
"candidate should be reset to pending after unmatch"
|
||||
);
|
||||
|
||||
// ASSERT: Link should be deleted
|
||||
let (link_count,): (i64,) =
|
||||
sqlx::query_as("SELECT COUNT(*) FROM instructor_rmp_links WHERE instructor_id = $1")
|
||||
.bind(instructor_id)
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.expect("failed to count links");
|
||||
assert_eq!(link_count, 0, "link should be deleted");
|
||||
|
||||
// ASSERT: Instructor status should be unmatched
|
||||
let (instructor_status,): (String,) =
|
||||
sqlx::query_as("SELECT rmp_match_status FROM instructors WHERE id = $1")
|
||||
.bind(instructor_id)
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.expect("failed to fetch instructor status");
|
||||
assert_eq!(
|
||||
instructor_status, "unmatched",
|
||||
"instructor should be unmatched"
|
||||
);
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
#[allow(dead_code)]
|
||||
mod helpers;
|
||||
|
||||
use banner::data::batch::batch_upsert_courses;
|
||||
@@ -210,3 +211,116 @@ async fn test_batch_upsert_unique_constraint_crn_term(pool: PgPool) {
|
||||
assert_eq!(rows[1].0, "202520");
|
||||
assert_eq!(rows[1].1, 10);
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn test_batch_upsert_creates_audit_and_metric_entries(pool: PgPool) {
|
||||
// Insert initial data — should NOT create audits/metrics (it's a fresh insert)
|
||||
let initial = vec![helpers::make_course(
|
||||
"50001",
|
||||
"202510",
|
||||
"CS",
|
||||
"3443",
|
||||
"App Programming",
|
||||
10,
|
||||
35,
|
||||
0,
|
||||
5,
|
||||
)];
|
||||
batch_upsert_courses(&initial, &pool).await.unwrap();
|
||||
|
||||
let (audit_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_audits")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
audit_count, 0,
|
||||
"initial insert should not create audit entries"
|
||||
);
|
||||
|
||||
let (metric_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_metrics")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
metric_count, 0,
|
||||
"initial insert should not create metric entries"
|
||||
);
|
||||
|
||||
// Update enrollment and wait_count
|
||||
let updated = vec![helpers::make_course(
|
||||
"50001",
|
||||
"202510",
|
||||
"CS",
|
||||
"3443",
|
||||
"App Programming",
|
||||
20,
|
||||
35,
|
||||
2,
|
||||
5,
|
||||
)];
|
||||
batch_upsert_courses(&updated, &pool).await.unwrap();
|
||||
|
||||
// Should have audit entries for enrollment and wait_count changes
|
||||
let (audit_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_audits")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(
|
||||
audit_count >= 2,
|
||||
"should have audit entries for enrollment and wait_count changes, got {audit_count}"
|
||||
);
|
||||
|
||||
// Should have exactly 1 metric entry
|
||||
let (metric_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_metrics")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(metric_count, 1, "should have 1 metric snapshot");
|
||||
|
||||
// Verify metric values
|
||||
let (enrollment, wait_count, seats): (i32, i32, i32) = sqlx::query_as(
|
||||
"SELECT enrollment, wait_count, seats_available FROM course_metrics LIMIT 1",
|
||||
)
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(enrollment, 20);
|
||||
assert_eq!(wait_count, 2);
|
||||
assert_eq!(seats, 15); // 35 - 20
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn test_batch_upsert_no_change_no_audit(pool: PgPool) {
|
||||
// Insert then re-insert identical data — should produce zero audits/metrics
|
||||
let course = vec![helpers::make_course(
|
||||
"60001",
|
||||
"202510",
|
||||
"CS",
|
||||
"1083",
|
||||
"Intro to CS",
|
||||
25,
|
||||
30,
|
||||
0,
|
||||
5,
|
||||
)];
|
||||
batch_upsert_courses(&course, &pool).await.unwrap();
|
||||
batch_upsert_courses(&course, &pool).await.unwrap();
|
||||
|
||||
let (audit_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_audits")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
audit_count, 0,
|
||||
"identical re-upsert should not create audit entries"
|
||||
);
|
||||
|
||||
let (metric_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_metrics")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
metric_count, 0,
|
||||
"identical re-upsert should not create metric entries"
|
||||
);
|
||||
}
|
||||
|
||||
+18
-11
@@ -1,3 +1,4 @@
|
||||
#[allow(dead_code)]
|
||||
mod helpers;
|
||||
|
||||
use banner::data::models::{ScrapePriority, TargetType};
|
||||
@@ -217,10 +218,13 @@ async fn unlock_and_increment_retry_has_retries_remaining(pool: PgPool) {
|
||||
)
|
||||
.await;
|
||||
|
||||
let has_retries = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
|
||||
let result = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(has_retries, "should have retries remaining (0→1, max=3)");
|
||||
assert!(
|
||||
result.is_some(),
|
||||
"should have retries remaining (0→1, max=3)"
|
||||
);
|
||||
|
||||
// Verify state in DB
|
||||
let (retry_count, locked_at): (i32, Option<chrono::DateTime<chrono::Utc>>) =
|
||||
@@ -241,17 +245,17 @@ async fn unlock_and_increment_retry_exhausted(pool: PgPool) {
|
||||
json!({"subject": "CS"}),
|
||||
ScrapePriority::Medium,
|
||||
true,
|
||||
2, // retry_count
|
||||
3, // retry_count (already used all 3 retries)
|
||||
3, // max_retries
|
||||
)
|
||||
.await;
|
||||
|
||||
let has_retries = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
|
||||
let result = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(
|
||||
!has_retries,
|
||||
"should NOT have retries remaining (2→3, max=3)"
|
||||
result.is_none(),
|
||||
"should NOT have retries remaining (3→4, max=3)"
|
||||
);
|
||||
|
||||
let (retry_count,): (i32,) =
|
||||
@@ -260,7 +264,7 @@ async fn unlock_and_increment_retry_exhausted(pool: PgPool) {
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(retry_count, 3);
|
||||
assert_eq!(retry_count, 4);
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
@@ -276,11 +280,11 @@ async fn unlock_and_increment_retry_already_exceeded(pool: PgPool) {
|
||||
)
|
||||
.await;
|
||||
|
||||
let has_retries = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
|
||||
let result = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(
|
||||
!has_retries,
|
||||
result.is_none(),
|
||||
"should NOT have retries remaining (5→6, max=3)"
|
||||
);
|
||||
|
||||
@@ -346,7 +350,7 @@ async fn find_existing_payloads_returns_matching(pool: PgPool) {
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn find_existing_payloads_ignores_locked(pool: PgPool) {
|
||||
async fn find_existing_payloads_includes_locked(pool: PgPool) {
|
||||
let payload = json!({"subject": "CS"});
|
||||
|
||||
helpers::insert_scrape_job(
|
||||
@@ -365,7 +369,10 @@ async fn find_existing_payloads_ignores_locked(pool: PgPool) {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(existing.is_empty(), "locked jobs should be ignored");
|
||||
assert!(
|
||||
existing.contains(&payload.to_string()),
|
||||
"locked jobs should be included in deduplication"
|
||||
);
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
|
||||
+1
-1
@@ -7,7 +7,7 @@
|
||||
},
|
||||
"files": {
|
||||
"ignoreUnknown": false,
|
||||
"ignore": ["dist/", "node_modules/", ".svelte-kit/"]
|
||||
"ignore": ["dist/", "node_modules/", ".svelte-kit/", "src/lib/bindings/"]
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
|
||||
+241
@@ -5,6 +5,12 @@
|
||||
"": {
|
||||
"name": "banner-web",
|
||||
"dependencies": {
|
||||
"@icons-pack/svelte-simple-icons": "^6.5.0",
|
||||
"d3-scale": "^4.0.2",
|
||||
"d3-shape": "^3.2.0",
|
||||
"d3-time-format": "^4.1.0",
|
||||
"date-fns": "^4.1.0",
|
||||
"layerchart": "^1.0.13",
|
||||
"overlayscrollbars": "^2.14.0",
|
||||
"overlayscrollbars-svelte": "^0.5.5",
|
||||
},
|
||||
@@ -17,6 +23,9 @@
|
||||
"@sveltejs/vite-plugin-svelte": "^5.0.3",
|
||||
"@tailwindcss/vite": "^4.0.0",
|
||||
"@tanstack/table-core": "^8.21.3",
|
||||
"@types/d3-scale": "^4.0.9",
|
||||
"@types/d3-shape": "^3.1.8",
|
||||
"@types/d3-time-format": "^4.0.3",
|
||||
"@types/node": "^25.1.0",
|
||||
"bits-ui": "^1.3.7",
|
||||
"clsx": "^2.1.1",
|
||||
@@ -32,6 +41,8 @@
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@alloc/quick-lru": ["@alloc/quick-lru@5.2.0", "", {}, "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw=="],
|
||||
|
||||
"@asamuzakjp/css-color": ["@asamuzakjp/css-color@3.2.0", "", { "dependencies": { "@csstools/css-calc": "^2.1.3", "@csstools/css-color-parser": "^3.0.9", "@csstools/css-parser-algorithms": "^3.0.4", "@csstools/css-tokenizer": "^3.0.3", "lru-cache": "^10.4.3" } }, "sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw=="],
|
||||
|
||||
"@biomejs/biome": ["@biomejs/biome@1.9.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "1.9.4", "@biomejs/cli-darwin-x64": "1.9.4", "@biomejs/cli-linux-arm64": "1.9.4", "@biomejs/cli-linux-arm64-musl": "1.9.4", "@biomejs/cli-linux-x64": "1.9.4", "@biomejs/cli-linux-x64-musl": "1.9.4", "@biomejs/cli-win32-arm64": "1.9.4", "@biomejs/cli-win32-x64": "1.9.4" }, "bin": { "biome": "bin/biome" } }, "sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog=="],
|
||||
@@ -62,6 +73,10 @@
|
||||
|
||||
"@csstools/css-tokenizer": ["@csstools/css-tokenizer@3.0.4", "", {}, "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw=="],
|
||||
|
||||
"@dagrejs/dagre": ["@dagrejs/dagre@1.1.8", "", { "dependencies": { "@dagrejs/graphlib": "2.2.4" } }, "sha512-5SEDlndt4W/LaVzPYJW+bSmSEZc9EzTf8rJ20WCKvjS5EAZAN0b+x0Yww7VMT4R3Wootkg+X9bUfUxazYw6Blw=="],
|
||||
|
||||
"@dagrejs/graphlib": ["@dagrejs/graphlib@2.2.4", "", {}, "sha512-mepCf/e9+SKYy1d02/UkvSy6+6MoyXhVxP8lLDfA7BPE1X1d4dR0sZznmbM8/XVJ1GPM+Svnx7Xj6ZweByWUkw=="],
|
||||
|
||||
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA=="],
|
||||
|
||||
"@esbuild/android-arm": ["@esbuild/android-arm@0.25.12", "", { "os": "android", "cpu": "arm" }, "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg=="],
|
||||
@@ -122,6 +137,8 @@
|
||||
|
||||
"@fontsource-variable/inter": ["@fontsource-variable/inter@5.2.8", "", {}, "sha512-kOfP2D+ykbcX/P3IFnokOhVRNoTozo5/JxhAIVYLpea/UBmCQ/YWPBfWIDuBImXX/15KH+eKh4xpEUyS2sQQGQ=="],
|
||||
|
||||
"@icons-pack/svelte-simple-icons": ["@icons-pack/svelte-simple-icons@6.5.0", "", { "peerDependencies": { "@sveltejs/kit": "^2.5.0", "svelte": "^4.2.0 || ^5.0.0" } }, "sha512-Xj3PTioiV3TJ1NTKsXY95NFG8FUqw90oeyDZIlslWHs1KkuCheu1HOPrlHb0/IM0b4cldPgx/0TldzxzBlM8Cw=="],
|
||||
|
||||
"@internationalized/date": ["@internationalized/date@3.10.1", "", { "dependencies": { "@swc/helpers": "^0.5.0" } }, "sha512-oJrXtQiAXLvT9clCf1K4kxp3eKsQhIaZqxEyowkBcsvZDdZkbWrVmnGknxs5flTD0VGsxrxKgBCZty1EzoiMzA=="],
|
||||
|
||||
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="],
|
||||
@@ -134,8 +151,22 @@
|
||||
|
||||
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="],
|
||||
|
||||
"@layerstack/svelte-actions": ["@layerstack/svelte-actions@1.0.1", "", { "dependencies": { "@floating-ui/dom": "^1.6.13", "@layerstack/utils": "1.0.1", "d3-array": "^3.2.4", "d3-scale": "^4.0.2", "date-fns": "^4.1.0", "lodash-es": "^4.17.21" } }, "sha512-Tv8B3TeT7oaghx0R0I4avnSdfAT6GxEK+StL8k/hEaa009iNOIGFl3f76kfvNvPioQHAMFGtnWGLPHfsfD41nQ=="],
|
||||
|
||||
"@layerstack/svelte-stores": ["@layerstack/svelte-stores@1.0.2", "", { "dependencies": { "@layerstack/utils": "1.0.1", "d3-array": "^3.2.4", "date-fns": "^4.1.0", "immer": "^10.1.1", "lodash-es": "^4.17.21", "zod": "^3.24.2" } }, "sha512-IxK0UKD0PVxg1VsyaR+n7NyJ+NlvyqvYYAp+J10lkjDQxm0yx58CaF2LBV08T22C3aY1iTlqJaatn/VHV4SoQg=="],
|
||||
|
||||
"@layerstack/tailwind": ["@layerstack/tailwind@1.0.1", "", { "dependencies": { "@layerstack/utils": "^1.0.1", "clsx": "^2.1.1", "culori": "^4.0.1", "d3-array": "^3.2.4", "date-fns": "^4.1.0", "lodash-es": "^4.17.21", "tailwind-merge": "^2.5.4", "tailwindcss": "^3.4.15" } }, "sha512-nlshEkUCfaV0zYzrFXVVYRnS8bnBjs4M7iui6l/tu6NeBBlxDivIyRraJkdYGCSL1lZHi6FqacLQ3eerHtz90A=="],
|
||||
|
||||
"@layerstack/utils": ["@layerstack/utils@1.0.1", "", { "dependencies": { "d3-array": "^3.2.4", "date-fns": "^4.1.0", "lodash-es": "^4.17.21" } }, "sha512-sWP9b+SFMkJYMZyYFI01aLxbg2ZUrix6Tv+BCDmeOrcLNxtWFsMYAomMhALzTMHbb+Vis/ua5vXhpdNXEw8a2Q=="],
|
||||
|
||||
"@lucide/svelte": ["@lucide/svelte@0.563.1", "", { "peerDependencies": { "svelte": "^5" } }, "sha512-Kt+MbnE5D9RsuI/csmf7M+HWxALe57x3A0DhQ8pPnnUpneh7zuldrYjlT+veWtk+tVnp5doQtaAAxLujzIlhBw=="],
|
||||
|
||||
"@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="],
|
||||
|
||||
"@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="],
|
||||
|
||||
"@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="],
|
||||
|
||||
"@polka/url": ["@polka/url@1.0.0-next.29", "", {}, "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww=="],
|
||||
|
||||
"@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.57.0", "", { "os": "android", "cpu": "arm" }, "sha512-tPgXB6cDTndIe1ah7u6amCI1T0SsnlOuKgg10Xh3uizJk4e5M1JGaUMk7J4ciuAUcFpbOiNhm2XIjP9ON0dUqA=="],
|
||||
@@ -238,6 +269,16 @@
|
||||
|
||||
"@types/cookie": ["@types/cookie@0.6.0", "", {}, "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA=="],
|
||||
|
||||
"@types/d3-path": ["@types/d3-path@3.1.1", "", {}, "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg=="],
|
||||
|
||||
"@types/d3-scale": ["@types/d3-scale@4.0.9", "", { "dependencies": { "@types/d3-time": "*" } }, "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw=="],
|
||||
|
||||
"@types/d3-shape": ["@types/d3-shape@3.1.8", "", { "dependencies": { "@types/d3-path": "*" } }, "sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w=="],
|
||||
|
||||
"@types/d3-time": ["@types/d3-time@3.0.4", "", {}, "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g=="],
|
||||
|
||||
"@types/d3-time-format": ["@types/d3-time-format@4.0.3", "", {}, "sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg=="],
|
||||
|
||||
"@types/deep-eql": ["@types/deep-eql@4.0.2", "", {}, "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw=="],
|
||||
|
||||
"@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="],
|
||||
@@ -262,16 +303,28 @@
|
||||
|
||||
"agent-base": ["agent-base@7.1.4", "", {}, "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ=="],
|
||||
|
||||
"any-promise": ["any-promise@1.3.0", "", {}, "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A=="],
|
||||
|
||||
"anymatch": ["anymatch@3.1.3", "", { "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" } }, "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw=="],
|
||||
|
||||
"arg": ["arg@5.0.2", "", {}, "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg=="],
|
||||
|
||||
"aria-query": ["aria-query@5.3.2", "", {}, "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw=="],
|
||||
|
||||
"assertion-error": ["assertion-error@2.0.1", "", {}, "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA=="],
|
||||
|
||||
"axobject-query": ["axobject-query@4.1.0", "", {}, "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ=="],
|
||||
|
||||
"binary-extensions": ["binary-extensions@2.3.0", "", {}, "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw=="],
|
||||
|
||||
"bits-ui": ["bits-ui@1.8.0", "", { "dependencies": { "@floating-ui/core": "^1.6.4", "@floating-ui/dom": "^1.6.7", "@internationalized/date": "^3.5.6", "css.escape": "^1.5.1", "esm-env": "^1.1.2", "runed": "^0.23.2", "svelte-toolbelt": "^0.7.1", "tabbable": "^6.2.0" }, "peerDependencies": { "svelte": "^5.11.0" } }, "sha512-CXD6Orp7l8QevNDcRPLXc/b8iMVgxDWT2LyTwsdLzJKh9CxesOmPuNePSPqAxKoT59FIdU4aFPS1k7eBdbaCxg=="],
|
||||
|
||||
"braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="],
|
||||
|
||||
"cac": ["cac@6.7.14", "", {}, "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ=="],
|
||||
|
||||
"camelcase-css": ["camelcase-css@2.0.1", "", {}, "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA=="],
|
||||
|
||||
"chai": ["chai@5.3.3", "", { "dependencies": { "assertion-error": "^2.0.1", "check-error": "^2.1.1", "deep-eql": "^5.0.1", "loupe": "^3.1.0", "pathval": "^2.0.0" } }, "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw=="],
|
||||
|
||||
"check-error": ["check-error@2.1.3", "", {}, "sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA=="],
|
||||
@@ -280,14 +333,70 @@
|
||||
|
||||
"clsx": ["clsx@2.1.1", "", {}, "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA=="],
|
||||
|
||||
"commander": ["commander@7.2.0", "", {}, "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw=="],
|
||||
|
||||
"cookie": ["cookie@0.6.0", "", {}, "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw=="],
|
||||
|
||||
"css.escape": ["css.escape@1.5.1", "", {}, "sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg=="],
|
||||
|
||||
"cssesc": ["cssesc@3.0.0", "", { "bin": { "cssesc": "bin/cssesc" } }, "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg=="],
|
||||
|
||||
"cssstyle": ["cssstyle@4.6.0", "", { "dependencies": { "@asamuzakjp/css-color": "^3.2.0", "rrweb-cssom": "^0.8.0" } }, "sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg=="],
|
||||
|
||||
"culori": ["culori@4.0.2", "", {}, "sha512-1+BhOB8ahCn4O0cep0Sh2l9KCOfOdY+BXJnKMHFFzDEouSr/el18QwXEMRlOj9UY5nCeA8UN3a/82rUWRBeyBw=="],
|
||||
|
||||
"d3-array": ["d3-array@3.2.4", "", { "dependencies": { "internmap": "1 - 2" } }, "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg=="],
|
||||
|
||||
"d3-color": ["d3-color@3.1.0", "", {}, "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA=="],
|
||||
|
||||
"d3-delaunay": ["d3-delaunay@6.0.4", "", { "dependencies": { "delaunator": "5" } }, "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A=="],
|
||||
|
||||
"d3-dispatch": ["d3-dispatch@3.0.1", "", {}, "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg=="],
|
||||
|
||||
"d3-dsv": ["d3-dsv@3.0.1", "", { "dependencies": { "commander": "7", "iconv-lite": "0.6", "rw": "1" }, "bin": { "csv2json": "bin/dsv2json.js", "csv2tsv": "bin/dsv2dsv.js", "dsv2dsv": "bin/dsv2dsv.js", "dsv2json": "bin/dsv2json.js", "json2csv": "bin/json2dsv.js", "json2dsv": "bin/json2dsv.js", "json2tsv": "bin/json2dsv.js", "tsv2csv": "bin/dsv2dsv.js", "tsv2json": "bin/dsv2json.js" } }, "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q=="],
|
||||
|
||||
"d3-force": ["d3-force@3.0.0", "", { "dependencies": { "d3-dispatch": "1 - 3", "d3-quadtree": "1 - 3", "d3-timer": "1 - 3" } }, "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg=="],
|
||||
|
||||
"d3-format": ["d3-format@3.1.2", "", {}, "sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg=="],
|
||||
|
||||
"d3-geo": ["d3-geo@3.1.1", "", { "dependencies": { "d3-array": "2.5.0 - 3" } }, "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q=="],
|
||||
|
||||
"d3-geo-voronoi": ["d3-geo-voronoi@2.1.0", "", { "dependencies": { "d3-array": "3", "d3-delaunay": "6", "d3-geo": "3", "d3-tricontour": "1" } }, "sha512-kqE4yYuOjPbKdBXG0xztCacPwkVSK2REF1opSNrnqqtXJmNcM++UbwQ8SxvwP6IQTj9RvIjjK4qeiVsEfj0Z2Q=="],
|
||||
|
||||
"d3-hierarchy": ["d3-hierarchy@3.1.2", "", {}, "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA=="],
|
||||
|
||||
"d3-interpolate": ["d3-interpolate@3.0.1", "", { "dependencies": { "d3-color": "1 - 3" } }, "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g=="],
|
||||
|
||||
"d3-interpolate-path": ["d3-interpolate-path@2.3.0", "", {}, "sha512-tZYtGXxBmbgHsIc9Wms6LS5u4w6KbP8C09a4/ZYc4KLMYYqub57rRBUgpUr2CIarIrJEpdAWWxWQvofgaMpbKQ=="],
|
||||
|
||||
"d3-path": ["d3-path@3.1.0", "", {}, "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ=="],
|
||||
|
||||
"d3-quadtree": ["d3-quadtree@3.0.1", "", {}, "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw=="],
|
||||
|
||||
"d3-random": ["d3-random@3.0.1", "", {}, "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ=="],
|
||||
|
||||
"d3-sankey": ["d3-sankey@0.12.3", "", { "dependencies": { "d3-array": "1 - 2", "d3-shape": "^1.2.0" } }, "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ=="],
|
||||
|
||||
"d3-scale": ["d3-scale@4.0.2", "", { "dependencies": { "d3-array": "2.10.0 - 3", "d3-format": "1 - 3", "d3-interpolate": "1.2.0 - 3", "d3-time": "2.1.1 - 3", "d3-time-format": "2 - 4" } }, "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ=="],
|
||||
|
||||
"d3-scale-chromatic": ["d3-scale-chromatic@3.1.0", "", { "dependencies": { "d3-color": "1 - 3", "d3-interpolate": "1 - 3" } }, "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ=="],
|
||||
|
||||
"d3-shape": ["d3-shape@3.2.0", "", { "dependencies": { "d3-path": "^3.1.0" } }, "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA=="],
|
||||
|
||||
"d3-tile": ["d3-tile@1.0.0", "", {}, "sha512-79fnTKpPMPDS5xQ0xuS9ir0165NEwwkFpe/DSOmc2Gl9ldYzKKRDWogmTTE8wAJ8NA7PMapNfEcyKhI9Lxdu5Q=="],
|
||||
|
||||
"d3-time": ["d3-time@3.1.0", "", { "dependencies": { "d3-array": "2 - 3" } }, "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q=="],
|
||||
|
||||
"d3-time-format": ["d3-time-format@4.1.0", "", { "dependencies": { "d3-time": "1 - 3" } }, "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg=="],
|
||||
|
||||
"d3-timer": ["d3-timer@3.0.1", "", {}, "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA=="],
|
||||
|
||||
"d3-tricontour": ["d3-tricontour@1.1.0", "", { "dependencies": { "d3-delaunay": "6", "d3-scale": "4" } }, "sha512-G7gHKj89n2owmkGb6WX6ixcnQ0Kf/0wpa9VIh9DGdbHu8wdrlaHU4ir3/bFNERl8N8nn4G7e7qbtBG8N9caihQ=="],
|
||||
|
||||
"data-urls": ["data-urls@5.0.0", "", { "dependencies": { "whatwg-mimetype": "^4.0.0", "whatwg-url": "^14.0.0" } }, "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg=="],
|
||||
|
||||
"date-fns": ["date-fns@4.1.0", "", {}, "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg=="],
|
||||
|
||||
"debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="],
|
||||
|
||||
"decimal.js": ["decimal.js@10.6.0", "", {}, "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg=="],
|
||||
@@ -296,10 +405,16 @@
|
||||
|
||||
"deepmerge": ["deepmerge@4.3.1", "", {}, "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A=="],
|
||||
|
||||
"delaunator": ["delaunator@5.0.1", "", { "dependencies": { "robust-predicates": "^3.0.2" } }, "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw=="],
|
||||
|
||||
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
|
||||
|
||||
"devalue": ["devalue@5.6.2", "", {}, "sha512-nPRkjWzzDQlsejL1WVifk5rvcFi/y1onBRxjaFMjZeR9mFpqu2gmAZ9xUB9/IEanEP/vBtGeGganC/GO1fmufg=="],
|
||||
|
||||
"didyoumean": ["didyoumean@1.2.2", "", {}, "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw=="],
|
||||
|
||||
"dlv": ["dlv@1.1.3", "", {}, "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA=="],
|
||||
|
||||
"enhanced-resolve": ["enhanced-resolve@5.18.4", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "sha512-LgQMM4WXU3QI+SYgEc2liRgznaD5ojbmY3sb8LxyguVkIg5FxdpTkvk72te2R38/TGKxH634oLxXRGY6d7AP+Q=="],
|
||||
|
||||
"entities": ["entities@6.0.1", "", {}, "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g=="],
|
||||
@@ -316,12 +431,24 @@
|
||||
|
||||
"expect-type": ["expect-type@1.3.0", "", {}, "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA=="],
|
||||
|
||||
"fast-glob": ["fast-glob@3.3.3", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.8" } }, "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="],
|
||||
|
||||
"fastq": ["fastq@1.20.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw=="],
|
||||
|
||||
"fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="],
|
||||
|
||||
"fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="],
|
||||
|
||||
"fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="],
|
||||
|
||||
"function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
|
||||
|
||||
"glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="],
|
||||
|
||||
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
|
||||
|
||||
"hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="],
|
||||
|
||||
"html-encoding-sniffer": ["html-encoding-sniffer@4.0.0", "", { "dependencies": { "whatwg-encoding": "^3.1.1" } }, "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ=="],
|
||||
|
||||
"http-proxy-agent": ["http-proxy-agent@7.0.2", "", { "dependencies": { "agent-base": "^7.1.0", "debug": "^4.3.4" } }, "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig=="],
|
||||
@@ -330,8 +457,22 @@
|
||||
|
||||
"iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
|
||||
|
||||
"immer": ["immer@10.2.0", "", {}, "sha512-d/+XTN3zfODyjr89gM3mPq1WNX2B8pYsu7eORitdwyA2sBubnTl3laYlBk4sXY5FUa5qTZGBDPJICVbvqzjlbw=="],
|
||||
|
||||
"inline-style-parser": ["inline-style-parser@0.2.7", "", {}, "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA=="],
|
||||
|
||||
"internmap": ["internmap@2.0.3", "", {}, "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg=="],
|
||||
|
||||
"is-binary-path": ["is-binary-path@2.1.0", "", { "dependencies": { "binary-extensions": "^2.0.0" } }, "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw=="],
|
||||
|
||||
"is-core-module": ["is-core-module@2.16.1", "", { "dependencies": { "hasown": "^2.0.2" } }, "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w=="],
|
||||
|
||||
"is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
|
||||
|
||||
"is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="],
|
||||
|
||||
"is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="],
|
||||
|
||||
"is-potential-custom-element-name": ["is-potential-custom-element-name@1.0.1", "", {}, "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ=="],
|
||||
|
||||
"is-reference": ["is-reference@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.6" } }, "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw=="],
|
||||
@@ -344,6 +485,10 @@
|
||||
|
||||
"kleur": ["kleur@4.1.5", "", {}, "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ=="],
|
||||
|
||||
"layercake": ["layercake@8.4.3", "", { "dependencies": { "d3-array": "^3.2.4", "d3-color": "^3.1.0", "d3-scale": "^4.0.2", "d3-shape": "^3.2.0" }, "peerDependencies": { "svelte": "3 - 5 || >=5.0.0-next.120", "typescript": "^5.0.2" } }, "sha512-PZDduaPFxgHHkxlmsz5MVBECf6ZCT39DI3LgMVvuMwrmlrtlXwXUM/elJp46zHYzCE1j+cGyDuBDxnANv94tOQ=="],
|
||||
|
||||
"layerchart": ["layerchart@1.0.13", "", { "dependencies": { "@dagrejs/dagre": "^1.1.4", "@layerstack/svelte-actions": "^1.0.1", "@layerstack/svelte-stores": "^1.0.2", "@layerstack/tailwind": "^1.0.1", "@layerstack/utils": "^1.0.1", "d3-array": "^3.2.4", "d3-color": "^3.1.0", "d3-delaunay": "^6.0.4", "d3-dsv": "^3.0.1", "d3-force": "^3.0.0", "d3-geo": "^3.1.1", "d3-geo-voronoi": "^2.1.0", "d3-hierarchy": "^3.1.2", "d3-interpolate": "^3.0.1", "d3-interpolate-path": "^2.3.0", "d3-path": "^3.1.0", "d3-quadtree": "^3.0.1", "d3-random": "^3.0.1", "d3-sankey": "^0.12.3", "d3-scale": "^4.0.2", "d3-scale-chromatic": "^3.1.0", "d3-shape": "^3.2.0", "d3-tile": "^1.0.0", "d3-time": "^3.1.0", "date-fns": "^4.1.0", "layercake": "8.4.3", "lodash-es": "^4.17.21" }, "peerDependencies": { "svelte": "^3.56.0 || ^4.0.0 || ^5.0.0" } }, "sha512-bjcrfyTdHtfYZn7yj26dvA1qUjM+R6+akp2VeBJ4JWKmDGhb5WvT9nMCs52Rb+gSd/omFq5SjZLz49MqlVljZw=="],
|
||||
|
||||
"lightningcss": ["lightningcss@1.30.2", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-android-arm64": "1.30.2", "lightningcss-darwin-arm64": "1.30.2", "lightningcss-darwin-x64": "1.30.2", "lightningcss-freebsd-x64": "1.30.2", "lightningcss-linux-arm-gnueabihf": "1.30.2", "lightningcss-linux-arm64-gnu": "1.30.2", "lightningcss-linux-arm64-musl": "1.30.2", "lightningcss-linux-x64-gnu": "1.30.2", "lightningcss-linux-x64-musl": "1.30.2", "lightningcss-win32-arm64-msvc": "1.30.2", "lightningcss-win32-x64-msvc": "1.30.2" } }, "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ=="],
|
||||
|
||||
"lightningcss-android-arm64": ["lightningcss-android-arm64@1.30.2", "", { "os": "android", "cpu": "arm64" }, "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A=="],
|
||||
@@ -368,30 +513,50 @@
|
||||
|
||||
"lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.2", "", { "os": "win32", "cpu": "x64" }, "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw=="],
|
||||
|
||||
"lilconfig": ["lilconfig@3.1.3", "", {}, "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw=="],
|
||||
|
||||
"lines-and-columns": ["lines-and-columns@1.2.4", "", {}, "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg=="],
|
||||
|
||||
"locate-character": ["locate-character@3.0.0", "", {}, "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA=="],
|
||||
|
||||
"lodash-es": ["lodash-es@4.17.23", "", {}, "sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg=="],
|
||||
|
||||
"loupe": ["loupe@3.2.1", "", {}, "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ=="],
|
||||
|
||||
"lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
|
||||
|
||||
"magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="],
|
||||
|
||||
"merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="],
|
||||
|
||||
"micromatch": ["micromatch@4.0.8", "", { "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" } }, "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="],
|
||||
|
||||
"mri": ["mri@1.2.0", "", {}, "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA=="],
|
||||
|
||||
"mrmime": ["mrmime@2.0.1", "", {}, "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ=="],
|
||||
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
|
||||
"mz": ["mz@2.7.0", "", { "dependencies": { "any-promise": "^1.0.0", "object-assign": "^4.0.1", "thenify-all": "^1.0.0" } }, "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q=="],
|
||||
|
||||
"nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
|
||||
|
||||
"normalize-path": ["normalize-path@3.0.0", "", {}, "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA=="],
|
||||
|
||||
"nwsapi": ["nwsapi@2.2.23", "", {}, "sha512-7wfH4sLbt4M0gCDzGE6vzQBo0bfTKjU7Sfpqy/7gs1qBfYz2vEJH6vXcBKpO3+6Yu1telwd0t9HpyOoLEQQbIQ=="],
|
||||
|
||||
"object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="],
|
||||
|
||||
"object-hash": ["object-hash@3.0.0", "", {}, "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw=="],
|
||||
|
||||
"overlayscrollbars": ["overlayscrollbars@2.14.0", "", {}, "sha512-RjV0pqc79kYhQLC3vTcLRb5GLpI1n6qh0Oua3g+bGH4EgNOJHVBGP7u0zZtxoAa0dkHlAqTTSYRb9MMmxNLjig=="],
|
||||
|
||||
"overlayscrollbars-svelte": ["overlayscrollbars-svelte@0.5.5", "", { "peerDependencies": { "overlayscrollbars": "^2.0.0", "svelte": "^5.0.0" } }, "sha512-+dRW3YZSvFbKi5vDCpnUOHuoPLLSdu0BUVVMYZdmfVghu7XkafDRebG2y91/ImPqj6YDAUsz1rcWVYhCJSS/pQ=="],
|
||||
|
||||
"parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw=="],
|
||||
|
||||
"path-parse": ["path-parse@1.0.7", "", {}, "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw=="],
|
||||
|
||||
"pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
|
||||
|
||||
"pathval": ["pathval@2.0.1", "", {}, "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ=="],
|
||||
@@ -400,18 +565,48 @@
|
||||
|
||||
"picomatch": ["picomatch@4.0.3", "", {}, "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q=="],
|
||||
|
||||
"pify": ["pify@2.3.0", "", {}, "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog=="],
|
||||
|
||||
"pirates": ["pirates@4.0.7", "", {}, "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA=="],
|
||||
|
||||
"postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="],
|
||||
|
||||
"postcss-import": ["postcss-import@15.1.0", "", { "dependencies": { "postcss-value-parser": "^4.0.0", "read-cache": "^1.0.0", "resolve": "^1.1.7" }, "peerDependencies": { "postcss": "^8.0.0" } }, "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew=="],
|
||||
|
||||
"postcss-js": ["postcss-js@4.1.0", "", { "dependencies": { "camelcase-css": "^2.0.1" }, "peerDependencies": { "postcss": "^8.4.21" } }, "sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw=="],
|
||||
|
||||
"postcss-load-config": ["postcss-load-config@6.0.1", "", { "dependencies": { "lilconfig": "^3.1.1" }, "peerDependencies": { "jiti": ">=1.21.0", "postcss": ">=8.0.9", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["jiti", "postcss", "tsx", "yaml"] }, "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g=="],
|
||||
|
||||
"postcss-nested": ["postcss-nested@6.2.0", "", { "dependencies": { "postcss-selector-parser": "^6.1.1" }, "peerDependencies": { "postcss": "^8.2.14" } }, "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ=="],
|
||||
|
||||
"postcss-selector-parser": ["postcss-selector-parser@6.1.2", "", { "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" } }, "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg=="],
|
||||
|
||||
"postcss-value-parser": ["postcss-value-parser@4.2.0", "", {}, "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ=="],
|
||||
|
||||
"punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="],
|
||||
|
||||
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
|
||||
|
||||
"read-cache": ["read-cache@1.0.0", "", { "dependencies": { "pify": "^2.3.0" } }, "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA=="],
|
||||
|
||||
"readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
|
||||
|
||||
"resolve": ["resolve@1.22.11", "", { "dependencies": { "is-core-module": "^2.16.1", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" } }, "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ=="],
|
||||
|
||||
"reusify": ["reusify@1.1.0", "", {}, "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="],
|
||||
|
||||
"robust-predicates": ["robust-predicates@3.0.2", "", {}, "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg=="],
|
||||
|
||||
"rollup": ["rollup@4.57.0", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.57.0", "@rollup/rollup-android-arm64": "4.57.0", "@rollup/rollup-darwin-arm64": "4.57.0", "@rollup/rollup-darwin-x64": "4.57.0", "@rollup/rollup-freebsd-arm64": "4.57.0", "@rollup/rollup-freebsd-x64": "4.57.0", "@rollup/rollup-linux-arm-gnueabihf": "4.57.0", "@rollup/rollup-linux-arm-musleabihf": "4.57.0", "@rollup/rollup-linux-arm64-gnu": "4.57.0", "@rollup/rollup-linux-arm64-musl": "4.57.0", "@rollup/rollup-linux-loong64-gnu": "4.57.0", "@rollup/rollup-linux-loong64-musl": "4.57.0", "@rollup/rollup-linux-ppc64-gnu": "4.57.0", "@rollup/rollup-linux-ppc64-musl": "4.57.0", "@rollup/rollup-linux-riscv64-gnu": "4.57.0", "@rollup/rollup-linux-riscv64-musl": "4.57.0", "@rollup/rollup-linux-s390x-gnu": "4.57.0", "@rollup/rollup-linux-x64-gnu": "4.57.0", "@rollup/rollup-linux-x64-musl": "4.57.0", "@rollup/rollup-openbsd-x64": "4.57.0", "@rollup/rollup-openharmony-arm64": "4.57.0", "@rollup/rollup-win32-arm64-msvc": "4.57.0", "@rollup/rollup-win32-ia32-msvc": "4.57.0", "@rollup/rollup-win32-x64-gnu": "4.57.0", "@rollup/rollup-win32-x64-msvc": "4.57.0", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-e5lPJi/aui4TO1LpAXIRLySmwXSE8k3b9zoGfd42p67wzxog4WHjiZF3M2uheQih4DGyc25QEV4yRBbpueNiUA=="],
|
||||
|
||||
"rrweb-cssom": ["rrweb-cssom@0.8.0", "", {}, "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw=="],
|
||||
|
||||
"run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="],
|
||||
|
||||
"runed": ["runed@0.23.4", "", { "dependencies": { "esm-env": "^1.0.0" }, "peerDependencies": { "svelte": "^5.7.0" } }, "sha512-9q8oUiBYeXIDLWNK5DfCWlkL0EW3oGbk845VdKlPeia28l751VpfesaB/+7pI6rnbx1I6rqoZ2fZxptOJLxILA=="],
|
||||
|
||||
"rw": ["rw@1.3.3", "", {}, "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ=="],
|
||||
|
||||
"sade": ["sade@1.8.1", "", { "dependencies": { "mri": "^1.1.0" } }, "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A=="],
|
||||
|
||||
"safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="],
|
||||
@@ -434,6 +629,10 @@
|
||||
|
||||
"style-to-object": ["style-to-object@1.0.14", "", { "dependencies": { "inline-style-parser": "0.2.7" } }, "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw=="],
|
||||
|
||||
"sucrase": ["sucrase@3.35.1", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.2", "commander": "^4.0.0", "lines-and-columns": "^1.1.6", "mz": "^2.7.0", "pirates": "^4.0.1", "tinyglobby": "^0.2.11", "ts-interface-checker": "^0.1.9" }, "bin": { "sucrase": "bin/sucrase", "sucrase-node": "bin/sucrase-node" } }, "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw=="],
|
||||
|
||||
"supports-preserve-symlinks-flag": ["supports-preserve-symlinks-flag@1.0.0", "", {}, "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w=="],
|
||||
|
||||
"svelte": ["svelte@5.49.0", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "devalue": "^5.6.2", "esm-env": "^1.2.1", "esrap": "^2.2.1", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-Fn2mCc3XX0gnnbBYzWOTrZHi5WnF9KvqmB1+KGlUWoJkdioPmFYtg2ALBr6xl2dcnFTz3Vi7/mHpbKSVg/imVg=="],
|
||||
|
||||
"svelte-check": ["svelte-check@4.3.5", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.25", "chokidar": "^4.0.1", "fdir": "^6.2.0", "picocolors": "^1.0.0", "sade": "^1.7.4" }, "peerDependencies": { "svelte": "^4.0.0 || ^5.0.0-next.0", "typescript": ">=5.0.0" }, "bin": { "svelte-check": "bin/svelte-check" } }, "sha512-e4VWZETyXaKGhpkxOXP+B/d0Fp/zKViZoJmneZWe/05Y2aqSKj3YN2nLfYPJBQ87WEiY4BQCQ9hWGu9mPT1a1Q=="],
|
||||
@@ -450,6 +649,10 @@
|
||||
|
||||
"tapable": ["tapable@2.3.0", "", {}, "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg=="],
|
||||
|
||||
"thenify": ["thenify@3.3.1", "", { "dependencies": { "any-promise": "^1.0.0" } }, "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw=="],
|
||||
|
||||
"thenify-all": ["thenify-all@1.6.0", "", { "dependencies": { "thenify": ">= 3.1.0 < 4" } }, "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA=="],
|
||||
|
||||
"tinybench": ["tinybench@2.9.0", "", {}, "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg=="],
|
||||
|
||||
"tinyexec": ["tinyexec@0.3.2", "", {}, "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA=="],
|
||||
@@ -466,18 +669,24 @@
|
||||
|
||||
"tldts-core": ["tldts-core@6.1.86", "", {}, "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA=="],
|
||||
|
||||
"to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
|
||||
|
||||
"totalist": ["totalist@3.0.1", "", {}, "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ=="],
|
||||
|
||||
"tough-cookie": ["tough-cookie@5.1.2", "", { "dependencies": { "tldts": "^6.1.32" } }, "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A=="],
|
||||
|
||||
"tr46": ["tr46@5.1.1", "", { "dependencies": { "punycode": "^2.3.1" } }, "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw=="],
|
||||
|
||||
"ts-interface-checker": ["ts-interface-checker@0.1.13", "", {}, "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA=="],
|
||||
|
||||
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
|
||||
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
|
||||
|
||||
"undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
|
||||
|
||||
"util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="],
|
||||
|
||||
"vite": ["vite@6.4.1", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", "picomatch": "^4.0.2", "postcss": "^8.5.3", "rollup": "^4.34.9", "tinyglobby": "^0.2.13" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "jiti": ">=1.21.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g=="],
|
||||
|
||||
"vite-node": ["vite-node@3.2.4", "", { "dependencies": { "cac": "^6.7.14", "debug": "^4.4.1", "es-module-lexer": "^1.7.0", "pathe": "^2.0.3", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": { "vite-node": "vite-node.mjs" } }, "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg=="],
|
||||
@@ -506,6 +715,12 @@
|
||||
|
||||
"zimmerframe": ["zimmerframe@1.1.4", "", {}, "sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ=="],
|
||||
|
||||
"zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
|
||||
|
||||
"@layerstack/tailwind/tailwind-merge": ["tailwind-merge@2.6.0", "", {}, "sha512-P+Vu1qXfzediirmHOC3xKGAYeZtPcV9g76X+xg2FD4tYgR71ewMA35Y3sCz3zhiN/dwefRpJX0yBcgwi1fXNQA=="],
|
||||
|
||||
"@layerstack/tailwind/tailwindcss": ["tailwindcss@3.4.19", "", { "dependencies": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", "chokidar": "^3.6.0", "didyoumean": "^1.2.2", "dlv": "^1.1.3", "fast-glob": "^3.3.2", "glob-parent": "^6.0.2", "is-glob": "^4.0.3", "jiti": "^1.21.7", "lilconfig": "^3.1.3", "micromatch": "^4.0.8", "normalize-path": "^3.0.0", "object-hash": "^3.0.0", "picocolors": "^1.1.1", "postcss": "^8.4.47", "postcss-import": "^15.1.0", "postcss-js": "^4.0.1", "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0", "postcss-nested": "^6.2.0", "postcss-selector-parser": "^6.1.2", "resolve": "^1.22.8", "sucrase": "^3.35.0" }, "bin": { "tailwind": "lib/cli.js", "tailwindcss": "lib/cli.js" } }, "sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.8.1", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" }, "bundled": true }, "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi/@emnapi/runtime": ["@emnapi/runtime@1.8.1", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg=="],
|
||||
@@ -517,5 +732,31 @@
|
||||
"@tailwindcss/oxide-wasm32-wasi/@tybys/wasm-util": ["@tybys/wasm-util@0.10.1", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi/tslib": ["tslib@2.8.1", "", { "bundled": true }, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
|
||||
"anymatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
|
||||
|
||||
"d3-sankey/d3-array": ["d3-array@2.12.1", "", { "dependencies": { "internmap": "^1.0.0" } }, "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ=="],
|
||||
|
||||
"d3-sankey/d3-shape": ["d3-shape@1.3.7", "", { "dependencies": { "d3-path": "1" } }, "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw=="],
|
||||
|
||||
"fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
|
||||
|
||||
"micromatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
|
||||
|
||||
"sucrase/commander": ["commander@4.1.1", "", {}, "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA=="],
|
||||
|
||||
"@layerstack/tailwind/tailwindcss/chokidar": ["chokidar@3.6.0", "", { "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", "glob-parent": "~5.1.2", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", "readdirp": "~3.6.0" }, "optionalDependencies": { "fsevents": "~2.3.2" } }, "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw=="],
|
||||
|
||||
"@layerstack/tailwind/tailwindcss/jiti": ["jiti@1.21.7", "", { "bin": { "jiti": "bin/jiti.js" } }, "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A=="],
|
||||
|
||||
"d3-sankey/d3-array/internmap": ["internmap@1.0.1", "", {}, "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw=="],
|
||||
|
||||
"d3-sankey/d3-shape/d3-path": ["d3-path@1.0.9", "", {}, "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg=="],
|
||||
|
||||
"@layerstack/tailwind/tailwindcss/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
|
||||
|
||||
"@layerstack/tailwind/tailwindcss/chokidar/readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="],
|
||||
|
||||
"@layerstack/tailwind/tailwindcss/chokidar/readdirp/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,9 @@
|
||||
"@sveltejs/vite-plugin-svelte": "^5.0.3",
|
||||
"@tailwindcss/vite": "^4.0.0",
|
||||
"@tanstack/table-core": "^8.21.3",
|
||||
"@types/d3-scale": "^4.0.9",
|
||||
"@types/d3-shape": "^3.1.8",
|
||||
"@types/d3-time-format": "^4.0.3",
|
||||
"@types/node": "^25.1.0",
|
||||
"bits-ui": "^1.3.7",
|
||||
"clsx": "^2.1.1",
|
||||
@@ -33,6 +36,12 @@
|
||||
"vitest": "^3.0.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"@icons-pack/svelte-simple-icons": "^6.5.0",
|
||||
"d3-scale": "^4.0.2",
|
||||
"d3-shape": "^3.2.0",
|
||||
"d3-time-format": "^4.1.0",
|
||||
"date-fns": "^4.1.0",
|
||||
"layerchart": "^1.0.13",
|
||||
"overlayscrollbars": "^2.14.0",
|
||||
"overlayscrollbars-svelte": "^0.5.5"
|
||||
}
|
||||
|
||||
@@ -0,0 +1,148 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Pre-compress static assets with maximum compression levels.
|
||||
* Run after `bun run build`.
|
||||
*
|
||||
* Generates .gz, .br, .zst variants for compressible files ≥ MIN_SIZE bytes.
|
||||
* These are embedded alongside originals by rust-embed and served via
|
||||
* content negotiation in src/web/assets.rs.
|
||||
*/
|
||||
import { readdir, stat, readFile, writeFile } from "fs/promises";
|
||||
import { join, extname } from "path";
|
||||
import { gzipSync, brotliCompressSync, constants } from "zlib";
|
||||
import { $ } from "bun";
|
||||
|
||||
// Must match COMPRESSION_MIN_SIZE in src/web/encoding.rs
|
||||
const MIN_SIZE = 512;
|
||||
|
||||
const COMPRESSIBLE_EXTENSIONS = new Set([
|
||||
".js",
|
||||
".css",
|
||||
".html",
|
||||
".json",
|
||||
".svg",
|
||||
".txt",
|
||||
".xml",
|
||||
".map",
|
||||
]);
|
||||
|
||||
// Check if zstd CLI is available
|
||||
let hasZstd = false;
|
||||
try {
|
||||
await $`which zstd`.quiet();
|
||||
hasZstd = true;
|
||||
} catch {
|
||||
console.warn("Warning: zstd not found, skipping .zst generation");
|
||||
}
|
||||
|
||||
async function* walkDir(dir: string): AsyncGenerator<string> {
|
||||
try {
|
||||
const entries = await readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const path = join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
yield* walkDir(path);
|
||||
} else if (entry.isFile()) {
|
||||
yield path;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Directory doesn't exist, skip
|
||||
}
|
||||
}
|
||||
|
||||
async function compressFile(path: string): Promise<void> {
|
||||
const ext = extname(path);
|
||||
|
||||
if (!COMPRESSIBLE_EXTENSIONS.has(ext)) return;
|
||||
if (path.endsWith(".br") || path.endsWith(".gz") || path.endsWith(".zst")) return;
|
||||
|
||||
const stats = await stat(path);
|
||||
if (stats.size < MIN_SIZE) return;
|
||||
|
||||
// Skip if all compressed variants already exist
|
||||
const variantsExist = await Promise.all([
|
||||
stat(`${path}.br`).then(
|
||||
() => true,
|
||||
() => false
|
||||
),
|
||||
stat(`${path}.gz`).then(
|
||||
() => true,
|
||||
() => false
|
||||
),
|
||||
hasZstd
|
||||
? stat(`${path}.zst`).then(
|
||||
() => true,
|
||||
() => false
|
||||
)
|
||||
: Promise.resolve(false),
|
||||
]);
|
||||
|
||||
if (variantsExist.every((exists) => exists || !hasZstd)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const content = await readFile(path);
|
||||
const originalSize = content.length;
|
||||
|
||||
// Brotli (maximum quality = 11)
|
||||
const brContent = brotliCompressSync(content, {
|
||||
params: {
|
||||
[constants.BROTLI_PARAM_QUALITY]: 11,
|
||||
},
|
||||
});
|
||||
await writeFile(`${path}.br`, brContent);
|
||||
|
||||
// Gzip (level 9)
|
||||
const gzContent = gzipSync(content, { level: 9 });
|
||||
await writeFile(`${path}.gz`, gzContent);
|
||||
|
||||
// Zstd (level 19 - maximum)
|
||||
if (hasZstd) {
|
||||
try {
|
||||
await $`zstd -19 -q -f -o ${path}.zst ${path}`.quiet();
|
||||
} catch (e) {
|
||||
console.warn(`Warning: Failed to compress ${path} with zstd: ${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
const brRatio = ((brContent.length / originalSize) * 100).toFixed(1);
|
||||
const gzRatio = ((gzContent.length / originalSize) * 100).toFixed(1);
|
||||
console.log(`Compressed: ${path} (br: ${brRatio}%, gz: ${gzRatio}%, ${originalSize} bytes)`);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log("Pre-compressing static assets...");
|
||||
|
||||
// Banner uses adapter-static with output in dist/
|
||||
const dirs = ["dist"];
|
||||
let scannedFiles = 0;
|
||||
let compressedFiles = 0;
|
||||
|
||||
for (const dir of dirs) {
|
||||
for await (const file of walkDir(dir)) {
|
||||
const ext = extname(file);
|
||||
scannedFiles++;
|
||||
|
||||
if (
|
||||
COMPRESSIBLE_EXTENSIONS.has(ext) &&
|
||||
!file.endsWith(".br") &&
|
||||
!file.endsWith(".gz") &&
|
||||
!file.endsWith(".zst")
|
||||
) {
|
||||
const stats = await stat(file);
|
||||
if (stats.size >= MIN_SIZE) {
|
||||
await compressFile(file);
|
||||
compressedFiles++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Done! Scanned ${scannedFiles} files, compressed ${compressedFiles} files.`);
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.error("Compression failed:", e);
|
||||
process.exit(1);
|
||||
});
|
||||
+2
-19
@@ -11,23 +11,6 @@ describe("BannerApiClient", () => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("should fetch health data", async () => {
|
||||
const mockHealth = {
|
||||
status: "healthy",
|
||||
timestamp: "2024-01-01T00:00:00Z",
|
||||
};
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockHealth),
|
||||
} as Response);
|
||||
|
||||
const result = await apiClient.getHealth();
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith("/api/health");
|
||||
expect(result).toEqual(mockHealth);
|
||||
});
|
||||
|
||||
it("should fetch status data", async () => {
|
||||
const mockStatus = {
|
||||
status: "active" as const,
|
||||
@@ -57,7 +40,7 @@ describe("BannerApiClient", () => {
|
||||
statusText: "Internal Server Error",
|
||||
} as Response);
|
||||
|
||||
await expect(apiClient.getHealth()).rejects.toThrow(
|
||||
await expect(apiClient.getStatus()).rejects.toThrow(
|
||||
"API request failed: 500 Internal Server Error"
|
||||
);
|
||||
});
|
||||
@@ -77,7 +60,7 @@ describe("BannerApiClient", () => {
|
||||
|
||||
const result = await apiClient.searchCourses({
|
||||
term: "202420",
|
||||
subject: "CS",
|
||||
subjects: ["CS"],
|
||||
q: "data",
|
||||
open_only: true,
|
||||
limit: 25,
|
||||
|
||||
+331
-22
@@ -1,25 +1,59 @@
|
||||
import { authStore } from "$lib/auth.svelte";
|
||||
import type {
|
||||
CandidateResponse,
|
||||
CodeDescription,
|
||||
CourseResponse,
|
||||
DbMeetingTime,
|
||||
InstructorDetail,
|
||||
InstructorDetailResponse,
|
||||
InstructorListItem,
|
||||
InstructorResponse,
|
||||
InstructorStats,
|
||||
LinkedRmpProfile,
|
||||
ListInstructorsResponse,
|
||||
RescoreResponse,
|
||||
ScraperStatsResponse,
|
||||
SearchResponse as SearchResponseGenerated,
|
||||
ServiceInfo,
|
||||
ServiceStatus,
|
||||
StatusResponse,
|
||||
SubjectDetailResponse,
|
||||
SubjectResultEntry,
|
||||
SubjectSummary,
|
||||
SubjectsResponse,
|
||||
TimeseriesPoint,
|
||||
TimeseriesResponse,
|
||||
TopCandidateResponse,
|
||||
User,
|
||||
} from "$lib/bindings";
|
||||
|
||||
const API_BASE_URL = "/api";
|
||||
|
||||
// Re-export generated types under their canonical names
|
||||
export type {
|
||||
CandidateResponse,
|
||||
CodeDescription,
|
||||
CourseResponse,
|
||||
DbMeetingTime,
|
||||
InstructorDetail,
|
||||
InstructorDetailResponse,
|
||||
InstructorListItem,
|
||||
InstructorResponse,
|
||||
InstructorStats,
|
||||
LinkedRmpProfile,
|
||||
ListInstructorsResponse,
|
||||
RescoreResponse,
|
||||
ScraperStatsResponse,
|
||||
ServiceInfo,
|
||||
ServiceStatus,
|
||||
StatusResponse,
|
||||
SubjectDetailResponse,
|
||||
SubjectResultEntry,
|
||||
SubjectSummary,
|
||||
SubjectsResponse,
|
||||
TimeseriesPoint,
|
||||
TimeseriesResponse,
|
||||
TopCandidateResponse,
|
||||
};
|
||||
|
||||
// Semantic aliases — these all share the CodeDescription shape
|
||||
@@ -30,26 +64,104 @@ export type ReferenceEntry = CodeDescription;
|
||||
// SearchResponse re-exported (aliased to strip the "Generated" suffix)
|
||||
export type SearchResponse = SearchResponseGenerated;
|
||||
|
||||
// Health/metrics endpoints return ad-hoc JSON — keep manual types
|
||||
export interface HealthResponse {
|
||||
status: string;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export interface MetricsResponse {
|
||||
banner_api: {
|
||||
status: string;
|
||||
};
|
||||
timestamp: string;
|
||||
}
|
||||
export type ScraperPeriod = "1h" | "6h" | "24h" | "7d" | "30d";
|
||||
|
||||
// Client-side only — not generated from Rust
|
||||
export type SortColumn = "course_code" | "title" | "instructor" | "time" | "seats";
|
||||
export type SortDirection = "asc" | "desc";
|
||||
|
||||
export interface AdminStatus {
|
||||
userCount: number;
|
||||
sessionCount: number;
|
||||
courseCount: number;
|
||||
scrapeJobCount: number;
|
||||
services: { name: string; status: string }[];
|
||||
}
|
||||
|
||||
export interface ScrapeJob {
|
||||
id: number;
|
||||
targetType: string;
|
||||
targetPayload: unknown;
|
||||
priority: string;
|
||||
executeAt: string;
|
||||
createdAt: string;
|
||||
lockedAt: string | null;
|
||||
retryCount: number;
|
||||
maxRetries: number;
|
||||
queuedAt: string;
|
||||
status: "processing" | "staleLock" | "exhausted" | "scheduled" | "pending";
|
||||
}
|
||||
|
||||
export interface ScrapeJobsResponse {
|
||||
jobs: ScrapeJob[];
|
||||
}
|
||||
|
||||
export interface AuditLogEntry {
|
||||
id: number;
|
||||
courseId: number;
|
||||
timestamp: string;
|
||||
fieldChanged: string;
|
||||
oldValue: string;
|
||||
newValue: string;
|
||||
subject: string | null;
|
||||
courseNumber: string | null;
|
||||
crn: string | null;
|
||||
courseTitle: string | null;
|
||||
}
|
||||
|
||||
export interface AuditLogResponse {
|
||||
entries: AuditLogEntry[];
|
||||
}
|
||||
|
||||
export interface MetricEntry {
|
||||
id: number;
|
||||
courseId: number;
|
||||
timestamp: string;
|
||||
enrollment: number;
|
||||
waitCount: number;
|
||||
seatsAvailable: number;
|
||||
}
|
||||
|
||||
export interface MetricsResponse {
|
||||
metrics: MetricEntry[];
|
||||
count: number;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export interface MetricsParams {
|
||||
course_id?: number;
|
||||
term?: string;
|
||||
crn?: string;
|
||||
range?: "1h" | "6h" | "24h" | "7d" | "30d";
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
/** A time range for timeline queries (ISO-8601 strings). */
|
||||
export interface TimelineRange {
|
||||
start: string;
|
||||
end: string;
|
||||
}
|
||||
|
||||
/** Request body for POST /api/timeline. */
|
||||
export interface TimelineRequest {
|
||||
ranges: TimelineRange[];
|
||||
}
|
||||
|
||||
/** A single 15-minute slot returned by the timeline API. */
|
||||
export interface TimelineSlot {
|
||||
time: string;
|
||||
subjects: Record<string, number>;
|
||||
}
|
||||
|
||||
/** Response from POST /api/timeline. */
|
||||
export interface TimelineResponse {
|
||||
slots: TimelineSlot[];
|
||||
subjects: string[];
|
||||
}
|
||||
|
||||
export interface SearchParams {
|
||||
term: string;
|
||||
subject?: string;
|
||||
subjects?: string[];
|
||||
q?: string;
|
||||
open_only?: boolean;
|
||||
limit?: number;
|
||||
@@ -58,6 +170,15 @@ export interface SearchParams {
|
||||
sort_dir?: SortDirection;
|
||||
}
|
||||
|
||||
// Admin instructor query params (client-only, not generated)
|
||||
export interface AdminInstructorListParams {
|
||||
status?: string;
|
||||
search?: string;
|
||||
page?: number;
|
||||
per_page?: number;
|
||||
sort?: string;
|
||||
}
|
||||
|
||||
export class BannerApiClient {
|
||||
private baseUrl: string;
|
||||
private fetchFn: typeof fetch;
|
||||
@@ -67,8 +188,34 @@ export class BannerApiClient {
|
||||
this.fetchFn = fetchFn;
|
||||
}
|
||||
|
||||
private async request<T>(endpoint: string): Promise<T> {
|
||||
const response = await this.fetchFn(`${this.baseUrl}${endpoint}`);
|
||||
private buildInit(options?: { method?: string; body?: unknown }): RequestInit | undefined {
|
||||
if (!options) return undefined;
|
||||
const init: RequestInit = {};
|
||||
if (options.method) {
|
||||
init.method = options.method;
|
||||
}
|
||||
if (options.body !== undefined) {
|
||||
init.headers = { "Content-Type": "application/json" };
|
||||
init.body = JSON.stringify(options.body);
|
||||
} else if (options.method) {
|
||||
init.headers = { "Content-Type": "application/json" };
|
||||
}
|
||||
return Object.keys(init).length > 0 ? init : undefined;
|
||||
}
|
||||
|
||||
private async request<T>(
|
||||
endpoint: string,
|
||||
options?: { method?: string; body?: unknown }
|
||||
): Promise<T> {
|
||||
const init = this.buildInit(options);
|
||||
const args: [string, RequestInit?] = [`${this.baseUrl}${endpoint}`];
|
||||
if (init) args.push(init);
|
||||
|
||||
const response = await this.fetchFn(...args);
|
||||
|
||||
if (response.status === 401) {
|
||||
authStore.handleUnauthorized();
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
|
||||
@@ -77,22 +224,37 @@ export class BannerApiClient {
|
||||
return (await response.json()) as T;
|
||||
}
|
||||
|
||||
async getHealth(): Promise<HealthResponse> {
|
||||
return this.request<HealthResponse>("/health");
|
||||
private async requestVoid(
|
||||
endpoint: string,
|
||||
options?: { method?: string; body?: unknown }
|
||||
): Promise<void> {
|
||||
const init = this.buildInit(options);
|
||||
const args: [string, RequestInit?] = [`${this.baseUrl}${endpoint}`];
|
||||
if (init) args.push(init);
|
||||
|
||||
const response = await this.fetchFn(...args);
|
||||
|
||||
if (response.status === 401) {
|
||||
authStore.handleUnauthorized();
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
}
|
||||
|
||||
async getStatus(): Promise<StatusResponse> {
|
||||
return this.request<StatusResponse>("/status");
|
||||
}
|
||||
|
||||
async getMetrics(): Promise<MetricsResponse> {
|
||||
return this.request<MetricsResponse>("/metrics");
|
||||
}
|
||||
|
||||
async searchCourses(params: SearchParams): Promise<SearchResponse> {
|
||||
const query = new URLSearchParams();
|
||||
query.set("term", params.term);
|
||||
if (params.subject) query.set("subject", params.subject);
|
||||
if (params.subjects) {
|
||||
for (const s of params.subjects) {
|
||||
query.append("subject", s);
|
||||
}
|
||||
}
|
||||
if (params.q) query.set("q", params.q);
|
||||
if (params.open_only) query.set("open_only", "true");
|
||||
if (params.limit !== undefined) query.set("limit", String(params.limit));
|
||||
@@ -113,6 +275,153 @@ export class BannerApiClient {
|
||||
async getReference(category: string): Promise<ReferenceEntry[]> {
|
||||
return this.request<ReferenceEntry[]>(`/reference/${encodeURIComponent(category)}`);
|
||||
}
|
||||
|
||||
// Admin endpoints
|
||||
async getAdminStatus(): Promise<AdminStatus> {
|
||||
return this.request<AdminStatus>("/admin/status");
|
||||
}
|
||||
|
||||
async getAdminUsers(): Promise<User[]> {
|
||||
return this.request<User[]>("/admin/users");
|
||||
}
|
||||
|
||||
async setUserAdmin(discordId: string, isAdmin: boolean): Promise<User> {
|
||||
return this.request<User>(`/admin/users/${discordId}/admin`, {
|
||||
method: "PUT",
|
||||
body: { is_admin: isAdmin },
|
||||
});
|
||||
}
|
||||
|
||||
async getAdminScrapeJobs(): Promise<ScrapeJobsResponse> {
|
||||
return this.request<ScrapeJobsResponse>("/admin/scrape-jobs");
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the audit log with conditional request support.
|
||||
*
|
||||
* Returns `null` when the server responds 304 (data unchanged).
|
||||
* Stores and sends `Last-Modified` / `If-Modified-Since` automatically.
|
||||
*/
|
||||
async getAdminAuditLog(): Promise<AuditLogResponse | null> {
|
||||
const headers: Record<string, string> = {};
|
||||
if (this._auditLastModified) {
|
||||
headers["If-Modified-Since"] = this._auditLastModified;
|
||||
}
|
||||
|
||||
const response = await this.fetchFn(`${this.baseUrl}/admin/audit-log`, { headers });
|
||||
|
||||
if (response.status === 304) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const lastMod = response.headers.get("Last-Modified");
|
||||
if (lastMod) {
|
||||
this._auditLastModified = lastMod;
|
||||
}
|
||||
|
||||
return (await response.json()) as AuditLogResponse;
|
||||
}
|
||||
|
||||
/** Stored `Last-Modified` value for audit log conditional requests. */
|
||||
private _auditLastModified: string | null = null;
|
||||
|
||||
async getTimeline(ranges: TimelineRange[]): Promise<TimelineResponse> {
|
||||
return this.request<TimelineResponse>("/timeline", {
|
||||
method: "POST",
|
||||
body: { ranges } satisfies TimelineRequest,
|
||||
});
|
||||
}
|
||||
|
||||
async getMetrics(params?: MetricsParams): Promise<MetricsResponse> {
|
||||
const query = new URLSearchParams();
|
||||
if (params?.course_id !== undefined) query.set("course_id", String(params.course_id));
|
||||
if (params?.term) query.set("term", params.term);
|
||||
if (params?.crn) query.set("crn", params.crn);
|
||||
if (params?.range) query.set("range", params.range);
|
||||
if (params?.limit !== undefined) query.set("limit", String(params.limit));
|
||||
const qs = query.toString();
|
||||
return this.request<MetricsResponse>(`/metrics${qs ? `?${qs}` : ""}`);
|
||||
}
|
||||
|
||||
// Admin instructor endpoints
|
||||
|
||||
async getAdminInstructors(params?: AdminInstructorListParams): Promise<ListInstructorsResponse> {
|
||||
const query = new URLSearchParams();
|
||||
if (params?.status) query.set("status", params.status);
|
||||
if (params?.search) query.set("search", params.search);
|
||||
if (params?.page !== undefined) query.set("page", String(params.page));
|
||||
if (params?.per_page !== undefined) query.set("per_page", String(params.per_page));
|
||||
if (params?.sort) query.set("sort", params.sort);
|
||||
const qs = query.toString();
|
||||
return this.request<ListInstructorsResponse>(`/admin/instructors${qs ? `?${qs}` : ""}`);
|
||||
}
|
||||
|
||||
async getAdminInstructor(id: number): Promise<InstructorDetailResponse> {
|
||||
return this.request<InstructorDetailResponse>(`/admin/instructors/${id}`);
|
||||
}
|
||||
|
||||
async matchInstructor(id: number, rmpLegacyId: number): Promise<InstructorDetailResponse> {
|
||||
return this.request<InstructorDetailResponse>(`/admin/instructors/${id}/match`, {
|
||||
method: "POST",
|
||||
body: { rmpLegacyId },
|
||||
});
|
||||
}
|
||||
|
||||
async rejectCandidate(id: number, rmpLegacyId: number): Promise<void> {
|
||||
return this.requestVoid(`/admin/instructors/${id}/reject-candidate`, {
|
||||
method: "POST",
|
||||
body: { rmpLegacyId },
|
||||
});
|
||||
}
|
||||
|
||||
async rejectAllCandidates(id: number): Promise<void> {
|
||||
return this.requestVoid(`/admin/instructors/${id}/reject-all`, {
|
||||
method: "POST",
|
||||
});
|
||||
}
|
||||
|
||||
async unmatchInstructor(id: number, rmpLegacyId?: number): Promise<void> {
|
||||
return this.requestVoid(`/admin/instructors/${id}/unmatch`, {
|
||||
method: "POST",
|
||||
...(rmpLegacyId !== undefined ? { body: { rmpLegacyId } } : {}),
|
||||
});
|
||||
}
|
||||
|
||||
async rescoreInstructors(): Promise<RescoreResponse> {
|
||||
return this.request<RescoreResponse>("/admin/rmp/rescore", {
|
||||
method: "POST",
|
||||
});
|
||||
}
|
||||
|
||||
// Scraper analytics endpoints
|
||||
|
||||
async getScraperStats(period?: ScraperPeriod): Promise<ScraperStatsResponse> {
|
||||
const qs = period ? `?period=${period}` : "";
|
||||
return this.request<ScraperStatsResponse>(`/admin/scraper/stats${qs}`);
|
||||
}
|
||||
|
||||
async getScraperTimeseries(period?: ScraperPeriod, bucket?: string): Promise<TimeseriesResponse> {
|
||||
const query = new URLSearchParams();
|
||||
if (period) query.set("period", period);
|
||||
if (bucket) query.set("bucket", bucket);
|
||||
const qs = query.toString();
|
||||
return this.request<TimeseriesResponse>(`/admin/scraper/timeseries${qs ? `?${qs}` : ""}`);
|
||||
}
|
||||
|
||||
async getScraperSubjects(): Promise<SubjectsResponse> {
|
||||
return this.request<SubjectsResponse>("/admin/scraper/subjects");
|
||||
}
|
||||
|
||||
async getScraperSubjectDetail(subject: string, limit?: number): Promise<SubjectDetailResponse> {
|
||||
const qs = limit !== undefined ? `?limit=${limit}` : "";
|
||||
return this.request<SubjectDetailResponse>(
|
||||
`/admin/scraper/subjects/${encodeURIComponent(subject)}${qs}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export const client = new BannerApiClient();
|
||||
|
||||
@@ -0,0 +1,84 @@
|
||||
import type { User } from "$lib/bindings";
|
||||
|
||||
type AuthState =
|
||||
| { mode: "loading" }
|
||||
| { mode: "authenticated"; user: User }
|
||||
| { mode: "unauthenticated" };
|
||||
|
||||
class AuthStore {
|
||||
state = $state<AuthState>({ mode: "loading" });
|
||||
|
||||
get user(): User | null {
|
||||
return this.state.mode === "authenticated" ? this.state.user : null;
|
||||
}
|
||||
|
||||
get isAdmin(): boolean {
|
||||
return this.user?.isAdmin ?? false;
|
||||
}
|
||||
|
||||
get isLoading(): boolean {
|
||||
return this.state.mode === "loading";
|
||||
}
|
||||
|
||||
get isAuthenticated(): boolean {
|
||||
return this.state.mode === "authenticated";
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to load the current user session from the backend.
|
||||
* Only transitions to "unauthenticated" on a definitive 401/403.
|
||||
* Retries indefinitely on transient failures (network errors, 5xx)
|
||||
* so that a slow backend startup doesn't kick the user to login.
|
||||
*/
|
||||
async init() {
|
||||
const MAX_DELAY_MS = 7_000;
|
||||
let delayMs = 500;
|
||||
|
||||
for (;;) {
|
||||
try {
|
||||
const response = await fetch("/api/auth/me");
|
||||
|
||||
if (response.ok) {
|
||||
const user: User = await response.json();
|
||||
this.state = { mode: "authenticated", user };
|
||||
return;
|
||||
}
|
||||
|
||||
// Definitive rejection — no session or not authorized
|
||||
if (response.status === 401 || response.status === 403) {
|
||||
this.state = { mode: "unauthenticated" };
|
||||
return;
|
||||
}
|
||||
|
||||
// Server error (5xx) or unexpected status — retry
|
||||
} catch {
|
||||
// Network error (backend not up yet) — retry
|
||||
}
|
||||
|
||||
await new Promise((r) => setTimeout(r, delayMs));
|
||||
delayMs = Math.min(delayMs * 2, MAX_DELAY_MS);
|
||||
}
|
||||
}
|
||||
|
||||
/** Idempotently mark the session as lost. Called by apiFetch on 401. */
|
||||
handleUnauthorized() {
|
||||
if (this.state.mode !== "unauthenticated") {
|
||||
this.state = { mode: "unauthenticated" };
|
||||
}
|
||||
}
|
||||
|
||||
login() {
|
||||
window.location.href = "/api/auth/login";
|
||||
}
|
||||
|
||||
async logout() {
|
||||
try {
|
||||
await fetch("/api/auth/logout", { method: "POST" });
|
||||
} finally {
|
||||
this.state = { mode: "unauthenticated" };
|
||||
window.location.href = "/";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const authStore = new AuthStore();
|
||||
@@ -0,0 +1,26 @@
|
||||
export type { CandidateResponse } from "./CandidateResponse";
|
||||
export type { CodeDescription } from "./CodeDescription";
|
||||
export type { CourseResponse } from "./CourseResponse";
|
||||
export type { DbMeetingTime } from "./DbMeetingTime";
|
||||
export type { InstructorDetail } from "./InstructorDetail";
|
||||
export type { InstructorDetailResponse } from "./InstructorDetailResponse";
|
||||
export type { InstructorListItem } from "./InstructorListItem";
|
||||
export type { InstructorResponse } from "./InstructorResponse";
|
||||
export type { InstructorStats } from "./InstructorStats";
|
||||
export type { LinkedRmpProfile } from "./LinkedRmpProfile";
|
||||
export type { ListInstructorsResponse } from "./ListInstructorsResponse";
|
||||
export type { OkResponse } from "./OkResponse";
|
||||
export type { RescoreResponse } from "./RescoreResponse";
|
||||
export type { ScraperStatsResponse } from "./ScraperStatsResponse";
|
||||
export type { SearchResponse } from "./SearchResponse";
|
||||
export type { ServiceInfo } from "./ServiceInfo";
|
||||
export type { ServiceStatus } from "./ServiceStatus";
|
||||
export type { StatusResponse } from "./StatusResponse";
|
||||
export type { SubjectDetailResponse } from "./SubjectDetailResponse";
|
||||
export type { SubjectResultEntry } from "./SubjectResultEntry";
|
||||
export type { SubjectSummary } from "./SubjectSummary";
|
||||
export type { SubjectsResponse } from "./SubjectsResponse";
|
||||
export type { TimeseriesPoint } from "./TimeseriesPoint";
|
||||
export type { TimeseriesResponse } from "./TimeseriesResponse";
|
||||
export type { TopCandidateResponse } from "./TopCandidateResponse";
|
||||
export type { User } from "./User";
|
||||
@@ -7,251 +7,346 @@ import {
|
||||
formatMeetingDaysLong,
|
||||
isMeetingTimeTBA,
|
||||
isTimeTBA,
|
||||
ratingStyle,
|
||||
rmpUrl,
|
||||
RMP_CONFIDENCE_THRESHOLD,
|
||||
} from "$lib/course";
|
||||
import { themeStore } from "$lib/stores/theme.svelte";
|
||||
import { useClipboard } from "$lib/composables/useClipboard.svelte";
|
||||
import { cn, tooltipContentClass, formatNumber } from "$lib/utils";
|
||||
import { Tooltip } from "bits-ui";
|
||||
import { Info, Copy, Check } from "@lucide/svelte";
|
||||
import SimpleTooltip from "./SimpleTooltip.svelte";
|
||||
import {
|
||||
Info,
|
||||
Copy,
|
||||
Check,
|
||||
Star,
|
||||
Triangle,
|
||||
ExternalLink,
|
||||
Calendar,
|
||||
Download,
|
||||
} from "@lucide/svelte";
|
||||
|
||||
let { course }: { course: CourseResponse } = $props();
|
||||
|
||||
let copiedEmail: string | null = $state(null);
|
||||
|
||||
async function copyEmail(email: string, event: MouseEvent) {
|
||||
event.stopPropagation();
|
||||
await navigator.clipboard.writeText(email);
|
||||
copiedEmail = email;
|
||||
setTimeout(() => {
|
||||
copiedEmail = null;
|
||||
}, 2000);
|
||||
}
|
||||
const clipboard = useClipboard();
|
||||
</script>
|
||||
|
||||
<div class="bg-muted/60 p-5 text-sm border-b border-border">
|
||||
<div class="grid grid-cols-1 sm:grid-cols-2 gap-5">
|
||||
<!-- Instructors -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Instructors
|
||||
</h4>
|
||||
{#if course.instructors.length > 0}
|
||||
<div class="flex flex-wrap gap-1.5">
|
||||
{#each course.instructors as instructor}
|
||||
<Tooltip.Root delayDuration={200}>
|
||||
<Tooltip.Trigger>
|
||||
<span
|
||||
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
|
||||
>
|
||||
{instructor.displayName}
|
||||
{#if 'rmpRating' in instructor && instructor.rmpRating}
|
||||
{@const rating = instructor.rmpRating as number}
|
||||
<span
|
||||
class="text-[10px] font-semibold {rating >= 4.0 ? 'text-status-green' : rating >= 3.0 ? 'text-yellow-500' : 'text-status-red'}"
|
||||
>{rating.toFixed(1)}★</span>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
class="bg-card text-card-foreground text-xs border border-border rounded-md px-3 py-2 shadow-md max-w-72"
|
||||
>
|
||||
<div class="space-y-1.5">
|
||||
<div class="font-medium">{instructor.displayName}</div>
|
||||
{#if instructor.isPrimary}
|
||||
<div class="text-muted-foreground">Primary instructor</div>
|
||||
{/if}
|
||||
{#if 'rmpRating' in instructor && instructor.rmpRating}
|
||||
<div class="text-muted-foreground">
|
||||
{(instructor.rmpRating as number).toFixed(1)}/5 ({(instructor as any).rmpNumRatings ?? 0} ratings)
|
||||
</div>
|
||||
{/if}
|
||||
{#if instructor.email}
|
||||
<button
|
||||
onclick={(e) => copyEmail(instructor.email!, e)}
|
||||
class="inline-flex items-center gap-1 text-muted-foreground hover:text-foreground transition-colors cursor-pointer"
|
||||
>
|
||||
{#if copiedEmail === instructor.email}
|
||||
<Check class="size-3" />
|
||||
<span>Copied!</span>
|
||||
{:else}
|
||||
<Copy class="size-3" />
|
||||
<span>{instructor.email}</span>
|
||||
{/if}
|
||||
</button>
|
||||
{/if}
|
||||
<div class="grid grid-cols-1 sm:grid-cols-2 gap-5">
|
||||
<!-- Instructors -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">Instructors</h4>
|
||||
{#if course.instructors.length > 0}
|
||||
<div class="flex flex-wrap gap-1.5">
|
||||
{#each course.instructors as instructor}
|
||||
<Tooltip.Root delayDuration={200}>
|
||||
<Tooltip.Trigger>
|
||||
<span
|
||||
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
|
||||
>
|
||||
{instructor.displayName}
|
||||
{#if instructor.rmpRating != null}
|
||||
{@const rating = instructor.rmpRating}
|
||||
{@const lowConfidence =
|
||||
(instructor.rmpNumRatings ?? 0) <
|
||||
RMP_CONFIDENCE_THRESHOLD}
|
||||
<span
|
||||
class="text-[10px] font-semibold inline-flex items-center gap-0.5"
|
||||
style={ratingStyle(
|
||||
rating,
|
||||
themeStore.isDark,
|
||||
)}
|
||||
>
|
||||
{rating.toFixed(1)}
|
||||
{#if lowConfidence}
|
||||
<Triangle
|
||||
class="size-2 fill-current"
|
||||
/>
|
||||
{:else}
|
||||
<Star
|
||||
class="size-2.5 fill-current"
|
||||
/>
|
||||
{/if}
|
||||
</span>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
sideOffset={6}
|
||||
class={cn(tooltipContentClass, "px-3 py-2")}
|
||||
>
|
||||
<div class="space-y-1.5">
|
||||
<div class="font-medium">
|
||||
{instructor.displayName}
|
||||
</div>
|
||||
{#if instructor.isPrimary}
|
||||
<div class="text-muted-foreground">
|
||||
Primary instructor
|
||||
</div>
|
||||
{/if}
|
||||
{#if instructor.rmpRating != null}
|
||||
<div class="text-muted-foreground">
|
||||
{instructor.rmpRating.toFixed(1)}/5
|
||||
· {instructor.rmpNumRatings ?? 0} ratings
|
||||
{#if (instructor.rmpNumRatings ?? 0) < RMP_CONFIDENCE_THRESHOLD}
|
||||
(low)
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
{#if instructor.rmpLegacyId != null}
|
||||
<a
|
||||
href={rmpUrl(
|
||||
instructor.rmpLegacyId,
|
||||
)}
|
||||
target="_blank"
|
||||
rel="noopener"
|
||||
class="inline-flex items-center gap-1 text-muted-foreground hover:text-foreground transition-colors"
|
||||
>
|
||||
<ExternalLink class="size-3" />
|
||||
<span>View on RMP</span>
|
||||
</a>
|
||||
{/if}
|
||||
{#if instructor.email}
|
||||
<button
|
||||
onclick={(e) =>
|
||||
clipboard.copy(
|
||||
instructor.email!,
|
||||
e,
|
||||
)}
|
||||
class="inline-flex items-center gap-1 text-muted-foreground hover:text-foreground transition-colors cursor-pointer"
|
||||
>
|
||||
{#if clipboard.copiedValue === instructor.email}
|
||||
<Check class="size-3" />
|
||||
<span>Copied!</span>
|
||||
{:else}
|
||||
<Copy class="size-3" />
|
||||
<span>{instructor.email}</span>
|
||||
{/if}
|
||||
</button>
|
||||
{/if}
|
||||
</div>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{/each}
|
||||
</div>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{/each}
|
||||
</div>
|
||||
{:else}
|
||||
<span class="text-muted-foreground italic">Staff</span>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Meeting Times -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Meeting Times
|
||||
</h4>
|
||||
{#if course.meetingTimes.length > 0}
|
||||
<ul class="space-y-2">
|
||||
{#each course.meetingTimes as mt}
|
||||
<li>
|
||||
{#if isMeetingTimeTBA(mt) && isTimeTBA(mt)}
|
||||
<span class="italic text-muted-foreground">TBA</span>
|
||||
{:else}
|
||||
<div class="flex items-baseline gap-1.5">
|
||||
{#if !isMeetingTimeTBA(mt)}
|
||||
<span class="font-medium text-foreground">
|
||||
{formatMeetingDaysLong(mt)}
|
||||
</span>
|
||||
{/if}
|
||||
{#if !isTimeTBA(mt)}
|
||||
<span class="text-muted-foreground">
|
||||
{formatTime(mt.begin_time)}–{formatTime(mt.end_time)}
|
||||
</span>
|
||||
{:else}
|
||||
<span class="italic text-muted-foreground">Time TBA</span>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
{#if mt.building || mt.room}
|
||||
<div class="text-xs text-muted-foreground mt-0.5">
|
||||
{mt.building_description ?? mt.building}{mt.room ? ` ${mt.room}` : ""}
|
||||
</div>
|
||||
{/if}
|
||||
<div class="text-xs text-muted-foreground/70 mt-0.5">
|
||||
{formatDate(mt.start_date)} – {formatDate(mt.end_date)}
|
||||
</div>
|
||||
</li>
|
||||
{/each}
|
||||
</ul>
|
||||
{:else}
|
||||
<span class="italic text-muted-foreground">TBA</span>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Delivery -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Delivery
|
||||
<Tooltip.Root delayDuration={100}>
|
||||
<Tooltip.Trigger>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
class="bg-card text-card-foreground text-xs border border-border rounded-md px-2.5 py-1.5 shadow-md max-w-72"
|
||||
>
|
||||
How the course is taught: in-person, online, hybrid, etc.
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</span>
|
||||
</h4>
|
||||
<span class="text-foreground">
|
||||
{course.instructionalMethod ?? "—"}
|
||||
{#if course.campus}
|
||||
<span class="text-muted-foreground"> · {course.campus}</span>
|
||||
{/if}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<!-- Credits -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Credits
|
||||
</h4>
|
||||
<span class="text-foreground">{formatCreditHours(course)}</span>
|
||||
</div>
|
||||
|
||||
<!-- Attributes -->
|
||||
{#if course.attributes.length > 0}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Attributes
|
||||
<Tooltip.Root delayDuration={100}>
|
||||
<Tooltip.Trigger>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
class="bg-card text-card-foreground text-xs border border-border rounded-md px-2.5 py-1.5 shadow-md max-w-72"
|
||||
>
|
||||
Course flags for degree requirements, core curriculum, or special designations
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</span>
|
||||
</h4>
|
||||
<div class="flex flex-wrap gap-1.5">
|
||||
{#each course.attributes as attr}
|
||||
<Tooltip.Root delayDuration={100}>
|
||||
<Tooltip.Trigger>
|
||||
<span
|
||||
class="inline-flex text-xs font-medium bg-card border border-border rounded-md px-2 py-0.5 text-muted-foreground hover:text-foreground hover:border-foreground/20 transition-colors"
|
||||
>
|
||||
{attr}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
class="bg-card text-card-foreground text-xs border border-border rounded-md px-2.5 py-1.5 shadow-md max-w-64"
|
||||
>
|
||||
Course attribute code
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- Cross-list -->
|
||||
{#if course.crossList}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Cross-list
|
||||
<Tooltip.Root delayDuration={100}>
|
||||
<Tooltip.Trigger>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
class="bg-card text-card-foreground text-xs border border-border rounded-md px-2.5 py-1.5 shadow-md max-w-72"
|
||||
>
|
||||
Cross-listed sections share enrollment across multiple course numbers. Students in any linked section attend the same class.
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</span>
|
||||
</h4>
|
||||
<Tooltip.Root delayDuration={100}>
|
||||
<Tooltip.Trigger>
|
||||
<span class="inline-flex items-center gap-1.5 text-foreground font-mono">
|
||||
<span class="bg-card border border-border rounded-md px-2 py-0.5 text-xs font-medium">
|
||||
{course.crossList}
|
||||
</span>
|
||||
{#if course.crossListCount != null && course.crossListCapacity != null}
|
||||
<span class="text-muted-foreground text-xs">
|
||||
{course.crossListCount}/{course.crossListCapacity}
|
||||
</span>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
class="bg-card text-card-foreground text-xs border border-border rounded-md px-2.5 py-1.5 shadow-md max-w-72"
|
||||
>
|
||||
Group <span class="font-mono font-medium">{course.crossList}</span>
|
||||
{#if course.crossListCount != null && course.crossListCapacity != null}
|
||||
— {course.crossListCount} enrolled across {course.crossListCapacity} shared seats
|
||||
{:else}
|
||||
<span class="text-muted-foreground italic">Staff</span>
|
||||
{/if}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Waitlist -->
|
||||
{#if course.waitCapacity > 0}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Waitlist
|
||||
</h4>
|
||||
<span class="text-foreground">{course.waitCount} / {course.waitCapacity}</span>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
<!-- Meeting Times -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">Meeting Times</h4>
|
||||
{#if course.meetingTimes.length > 0}
|
||||
<ul class="space-y-2">
|
||||
{#each course.meetingTimes as mt}
|
||||
<li>
|
||||
{#if isMeetingTimeTBA(mt) && isTimeTBA(mt)}
|
||||
<span class="italic text-muted-foreground"
|
||||
>TBA</span
|
||||
>
|
||||
{:else}
|
||||
<div class="flex items-baseline gap-1.5">
|
||||
{#if !isMeetingTimeTBA(mt)}
|
||||
<span
|
||||
class="font-medium text-foreground"
|
||||
>
|
||||
{formatMeetingDaysLong(mt)}
|
||||
</span>
|
||||
{/if}
|
||||
{#if !isTimeTBA(mt)}
|
||||
<span class="text-muted-foreground">
|
||||
{formatTime(
|
||||
mt.begin_time,
|
||||
)}–{formatTime(mt.end_time)}
|
||||
</span>
|
||||
{:else}
|
||||
<span
|
||||
class="italic text-muted-foreground"
|
||||
>Time TBA</span
|
||||
>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
{#if mt.building || mt.room}
|
||||
<div
|
||||
class="text-xs text-muted-foreground mt-0.5"
|
||||
>
|
||||
{mt.building_description ??
|
||||
mt.building}{mt.room
|
||||
? ` ${mt.room}`
|
||||
: ""}
|
||||
</div>
|
||||
{/if}
|
||||
<div
|
||||
class="text-xs text-muted-foreground/70 mt-0.5"
|
||||
>
|
||||
{formatDate(mt.start_date)} – {formatDate(
|
||||
mt.end_date,
|
||||
)}
|
||||
</div>
|
||||
</li>
|
||||
{/each}
|
||||
</ul>
|
||||
{:else}
|
||||
<span class="italic text-muted-foreground">TBA</span>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Delivery -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Delivery
|
||||
<SimpleTooltip
|
||||
text="How the course is taught: in-person, online, hybrid, etc."
|
||||
delay={150}
|
||||
passthrough
|
||||
>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<span class="text-foreground">
|
||||
{course.instructionalMethod ?? "—"}
|
||||
{#if course.campus}
|
||||
<span class="text-muted-foreground">
|
||||
· {course.campus}
|
||||
</span>
|
||||
{/if}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<!-- Credits -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">Credits</h4>
|
||||
<span class="text-foreground">{formatCreditHours(course)}</span>
|
||||
</div>
|
||||
|
||||
<!-- Attributes -->
|
||||
{#if course.attributes.length > 0}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Attributes
|
||||
<SimpleTooltip
|
||||
text="Course flags for degree requirements, core curriculum, or special designations"
|
||||
delay={150}
|
||||
passthrough
|
||||
>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<div class="flex flex-wrap gap-1.5">
|
||||
{#each course.attributes as attr}
|
||||
<SimpleTooltip
|
||||
text="Course attribute code"
|
||||
delay={150}
|
||||
passthrough
|
||||
>
|
||||
<span
|
||||
class="inline-flex text-xs font-medium bg-card border border-border rounded-md px-2 py-0.5 text-muted-foreground hover:text-foreground hover:border-foreground/20 transition-colors"
|
||||
>
|
||||
{attr}
|
||||
</span>
|
||||
</SimpleTooltip>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- Cross-list -->
|
||||
{#if course.crossList}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Cross-list
|
||||
<SimpleTooltip
|
||||
text="Cross-listed sections share enrollment across multiple course numbers. Students in any linked section attend the same class."
|
||||
delay={150}
|
||||
passthrough
|
||||
>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<Tooltip.Root delayDuration={150} disableHoverableContent>
|
||||
<Tooltip.Trigger>
|
||||
<span
|
||||
class="inline-flex items-center gap-1.5 text-foreground font-mono"
|
||||
>
|
||||
<span
|
||||
class="bg-card border border-border rounded-md px-2 py-0.5 text-xs font-medium"
|
||||
>
|
||||
{course.crossList}
|
||||
</span>
|
||||
{#if course.crossListCount != null && course.crossListCapacity != null}
|
||||
<span class="text-muted-foreground text-xs">
|
||||
{formatNumber(course.crossListCount)}/{formatNumber(course.crossListCapacity)}
|
||||
</span>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content sideOffset={6} class={tooltipContentClass}>
|
||||
Group <span class="font-mono font-medium"
|
||||
>{course.crossList}</span
|
||||
>
|
||||
{#if course.crossListCount != null && course.crossListCapacity != null}
|
||||
— {formatNumber(course.crossListCount)} enrolled across {formatNumber(course.crossListCapacity)}
|
||||
shared seats
|
||||
{/if}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- Waitlist -->
|
||||
{#if course.waitCapacity > 0}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">Waitlist</h4>
|
||||
<span class="text-2foreground"
|
||||
>{formatNumber(course.waitCount)} / {formatNumber(course.waitCapacity)}</span
|
||||
>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- Calendar Export -->
|
||||
{#if course.meetingTimes.length > 0}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Calendar
|
||||
<SimpleTooltip
|
||||
text="Export this course schedule to your calendar app"
|
||||
delay={150}
|
||||
passthrough
|
||||
>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<div class="flex flex-wrap gap-1.5">
|
||||
<a
|
||||
href="/api/courses/{course.termCode}/{course.crn}/calendar.ics"
|
||||
download
|
||||
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
|
||||
>
|
||||
<Download class="size-3.5" />
|
||||
ICS File
|
||||
</a>
|
||||
<a
|
||||
href="/api/courses/{course.termCode}/{course.crn}/gcal"
|
||||
target="_blank"
|
||||
rel="noopener"
|
||||
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
|
||||
>
|
||||
<Calendar class="size-3.5" />
|
||||
Google Calendar
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -2,14 +2,29 @@
|
||||
import type { CourseResponse } from "$lib/api";
|
||||
import {
|
||||
abbreviateInstructor,
|
||||
formatTime,
|
||||
concernAccentColor,
|
||||
formatLocationDisplay,
|
||||
formatLocationTooltip,
|
||||
formatMeetingDays,
|
||||
formatLocation,
|
||||
formatMeetingTimesTooltip,
|
||||
formatTimeRange,
|
||||
getDeliveryConcern,
|
||||
getPrimaryInstructor,
|
||||
isMeetingTimeTBA,
|
||||
isTimeTBA,
|
||||
openSeats,
|
||||
seatsColor,
|
||||
seatsDotColor,
|
||||
ratingStyle,
|
||||
rmpUrl,
|
||||
RMP_CONFIDENCE_THRESHOLD,
|
||||
} from "$lib/course";
|
||||
import { themeStore } from "$lib/stores/theme.svelte";
|
||||
import { useClipboard } from "$lib/composables/useClipboard.svelte";
|
||||
import { useOverlayScrollbars } from "$lib/composables/useOverlayScrollbars.svelte";
|
||||
import CourseDetail from "./CourseDetail.svelte";
|
||||
import { fade, fly, slide } from "svelte/transition";
|
||||
import { flip } from "svelte/animate";
|
||||
import { createSvelteTable, FlexRender } from "$lib/components/ui/data-table/index.js";
|
||||
import {
|
||||
getCoreRowModel,
|
||||
@@ -19,9 +34,20 @@ import {
|
||||
type VisibilityState,
|
||||
type Updater,
|
||||
} from "@tanstack/table-core";
|
||||
import { ArrowUp, ArrowDown, ArrowUpDown, Columns3, Check, RotateCcw } from "@lucide/svelte";
|
||||
import { DropdownMenu, ContextMenu } from "bits-ui";
|
||||
import { fade, fly } from "svelte/transition";
|
||||
import {
|
||||
ArrowUp,
|
||||
ArrowDown,
|
||||
ArrowUpDown,
|
||||
Columns3,
|
||||
Check,
|
||||
RotateCcw,
|
||||
Star,
|
||||
Triangle,
|
||||
ExternalLink,
|
||||
} from "@lucide/svelte";
|
||||
import { DropdownMenu, ContextMenu, Tooltip } from "bits-ui";
|
||||
import { cn, tooltipContentClass, formatNumber } from "$lib/utils";
|
||||
import SimpleTooltip from "./SimpleTooltip.svelte";
|
||||
|
||||
let {
|
||||
courses,
|
||||
@@ -29,23 +55,37 @@ let {
|
||||
sorting = [],
|
||||
onSortingChange,
|
||||
manualSorting = false,
|
||||
subjectMap = {},
|
||||
}: {
|
||||
courses: CourseResponse[];
|
||||
loading: boolean;
|
||||
sorting?: SortingState;
|
||||
onSortingChange?: (sorting: SortingState) => void;
|
||||
manualSorting?: boolean;
|
||||
subjectMap?: Record<string, string>;
|
||||
} = $props();
|
||||
|
||||
let expandedCrn: string | null = $state(null);
|
||||
let tableWrapper: HTMLDivElement = undefined!;
|
||||
const clipboard = useClipboard(1000);
|
||||
|
||||
// Collapse expanded row when the dataset changes to avoid stale detail rows
|
||||
// and FLIP position calculation glitches from lingering expanded content
|
||||
$effect(() => {
|
||||
courses; // track dependency
|
||||
expandedCrn = null;
|
||||
});
|
||||
|
||||
useOverlayScrollbars(() => tableWrapper, {
|
||||
overflow: { x: "scroll", y: "hidden" },
|
||||
scrollbars: { autoHide: "never" },
|
||||
});
|
||||
|
||||
// Column visibility state
|
||||
let columnVisibility: VisibilityState = $state({});
|
||||
|
||||
const DEFAULT_VISIBILITY: VisibilityState = {};
|
||||
|
||||
function resetColumnVisibility() {
|
||||
columnVisibility = { ...DEFAULT_VISIBILITY };
|
||||
columnVisibility = {};
|
||||
}
|
||||
|
||||
function handleVisibilityChange(updater: Updater<VisibilityState>) {
|
||||
@@ -59,40 +99,22 @@ function toggleRow(crn: string) {
|
||||
expandedCrn = expandedCrn === crn ? null : crn;
|
||||
}
|
||||
|
||||
function openSeats(course: CourseResponse): number {
|
||||
return Math.max(0, course.maxEnrollment - course.enrollment);
|
||||
}
|
||||
|
||||
function seatsColor(course: CourseResponse): string {
|
||||
const open = openSeats(course);
|
||||
if (open === 0) return "text-status-red";
|
||||
if (open <= 5) return "text-yellow-500";
|
||||
return "text-status-green";
|
||||
}
|
||||
|
||||
function seatsDotColor(course: CourseResponse): string {
|
||||
const open = openSeats(course);
|
||||
if (open === 0) return "bg-red-500";
|
||||
if (open <= 5) return "bg-yellow-500";
|
||||
return "bg-green-500";
|
||||
}
|
||||
|
||||
function primaryInstructorDisplay(course: CourseResponse): string {
|
||||
const primary = getPrimaryInstructor(course.instructors);
|
||||
if (!primary) return "Staff";
|
||||
return abbreviateInstructor(primary.displayName);
|
||||
}
|
||||
|
||||
function ratingColor(rating: number): string {
|
||||
if (rating >= 4.0) return "text-status-green";
|
||||
if (rating >= 3.0) return "text-yellow-500";
|
||||
return "text-status-red";
|
||||
}
|
||||
|
||||
function primaryRating(course: CourseResponse): { rating: number; count: number } | null {
|
||||
function primaryRating(
|
||||
course: CourseResponse
|
||||
): { rating: number; count: number; legacyId: number | null } | null {
|
||||
const primary = getPrimaryInstructor(course.instructors);
|
||||
if (!primary?.rmpRating) return null;
|
||||
return { rating: primary.rmpRating, count: primary.rmpNumRatings ?? 0 };
|
||||
return {
|
||||
rating: primary.rmpRating,
|
||||
count: primary.rmpNumRatings ?? 0,
|
||||
legacyId: primary.rmpLegacyId ?? null,
|
||||
};
|
||||
}
|
||||
|
||||
function timeIsTBA(course: CourseResponse): boolean {
|
||||
@@ -132,14 +154,14 @@ const columns: ColumnDef<CourseResponse, unknown>[] = [
|
||||
accessorFn: (row) => {
|
||||
if (row.meetingTimes.length === 0) return "";
|
||||
const mt = row.meetingTimes[0];
|
||||
return `${formatMeetingDays(mt)} ${formatTime(mt.begin_time)}`;
|
||||
return `${formatMeetingDays(mt)} ${formatTimeRange(mt.begin_time, mt.end_time)}`;
|
||||
},
|
||||
header: "Time",
|
||||
enableSorting: true,
|
||||
},
|
||||
{
|
||||
id: "location",
|
||||
accessorFn: (row) => formatLocation(row) ?? "",
|
||||
accessorFn: (row) => formatLocationDisplay(row) ?? "",
|
||||
header: "Location",
|
||||
enableSorting: false,
|
||||
},
|
||||
@@ -167,6 +189,7 @@ const table = createSvelteTable({
|
||||
get data() {
|
||||
return courses;
|
||||
},
|
||||
getRowId: (row) => String(row.crn),
|
||||
columns,
|
||||
state: {
|
||||
get sorting() {
|
||||
@@ -189,264 +212,576 @@ const table = createSvelteTable({
|
||||
});
|
||||
</script>
|
||||
|
||||
{#snippet columnVisibilityItems(variant: "dropdown" | "context")}
|
||||
{#if variant === "dropdown"}
|
||||
<DropdownMenu.Group>
|
||||
<DropdownMenu.GroupHeading class="px-2 py-1.5 text-xs font-medium text-muted-foreground">
|
||||
Toggle columns
|
||||
</DropdownMenu.GroupHeading>
|
||||
{#each columns as col}
|
||||
{@const id = col.id!}
|
||||
{@const label = typeof col.header === "string" ? col.header : id}
|
||||
<DropdownMenu.CheckboxItem
|
||||
checked={columnVisibility[id] !== false}
|
||||
closeOnSelect={false}
|
||||
onCheckedChange={(checked) => {
|
||||
columnVisibility = { ...columnVisibility, [id]: checked };
|
||||
}}
|
||||
class="relative flex items-center gap-2 rounded-sm px-2 py-1.5 text-sm cursor-pointer select-none outline-none data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground"
|
||||
{#snippet columnVisibilityGroup(
|
||||
Group: typeof DropdownMenu.Group,
|
||||
GroupHeading: typeof DropdownMenu.GroupHeading,
|
||||
CheckboxItem: typeof DropdownMenu.CheckboxItem,
|
||||
Separator: typeof DropdownMenu.Separator,
|
||||
Item: typeof DropdownMenu.Item,
|
||||
)}
|
||||
<Group>
|
||||
<GroupHeading
|
||||
class="px-2 py-1.5 text-xs font-medium text-muted-foreground"
|
||||
>
|
||||
{#snippet children({ checked })}
|
||||
<span class="flex size-4 items-center justify-center rounded-sm border border-border">
|
||||
{#if checked}
|
||||
<Check class="size-3" />
|
||||
{/if}
|
||||
</span>
|
||||
{label}
|
||||
{/snippet}
|
||||
</DropdownMenu.CheckboxItem>
|
||||
{/each}
|
||||
</DropdownMenu.Group>
|
||||
Toggle columns
|
||||
</GroupHeading>
|
||||
{#each columns as col}
|
||||
{@const id = col.id!}
|
||||
{@const label = typeof col.header === "string" ? col.header : id}
|
||||
<CheckboxItem
|
||||
checked={columnVisibility[id] !== false}
|
||||
closeOnSelect={false}
|
||||
onCheckedChange={(checked) => {
|
||||
columnVisibility = {
|
||||
...columnVisibility,
|
||||
[id]: checked,
|
||||
};
|
||||
}}
|
||||
class="relative flex items-center gap-2 rounded-sm px-2 py-1.5 text-sm cursor-pointer select-none outline-none data-highlighted:bg-accent data-highlighted:text-accent-foreground"
|
||||
>
|
||||
{#snippet children({ checked })}
|
||||
<span
|
||||
class="flex size-4 items-center justify-center rounded-sm border border-border"
|
||||
>
|
||||
{#if checked}
|
||||
<Check class="size-3" />
|
||||
{/if}
|
||||
</span>
|
||||
{label}
|
||||
{/snippet}
|
||||
</CheckboxItem>
|
||||
{/each}
|
||||
</Group>
|
||||
{#if hasCustomVisibility}
|
||||
<DropdownMenu.Separator class="mx-1 my-1 h-px bg-border" />
|
||||
<DropdownMenu.Item
|
||||
class="flex items-center gap-2 rounded-sm px-2 py-1.5 text-sm cursor-pointer select-none outline-none data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground"
|
||||
onSelect={resetColumnVisibility}
|
||||
>
|
||||
<RotateCcw class="size-3.5" />
|
||||
Reset to default
|
||||
</DropdownMenu.Item>
|
||||
{/if}
|
||||
{:else}
|
||||
<ContextMenu.Group>
|
||||
<ContextMenu.GroupHeading class="px-2 py-1.5 text-xs font-medium text-muted-foreground">
|
||||
Toggle columns
|
||||
</ContextMenu.GroupHeading>
|
||||
{#each columns as col}
|
||||
{@const id = col.id!}
|
||||
{@const label = typeof col.header === "string" ? col.header : id}
|
||||
<ContextMenu.CheckboxItem
|
||||
checked={columnVisibility[id] !== false}
|
||||
closeOnSelect={false}
|
||||
onCheckedChange={(checked) => {
|
||||
columnVisibility = { ...columnVisibility, [id]: checked };
|
||||
}}
|
||||
class="relative flex items-center gap-2 rounded-sm px-2 py-1.5 text-sm cursor-pointer select-none outline-none data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground"
|
||||
<Separator class="mx-1 my-1 h-px bg-border" />
|
||||
<Item
|
||||
class="flex items-center gap-2 rounded-sm px-2 py-1.5 text-sm cursor-pointer select-none outline-none data-highlighted:bg-accent data-highlighted:text-accent-foreground"
|
||||
onSelect={resetColumnVisibility}
|
||||
>
|
||||
{#snippet children({ checked })}
|
||||
<span class="flex size-4 items-center justify-center rounded-sm border border-border">
|
||||
{#if checked}
|
||||
<Check class="size-3" />
|
||||
{/if}
|
||||
</span>
|
||||
{label}
|
||||
{/snippet}
|
||||
</ContextMenu.CheckboxItem>
|
||||
{/each}
|
||||
</ContextMenu.Group>
|
||||
{#if hasCustomVisibility}
|
||||
<ContextMenu.Separator class="mx-1 my-1 h-px bg-border" />
|
||||
<ContextMenu.Item
|
||||
class="flex items-center gap-2 rounded-sm px-2 py-1.5 text-sm cursor-pointer select-none outline-none data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground"
|
||||
onSelect={resetColumnVisibility}
|
||||
>
|
||||
<RotateCcw class="size-3.5" />
|
||||
Reset to default
|
||||
</ContextMenu.Item>
|
||||
<RotateCcw class="size-3.5" />
|
||||
Reset to default
|
||||
</Item>
|
||||
{/if}
|
||||
{/if}
|
||||
{/snippet}
|
||||
|
||||
<!-- Toolbar: View columns button -->
|
||||
<div class="flex items-center justify-end pb-2">
|
||||
<DropdownMenu.Root>
|
||||
<DropdownMenu.Trigger
|
||||
class="inline-flex items-center gap-1.5 rounded-md border border-border bg-background px-2.5 py-1.5 text-xs font-medium text-muted-foreground hover:bg-accent hover:text-accent-foreground transition-colors cursor-pointer"
|
||||
>
|
||||
<Columns3 class="size-3.5" />
|
||||
View
|
||||
</DropdownMenu.Trigger>
|
||||
<DropdownMenu.Portal>
|
||||
<DropdownMenu.Content
|
||||
class="z-50 min-w-[160px] rounded-md border border-border bg-card p-1 text-card-foreground shadow-lg"
|
||||
align="end"
|
||||
sideOffset={4}
|
||||
forceMount
|
||||
>
|
||||
{#snippet child({ wrapperProps, props, open })}
|
||||
{#if open}
|
||||
<div {...wrapperProps}>
|
||||
<div {...props} transition:fly={{ duration: 150, y: -10 }}>
|
||||
{@render columnVisibilityItems("dropdown")}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/snippet}
|
||||
</DropdownMenu.Content>
|
||||
</DropdownMenu.Portal>
|
||||
</DropdownMenu.Root>
|
||||
<DropdownMenu.Root>
|
||||
<DropdownMenu.Trigger
|
||||
class="inline-flex items-center gap-1.5 rounded-md border border-border bg-background px-2.5 py-1.5 text-xs font-medium text-muted-foreground hover:bg-accent hover:text-accent-foreground transition-colors cursor-pointer"
|
||||
>
|
||||
<Columns3 class="size-3.5" />
|
||||
View
|
||||
</DropdownMenu.Trigger>
|
||||
<DropdownMenu.Portal>
|
||||
<DropdownMenu.Content
|
||||
class="z-50 min-w-40 rounded-md border border-border bg-card p-1 text-card-foreground shadow-lg"
|
||||
align="end"
|
||||
sideOffset={4}
|
||||
forceMount
|
||||
>
|
||||
{#snippet child({ wrapperProps, props, open })}
|
||||
{#if open}
|
||||
<div {...wrapperProps}>
|
||||
<div
|
||||
{...props}
|
||||
transition:fly={{ duration: 150, y: -10 }}
|
||||
>
|
||||
{@render columnVisibilityGroup(
|
||||
DropdownMenu.Group,
|
||||
DropdownMenu.GroupHeading,
|
||||
DropdownMenu.CheckboxItem,
|
||||
DropdownMenu.Separator,
|
||||
DropdownMenu.Item,
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/snippet}
|
||||
</DropdownMenu.Content>
|
||||
</DropdownMenu.Portal>
|
||||
</DropdownMenu.Root>
|
||||
</div>
|
||||
|
||||
<!-- Table with context menu on header -->
|
||||
<div class="overflow-x-auto">
|
||||
<ContextMenu.Root>
|
||||
<ContextMenu.Trigger class="contents">
|
||||
<table class="w-full border-collapse text-sm">
|
||||
<thead>
|
||||
{#each table.getHeaderGroups() as headerGroup}
|
||||
<tr class="border-b border-border text-left text-muted-foreground">
|
||||
{#each headerGroup.headers as header}
|
||||
{#if header.column.getIsVisible()}
|
||||
<th
|
||||
class="py-2 px-2 font-medium {header.id === 'seats' ? 'text-right' : ''}"
|
||||
class:cursor-pointer={header.column.getCanSort()}
|
||||
class:select-none={header.column.getCanSort()}
|
||||
onclick={header.column.getToggleSortingHandler()}
|
||||
>
|
||||
{#if header.column.getCanSort()}
|
||||
<span class="inline-flex items-center gap-1">
|
||||
{#if typeof header.column.columnDef.header === "string"}
|
||||
{header.column.columnDef.header}
|
||||
{:else}
|
||||
<FlexRender content={header.column.columnDef.header} context={header.getContext()} />
|
||||
{/if}
|
||||
{#if header.column.getIsSorted() === "asc"}
|
||||
<ArrowUp class="size-3.5" />
|
||||
{:else if header.column.getIsSorted() === "desc"}
|
||||
<ArrowDown class="size-3.5" />
|
||||
{:else}
|
||||
<ArrowUpDown class="size-3.5 text-muted-foreground/40" />
|
||||
{/if}
|
||||
</span>
|
||||
{:else if typeof header.column.columnDef.header === "string"}
|
||||
{header.column.columnDef.header}
|
||||
{:else}
|
||||
<FlexRender content={header.column.columnDef.header} context={header.getContext()} />
|
||||
{/if}
|
||||
</th>
|
||||
<div bind:this={tableWrapper} class="overflow-x-auto">
|
||||
<ContextMenu.Root>
|
||||
<ContextMenu.Trigger class="contents">
|
||||
<table class="w-full min-w-160 border-collapse text-sm">
|
||||
<thead>
|
||||
{#each table.getHeaderGroups() as headerGroup}
|
||||
<tr
|
||||
class="border-b border-border text-left text-muted-foreground"
|
||||
>
|
||||
{#each headerGroup.headers as header}
|
||||
{#if header.column.getIsVisible()}
|
||||
<th
|
||||
class="py-2 px-2 font-medium {header.id ===
|
||||
'seats'
|
||||
? 'text-right'
|
||||
: ''}"
|
||||
class:cursor-pointer={header.column.getCanSort()}
|
||||
class:select-none={header.column.getCanSort()}
|
||||
onclick={header.column.getToggleSortingHandler()}
|
||||
>
|
||||
{#if header.column.getCanSort()}
|
||||
<span
|
||||
class="inline-flex items-center gap-1"
|
||||
>
|
||||
{#if typeof header.column.columnDef.header === "string"}
|
||||
{header.column.columnDef
|
||||
.header}
|
||||
{:else}
|
||||
<FlexRender
|
||||
content={header.column
|
||||
.columnDef.header}
|
||||
context={header.getContext()}
|
||||
/>
|
||||
{/if}
|
||||
{#if header.column.getIsSorted() === "asc"}
|
||||
<ArrowUp class="size-3.5" />
|
||||
{:else if header.column.getIsSorted() === "desc"}
|
||||
<ArrowDown
|
||||
class="size-3.5"
|
||||
/>
|
||||
{:else}
|
||||
<ArrowUpDown
|
||||
class="size-3.5 text-muted-foreground/40"
|
||||
/>
|
||||
{/if}
|
||||
</span>
|
||||
{:else if typeof header.column.columnDef.header === "string"}
|
||||
{header.column.columnDef.header}
|
||||
{:else}
|
||||
<FlexRender
|
||||
content={header.column.columnDef
|
||||
.header}
|
||||
context={header.getContext()}
|
||||
/>
|
||||
{/if}
|
||||
</th>
|
||||
{/if}
|
||||
{/each}
|
||||
</tr>
|
||||
{/each}
|
||||
</thead>
|
||||
{#if loading && courses.length === 0}
|
||||
<tbody>
|
||||
{#each Array(5) as _}
|
||||
<tr class="border-b border-border">
|
||||
{#each table.getVisibleLeafColumns() as col}
|
||||
<td class="py-2.5 px-2">
|
||||
<div
|
||||
class="h-4 bg-muted rounded animate-pulse {col.id ===
|
||||
'seats'
|
||||
? 'w-14 ml-auto'
|
||||
: col.id === 'title'
|
||||
? 'w-40'
|
||||
: col.id === 'crn'
|
||||
? 'w-10'
|
||||
: 'w-20'}"
|
||||
></div>
|
||||
</td>
|
||||
{/each}
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
{:else if courses.length === 0}
|
||||
<tbody>
|
||||
<tr>
|
||||
<td
|
||||
colspan={visibleColumnIds.length}
|
||||
class="py-12 text-center text-muted-foreground"
|
||||
>
|
||||
No courses found. Try adjusting your filters.
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
{:else}
|
||||
<!-- No out: transition — Svelte outros break table layout (tbody loses positioning and overlaps) -->
|
||||
{#each table.getRowModel().rows as row, i (row.id)}
|
||||
{@const course = row.original}
|
||||
<tbody
|
||||
animate:flip={{ duration: 300 }}
|
||||
in:fade={{
|
||||
duration: 200,
|
||||
delay: Math.min(i * 20, 400),
|
||||
}}
|
||||
>
|
||||
<tr
|
||||
class="border-b border-border cursor-pointer hover:bg-muted/50 transition-colors whitespace-nowrap {expandedCrn ===
|
||||
course.crn
|
||||
? 'bg-muted/30'
|
||||
: ''}"
|
||||
onclick={() => toggleRow(course.crn)}
|
||||
>
|
||||
{#each row.getVisibleCells() as cell (cell.id)}
|
||||
{@const colId = cell.column.id}
|
||||
{#if colId === "crn"}
|
||||
<td class="py-2 px-2 relative">
|
||||
<button
|
||||
class="relative inline-flex items-center rounded-full px-2 py-0.5 border border-border/50 bg-muted/20 hover:bg-muted/40 hover:border-foreground/30 transition-colors duration-150 cursor-copy focus-visible:outline-2 focus-visible:outline-offset-1 focus-visible:outline-ring font-mono text-xs text-muted-foreground/70"
|
||||
onclick={(e) =>
|
||||
clipboard.copy(
|
||||
course.crn,
|
||||
e,
|
||||
)}
|
||||
onkeydown={(e) => {
|
||||
if (
|
||||
e.key === "Enter" ||
|
||||
e.key === " "
|
||||
) {
|
||||
e.preventDefault();
|
||||
clipboard.copy(
|
||||
course.crn,
|
||||
e,
|
||||
);
|
||||
}
|
||||
}}
|
||||
aria-label="Copy CRN {course.crn} to clipboard"
|
||||
>
|
||||
{course.crn}
|
||||
{#if clipboard.copiedValue === course.crn}
|
||||
<span
|
||||
class="absolute -top-8 left-1/2 -translate-x-1/2 whitespace-nowrap text-xs px-2 py-1 rounded-md bg-green-500/10 border border-green-500/20 text-green-700 dark:text-green-300 pointer-events-none z-10"
|
||||
in:fade={{
|
||||
duration: 100,
|
||||
}}
|
||||
out:fade={{
|
||||
duration: 200,
|
||||
}}
|
||||
>
|
||||
Copied!
|
||||
</span>
|
||||
{/if}
|
||||
</button>
|
||||
</td>
|
||||
{:else if colId === "course_code"}
|
||||
{@const subjectDesc =
|
||||
subjectMap[course.subject]}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
<SimpleTooltip
|
||||
text={subjectDesc
|
||||
? `${subjectDesc} ${course.courseNumber}`
|
||||
: `${course.subject} ${course.courseNumber}`}
|
||||
delay={200}
|
||||
side="bottom"
|
||||
passthrough
|
||||
>
|
||||
<span class="font-semibold"
|
||||
>{course.subject}
|
||||
{course.courseNumber}</span
|
||||
>{#if course.sequenceNumber}<span
|
||||
class="text-muted-foreground"
|
||||
>-{course.sequenceNumber}</span
|
||||
>{/if}
|
||||
</SimpleTooltip>
|
||||
</td>
|
||||
{:else if colId === "title"}
|
||||
<td
|
||||
class="py-2 px-2 font-medium max-w-50 truncate"
|
||||
>
|
||||
<SimpleTooltip
|
||||
text={course.title}
|
||||
delay={200}
|
||||
side="bottom"
|
||||
passthrough
|
||||
>
|
||||
<span class="block truncate"
|
||||
>{course.title}</span
|
||||
>
|
||||
</SimpleTooltip>
|
||||
</td>
|
||||
{:else if colId === "instructor"}
|
||||
{@const primary = getPrimaryInstructor(
|
||||
course.instructors,
|
||||
)}
|
||||
{@const display =
|
||||
primaryInstructorDisplay(course)}
|
||||
{@const commaIdx =
|
||||
display.indexOf(", ")}
|
||||
{@const ratingData =
|
||||
primaryRating(course)}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
{#if display === "Staff"}
|
||||
<span
|
||||
class="text-xs text-muted-foreground/60 uppercase"
|
||||
>Staff</span
|
||||
>
|
||||
{:else}
|
||||
<SimpleTooltip
|
||||
text={primary?.displayName ??
|
||||
"Staff"}
|
||||
delay={200}
|
||||
side="bottom"
|
||||
passthrough
|
||||
>
|
||||
{#if commaIdx !== -1}
|
||||
<span
|
||||
>{display.slice(
|
||||
0,
|
||||
commaIdx,
|
||||
)},
|
||||
<span
|
||||
class="text-muted-foreground"
|
||||
>{display.slice(
|
||||
commaIdx +
|
||||
1,
|
||||
)}</span
|
||||
></span
|
||||
>
|
||||
{:else}
|
||||
<span>{display}</span>
|
||||
{/if}
|
||||
</SimpleTooltip>
|
||||
{/if}
|
||||
{#if ratingData}
|
||||
{@const lowConfidence =
|
||||
ratingData.count <
|
||||
RMP_CONFIDENCE_THRESHOLD}
|
||||
<Tooltip.Root
|
||||
delayDuration={150}
|
||||
>
|
||||
<Tooltip.Trigger>
|
||||
<span
|
||||
class="ml-1 text-xs font-medium inline-flex items-center gap-0.5"
|
||||
style={ratingStyle(
|
||||
ratingData.rating,
|
||||
themeStore.isDark,
|
||||
)}
|
||||
>
|
||||
{ratingData.rating.toFixed(
|
||||
1,
|
||||
)}
|
||||
{#if lowConfidence}
|
||||
<Triangle
|
||||
class="size-2 fill-current"
|
||||
/>
|
||||
{:else}
|
||||
<Star
|
||||
class="size-2.5 fill-current"
|
||||
/>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
side="bottom"
|
||||
sideOffset={6}
|
||||
class={cn(
|
||||
tooltipContentClass,
|
||||
"px-2.5 py-1.5",
|
||||
)}
|
||||
>
|
||||
<span
|
||||
class="inline-flex items-center gap-1.5 text-xs"
|
||||
>
|
||||
{ratingData.rating.toFixed(
|
||||
1,
|
||||
)}/5 · {formatNumber(ratingData.count)}
|
||||
ratings
|
||||
{#if (ratingData.count ?? 0) < RMP_CONFIDENCE_THRESHOLD}
|
||||
(low)
|
||||
{/if}
|
||||
{#if ratingData.legacyId != null}
|
||||
·
|
||||
<a
|
||||
href={rmpUrl(
|
||||
ratingData.legacyId,
|
||||
)}
|
||||
target="_blank"
|
||||
rel="noopener"
|
||||
class="inline-flex items-center gap-0.5 text-muted-foreground hover:text-foreground transition-colors"
|
||||
>
|
||||
RMP
|
||||
<ExternalLink
|
||||
class="size-3"
|
||||
/>
|
||||
</a>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{/if}
|
||||
</td>
|
||||
{:else if colId === "time"}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
<SimpleTooltip
|
||||
text={formatMeetingTimesTooltip(
|
||||
course.meetingTimes,
|
||||
)}
|
||||
passthrough
|
||||
>
|
||||
{#if timeIsTBA(course)}
|
||||
<span
|
||||
class="text-xs text-muted-foreground/60"
|
||||
>TBA</span
|
||||
>
|
||||
{:else}
|
||||
{@const mt =
|
||||
course.meetingTimes[0]}
|
||||
<span>
|
||||
{#if !isMeetingTimeTBA(mt)}
|
||||
<span
|
||||
class="font-mono font-medium"
|
||||
>{formatMeetingDays(
|
||||
mt,
|
||||
)}</span
|
||||
>
|
||||
{" "}
|
||||
{/if}
|
||||
{#if !isTimeTBA(mt)}
|
||||
<span
|
||||
class="text-muted-foreground"
|
||||
>{formatTimeRange(
|
||||
mt.begin_time,
|
||||
mt.end_time,
|
||||
)}</span
|
||||
>
|
||||
{:else}
|
||||
<span
|
||||
class="text-xs text-muted-foreground/60"
|
||||
>TBA</span
|
||||
>
|
||||
{/if}
|
||||
{#if course.meetingTimes.length > 1}
|
||||
<span
|
||||
class="ml-1 text-xs text-muted-foreground/70 font-medium"
|
||||
>+{course
|
||||
.meetingTimes
|
||||
.length -
|
||||
1}</span
|
||||
>
|
||||
{/if}
|
||||
</span>
|
||||
{/if}
|
||||
</SimpleTooltip>
|
||||
</td>
|
||||
{:else if colId === "location"}
|
||||
{@const concern =
|
||||
getDeliveryConcern(course)}
|
||||
{@const accentColor =
|
||||
concernAccentColor(concern)}
|
||||
{@const locTooltip =
|
||||
formatLocationTooltip(course)}
|
||||
{@const locDisplay =
|
||||
formatLocationDisplay(course)}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
{#if locTooltip}
|
||||
<SimpleTooltip
|
||||
text={locTooltip}
|
||||
delay={200}
|
||||
passthrough
|
||||
>
|
||||
<span
|
||||
class="text-muted-foreground"
|
||||
class:pl-2={accentColor !==
|
||||
null}
|
||||
style:border-left={accentColor
|
||||
? `2px solid ${accentColor}`
|
||||
: undefined}
|
||||
>
|
||||
{locDisplay ?? "—"}
|
||||
</span>
|
||||
</SimpleTooltip>
|
||||
{:else if locDisplay}
|
||||
<span
|
||||
class="text-muted-foreground"
|
||||
>
|
||||
{locDisplay}
|
||||
</span>
|
||||
{:else}
|
||||
<span
|
||||
class="text-xs text-muted-foreground/50"
|
||||
>—</span
|
||||
>
|
||||
{/if}
|
||||
</td>
|
||||
{:else if colId === "seats"}
|
||||
<td
|
||||
class="py-2 px-2 text-right whitespace-nowrap"
|
||||
>
|
||||
<SimpleTooltip
|
||||
text="{formatNumber(openSeats(
|
||||
course,
|
||||
))} of {formatNumber(course.maxEnrollment)} seats open, {formatNumber(course.enrollment)} enrolled{course.waitCount >
|
||||
0
|
||||
? `, ${formatNumber(course.waitCount)} waitlisted`
|
||||
: ''}"
|
||||
delay={200}
|
||||
side="left"
|
||||
passthrough
|
||||
>
|
||||
<span
|
||||
class="inline-flex items-center gap-1.5"
|
||||
>
|
||||
<span
|
||||
class="size-1.5 rounded-full {seatsDotColor(
|
||||
course,
|
||||
)} shrink-0"
|
||||
></span>
|
||||
<span
|
||||
class="{seatsColor(
|
||||
course,
|
||||
)} font-medium tabular-nums"
|
||||
>{#if openSeats(course) === 0}Full{:else}{openSeats(
|
||||
course,
|
||||
)} open{/if}</span
|
||||
>
|
||||
<span
|
||||
class="text-muted-foreground/60 tabular-nums"
|
||||
>{formatNumber(course.enrollment)}/{formatNumber(course.maxEnrollment)}{#if course.waitCount > 0}
|
||||
· WL {formatNumber(course.waitCount)}/{formatNumber(course.waitCapacity)}{/if}</span
|
||||
>
|
||||
</span>
|
||||
</SimpleTooltip>
|
||||
</td>
|
||||
{/if}
|
||||
{/each}
|
||||
</tr>
|
||||
{#if expandedCrn === course.crn}
|
||||
<tr>
|
||||
<td
|
||||
colspan={visibleColumnIds.length}
|
||||
class="p-0"
|
||||
>
|
||||
<div
|
||||
transition:slide={{ duration: 200 }}
|
||||
>
|
||||
<CourseDetail {course} />
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{/if}
|
||||
</tbody>
|
||||
{/each}
|
||||
{/if}
|
||||
{/each}
|
||||
</tr>
|
||||
{/each}
|
||||
</thead>
|
||||
<tbody>
|
||||
{#if loading && courses.length === 0}
|
||||
{#each Array(5) as _}
|
||||
<tr class="border-b border-border">
|
||||
{#each table.getVisibleLeafColumns() as col}
|
||||
<td class="py-2.5 px-2">
|
||||
<div class="h-4 bg-muted rounded animate-pulse {col.id === 'seats' ? 'w-14 ml-auto' : col.id === 'title' ? 'w-40' : col.id === 'crn' ? 'w-10' : 'w-20'}"></div>
|
||||
</td>
|
||||
{/each}
|
||||
</tr>
|
||||
{/each}
|
||||
{:else if courses.length === 0}
|
||||
<tr>
|
||||
<td colspan={visibleColumnIds.length} class="py-12 text-center text-muted-foreground">
|
||||
No courses found. Try adjusting your filters.
|
||||
</td>
|
||||
</tr>
|
||||
{:else}
|
||||
{#each table.getRowModel().rows as row (row.id)}
|
||||
{@const course = row.original}
|
||||
<tr
|
||||
class="border-b border-border cursor-pointer hover:bg-muted/50 transition-colors whitespace-nowrap {expandedCrn === course.crn ? 'bg-muted/30' : ''}"
|
||||
onclick={() => toggleRow(course.crn)}
|
||||
>
|
||||
{#each row.getVisibleCells() as cell (cell.id)}
|
||||
{@const colId = cell.column.id}
|
||||
{#if colId === "crn"}
|
||||
<td class="py-2 px-2 font-mono text-xs text-muted-foreground/70">{course.crn}</td>
|
||||
{:else if colId === "course_code"}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
<span class="font-semibold">{course.subject} {course.courseNumber}</span>{#if course.sequenceNumber}<span class="text-muted-foreground">-{course.sequenceNumber}</span>{/if}
|
||||
</td>
|
||||
{:else if colId === "title"}
|
||||
<td class="py-2 px-2 font-medium">{course.title}</td>
|
||||
{:else if colId === "instructor"}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
{primaryInstructorDisplay(course)}
|
||||
{#if primaryRating(course)}
|
||||
{@const r = primaryRating(course)!}
|
||||
<span
|
||||
class="ml-1 text-xs font-medium {ratingColor(r.rating)}"
|
||||
title="{r.rating.toFixed(1)}/5 ({r.count} ratings)"
|
||||
>{r.rating.toFixed(1)}★</span>
|
||||
{/if}
|
||||
</td>
|
||||
{:else if colId === "time"}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
{#if timeIsTBA(course)}
|
||||
<span class="text-xs text-muted-foreground/60">TBA</span>
|
||||
{:else}
|
||||
{@const mt = course.meetingTimes[0]}
|
||||
{#if !isMeetingTimeTBA(mt)}
|
||||
<span class="font-mono font-medium">{formatMeetingDays(mt)}</span>
|
||||
{" "}
|
||||
{/if}
|
||||
{#if !isTimeTBA(mt)}
|
||||
<span class="text-muted-foreground">{formatTime(mt.begin_time)}–{formatTime(mt.end_time)}</span>
|
||||
{:else}
|
||||
<span class="text-xs text-muted-foreground/60">TBA</span>
|
||||
{/if}
|
||||
{/if}
|
||||
</td>
|
||||
{:else if colId === "location"}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
{#if formatLocation(course)}
|
||||
<span class="text-muted-foreground">{formatLocation(course)}</span>
|
||||
{:else}
|
||||
<span class="text-xs text-muted-foreground/50">—</span>
|
||||
{/if}
|
||||
</td>
|
||||
{:else if colId === "seats"}
|
||||
<td class="py-2 px-2 text-right whitespace-nowrap">
|
||||
<span class="inline-flex items-center gap-1.5">
|
||||
<span class="size-1.5 rounded-full {seatsDotColor(course)} shrink-0"></span>
|
||||
<span class="{seatsColor(course)} font-medium tabular-nums">{#if openSeats(course) === 0}Full{:else}{openSeats(course)} open{/if}</span>
|
||||
<span class="text-muted-foreground/60 tabular-nums">{course.enrollment}/{course.maxEnrollment}{#if course.waitCount > 0} · WL {course.waitCount}/{course.waitCapacity}{/if}</span>
|
||||
</span>
|
||||
</td>
|
||||
{/if}
|
||||
{/each}
|
||||
</tr>
|
||||
{#if expandedCrn === course.crn}
|
||||
<tr>
|
||||
<td colspan={visibleColumnIds.length} class="p-0">
|
||||
<CourseDetail {course} />
|
||||
</td>
|
||||
</tr>
|
||||
{/if}
|
||||
{/each}
|
||||
{/if}
|
||||
</tbody>
|
||||
</table>
|
||||
</ContextMenu.Trigger>
|
||||
<ContextMenu.Portal>
|
||||
<ContextMenu.Content
|
||||
class="z-50 min-w-[160px] rounded-md border border-border bg-card p-1 text-card-foreground shadow-lg"
|
||||
forceMount
|
||||
>
|
||||
{#snippet child({ wrapperProps, props, open })}
|
||||
{#if open}
|
||||
<div {...wrapperProps}>
|
||||
<div {...props} in:fade={{ duration: 100 }} out:fade={{ duration: 100 }}>
|
||||
{@render columnVisibilityItems("context")}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/snippet}
|
||||
</ContextMenu.Content>
|
||||
</ContextMenu.Portal>
|
||||
</ContextMenu.Root>
|
||||
</table>
|
||||
</ContextMenu.Trigger>
|
||||
<ContextMenu.Portal>
|
||||
<ContextMenu.Content
|
||||
class="z-50 min-w-40 rounded-md border border-border bg-card p-1 text-card-foreground shadow-lg"
|
||||
forceMount
|
||||
>
|
||||
{#snippet child({ wrapperProps, props, open })}
|
||||
{#if open}
|
||||
<div {...wrapperProps}>
|
||||
<div
|
||||
{...props}
|
||||
in:fade={{ duration: 100 }}
|
||||
out:fade={{ duration: 100 }}
|
||||
>
|
||||
{@render columnVisibilityGroup(
|
||||
ContextMenu.Group,
|
||||
ContextMenu.GroupHeading,
|
||||
ContextMenu.CheckboxItem,
|
||||
ContextMenu.Separator,
|
||||
ContextMenu.Item,
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/snippet}
|
||||
</ContextMenu.Content>
|
||||
</ContextMenu.Portal>
|
||||
</ContextMenu.Root>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,56 @@
|
||||
<script lang="ts">
|
||||
import { page } from "$app/state";
|
||||
import { TriangleAlert, RotateCcw } from "@lucide/svelte";
|
||||
|
||||
interface Props {
|
||||
/** Heading shown in the error card */
|
||||
title?: string;
|
||||
/** The error value from svelte:boundary */
|
||||
error: unknown;
|
||||
/** Reset callback from svelte:boundary */
|
||||
reset: () => void;
|
||||
}
|
||||
|
||||
let { title = "Something went wrong", error, reset }: Props = $props();
|
||||
|
||||
let errorName = $derived(error instanceof Error ? error.constructor.name : "Error");
|
||||
let errorMessage = $derived(error instanceof Error ? error.message : String(error));
|
||||
let errorStack = $derived(error instanceof Error ? error.stack : null);
|
||||
</script>
|
||||
|
||||
<div class="flex items-center justify-center py-16 px-4">
|
||||
<div class="w-full max-w-lg rounded-lg border border-status-red/25 bg-status-red/5 overflow-hidden text-sm">
|
||||
<div class="px-4 py-2.5 border-b border-status-red/15 flex items-center justify-between gap-4">
|
||||
<div class="flex items-center gap-2 text-status-red">
|
||||
<TriangleAlert size={16} strokeWidth={2.25} />
|
||||
<span class="font-semibold">{title}</span>
|
||||
</div>
|
||||
<span class="text-xs text-muted-foreground font-mono">{page.url.pathname}</span>
|
||||
</div>
|
||||
|
||||
<div class="px-4 py-3 border-b border-status-red/15">
|
||||
<span class="text-xs text-muted-foreground/70 font-mono">{errorName}</span>
|
||||
<pre class="mt-1 text-xs text-foreground/80 overflow-auto whitespace-pre-wrap break-words">{errorMessage}</pre>
|
||||
</div>
|
||||
|
||||
{#if errorStack}
|
||||
<details class="border-b border-status-red/15">
|
||||
<summary class="px-4 py-2 text-xs text-muted-foreground/70 cursor-pointer hover:text-muted-foreground select-none">
|
||||
Stack trace
|
||||
</summary>
|
||||
<pre class="px-4 py-3 text-xs text-muted-foreground/60 overflow-auto whitespace-pre-wrap break-words max-h-48">{errorStack}</pre>
|
||||
</details>
|
||||
{/if}
|
||||
|
||||
<div class="px-4 py-2.5 flex items-center justify-end gap-3">
|
||||
<span class="text-xs text-muted-foreground/60">Retries this section, not the full page</span>
|
||||
<button
|
||||
class="shrink-0 cursor-pointer inline-flex items-center gap-1.5 rounded-md bg-status-red px-3 py-1.5 text-sm font-medium text-white hover:brightness-110 transition-all"
|
||||
onclick={reset}
|
||||
>
|
||||
<RotateCcw size={14} strokeWidth={2.25} />
|
||||
Try again
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -0,0 +1,36 @@
|
||||
<script lang="ts">
|
||||
import { cn } from "$lib/utils";
|
||||
|
||||
let {
|
||||
commitHash,
|
||||
showStatusLink = true,
|
||||
class: className,
|
||||
}: {
|
||||
commitHash?: string | null;
|
||||
showStatusLink?: boolean;
|
||||
class?: string;
|
||||
} = $props();
|
||||
</script>
|
||||
|
||||
<div class={cn("flex justify-center items-center gap-2 mt-auto pt-6 pb-4", className)}>
|
||||
{#if __APP_VERSION__}
|
||||
<span class="text-xs text-muted-foreground">v{__APP_VERSION__}</span>
|
||||
<div class="w-px h-3 bg-muted-foreground opacity-30"></div>
|
||||
{/if}
|
||||
<a
|
||||
href={commitHash
|
||||
? `https://github.com/Xevion/banner/commit/${commitHash}`
|
||||
: "https://github.com/Xevion/banner"}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
class="text-xs text-muted-foreground no-underline hover:underline"
|
||||
>
|
||||
GitHub
|
||||
</a>
|
||||
{#if showStatusLink}
|
||||
<div class="w-px h-3 bg-muted-foreground opacity-30"></div>
|
||||
<a href="/health" class="text-xs text-muted-foreground no-underline hover:underline">
|
||||
Status
|
||||
</a>
|
||||
{/if}
|
||||
</div>
|
||||
@@ -0,0 +1,62 @@
|
||||
<script lang="ts">
|
||||
import { page } from "$app/state";
|
||||
import { Search, User, Clock } from "@lucide/svelte";
|
||||
import { authStore } from "$lib/auth.svelte";
|
||||
import ThemeToggle from "./ThemeToggle.svelte";
|
||||
|
||||
const staticTabs = [
|
||||
{ href: "/", label: "Search", icon: Search },
|
||||
{ href: "/timeline", label: "Timeline", icon: Clock },
|
||||
] as const;
|
||||
|
||||
const APP_PREFIXES = ["/profile", "/settings", "/admin"];
|
||||
|
||||
let profileTab = $derived(
|
||||
authStore.isLoading
|
||||
? { href: "/login" as const, label: null, icon: User }
|
||||
: {
|
||||
href: authStore.isAuthenticated ? ("/profile" as const) : ("/login" as const),
|
||||
label: authStore.isAuthenticated ? "Account" : "Login",
|
||||
icon: User,
|
||||
}
|
||||
);
|
||||
|
||||
function isActive(tabHref: string): boolean {
|
||||
if (tabHref === "/") return page.url.pathname === "/";
|
||||
if (tabHref === "/profile") {
|
||||
return APP_PREFIXES.some((p) => page.url.pathname.startsWith(p));
|
||||
}
|
||||
return page.url.pathname.startsWith(tabHref);
|
||||
}
|
||||
</script>
|
||||
|
||||
<nav class="w-full flex justify-center pt-5 px-5">
|
||||
<div class="w-full max-w-6xl flex items-center justify-between">
|
||||
<!-- pointer-events-auto: root layout wraps nav in pointer-events-none overlay -->
|
||||
<div class="flex items-center gap-1 rounded-lg bg-muted p-1 pointer-events-auto">
|
||||
{#each staticTabs as tab}
|
||||
<a
|
||||
href={tab.href}
|
||||
class="flex items-center gap-1.5 rounded-md px-3 py-1.5 text-sm font-medium transition-colors no-underline
|
||||
{isActive(tab.href)
|
||||
? 'bg-background text-foreground shadow-sm'
|
||||
: 'text-muted-foreground hover:text-foreground hover:bg-background/50'}"
|
||||
>
|
||||
<tab.icon size={15} strokeWidth={2} />
|
||||
{tab.label}
|
||||
</a>
|
||||
{/each}
|
||||
<a
|
||||
href={profileTab.href}
|
||||
class="flex items-center gap-1.5 rounded-md px-3 py-1.5 text-sm font-medium transition-colors no-underline
|
||||
{isActive(profileTab.href)
|
||||
? 'bg-background text-foreground shadow-sm'
|
||||
: 'text-muted-foreground hover:text-foreground hover:bg-background/50'}"
|
||||
>
|
||||
<User size={15} strokeWidth={2} />
|
||||
{#if profileTab.label}{profileTab.label}{/if}
|
||||
</a>
|
||||
<ThemeToggle />
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
@@ -0,0 +1,76 @@
|
||||
<script lang="ts">
|
||||
import { navigationStore } from "$lib/stores/navigation.svelte";
|
||||
import type { Snippet } from "svelte";
|
||||
import { cubicOut } from "svelte/easing";
|
||||
import type { TransitionConfig } from "svelte/transition";
|
||||
|
||||
type Axis = "horizontal" | "vertical";
|
||||
|
||||
let {
|
||||
key,
|
||||
children,
|
||||
axis = "horizontal",
|
||||
inDelay = 0,
|
||||
outDelay = 0,
|
||||
}: {
|
||||
key: string;
|
||||
children: Snippet;
|
||||
axis?: Axis;
|
||||
inDelay?: number;
|
||||
outDelay?: number;
|
||||
} = $props();
|
||||
|
||||
const DURATION = 400;
|
||||
const OFFSET = 40;
|
||||
|
||||
function translate(axis: Axis, value: number): string {
|
||||
return axis === "vertical" ? `translateY(${value}px)` : `translateX(${value}px)`;
|
||||
}
|
||||
|
||||
function inTransition(_node: HTMLElement): TransitionConfig {
|
||||
const dir = navigationStore.direction;
|
||||
if (dir === "fade") {
|
||||
return {
|
||||
duration: DURATION,
|
||||
delay: inDelay,
|
||||
easing: cubicOut,
|
||||
css: (t: number) => `opacity: ${t}`,
|
||||
};
|
||||
}
|
||||
const offset = dir === "right" ? OFFSET : -OFFSET;
|
||||
return {
|
||||
duration: DURATION,
|
||||
delay: inDelay,
|
||||
easing: cubicOut,
|
||||
css: (t: number) => `opacity: ${t}; transform: ${translate(axis, (1 - t) * offset)}`,
|
||||
};
|
||||
}
|
||||
|
||||
function outTransition(_node: HTMLElement): TransitionConfig {
|
||||
const dir = navigationStore.direction;
|
||||
const base = "position: absolute; top: 0; left: 0; width: 100%; height: 100%";
|
||||
if (dir === "fade") {
|
||||
return {
|
||||
duration: DURATION,
|
||||
delay: outDelay,
|
||||
easing: cubicOut,
|
||||
css: (t: number) => `${base}; opacity: ${t}`,
|
||||
};
|
||||
}
|
||||
const offset = dir === "right" ? -OFFSET : OFFSET;
|
||||
return {
|
||||
duration: DURATION,
|
||||
delay: outDelay,
|
||||
easing: cubicOut,
|
||||
css: (t: number) => `${base}; opacity: ${t}; transform: ${translate(axis, (1 - t) * offset)}`,
|
||||
};
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="relative flex flex-1 flex-col">
|
||||
{#key key}
|
||||
<div in:inTransition out:outTransition class="flex flex-1 flex-col">
|
||||
{@render children()}
|
||||
</div>
|
||||
{/key}
|
||||
</div>
|
||||
@@ -1,4 +1,21 @@
|
||||
<script lang="ts">
|
||||
import { Select } from "bits-ui";
|
||||
import { ChevronUp, ChevronDown } from "@lucide/svelte";
|
||||
import type { Action } from "svelte/action";
|
||||
import { formatNumber } from "$lib/utils";
|
||||
|
||||
const slideIn: Action<HTMLElement, number> = (node, direction) => {
|
||||
if (direction !== 0) {
|
||||
node.animate(
|
||||
[
|
||||
{ transform: `translateX(${direction * 20}px)`, opacity: 0 },
|
||||
{ transform: "translateX(0)", opacity: 1 },
|
||||
],
|
||||
{ duration: 200, easing: "ease-out" }
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
let {
|
||||
totalCount,
|
||||
offset,
|
||||
@@ -11,32 +28,140 @@ let {
|
||||
onPageChange: (newOffset: number) => void;
|
||||
} = $props();
|
||||
|
||||
const currentPage = $derived(Math.floor(offset / limit) + 1);
|
||||
const totalPages = $derived(Math.ceil(totalCount / limit));
|
||||
const start = $derived(offset + 1);
|
||||
const end = $derived(Math.min(offset + limit, totalCount));
|
||||
const hasPrev = $derived(offset > 0);
|
||||
const hasNext = $derived(offset + limit < totalCount);
|
||||
|
||||
// Track direction for slide animation
|
||||
let direction = $state(0);
|
||||
|
||||
// 5 page slots: current-2, current-1, current, current+1, current+2
|
||||
const pageSlots = $derived([-2, -1, 0, 1, 2].map((delta) => currentPage + delta));
|
||||
|
||||
function isSlotVisible(page: number): boolean {
|
||||
return page >= 1 && page <= totalPages;
|
||||
}
|
||||
|
||||
function goToPage(page: number) {
|
||||
direction = page > currentPage ? 1 : -1;
|
||||
onPageChange((page - 1) * limit);
|
||||
}
|
||||
|
||||
// Build items array for the Select dropdown
|
||||
const pageItems = $derived(
|
||||
Array.from({ length: totalPages }, (_, i) => ({
|
||||
value: String(i + 1),
|
||||
label: String(i + 1),
|
||||
}))
|
||||
);
|
||||
|
||||
const selectValue = $derived(String(currentPage));
|
||||
</script>
|
||||
|
||||
{#if totalCount > 0}
|
||||
<div class="flex items-center justify-between text-sm">
|
||||
<span class="text-muted-foreground">
|
||||
Showing {start}–{end} of {totalCount} courses
|
||||
</span>
|
||||
<div class="flex gap-2">
|
||||
<button
|
||||
disabled={!hasPrev}
|
||||
onclick={() => onPageChange(offset - limit)}
|
||||
class="border border-border bg-card text-foreground rounded-md px-3 py-1.5 text-sm disabled:opacity-40 disabled:cursor-not-allowed hover:bg-muted/50 transition-colors"
|
||||
>
|
||||
Previous
|
||||
</button>
|
||||
<button
|
||||
disabled={!hasNext}
|
||||
onclick={() => onPageChange(offset + limit)}
|
||||
class="border border-border bg-card text-foreground rounded-md px-3 py-1.5 text-sm disabled:opacity-40 disabled:cursor-not-allowed hover:bg-muted/50 transition-colors"
|
||||
>
|
||||
Next
|
||||
</button>
|
||||
{#if totalCount > 0 && totalPages > 1}
|
||||
<div class="flex items-start text-xs -mt-3 pl-2">
|
||||
<!-- Left zone: result count -->
|
||||
<div class="flex-1">
|
||||
<span class="text-muted-foreground">
|
||||
Showing {formatNumber(start)}–{formatNumber(end)} of {formatNumber(totalCount)} courses
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<!-- Center zone: page buttons -->
|
||||
<div class="flex items-center gap-1">
|
||||
{#key currentPage}
|
||||
{#each pageSlots as page, i (i)}
|
||||
{#if i === 2}
|
||||
<!-- Center slot: current page with dropdown trigger -->
|
||||
<Select.Root
|
||||
type="single"
|
||||
value={selectValue}
|
||||
onValueChange={(v) => {
|
||||
if (v) goToPage(Number(v));
|
||||
}}
|
||||
items={pageItems}
|
||||
>
|
||||
<Select.Trigger
|
||||
class="inline-flex items-center justify-center gap-1 w-auto min-w-9 h-9 px-2.5
|
||||
rounded-md text-sm font-medium tabular-nums
|
||||
border border-border bg-card text-foreground
|
||||
hover:bg-muted/50 active:bg-muted transition-colors
|
||||
cursor-pointer select-none outline-none
|
||||
focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background"
|
||||
aria-label="Page {currentPage} of {totalPages}, click to select page"
|
||||
>
|
||||
<span use:slideIn={direction}>{currentPage}</span>
|
||||
<ChevronUp class="size-3 text-muted-foreground" />
|
||||
</Select.Trigger>
|
||||
<Select.Portal>
|
||||
<Select.Content
|
||||
class="border border-border bg-card shadow-md outline-hidden z-50
|
||||
max-h-72 min-w-16 w-auto
|
||||
select-none rounded-md p-1
|
||||
data-[state=open]:animate-in data-[state=closed]:animate-out
|
||||
data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0
|
||||
data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95
|
||||
data-[side=top]:slide-in-from-bottom-2
|
||||
data-[side=bottom]:slide-in-from-top-2"
|
||||
side="top"
|
||||
sideOffset={6}
|
||||
>
|
||||
<Select.ScrollUpButton class="flex w-full items-center justify-center py-0.5">
|
||||
<ChevronUp class="size-3.5 text-muted-foreground" />
|
||||
</Select.ScrollUpButton>
|
||||
<Select.Viewport class="p-0.5">
|
||||
{#each pageItems as item (item.value)}
|
||||
<Select.Item
|
||||
class="rounded-sm outline-hidden flex h-8 w-full select-none items-center
|
||||
justify-center px-3 text-sm tabular-nums
|
||||
data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground
|
||||
data-[selected]:font-semibold"
|
||||
value={item.value}
|
||||
label={item.label}
|
||||
>
|
||||
{item.label}
|
||||
</Select.Item>
|
||||
{/each}
|
||||
</Select.Viewport>
|
||||
<Select.ScrollDownButton class="flex w-full items-center justify-center py-0.5">
|
||||
<ChevronDown class="size-3.5 text-muted-foreground" />
|
||||
</Select.ScrollDownButton>
|
||||
</Select.Content>
|
||||
</Select.Portal>
|
||||
</Select.Root>
|
||||
{:else}
|
||||
<!-- Side slot: navigable page button or invisible placeholder -->
|
||||
<button
|
||||
class="inline-flex items-center justify-center w-9 h-9
|
||||
rounded-md text-sm tabular-nums
|
||||
text-muted-foreground
|
||||
hover:bg-muted/50 hover:text-foreground active:bg-muted transition-colors
|
||||
cursor-pointer select-none
|
||||
focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background
|
||||
{isSlotVisible(page) ? '' : 'invisible pointer-events-none'}"
|
||||
onclick={() => goToPage(page)}
|
||||
aria-label="Go to page {page}"
|
||||
aria-hidden={!isSlotVisible(page)}
|
||||
tabindex={isSlotVisible(page) ? 0 : -1}
|
||||
disabled={!isSlotVisible(page)}
|
||||
use:slideIn={direction}
|
||||
>
|
||||
{page}
|
||||
</button>
|
||||
{/if}
|
||||
{/each}
|
||||
{/key}
|
||||
</div>
|
||||
|
||||
<!-- Right zone: spacer for centering -->
|
||||
<div class="flex-1"></div>
|
||||
</div>
|
||||
{:else if totalCount > 0}
|
||||
<!-- Single page: just show the count, no pagination controls -->
|
||||
<div class="flex items-start text-xs -mt-3 pl-2">
|
||||
<span class="text-muted-foreground">
|
||||
Showing {formatNumber(start)}–{formatNumber(end)} of {formatNumber(totalCount)} courses
|
||||
</span>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
@@ -1,52 +1,45 @@
|
||||
<script lang="ts">
|
||||
import type { Term, Subject } from "$lib/api";
|
||||
import SimpleTooltip from "./SimpleTooltip.svelte";
|
||||
import TermCombobox from "./TermCombobox.svelte";
|
||||
import SubjectCombobox from "./SubjectCombobox.svelte";
|
||||
|
||||
let {
|
||||
terms,
|
||||
subjects,
|
||||
selectedTerm = $bindable(),
|
||||
selectedSubject = $bindable(),
|
||||
selectedSubjects = $bindable(),
|
||||
query = $bindable(),
|
||||
openOnly = $bindable(),
|
||||
}: {
|
||||
terms: Term[];
|
||||
subjects: Subject[];
|
||||
selectedTerm: string;
|
||||
selectedSubject: string;
|
||||
selectedSubjects: string[];
|
||||
query: string;
|
||||
openOnly: boolean;
|
||||
} = $props();
|
||||
</script>
|
||||
|
||||
<div class="flex flex-wrap gap-3 items-center">
|
||||
<select
|
||||
bind:value={selectedTerm}
|
||||
class="border border-border bg-card text-foreground rounded-md px-3 py-1.5 text-sm"
|
||||
>
|
||||
{#each terms as term (term.code)}
|
||||
<option value={term.code}>{term.description}</option>
|
||||
{/each}
|
||||
</select>
|
||||
<div class="flex flex-wrap gap-3 items-start">
|
||||
<TermCombobox {terms} bind:value={selectedTerm} />
|
||||
|
||||
<select
|
||||
bind:value={selectedSubject}
|
||||
class="border border-border bg-card text-foreground rounded-md px-3 py-1.5 text-sm"
|
||||
>
|
||||
<option value="">All Subjects</option>
|
||||
{#each subjects as subject (subject.code)}
|
||||
<option value={subject.code}>{subject.description}</option>
|
||||
{/each}
|
||||
</select>
|
||||
<SubjectCombobox {subjects} bind:value={selectedSubjects} />
|
||||
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search courses..."
|
||||
aria-label="Search courses"
|
||||
bind:value={query}
|
||||
class="border border-border bg-card text-foreground rounded-md px-3 py-1.5 text-sm flex-1 min-w-[200px]"
|
||||
class="h-9 border border-border bg-card text-foreground rounded-md px-3 text-sm flex-1 min-w-[200px]
|
||||
focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background
|
||||
transition-colors"
|
||||
/>
|
||||
|
||||
<label class="flex items-center gap-1.5 text-sm text-muted-foreground cursor-pointer">
|
||||
<input type="checkbox" bind:checked={openOnly} />
|
||||
Open only
|
||||
</label>
|
||||
<SimpleTooltip text="Show only courses with available seats" delay={200} passthrough>
|
||||
<label class="flex items-center gap-1.5 h-9 text-sm text-muted-foreground cursor-pointer">
|
||||
<input type="checkbox" bind:checked={openOnly} />
|
||||
Open only
|
||||
</label>
|
||||
</SimpleTooltip>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,68 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from "svelte";
|
||||
import SimpleTooltip from "$lib/components/SimpleTooltip.svelte";
|
||||
import { relativeTime } from "$lib/time";
|
||||
import { formatNumber } from "$lib/utils";
|
||||
|
||||
export interface SearchMeta {
|
||||
totalCount: number;
|
||||
durationMs: number;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
let { meta }: { meta: SearchMeta | null } = $props();
|
||||
|
||||
let now = $state(new Date());
|
||||
|
||||
let formattedTime = $derived(
|
||||
meta
|
||||
? meta.timestamp.toLocaleTimeString(undefined, {
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
second: "2-digit",
|
||||
})
|
||||
: ""
|
||||
);
|
||||
|
||||
let relativeTimeResult = $derived(meta ? relativeTime(meta.timestamp, now) : null);
|
||||
let relativeTimeText = $derived(relativeTimeResult?.text ?? "");
|
||||
|
||||
let countLabel = $derived(meta ? formatNumber(meta.totalCount) : "");
|
||||
let resultNoun = $derived(meta ? (meta.totalCount !== 1 ? "results" : "result") : "");
|
||||
let durationLabel = $derived(meta ? `${Math.round(meta.durationMs)}ms` : "");
|
||||
|
||||
let tooltipText = $derived(meta ? `${relativeTimeText} · ${formattedTime}` : "");
|
||||
|
||||
onMount(() => {
|
||||
let nowTimeoutId: ReturnType<typeof setTimeout> | null = null;
|
||||
|
||||
function scheduleNowTick() {
|
||||
const delay = relativeTimeResult?.nextUpdateMs ?? 1000;
|
||||
nowTimeoutId = setTimeout(() => {
|
||||
now = new Date();
|
||||
scheduleNowTick();
|
||||
}, delay);
|
||||
}
|
||||
scheduleNowTick();
|
||||
|
||||
return () => {
|
||||
if (nowTimeoutId) clearTimeout(nowTimeoutId);
|
||||
};
|
||||
});
|
||||
</script>
|
||||
|
||||
<SimpleTooltip
|
||||
text={tooltipText}
|
||||
contentClass="whitespace-nowrap text-[12px] px-2 py-1"
|
||||
triggerClass="self-start"
|
||||
sideOffset={0}
|
||||
>
|
||||
<span
|
||||
class="pl-1 text-xs transition-opacity duration-200"
|
||||
style:opacity={meta ? 1 : 0}
|
||||
>
|
||||
<span class="text-muted-foreground/70">{countLabel}</span>
|
||||
<span class="text-muted-foreground/35">{resultNoun} in</span>
|
||||
<span class="text-muted-foreground/70">{durationLabel}</span>
|
||||
</span>
|
||||
</SimpleTooltip>
|
||||
@@ -0,0 +1,42 @@
|
||||
<script lang="ts">
|
||||
import { Tooltip } from "bits-ui";
|
||||
import type { Snippet } from "svelte";
|
||||
import { cn } from "$lib/utils";
|
||||
|
||||
let {
|
||||
text,
|
||||
delay = 150,
|
||||
side = "top",
|
||||
passthrough = false,
|
||||
triggerClass = "",
|
||||
contentClass = "",
|
||||
sideOffset = 6,
|
||||
children,
|
||||
}: {
|
||||
text: string;
|
||||
delay?: number;
|
||||
side?: "top" | "bottom" | "left" | "right";
|
||||
passthrough?: boolean;
|
||||
triggerClass?: string;
|
||||
contentClass?: string;
|
||||
sideOffset?: number;
|
||||
children: Snippet;
|
||||
} = $props();
|
||||
</script>
|
||||
|
||||
<Tooltip.Root delayDuration={delay} disableHoverableContent={passthrough}>
|
||||
<Tooltip.Trigger>
|
||||
{#snippet child({ props })}
|
||||
<span class={triggerClass} {...props}>
|
||||
{@render children()}
|
||||
</span>
|
||||
{/snippet}
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
{side}
|
||||
{sideOffset}
|
||||
class={cn("z-50 bg-card text-card-foreground text-xs border border-border rounded-md px-2.5 py-1.5 shadow-sm whitespace-pre-line max-w-max text-left", contentClass)}
|
||||
>
|
||||
{text}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
@@ -0,0 +1,162 @@
|
||||
<script lang="ts">
|
||||
import { Combobox } from "bits-ui";
|
||||
import { Check, ChevronsUpDown } from "@lucide/svelte";
|
||||
import { fly } from "svelte/transition";
|
||||
import type { Subject } from "$lib/api";
|
||||
import { formatNumber } from "$lib/utils";
|
||||
|
||||
let {
|
||||
subjects,
|
||||
value = $bindable(),
|
||||
}: {
|
||||
subjects: Subject[];
|
||||
value: string[];
|
||||
} = $props();
|
||||
|
||||
let open = $state(false);
|
||||
let searchValue = $state("");
|
||||
let containerEl = $state<HTMLDivElement>(null!);
|
||||
|
||||
const filteredSubjects = $derived.by(() => {
|
||||
const query = searchValue.toLowerCase().trim();
|
||||
if (query === "") return subjects;
|
||||
|
||||
const exactCode: Subject[] = [];
|
||||
const codeStartsWith: Subject[] = [];
|
||||
const descriptionMatch: Subject[] = [];
|
||||
|
||||
for (const s of subjects) {
|
||||
const codeLower = s.code.toLowerCase();
|
||||
const descLower = s.description.toLowerCase();
|
||||
|
||||
if (codeLower === query) {
|
||||
exactCode.push(s);
|
||||
} else if (codeLower.startsWith(query)) {
|
||||
codeStartsWith.push(s);
|
||||
} else if (descLower.includes(query) || codeLower.includes(query)) {
|
||||
descriptionMatch.push(s);
|
||||
}
|
||||
}
|
||||
|
||||
return [...exactCode, ...codeStartsWith, ...descriptionMatch];
|
||||
});
|
||||
|
||||
const MAX_VISIBLE_CHIPS = 3;
|
||||
const visibleChips = $derived(value.slice(0, MAX_VISIBLE_CHIPS));
|
||||
const overflowCount = $derived(Math.max(0, value.length - MAX_VISIBLE_CHIPS));
|
||||
|
||||
function removeSubject(code: string) {
|
||||
value = value.filter((v) => v !== code);
|
||||
}
|
||||
|
||||
// bits-ui sets the input text to the last selected item's label — clear it
|
||||
$effect(() => {
|
||||
value;
|
||||
const input = containerEl?.querySelector("input");
|
||||
if (input) {
|
||||
input.value = "";
|
||||
searchValue = "";
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<Combobox.Root
|
||||
type="multiple"
|
||||
bind:value
|
||||
bind:open
|
||||
onOpenChange={(o: boolean) => {
|
||||
if (!o) searchValue = "";
|
||||
}}
|
||||
>
|
||||
<!-- svelte-ignore a11y_click_events_have_key_events -->
|
||||
<!-- svelte-ignore a11y_no_static_element_interactions -->
|
||||
<div
|
||||
class="relative h-9 rounded-md border border-border bg-card
|
||||
flex flex-nowrap items-center gap-1 w-56 pr-9 overflow-hidden cursor-pointer
|
||||
has-[:focus-visible]:ring-2 has-[:focus-visible]:ring-ring has-[:focus-visible]:ring-offset-2 has-[:focus-visible]:ring-offset-background"
|
||||
bind:this={containerEl}
|
||||
onclick={() => { containerEl?.querySelector('input')?.focus(); }}
|
||||
>
|
||||
{#if value.length > 0}
|
||||
{#each (open ? value : visibleChips) as code (code)}
|
||||
<span
|
||||
role="button"
|
||||
tabindex="-1"
|
||||
onmousedown={(e) => { e.preventDefault(); e.stopPropagation(); }}
|
||||
onclick={(e) => { e.stopPropagation(); removeSubject(code); }}
|
||||
onkeydown={(e) => { if (e.key === "Enter" || e.key === " ") { e.stopPropagation(); removeSubject(code); } }}
|
||||
class="inline-flex items-center rounded bg-muted px-1.5 py-0.5 text-xs font-mono shrink-0
|
||||
text-muted-foreground hover:outline hover:outline-1 hover:outline-ring
|
||||
cursor-pointer transition-[outline] duration-100 first:ml-2"
|
||||
>
|
||||
{code}
|
||||
</span>
|
||||
{/each}
|
||||
{#if !open && overflowCount > 0}
|
||||
<span class="text-xs text-muted-foreground shrink-0">+{formatNumber(overflowCount)}</span>
|
||||
{/if}
|
||||
{/if}
|
||||
<Combobox.Input
|
||||
|
||||
oninput={(e) => (searchValue = e.currentTarget.value)}
|
||||
onfocus={() => { open = true; }}
|
||||
class="h-full min-w-0 flex-1 bg-transparent text-muted-foreground text-sm
|
||||
placeholder:text-muted-foreground outline-none border-none
|
||||
{value.length > 0 ? 'pl-1' : 'pl-3'}"
|
||||
placeholder={value.length > 0 ? "Filter..." : "All Subjects"}
|
||||
aria-label="Search subjects"
|
||||
autocomplete="off"
|
||||
autocorrect="off"
|
||||
spellcheck={false}
|
||||
/>
|
||||
<span class="absolute end-2 top-1/2 -translate-y-1/2 text-muted-foreground pointer-events-none">
|
||||
<ChevronsUpDown class="size-4" />
|
||||
</span>
|
||||
</div>
|
||||
<Combobox.Portal>
|
||||
<Combobox.Content
|
||||
customAnchor={containerEl}
|
||||
class="border border-border bg-card shadow-md
|
||||
outline-hidden z-50
|
||||
max-h-72 min-w-[var(--bits-combobox-anchor-width)] w-max max-w-96
|
||||
select-none rounded-md p-1
|
||||
data-[side=bottom]:translate-y-1 data-[side=top]:-translate-y-1"
|
||||
sideOffset={4}
|
||||
forceMount
|
||||
>
|
||||
{#snippet child({ wrapperProps, props, open: isOpen })}
|
||||
{#if isOpen}
|
||||
<div {...wrapperProps}>
|
||||
<div {...props} transition:fly={{ duration: 150, y: -4 }}>
|
||||
<Combobox.Viewport class="p-0.5">
|
||||
{#each filteredSubjects as subject (subject.code)}
|
||||
<Combobox.Item
|
||||
class="rounded-sm outline-hidden flex h-8 w-full select-none items-center gap-2 px-2 text-sm whitespace-nowrap
|
||||
data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground"
|
||||
value={subject.code}
|
||||
label={subject.description}
|
||||
>
|
||||
{#snippet children({ selected })}
|
||||
<span class="inline-flex items-center justify-center rounded bg-muted px-1 py-0.5
|
||||
text-xs font-mono text-muted-foreground w-10 shrink-0 text-center">
|
||||
{subject.code}
|
||||
</span>
|
||||
<span class="flex-1">{subject.description}</span>
|
||||
{#if selected}
|
||||
<Check class="ml-auto size-4 shrink-0" />
|
||||
{/if}
|
||||
{/snippet}
|
||||
</Combobox.Item>
|
||||
{:else}
|
||||
<span class="block px-2 py-2 text-sm text-muted-foreground">
|
||||
No subjects found.
|
||||
</span>
|
||||
{/each}
|
||||
</Combobox.Viewport>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/snippet}
|
||||
</Combobox.Content>
|
||||
</Combobox.Portal>
|
||||
</Combobox.Root>
|
||||
@@ -0,0 +1,139 @@
|
||||
<script lang="ts">
|
||||
import { Combobox } from "bits-ui";
|
||||
import { Check, ChevronsUpDown } from "@lucide/svelte";
|
||||
import { fly } from "svelte/transition";
|
||||
import type { Term } from "$lib/api";
|
||||
|
||||
let {
|
||||
terms,
|
||||
value = $bindable(),
|
||||
}: {
|
||||
terms: Term[];
|
||||
value: string;
|
||||
} = $props();
|
||||
|
||||
let open = $state(false);
|
||||
let searchValue = $state("");
|
||||
let containerEl = $state<HTMLDivElement>(null!);
|
||||
|
||||
const currentTermCode = $derived(
|
||||
terms.find((t) => !t.description.includes("(View Only)"))?.code ?? ""
|
||||
);
|
||||
|
||||
const selectedLabel = $derived(
|
||||
terms.find((t) => t.code === value)?.description ?? "Select term..."
|
||||
);
|
||||
|
||||
const filteredTerms = $derived.by(() => {
|
||||
const query = searchValue.toLowerCase();
|
||||
const matched =
|
||||
query === "" ? terms : terms.filter((t) => t.description.toLowerCase().includes(query));
|
||||
|
||||
const current = matched.find((t) => t.code === currentTermCode);
|
||||
const rest = matched.filter((t) => t.code !== currentTermCode);
|
||||
return current ? [current, ...rest] : rest;
|
||||
});
|
||||
|
||||
// Manage DOM input text: clear when open for searching, restore label when closed
|
||||
$effect(() => {
|
||||
const _open = open;
|
||||
void value; // track selection changes
|
||||
const _label = selectedLabel;
|
||||
const input = containerEl?.querySelector("input");
|
||||
if (!input) return;
|
||||
if (_open) {
|
||||
input.value = "";
|
||||
searchValue = "";
|
||||
} else {
|
||||
input.value = _label;
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<Combobox.Root
|
||||
type="single"
|
||||
bind:value={() => value, (v) => { if (v) value = v; }}
|
||||
bind:open
|
||||
onOpenChange={(o: boolean) => {
|
||||
if (!o) searchValue = "";
|
||||
}}
|
||||
>
|
||||
<!-- svelte-ignore a11y_no_static_element_interactions -->
|
||||
<div
|
||||
class="relative h-9 rounded-md border border-border bg-card
|
||||
flex items-center w-40 cursor-pointer
|
||||
has-[:focus-visible]:ring-2 has-[:focus-visible]:ring-ring has-[:focus-visible]:ring-offset-2 has-[:focus-visible]:ring-offset-background"
|
||||
role="presentation"
|
||||
bind:this={containerEl}
|
||||
onclick={() => { containerEl?.querySelector('input')?.focus(); }}
|
||||
onkeydown={() => { containerEl?.querySelector('input')?.focus(); }}
|
||||
>
|
||||
<Combobox.Input
|
||||
oninput={(e) => (searchValue = e.currentTarget.value)}
|
||||
onfocus={() => { open = true; }}
|
||||
class="h-full w-full bg-transparent text-muted-foreground text-sm
|
||||
placeholder:text-muted-foreground outline-none border-none
|
||||
pl-3 pr-9 truncate"
|
||||
placeholder="Select term..."
|
||||
aria-label="Select term"
|
||||
autocomplete="off"
|
||||
autocorrect="off"
|
||||
spellcheck={false}
|
||||
/>
|
||||
<span class="absolute end-2 top-1/2 -translate-y-1/2 text-muted-foreground pointer-events-none">
|
||||
<ChevronsUpDown class="size-4" />
|
||||
</span>
|
||||
</div>
|
||||
<Combobox.Portal>
|
||||
<Combobox.Content
|
||||
customAnchor={containerEl}
|
||||
class="border border-border bg-card shadow-md
|
||||
outline-hidden z-50
|
||||
max-h-72 min-w-[var(--bits-combobox-anchor-width)]
|
||||
select-none rounded-md p-1
|
||||
data-[side=bottom]:translate-y-1 data-[side=top]:-translate-y-1"
|
||||
sideOffset={4}
|
||||
forceMount
|
||||
>
|
||||
{#snippet child({ wrapperProps, props, open: isOpen })}
|
||||
{#if isOpen}
|
||||
<div {...wrapperProps}>
|
||||
<div {...props} transition:fly={{ duration: 150, y: -4 }}>
|
||||
<Combobox.Viewport class="p-0.5">
|
||||
{#each filteredTerms as term, i (term.code)}
|
||||
{#if i === 1 && term.code !== currentTermCode && filteredTerms[0]?.code === currentTermCode}
|
||||
<div class="mx-2 my-1 h-px bg-border"></div>
|
||||
{/if}
|
||||
<Combobox.Item
|
||||
class="rounded-sm outline-hidden flex h-8 w-full select-none items-center px-2 text-sm
|
||||
data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground
|
||||
{term.code === value ? 'cursor-default' : 'cursor-pointer'}
|
||||
{term.code === currentTermCode ? 'font-medium text-foreground' : 'text-foreground'}"
|
||||
value={term.code}
|
||||
label={term.description}
|
||||
>
|
||||
{#snippet children({ selected })}
|
||||
<span class="flex-1 truncate">
|
||||
{term.description}
|
||||
{#if term.code === currentTermCode}
|
||||
<span class="ml-1.5 text-xs text-muted-foreground font-normal">current</span>
|
||||
{/if}
|
||||
</span>
|
||||
{#if selected}
|
||||
<Check class="ml-2 size-4 shrink-0" />
|
||||
{/if}
|
||||
{/snippet}
|
||||
</Combobox.Item>
|
||||
{:else}
|
||||
<span class="block px-2 py-2 text-sm text-muted-foreground">
|
||||
No terms found.
|
||||
</span>
|
||||
{/each}
|
||||
</Combobox.Viewport>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/snippet}
|
||||
</Combobox.Content>
|
||||
</Combobox.Portal>
|
||||
</Combobox.Root>
|
||||
@@ -2,6 +2,7 @@
|
||||
import { tick } from "svelte";
|
||||
import { Moon, Sun } from "@lucide/svelte";
|
||||
import { themeStore } from "$lib/stores/theme.svelte";
|
||||
import SimpleTooltip from "./SimpleTooltip.svelte";
|
||||
|
||||
/**
|
||||
* Theme toggle with View Transitions API circular reveal animation.
|
||||
@@ -42,25 +43,27 @@ async function handleToggle(event: MouseEvent) {
|
||||
}
|
||||
</script>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
onclick={(e) => handleToggle(e)}
|
||||
aria-label={themeStore.isDark ? "Switch to light mode" : "Switch to dark mode"}
|
||||
class="cursor-pointer border-none rounded-md flex items-center justify-center p-2 scale-125
|
||||
text-muted-foreground hover:bg-muted bg-transparent transition-colors"
|
||||
>
|
||||
<div class="relative size-[18px]">
|
||||
<Sun
|
||||
size={18}
|
||||
class="absolute inset-0 transition-all duration-300 {themeStore.isDark
|
||||
? 'rotate-90 scale-0 opacity-0'
|
||||
: 'rotate-0 scale-100 opacity-100'}"
|
||||
/>
|
||||
<Moon
|
||||
size={18}
|
||||
class="absolute inset-0 transition-all duration-300 {themeStore.isDark
|
||||
? 'rotate-0 scale-100 opacity-100'
|
||||
: '-rotate-90 scale-0 opacity-0'}"
|
||||
/>
|
||||
</div>
|
||||
</button>
|
||||
<SimpleTooltip text={themeStore.isDark ? "Switch to light mode" : "Switch to dark mode"} delay={200} side="bottom" passthrough>
|
||||
<button
|
||||
type="button"
|
||||
onclick={(e) => handleToggle(e)}
|
||||
aria-label={themeStore.isDark ? "Switch to light mode" : "Switch to dark mode"}
|
||||
class="cursor-pointer border-none rounded-md flex items-center justify-center p-1.5
|
||||
text-muted-foreground hover:text-foreground hover:bg-background/50 bg-transparent transition-colors"
|
||||
>
|
||||
<div class="relative size-[18px]">
|
||||
<Sun
|
||||
size={18}
|
||||
class="absolute inset-0 transition-all duration-300 {themeStore.isDark
|
||||
? 'rotate-90 scale-0 opacity-0'
|
||||
: 'rotate-0 scale-100 opacity-100'}"
|
||||
/>
|
||||
<Moon
|
||||
size={18}
|
||||
class="absolute inset-0 transition-all duration-300 {themeStore.isDark
|
||||
? 'rotate-0 scale-100 opacity-100'
|
||||
: '-rotate-90 scale-0 opacity-0'}"
|
||||
/>
|
||||
</div>
|
||||
</button>
|
||||
</SimpleTooltip>
|
||||
|
||||
@@ -0,0 +1,676 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from "svelte";
|
||||
import { scaleTime, scaleLinear } from "d3-scale";
|
||||
|
||||
import type { TimeSlot, ChartContext } from "$lib/timeline/types";
|
||||
import {
|
||||
PADDING,
|
||||
DEFAULT_AXIS_RATIO,
|
||||
CHART_HEIGHT_RATIO,
|
||||
MIN_SPAN_MS,
|
||||
MAX_SPAN_MS,
|
||||
DEFAULT_SPAN_MS,
|
||||
ZOOM_FACTOR,
|
||||
ZOOM_KEY_FACTOR,
|
||||
ZOOM_EASE,
|
||||
ZOOM_SETTLE_THRESHOLD,
|
||||
PAN_FRICTION,
|
||||
PAN_STOP_THRESHOLD,
|
||||
PAN_STOP_THRESHOLD_Y,
|
||||
VELOCITY_SAMPLE_WINDOW,
|
||||
VELOCITY_MIN_DT,
|
||||
PAN_STEP_RATIO,
|
||||
PAN_STEP_CTRL_RATIO,
|
||||
PAN_EASE,
|
||||
PAN_SETTLE_THRESHOLD_PX,
|
||||
YRATIO_STEP,
|
||||
YRATIO_MIN,
|
||||
YRATIO_MAX,
|
||||
YRATIO_SETTLE_THRESHOLD,
|
||||
FOLLOW_EASE,
|
||||
MIN_MAXY,
|
||||
MAX_DT,
|
||||
DEFAULT_DT,
|
||||
TAP_MAX_DURATION_MS,
|
||||
TAP_MAX_DISTANCE_PX,
|
||||
} from "$lib/timeline/constants";
|
||||
import { createTimelineStore } from "$lib/timeline/store.svelte";
|
||||
import {
|
||||
createAnimMap,
|
||||
syncAnimTargets,
|
||||
stepAnimations,
|
||||
pruneAnimMap,
|
||||
} from "$lib/timeline/animation";
|
||||
import {
|
||||
getVisibleSlots,
|
||||
findSlotByTime,
|
||||
snapToSlot,
|
||||
enabledTotalClasses,
|
||||
} from "$lib/timeline/viewport";
|
||||
import {
|
||||
drawGrid,
|
||||
drawHoverColumn,
|
||||
drawStackedArea,
|
||||
drawNowLine,
|
||||
drawTimeAxis,
|
||||
stackVisibleSlots,
|
||||
} from "$lib/timeline/renderer";
|
||||
import TimelineDrawer from "./TimelineDrawer.svelte";
|
||||
import TimelineTooltip from "./TimelineTooltip.svelte";
|
||||
|
||||
// ── Reactive DOM state ──────────────────────────────────────────────
|
||||
let canvasEl: HTMLCanvasElement | undefined = $state();
|
||||
let containerEl: HTMLDivElement | undefined = $state();
|
||||
let width = $state(800);
|
||||
let height = $state(400);
|
||||
let dpr = $state(1);
|
||||
|
||||
// ── View window ─────────────────────────────────────────────────────
|
||||
let viewCenter = $state(Date.now());
|
||||
let viewSpan = $state(DEFAULT_SPAN_MS);
|
||||
let viewYRatio = $state(DEFAULT_AXIS_RATIO);
|
||||
|
||||
// ── Interaction state ───────────────────────────────────────────────
|
||||
let isDragging = $state(false);
|
||||
let dragStartX = $state(0);
|
||||
let dragStartY = $state(0);
|
||||
let dragStartCenter = $state(0);
|
||||
let dragStartYRatio = $state(0);
|
||||
let followEnabled = $state(true);
|
||||
let ctrlHeld = $state(false);
|
||||
|
||||
// ── Animation state (intentionally non-reactive — updated in rAF) ──
|
||||
let panVelocity = 0;
|
||||
let panVelocityY = 0;
|
||||
let pointerSamples: { time: number; x: number; y: number }[] = [];
|
||||
|
||||
// ── Multi-touch / pinch state ────────────────────────────────────────
|
||||
let activePointers = new Map<number, { x: number; y: number }>();
|
||||
let isPinching = false;
|
||||
let pinchStartDist = 0;
|
||||
let pinchStartSpan = 0;
|
||||
let pinchAnchorTime = 0;
|
||||
let pinchAnchorRatio = 0.5;
|
||||
|
||||
// ── Tap detection ────────────────────────────────────────────────────
|
||||
let pointerDownTime = 0;
|
||||
let pointerDownPos = { x: 0, y: 0 };
|
||||
|
||||
let targetSpan = DEFAULT_SPAN_MS;
|
||||
let zoomAnchorTime = 0;
|
||||
let zoomAnchorRatio = 0.5;
|
||||
let isZoomAnimating = false;
|
||||
|
||||
let targetCenter = Date.now();
|
||||
let isPanAnimating = false;
|
||||
let targetYRatio = DEFAULT_AXIS_RATIO;
|
||||
let isYPanAnimating = false;
|
||||
|
||||
let animationFrameId = 0;
|
||||
let lastFrameTime = 0;
|
||||
let animatedMaxY = MIN_MAXY;
|
||||
|
||||
const animMap = createAnimMap();
|
||||
|
||||
// ── Tooltip + hover ─────────────────────────────────────────────────
|
||||
let tooltipVisible = $state(false);
|
||||
let tooltipX = $state(0);
|
||||
let tooltipY = $state(0);
|
||||
let tooltipSlot: TimeSlot | null = $state(null);
|
||||
let hoverSlotTime: number | null = $state(null);
|
||||
let lastPointerClientX = 0;
|
||||
let lastPointerClientY = 0;
|
||||
let pointerOverCanvas = false;
|
||||
|
||||
// ── Drawer ──────────────────────────────────────────────────────────
|
||||
let drawerOpen = $state(false);
|
||||
// Start with an empty set — subjects are populated dynamically from the API.
|
||||
let enabledSubjects: Set<string> = $state(new Set());
|
||||
|
||||
// ── Data store ──────────────────────────────────────────────────────
|
||||
const store = createTimelineStore();
|
||||
let data: TimeSlot[] = $derived(store.data);
|
||||
let allSubjects: string[] = $derived(store.subjects);
|
||||
|
||||
// Auto-enable newly discovered subjects.
|
||||
$effect(() => {
|
||||
const storeSubjects = store.subjects;
|
||||
const next = new Set(enabledSubjects);
|
||||
let changed = false;
|
||||
for (const s of storeSubjects) {
|
||||
if (!next.has(s)) {
|
||||
next.add(s);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if (changed) {
|
||||
enabledSubjects = next;
|
||||
}
|
||||
});
|
||||
|
||||
let activeSubjects = $derived(allSubjects.filter((s) => enabledSubjects.has(s)));
|
||||
|
||||
// ── Derived layout ──────────────────────────────────────────────────
|
||||
let viewStart = $derived(viewCenter - viewSpan / 2);
|
||||
let viewEnd = $derived(viewCenter + viewSpan / 2);
|
||||
let chartHeight = $derived(height * CHART_HEIGHT_RATIO);
|
||||
let chartBottom = $derived(height * viewYRatio);
|
||||
let chartTop = $derived(chartBottom - chartHeight);
|
||||
|
||||
let xScale = $derived(
|
||||
scaleTime()
|
||||
.domain([new Date(viewStart), new Date(viewEnd)])
|
||||
.range([PADDING.left, width - PADDING.right])
|
||||
);
|
||||
|
||||
// Reused across frames — domain/range updated imperatively in render().
|
||||
let yScale = scaleLinear()
|
||||
.domain([0, MIN_MAXY * 1.1])
|
||||
.range([0, 1]);
|
||||
|
||||
// ── Subject toggling ────────────────────────────────────────────────
|
||||
function toggleSubject(subject: string) {
|
||||
const next = new Set(enabledSubjects);
|
||||
if (next.has(subject)) next.delete(subject);
|
||||
else next.add(subject);
|
||||
enabledSubjects = next;
|
||||
}
|
||||
|
||||
function enableAll() {
|
||||
enabledSubjects = new Set(allSubjects);
|
||||
}
|
||||
|
||||
function disableAll() {
|
||||
enabledSubjects = new Set();
|
||||
}
|
||||
|
||||
// ── Rendering ───────────────────────────────────────────────────────
|
||||
function render() {
|
||||
if (!canvasEl) return;
|
||||
const ctx = canvasEl.getContext("2d");
|
||||
if (!ctx) return;
|
||||
|
||||
// Update yScale in-place (no allocation per frame).
|
||||
yScale.domain([0, animatedMaxY * 1.1]).range([chartBottom, chartTop]);
|
||||
|
||||
ctx.save();
|
||||
ctx.scale(dpr, dpr);
|
||||
ctx.clearRect(0, 0, width, height);
|
||||
|
||||
const chart: ChartContext = {
|
||||
ctx,
|
||||
xScale,
|
||||
yScale,
|
||||
width,
|
||||
chartTop,
|
||||
chartBottom,
|
||||
viewSpan,
|
||||
viewStart,
|
||||
viewEnd,
|
||||
};
|
||||
|
||||
const visible = getVisibleSlots(data, viewStart, viewEnd);
|
||||
const visibleStack = stackVisibleSlots(visible, allSubjects, enabledSubjects, animMap);
|
||||
|
||||
drawGrid(chart);
|
||||
drawHoverColumn(chart, visibleStack, hoverSlotTime);
|
||||
drawStackedArea(chart, visibleStack);
|
||||
drawNowLine(chart);
|
||||
drawTimeAxis(chart);
|
||||
|
||||
ctx.restore();
|
||||
}
|
||||
|
||||
// ── Hover logic ─────────────────────────────────────────────────────
|
||||
function updateHover() {
|
||||
if (!pointerOverCanvas || isDragging || !canvasEl) return;
|
||||
|
||||
const rect = canvasEl.getBoundingClientRect();
|
||||
const x = lastPointerClientX - rect.left;
|
||||
const y = lastPointerClientY - rect.top;
|
||||
|
||||
if (y < chartTop || y > chartBottom) {
|
||||
tooltipVisible = false;
|
||||
hoverSlotTime = null;
|
||||
return;
|
||||
}
|
||||
|
||||
const time = xScale.invert(x);
|
||||
const snappedTime = snapToSlot(time.getTime());
|
||||
const slot = findSlotByTime(data, snappedTime);
|
||||
if (!slot) {
|
||||
tooltipVisible = false;
|
||||
hoverSlotTime = null;
|
||||
return;
|
||||
}
|
||||
|
||||
const total = enabledTotalClasses(slot, activeSubjects);
|
||||
if (total <= 0) {
|
||||
tooltipVisible = false;
|
||||
hoverSlotTime = null;
|
||||
return;
|
||||
}
|
||||
|
||||
// Bypass area overlap check when CTRL is held.
|
||||
if (!ctrlHeld) {
|
||||
const stackTopY = yScale(total);
|
||||
if (y < stackTopY) {
|
||||
tooltipVisible = false;
|
||||
hoverSlotTime = null;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
tooltipSlot = slot;
|
||||
tooltipX = lastPointerClientX;
|
||||
tooltipY = lastPointerClientY;
|
||||
tooltipVisible = true;
|
||||
hoverSlotTime = snappedTime;
|
||||
}
|
||||
|
||||
// ── Interaction helpers ───────────────────────────────────────────────
|
||||
function pinchDistance(): number {
|
||||
const pts = [...activePointers.values()];
|
||||
if (pts.length < 2) return 0;
|
||||
const dx = pts[1].x - pts[0].x;
|
||||
const dy = pts[1].y - pts[0].y;
|
||||
return Math.hypot(dx, dy);
|
||||
}
|
||||
|
||||
function pinchMidpoint(): { x: number; y: number } {
|
||||
const pts = [...activePointers.values()];
|
||||
if (pts.length < 2) return { x: 0, y: 0 };
|
||||
return { x: (pts[0].x + pts[1].x) / 2, y: (pts[0].y + pts[1].y) / 2 };
|
||||
}
|
||||
|
||||
// ── Interaction handlers ────────────────────────────────────────────
|
||||
function onPointerDown(e: PointerEvent) {
|
||||
if (e.button !== 0) return;
|
||||
(e.currentTarget as HTMLElement).setPointerCapture(e.pointerId);
|
||||
activePointers.set(e.pointerId, { x: e.clientX, y: e.clientY });
|
||||
|
||||
// Two fingers down → start pinch-to-zoom
|
||||
if (activePointers.size === 2) {
|
||||
isDragging = false;
|
||||
isPinching = true;
|
||||
pinchStartDist = pinchDistance();
|
||||
pinchStartSpan = viewSpan;
|
||||
|
||||
const mid = pinchMidpoint();
|
||||
const rect = canvasEl?.getBoundingClientRect();
|
||||
const midX = rect ? mid.x - rect.left : mid.x;
|
||||
const chartWidth = width - PADDING.left - PADDING.right;
|
||||
|
||||
pinchAnchorTime = xScale.invert(midX).getTime();
|
||||
pinchAnchorRatio = (midX - PADDING.left) / chartWidth;
|
||||
return;
|
||||
}
|
||||
|
||||
// Single finger / mouse → start drag
|
||||
isDragging = true;
|
||||
dragStartX = e.clientX;
|
||||
dragStartY = e.clientY;
|
||||
dragStartCenter = viewCenter;
|
||||
dragStartYRatio = viewYRatio;
|
||||
followEnabled = false;
|
||||
panVelocity = 0;
|
||||
panVelocityY = 0;
|
||||
isZoomAnimating = false;
|
||||
isPanAnimating = false;
|
||||
isYPanAnimating = false;
|
||||
targetSpan = viewSpan;
|
||||
tooltipVisible = false;
|
||||
hoverSlotTime = null;
|
||||
pointerDownTime = performance.now();
|
||||
pointerDownPos = { x: e.clientX, y: e.clientY };
|
||||
pointerSamples = [{ time: performance.now(), x: e.clientX, y: e.clientY }];
|
||||
}
|
||||
|
||||
function onPointerMove(e: PointerEvent) {
|
||||
ctrlHeld = e.ctrlKey || e.metaKey;
|
||||
lastPointerClientX = e.clientX;
|
||||
lastPointerClientY = e.clientY;
|
||||
pointerOverCanvas = true;
|
||||
activePointers.set(e.pointerId, { x: e.clientX, y: e.clientY });
|
||||
|
||||
// Pinch-to-zoom (two-finger gesture)
|
||||
if (isPinching && activePointers.size >= 2) {
|
||||
const dist = pinchDistance();
|
||||
if (pinchStartDist > 0) {
|
||||
const scale = pinchStartDist / dist; // fingers apart = zoom in
|
||||
const newSpan = Math.min(MAX_SPAN_MS, Math.max(MIN_SPAN_MS, pinchStartSpan * scale));
|
||||
viewSpan = newSpan;
|
||||
targetSpan = newSpan;
|
||||
viewCenter = pinchAnchorTime + (0.5 - pinchAnchorRatio) * viewSpan;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (isDragging) {
|
||||
const dx = e.clientX - dragStartX;
|
||||
const dy = e.clientY - dragStartY;
|
||||
const msPerPx = viewSpan / (width - PADDING.left - PADDING.right);
|
||||
viewCenter = dragStartCenter - dx * msPerPx;
|
||||
viewYRatio = dragStartYRatio + dy / height;
|
||||
|
||||
const now = performance.now();
|
||||
pointerSamples.push({ time: now, x: e.clientX, y: e.clientY });
|
||||
const cutoff = now - VELOCITY_SAMPLE_WINDOW;
|
||||
pointerSamples = pointerSamples.filter((s) => s.time >= cutoff);
|
||||
} else {
|
||||
updateHover();
|
||||
}
|
||||
}
|
||||
|
||||
function onPointerUp(e: PointerEvent) {
|
||||
(e.currentTarget as HTMLElement).releasePointerCapture(e.pointerId);
|
||||
activePointers.delete(e.pointerId);
|
||||
|
||||
// End pinch when fewer than 2 fingers remain
|
||||
if (isPinching) {
|
||||
if (activePointers.size < 2) {
|
||||
isPinching = false;
|
||||
// If one finger remains, reset drag origin to that finger's position
|
||||
if (activePointers.size === 1) {
|
||||
const remaining = [...activePointers.values()][0];
|
||||
isDragging = true;
|
||||
dragStartX = remaining.x;
|
||||
dragStartY = remaining.y;
|
||||
dragStartCenter = viewCenter;
|
||||
dragStartYRatio = viewYRatio;
|
||||
pointerSamples = [{ time: performance.now(), x: remaining.x, y: remaining.y }];
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
isDragging = false;
|
||||
|
||||
// Tap detection: short duration + minimal movement → show tooltip
|
||||
const elapsed = performance.now() - pointerDownTime;
|
||||
const dist = Math.hypot(e.clientX - pointerDownPos.x, e.clientY - pointerDownPos.y);
|
||||
if (elapsed < TAP_MAX_DURATION_MS && dist < TAP_MAX_DISTANCE_PX) {
|
||||
lastPointerClientX = e.clientX;
|
||||
lastPointerClientY = e.clientY;
|
||||
pointerOverCanvas = true;
|
||||
// Bypass the isDragging guard in updateHover since we just cleared it
|
||||
updateHover();
|
||||
pointerSamples = [];
|
||||
return;
|
||||
}
|
||||
|
||||
// Momentum from drag
|
||||
if (pointerSamples.length >= 2) {
|
||||
const first = pointerSamples[0];
|
||||
const last = pointerSamples[pointerSamples.length - 1];
|
||||
const dt = last.time - first.time;
|
||||
if (dt > VELOCITY_MIN_DT) {
|
||||
const pxPerMsX = (last.x - first.x) / dt;
|
||||
const msPerPx = viewSpan / (width - PADDING.left - PADDING.right);
|
||||
panVelocity = -pxPerMsX * msPerPx;
|
||||
panVelocityY = (last.y - first.y) / dt;
|
||||
}
|
||||
}
|
||||
pointerSamples = [];
|
||||
}
|
||||
|
||||
function onPointerLeave() {
|
||||
pointerOverCanvas = false;
|
||||
tooltipVisible = false;
|
||||
hoverSlotTime = null;
|
||||
}
|
||||
|
||||
function onPointerCancel(e: PointerEvent) {
|
||||
activePointers.delete(e.pointerId);
|
||||
if (activePointers.size < 2) isPinching = false;
|
||||
if (activePointers.size === 0) isDragging = false;
|
||||
}
|
||||
|
||||
function onWheel(e: WheelEvent) {
|
||||
e.preventDefault();
|
||||
if (!canvasEl) return;
|
||||
followEnabled = false;
|
||||
panVelocity = 0;
|
||||
panVelocityY = 0;
|
||||
|
||||
const rect = canvasEl.getBoundingClientRect();
|
||||
const mouseX = e.clientX - rect.left;
|
||||
const chartWidth = width - PADDING.left - PADDING.right;
|
||||
|
||||
zoomAnchorTime = xScale.invert(mouseX).getTime();
|
||||
zoomAnchorRatio = (mouseX - PADDING.left) / chartWidth;
|
||||
|
||||
const zoomIn = e.deltaY < 0;
|
||||
const factor = zoomIn ? 1 / ZOOM_FACTOR : ZOOM_FACTOR;
|
||||
targetSpan = Math.min(MAX_SPAN_MS, Math.max(MIN_SPAN_MS, targetSpan * factor));
|
||||
isZoomAnimating = true;
|
||||
}
|
||||
|
||||
function onKeyDown(e: KeyboardEvent) {
|
||||
const wasCtrl = ctrlHeld;
|
||||
ctrlHeld = e.ctrlKey || e.metaKey;
|
||||
if (ctrlHeld !== wasCtrl) updateHover();
|
||||
|
||||
switch (e.key) {
|
||||
case "ArrowLeft":
|
||||
case "ArrowRight": {
|
||||
e.preventDefault();
|
||||
followEnabled = false;
|
||||
panVelocity = 0;
|
||||
const ratio = e.ctrlKey ? PAN_STEP_CTRL_RATIO : PAN_STEP_RATIO;
|
||||
const step = viewSpan * ratio;
|
||||
if (!isPanAnimating) targetCenter = viewCenter;
|
||||
targetCenter += e.key === "ArrowRight" ? step : -step;
|
||||
isPanAnimating = true;
|
||||
break;
|
||||
}
|
||||
case "ArrowUp":
|
||||
case "ArrowDown": {
|
||||
e.preventDefault();
|
||||
if (e.ctrlKey || e.metaKey) {
|
||||
const direction = e.key === "ArrowUp" ? -1 : 1;
|
||||
if (!isYPanAnimating) targetYRatio = viewYRatio;
|
||||
targetYRatio = Math.max(
|
||||
YRATIO_MIN,
|
||||
Math.min(YRATIO_MAX, targetYRatio + direction * YRATIO_STEP)
|
||||
);
|
||||
isYPanAnimating = true;
|
||||
} else {
|
||||
const factor = e.key === "ArrowUp" ? 1 / ZOOM_KEY_FACTOR : ZOOM_KEY_FACTOR;
|
||||
followEnabled = false;
|
||||
panVelocity = 0;
|
||||
zoomAnchorTime = isPanAnimating ? targetCenter : viewCenter;
|
||||
zoomAnchorRatio = 0.5;
|
||||
targetSpan = Math.min(MAX_SPAN_MS, Math.max(MIN_SPAN_MS, targetSpan * factor));
|
||||
isZoomAnimating = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function onKeyUp(e: KeyboardEvent) {
|
||||
const wasCtrl = ctrlHeld;
|
||||
ctrlHeld = e.ctrlKey || e.metaKey;
|
||||
if (ctrlHeld !== wasCtrl) updateHover();
|
||||
}
|
||||
|
||||
function onWindowBlur() {
|
||||
const wasCtrl = ctrlHeld;
|
||||
ctrlHeld = false;
|
||||
if (wasCtrl) updateHover();
|
||||
}
|
||||
|
||||
function resumeFollow() {
|
||||
panVelocity = 0;
|
||||
panVelocityY = 0;
|
||||
isPanAnimating = false;
|
||||
isYPanAnimating = false;
|
||||
targetYRatio = DEFAULT_AXIS_RATIO;
|
||||
viewYRatio = DEFAULT_AXIS_RATIO;
|
||||
targetSpan = DEFAULT_SPAN_MS;
|
||||
isZoomAnimating = true;
|
||||
zoomAnchorTime = Date.now();
|
||||
zoomAnchorRatio = 0.5;
|
||||
followEnabled = true;
|
||||
}
|
||||
|
||||
// ── Resize ──────────────────────────────────────────────────────────
|
||||
function updateSize() {
|
||||
if (!containerEl) return;
|
||||
const rect = containerEl.getBoundingClientRect();
|
||||
width = rect.width;
|
||||
height = rect.height;
|
||||
dpr = window.devicePixelRatio || 1;
|
||||
if (canvasEl) {
|
||||
canvasEl.width = width * dpr;
|
||||
canvasEl.height = height * dpr;
|
||||
}
|
||||
}
|
||||
|
||||
// ── Animation loop ──────────────────────────────────────────────────
|
||||
function tick(timestamp: number) {
|
||||
const dt = lastFrameTime > 0 ? Math.min(timestamp - lastFrameTime, MAX_DT) : DEFAULT_DT;
|
||||
lastFrameTime = timestamp;
|
||||
|
||||
const friction = Math.pow(PAN_FRICTION, dt / 16);
|
||||
|
||||
// Momentum panning
|
||||
if (
|
||||
!isDragging &&
|
||||
(Math.abs(panVelocity) > PAN_STOP_THRESHOLD || Math.abs(panVelocityY) > PAN_STOP_THRESHOLD_Y)
|
||||
) {
|
||||
viewCenter += panVelocity * dt;
|
||||
viewYRatio += (panVelocityY * dt) / height;
|
||||
panVelocity *= friction;
|
||||
panVelocityY *= friction;
|
||||
if (Math.abs(panVelocity) < PAN_STOP_THRESHOLD) panVelocity = 0;
|
||||
if (Math.abs(panVelocityY) < PAN_STOP_THRESHOLD_Y) panVelocityY = 0;
|
||||
}
|
||||
|
||||
// Smooth zoom
|
||||
if (isZoomAnimating && !isDragging) {
|
||||
const spanDiff = targetSpan - viewSpan;
|
||||
if (Math.abs(spanDiff) < ZOOM_SETTLE_THRESHOLD) {
|
||||
viewSpan = targetSpan;
|
||||
viewCenter = zoomAnchorTime + (0.5 - zoomAnchorRatio) * viewSpan;
|
||||
isZoomAnimating = false;
|
||||
} else {
|
||||
const zf = 1 - Math.pow(1 - ZOOM_EASE, dt / 16);
|
||||
viewSpan += spanDiff * zf;
|
||||
viewCenter = zoomAnchorTime + (0.5 - zoomAnchorRatio) * viewSpan;
|
||||
}
|
||||
}
|
||||
|
||||
// Keyboard pan
|
||||
if (isPanAnimating && !isDragging) {
|
||||
const panDiff = targetCenter - viewCenter;
|
||||
const msPerPx = viewSpan / (width - PADDING.left - PADDING.right);
|
||||
if (Math.abs(panDiff) < msPerPx * PAN_SETTLE_THRESHOLD_PX) {
|
||||
viewCenter = targetCenter;
|
||||
isPanAnimating = false;
|
||||
} else {
|
||||
viewCenter += panDiff * (1 - Math.pow(1 - PAN_EASE, dt / 16));
|
||||
}
|
||||
}
|
||||
|
||||
// Y-axis pan
|
||||
if (isYPanAnimating && !isDragging) {
|
||||
const yDiff = targetYRatio - viewYRatio;
|
||||
if (Math.abs(yDiff) < YRATIO_SETTLE_THRESHOLD) {
|
||||
viewYRatio = targetYRatio;
|
||||
isYPanAnimating = false;
|
||||
} else {
|
||||
viewYRatio += yDiff * (1 - Math.pow(1 - PAN_EASE, dt / 16));
|
||||
}
|
||||
}
|
||||
|
||||
// Follow mode
|
||||
if (followEnabled && !isDragging) {
|
||||
const target = Date.now();
|
||||
viewCenter += (target - viewCenter) * (1 - Math.pow(1 - FOLLOW_EASE, dt / 16));
|
||||
}
|
||||
|
||||
// Step value animations & prune offscreen entries
|
||||
const result = stepAnimations(animMap, dt, animatedMaxY);
|
||||
animatedMaxY = result.maxY;
|
||||
pruneAnimMap(animMap, viewStart, viewEnd, viewSpan);
|
||||
|
||||
render();
|
||||
animationFrameId = requestAnimationFrame(tick);
|
||||
}
|
||||
|
||||
// ── Animation sync ──────────────────────────────────────────────────
|
||||
$effect(() => {
|
||||
const slots = data;
|
||||
const subs = allSubjects;
|
||||
const enabled = enabledSubjects;
|
||||
syncAnimTargets(animMap, slots, subs, enabled);
|
||||
});
|
||||
|
||||
// Request data whenever the visible window changes.
|
||||
$effect(() => {
|
||||
store.requestRange(viewStart, viewEnd);
|
||||
});
|
||||
|
||||
// ── Lifecycle ───────────────────────────────────────────────────────
|
||||
onMount(() => {
|
||||
updateSize();
|
||||
|
||||
const ro = new ResizeObserver(updateSize);
|
||||
if (containerEl) ro.observe(containerEl);
|
||||
|
||||
window.addEventListener("blur", onWindowBlur);
|
||||
|
||||
viewCenter = Date.now();
|
||||
targetCenter = viewCenter;
|
||||
targetSpan = viewSpan;
|
||||
canvasEl?.focus();
|
||||
animationFrameId = requestAnimationFrame(tick);
|
||||
|
||||
return () => {
|
||||
cancelAnimationFrame(animationFrameId);
|
||||
ro.disconnect();
|
||||
window.removeEventListener("blur", onWindowBlur);
|
||||
store.dispose();
|
||||
};
|
||||
});
|
||||
</script>
|
||||
|
||||
<div class="absolute inset-0 select-none" bind:this={containerEl}>
|
||||
<canvas
|
||||
bind:this={canvasEl}
|
||||
class="w-full h-full cursor-grab outline-none"
|
||||
class:cursor-grabbing={isDragging}
|
||||
style="display: block; touch-action: none;"
|
||||
tabindex="0"
|
||||
aria-label="Interactive enrollment timeline chart"
|
||||
onpointerdown={(e) => { canvasEl?.focus(); onPointerDown(e); }}
|
||||
onpointermove={onPointerMove}
|
||||
onpointerup={onPointerUp}
|
||||
onpointerleave={onPointerLeave}
|
||||
onpointercancel={onPointerCancel}
|
||||
onwheel={onWheel}
|
||||
onkeydown={onKeyDown}
|
||||
onkeyup={onKeyUp}
|
||||
></canvas>
|
||||
|
||||
<TimelineDrawer
|
||||
bind:open={drawerOpen}
|
||||
subjects={allSubjects}
|
||||
{enabledSubjects}
|
||||
{followEnabled}
|
||||
onToggleSubject={toggleSubject}
|
||||
onEnableAll={enableAll}
|
||||
onDisableAll={disableAll}
|
||||
onResumeFollow={resumeFollow}
|
||||
/>
|
||||
|
||||
<TimelineTooltip
|
||||
visible={tooltipVisible}
|
||||
x={tooltipX}
|
||||
y={tooltipY}
|
||||
slot={tooltipSlot}
|
||||
{activeSubjects}
|
||||
/>
|
||||
</div>
|
||||
@@ -0,0 +1,138 @@
|
||||
<script lang="ts">
|
||||
import { Filter, X } from "@lucide/svelte";
|
||||
import { getSubjectColor } from "$lib/timeline/data";
|
||||
import { DRAWER_WIDTH } from "$lib/timeline/constants";
|
||||
|
||||
interface Props {
|
||||
open: boolean;
|
||||
subjects: readonly string[];
|
||||
enabledSubjects: Set<string>;
|
||||
followEnabled: boolean;
|
||||
onToggleSubject: (subject: string) => void;
|
||||
onEnableAll: () => void;
|
||||
onDisableAll: () => void;
|
||||
onResumeFollow: () => void;
|
||||
}
|
||||
|
||||
let {
|
||||
open = $bindable(),
|
||||
subjects,
|
||||
enabledSubjects,
|
||||
followEnabled,
|
||||
onToggleSubject,
|
||||
onEnableAll,
|
||||
onDisableAll,
|
||||
onResumeFollow,
|
||||
}: Props = $props();
|
||||
|
||||
function onKeyDown(e: KeyboardEvent) {
|
||||
if (e.key === "Escape" && open) {
|
||||
open = false;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<svelte:window onkeydown={onKeyDown} />
|
||||
|
||||
<!-- Filter toggle button — slides out when drawer opens -->
|
||||
<button
|
||||
class="absolute right-3 z-50 p-2 rounded-md
|
||||
bg-black text-white dark:bg-white dark:text-black
|
||||
hover:bg-neutral-800 dark:hover:bg-neutral-200
|
||||
border border-black/20 dark:border-white/20
|
||||
shadow-md transition-all duration-200 ease-in-out cursor-pointer
|
||||
{open ? 'opacity-0 pointer-events-none' : 'opacity-100'}"
|
||||
style="top: 20%; transform: translateX({open ? '60px' : '0'});"
|
||||
onclick={() => (open = true)}
|
||||
aria-label="Open filters"
|
||||
>
|
||||
<Filter size={18} strokeWidth={2} />
|
||||
</button>
|
||||
|
||||
<!-- Drawer panel -->
|
||||
<div
|
||||
class="absolute right-0 z-40 rounded-l-lg shadow-xl transition-transform duration-200 ease-in-out {open ? '' : 'pointer-events-none'}"
|
||||
style="top: 20%; width: {DRAWER_WIDTH}px; height: 60%; transform: translateX({open
|
||||
? 0
|
||||
: DRAWER_WIDTH}px);"
|
||||
>
|
||||
<div
|
||||
class="h-full flex flex-col bg-background/90 backdrop-blur-md border border-border/40 rounded-l-lg overflow-hidden"
|
||||
style="width: {DRAWER_WIDTH}px;"
|
||||
>
|
||||
<!-- Header -->
|
||||
<div class="flex items-center justify-between px-3 py-2.5 border-b border-border/40">
|
||||
<span class="text-xs font-semibold text-foreground">Filters</span>
|
||||
<button
|
||||
class="p-0.5 rounded text-muted-foreground hover:text-foreground transition-colors cursor-pointer"
|
||||
onclick={() => (open = false)}
|
||||
aria-label="Close filters"
|
||||
>
|
||||
<X size={14} strokeWidth={2} />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- Follow status -->
|
||||
<div class="px-3 py-2 border-b border-border/40">
|
||||
{#if followEnabled}
|
||||
<div
|
||||
class="px-2 py-1 rounded-md text-[10px] font-medium text-center
|
||||
bg-green-500/10 text-green-600 dark:text-green-400 border border-green-500/20"
|
||||
>
|
||||
FOLLOWING
|
||||
</div>
|
||||
{:else}
|
||||
<button
|
||||
class="w-full px-2 py-1 rounded-md text-[10px] font-medium text-center
|
||||
bg-muted/80 text-muted-foreground hover:text-foreground
|
||||
border border-border/50 transition-colors cursor-pointer"
|
||||
onclick={onResumeFollow}
|
||||
aria-label="Resume following current time"
|
||||
>
|
||||
FOLLOW
|
||||
</button>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Subject toggles -->
|
||||
<div class="flex-1 overflow-y-auto px-3 py-2">
|
||||
<div class="flex items-center justify-between mb-2 text-[10px] text-muted-foreground">
|
||||
<span class="uppercase tracking-wider font-medium">Subjects</span>
|
||||
<div class="flex gap-1.5">
|
||||
<button
|
||||
class="hover:text-foreground transition-colors cursor-pointer"
|
||||
onclick={onEnableAll}>All</button
|
||||
>
|
||||
<span class="opacity-40">|</span>
|
||||
<button
|
||||
class="hover:text-foreground transition-colors cursor-pointer"
|
||||
onclick={onDisableAll}>None</button
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
<div class="space-y-0.5">
|
||||
{#each subjects as subject}
|
||||
{@const enabled = enabledSubjects.has(subject)}
|
||||
{@const color = getSubjectColor(subject)}
|
||||
<button
|
||||
class="flex items-center gap-2 w-full px-1.5 py-1 rounded text-xs
|
||||
hover:bg-muted/50 transition-colors cursor-pointer text-left"
|
||||
onclick={() => onToggleSubject(subject)}
|
||||
>
|
||||
<span
|
||||
class="inline-block w-3 h-3 rounded-sm shrink-0 transition-opacity"
|
||||
style="background: {color}; opacity: {enabled ? 1 : 0.2};"
|
||||
></span>
|
||||
<span
|
||||
class="transition-opacity {enabled
|
||||
? 'text-foreground'
|
||||
: 'text-muted-foreground/50'}"
|
||||
>
|
||||
{subject}
|
||||
</span>
|
||||
</button>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -0,0 +1,52 @@
|
||||
<script lang="ts">
|
||||
import { timeFormat } from "d3-time-format";
|
||||
import { getSubjectColor } from "$lib/timeline/data";
|
||||
import type { TimeSlot } from "$lib/timeline/types";
|
||||
import { enabledTotalClasses } from "$lib/timeline/viewport";
|
||||
|
||||
interface Props {
|
||||
visible: boolean;
|
||||
x: number;
|
||||
y: number;
|
||||
slot: TimeSlot | null;
|
||||
activeSubjects: readonly string[];
|
||||
}
|
||||
|
||||
let { visible, x, y, slot, activeSubjects }: Props = $props();
|
||||
|
||||
const fmtTime = timeFormat("%-I:%M %p");
|
||||
</script>
|
||||
|
||||
{#if visible && slot}
|
||||
{@const total = enabledTotalClasses(slot, activeSubjects)}
|
||||
<div
|
||||
class="pointer-events-none fixed z-50 rounded-lg border border-border/60 bg-background/95
|
||||
backdrop-blur-sm shadow-lg px-3 py-2 text-xs min-w-[140px]"
|
||||
style="left: {x + 12}px; top: {y - 10}px; transform: translateY(-100%);"
|
||||
>
|
||||
<div class="font-semibold text-foreground mb-1.5">
|
||||
{fmtTime(slot.time)}
|
||||
</div>
|
||||
<div class="space-y-0.5">
|
||||
{#each activeSubjects as subject}
|
||||
{@const count = slot.subjects[subject] || 0}
|
||||
{#if count > 0}
|
||||
<div class="flex items-center justify-between gap-3">
|
||||
<div class="flex items-center gap-1.5">
|
||||
<span
|
||||
class="inline-block w-2 h-2 rounded-sm"
|
||||
style="background: {getSubjectColor(subject)}"
|
||||
></span>
|
||||
<span class="text-muted-foreground">{subject}</span>
|
||||
</div>
|
||||
<span class="font-medium tabular-nums">{count}</span>
|
||||
</div>
|
||||
{/if}
|
||||
{/each}
|
||||
</div>
|
||||
<div class="mt-1.5 pt-1.5 border-t border-border/40 flex justify-between font-medium">
|
||||
<span>Total</span>
|
||||
<span class="tabular-nums">{total}</span>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
@@ -39,7 +39,7 @@ export function createSvelteTable<TData extends RowData>(options: TableOptions<T
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
onStateChange: (updater: any) => {
|
||||
if (updater instanceof Function) state = updater(state);
|
||||
else state = mergeObjects(state, updater as Partial<TableState>);
|
||||
else state = { ...state, ...(updater as Partial<TableState>) };
|
||||
|
||||
options.onStateChange?.(updater);
|
||||
},
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
/**
|
||||
* Reactive clipboard copy with automatic "copied" state reset.
|
||||
*
|
||||
* Returns a `copiedValue` that is non-null while the copied feedback
|
||||
* should be displayed, and a `copy()` function to trigger a copy.
|
||||
*/
|
||||
export function useClipboard(resetMs = 2000) {
|
||||
let copiedValue = $state<string | null>(null);
|
||||
let timeoutId: number | undefined;
|
||||
|
||||
async function copy(text: string, event?: MouseEvent | KeyboardEvent) {
|
||||
event?.stopPropagation();
|
||||
try {
|
||||
await navigator.clipboard.writeText(text);
|
||||
clearTimeout(timeoutId);
|
||||
copiedValue = text;
|
||||
timeoutId = window.setTimeout(() => {
|
||||
copiedValue = null;
|
||||
timeoutId = undefined;
|
||||
}, resetMs);
|
||||
} catch (err) {
|
||||
console.error("Failed to copy to clipboard:", err);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
get copiedValue() {
|
||||
return copiedValue;
|
||||
},
|
||||
copy,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
import { onMount } from "svelte";
|
||||
import { OverlayScrollbars, type PartialOptions } from "overlayscrollbars";
|
||||
import { themeStore } from "$lib/stores/theme.svelte";
|
||||
|
||||
/**
|
||||
* Set up OverlayScrollbars on an element with automatic theme reactivity.
|
||||
*
|
||||
* Must be called during component initialization (uses `onMount` internally).
|
||||
* The scrollbar theme automatically syncs with `themeStore.isDark`.
|
||||
*/
|
||||
export function useOverlayScrollbars(getElement: () => HTMLElement, options: PartialOptions = {}) {
|
||||
onMount(() => {
|
||||
const element = getElement();
|
||||
const osInstance = OverlayScrollbars(element, {
|
||||
...options,
|
||||
scrollbars: {
|
||||
...options.scrollbars,
|
||||
theme: themeStore.isDark ? "os-theme-dark" : "os-theme-light",
|
||||
},
|
||||
});
|
||||
|
||||
const unwatch = $effect.root(() => {
|
||||
$effect(() => {
|
||||
osInstance.options({
|
||||
scrollbars: {
|
||||
theme: themeStore.isDark ? "os-theme-dark" : "os-theme-light",
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return () => {
|
||||
unwatch();
|
||||
osInstance.destroy();
|
||||
};
|
||||
});
|
||||
}
|
||||
+203
-18
@@ -1,14 +1,19 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import {
|
||||
formatTime,
|
||||
formatTimeRange,
|
||||
formatMeetingDays,
|
||||
formatMeetingDaysVerbose,
|
||||
formatMeetingTime,
|
||||
formatMeetingTimeTooltip,
|
||||
formatMeetingTimesTooltip,
|
||||
abbreviateInstructor,
|
||||
formatCreditHours,
|
||||
getPrimaryInstructor,
|
||||
isMeetingTimeTBA,
|
||||
isTimeTBA,
|
||||
formatDate,
|
||||
formatDateShort,
|
||||
formatMeetingDaysLong,
|
||||
} from "$lib/course";
|
||||
import type { DbMeetingTime, CourseResponse, InstructorResponse } from "$lib/api";
|
||||
@@ -53,13 +58,13 @@ describe("formatMeetingDays", () => {
|
||||
formatMeetingDays(makeMeetingTime({ monday: true, wednesday: true, friday: true }))
|
||||
).toBe("MWF");
|
||||
});
|
||||
it("returns TR for tue/thu", () => {
|
||||
expect(formatMeetingDays(makeMeetingTime({ tuesday: true, thursday: true }))).toBe("TR");
|
||||
it("returns TTh for tue/thu", () => {
|
||||
expect(formatMeetingDays(makeMeetingTime({ tuesday: true, thursday: true }))).toBe("TTh");
|
||||
});
|
||||
it("returns empty string when no days", () => {
|
||||
expect(formatMeetingDays(makeMeetingTime())).toBe("");
|
||||
it("returns MW for mon/wed", () => {
|
||||
expect(formatMeetingDays(makeMeetingTime({ monday: true, wednesday: true }))).toBe("MW");
|
||||
});
|
||||
it("returns all days", () => {
|
||||
it("returns MTWThF for all weekdays", () => {
|
||||
expect(
|
||||
formatMeetingDays(
|
||||
makeMeetingTime({
|
||||
@@ -68,16 +73,56 @@ describe("formatMeetingDays", () => {
|
||||
wednesday: true,
|
||||
thursday: true,
|
||||
friday: true,
|
||||
saturday: true,
|
||||
sunday: true,
|
||||
})
|
||||
)
|
||||
).toBe("MTWRFSU");
|
||||
).toBe("MTWThF");
|
||||
});
|
||||
it("returns partial abbreviation for single day", () => {
|
||||
expect(formatMeetingDays(makeMeetingTime({ monday: true }))).toBe("Mon");
|
||||
expect(formatMeetingDays(makeMeetingTime({ thursday: true }))).toBe("Thu");
|
||||
expect(formatMeetingDays(makeMeetingTime({ saturday: true }))).toBe("Sat");
|
||||
});
|
||||
it("concatenates codes for other multi-day combos", () => {
|
||||
expect(formatMeetingDays(makeMeetingTime({ monday: true, friday: true }))).toBe("MF");
|
||||
expect(formatMeetingDays(makeMeetingTime({ tuesday: true, saturday: true }))).toBe("TSa");
|
||||
expect(
|
||||
formatMeetingDays(makeMeetingTime({ wednesday: true, friday: true, sunday: true }))
|
||||
).toBe("WFSu");
|
||||
expect(
|
||||
formatMeetingDays(
|
||||
makeMeetingTime({ monday: true, tuesday: true, wednesday: true, thursday: true })
|
||||
)
|
||||
).toBe("MTWTh");
|
||||
});
|
||||
it("returns empty string when no days", () => {
|
||||
expect(formatMeetingDays(makeMeetingTime())).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatTimeRange", () => {
|
||||
it("elides AM when both times are AM", () => {
|
||||
expect(formatTimeRange("0900", "0950")).toBe("9:00–9:50 AM");
|
||||
});
|
||||
it("elides PM when both times are PM", () => {
|
||||
expect(formatTimeRange("1315", "1430")).toBe("1:15–2:30 PM");
|
||||
});
|
||||
it("keeps both markers when crossing noon", () => {
|
||||
expect(formatTimeRange("1130", "1220")).toBe("11:30 AM–12:20 PM");
|
||||
});
|
||||
it("returns TBA for null begin", () => {
|
||||
expect(formatTimeRange(null, "0950")).toBe("TBA");
|
||||
});
|
||||
it("returns TBA for null end", () => {
|
||||
expect(formatTimeRange("0900", null)).toBe("TBA");
|
||||
});
|
||||
it("handles midnight and noon", () => {
|
||||
expect(formatTimeRange("0000", "0050")).toBe("12:00–12:50 AM");
|
||||
expect(formatTimeRange("1200", "1250")).toBe("12:00–12:50 PM");
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatMeetingTime", () => {
|
||||
it("formats a standard meeting time", () => {
|
||||
it("formats a standard meeting time with elided AM/PM", () => {
|
||||
expect(
|
||||
formatMeetingTime(
|
||||
makeMeetingTime({
|
||||
@@ -88,7 +133,19 @@ describe("formatMeetingTime", () => {
|
||||
end_time: "0950",
|
||||
})
|
||||
)
|
||||
).toBe("MWF 9:00 AM–9:50 AM");
|
||||
).toBe("MWF 9:00–9:50 AM");
|
||||
});
|
||||
it("keeps both markers when crossing noon", () => {
|
||||
expect(
|
||||
formatMeetingTime(
|
||||
makeMeetingTime({
|
||||
tuesday: true,
|
||||
thursday: true,
|
||||
begin_time: "1130",
|
||||
end_time: "1220",
|
||||
})
|
||||
)
|
||||
).toBe("TTh 11:30 AM–12:20 PM");
|
||||
});
|
||||
it("returns TBA when no days", () => {
|
||||
expect(formatMeetingTime(makeMeetingTime({ begin_time: "0900", end_time: "0950" }))).toBe(
|
||||
@@ -96,29 +153,74 @@ describe("formatMeetingTime", () => {
|
||||
);
|
||||
});
|
||||
it("returns days + TBA when no times", () => {
|
||||
expect(formatMeetingTime(makeMeetingTime({ monday: true }))).toBe("M TBA");
|
||||
expect(formatMeetingTime(makeMeetingTime({ monday: true }))).toBe("Mon TBA");
|
||||
});
|
||||
});
|
||||
|
||||
describe("abbreviateInstructor", () => {
|
||||
it("abbreviates standard name", () =>
|
||||
expect(abbreviateInstructor("Heaps, John")).toBe("Heaps, J."));
|
||||
it("returns short names unabbreviated", () =>
|
||||
expect(abbreviateInstructor("Li, Bo")).toBe("Li, Bo"));
|
||||
it("returns names within budget unabbreviated", () =>
|
||||
expect(abbreviateInstructor("Heaps, John")).toBe("Heaps, John"));
|
||||
it("handles no comma", () => expect(abbreviateInstructor("Staff")).toBe("Staff"));
|
||||
it("handles multiple first names", () =>
|
||||
expect(abbreviateInstructor("Smith, Mary Jane")).toBe("Smith, M."));
|
||||
|
||||
// Progressive abbreviation with multiple given names
|
||||
it("abbreviates trailing given names first", () =>
|
||||
expect(abbreviateInstructor("Ramirez, Maria Elena")).toBe("Ramirez, Maria E."));
|
||||
it("abbreviates all given names when needed", () =>
|
||||
expect(abbreviateInstructor("Ramirez, Maria Elena", 16)).toBe("Ramirez, M. E."));
|
||||
it("falls back to first initial only", () =>
|
||||
expect(abbreviateInstructor("Ramirez, Maria Elena", 12)).toBe("Ramirez, M."));
|
||||
|
||||
// Single given name that exceeds budget
|
||||
it("abbreviates single given name when over budget", () =>
|
||||
expect(abbreviateInstructor("Bartholomew, Christopher", 18)).toBe("Bartholomew, C."));
|
||||
|
||||
// Respects custom maxLen
|
||||
it("keeps full name when within custom budget", () =>
|
||||
expect(abbreviateInstructor("Ramirez, Maria Elena", 30)).toBe("Ramirez, Maria Elena"));
|
||||
it("always abbreviates when budget is tiny", () =>
|
||||
expect(abbreviateInstructor("Heaps, John", 5)).toBe("Heaps, J."));
|
||||
});
|
||||
|
||||
describe("getPrimaryInstructor", () => {
|
||||
it("returns primary instructor", () => {
|
||||
const instructors: InstructorResponse[] = [
|
||||
{ bannerId: "1", displayName: "A", email: null, isPrimary: false },
|
||||
{ bannerId: "2", displayName: "B", email: null, isPrimary: true },
|
||||
{
|
||||
instructorId: 1,
|
||||
bannerId: "1",
|
||||
displayName: "A",
|
||||
email: "a@utsa.edu",
|
||||
isPrimary: false,
|
||||
rmpRating: null,
|
||||
rmpNumRatings: null,
|
||||
rmpLegacyId: null,
|
||||
},
|
||||
{
|
||||
instructorId: 2,
|
||||
bannerId: "2",
|
||||
displayName: "B",
|
||||
email: "b@utsa.edu",
|
||||
isPrimary: true,
|
||||
rmpRating: null,
|
||||
rmpNumRatings: null,
|
||||
rmpLegacyId: null,
|
||||
},
|
||||
];
|
||||
expect(getPrimaryInstructor(instructors)?.displayName).toBe("B");
|
||||
});
|
||||
it("returns first instructor when no primary", () => {
|
||||
const instructors: InstructorResponse[] = [
|
||||
{ bannerId: "1", displayName: "A", email: null, isPrimary: false },
|
||||
{
|
||||
instructorId: 3,
|
||||
bannerId: "1",
|
||||
displayName: "A",
|
||||
email: "a@utsa.edu",
|
||||
isPrimary: false,
|
||||
rmpRating: null,
|
||||
rmpNumRatings: null,
|
||||
rmpLegacyId: null,
|
||||
},
|
||||
];
|
||||
expect(getPrimaryInstructor(instructors)?.displayName).toBe("A");
|
||||
});
|
||||
@@ -226,3 +328,86 @@ describe("formatMeetingDaysLong", () => {
|
||||
expect(formatMeetingDaysLong(makeMeetingTime())).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatDateShort", () => {
|
||||
it("formats YYYY-MM-DD to short", () => {
|
||||
expect(formatDateShort("2024-08-26")).toBe("Aug 26, 2024");
|
||||
});
|
||||
it("formats MM/DD/YYYY to short", () => {
|
||||
expect(formatDateShort("12/12/2024")).toBe("Dec 12, 2024");
|
||||
});
|
||||
it("returns original for invalid", () => {
|
||||
expect(formatDateShort("bad")).toBe("bad");
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatMeetingDaysVerbose", () => {
|
||||
it("returns plural for single day", () => {
|
||||
expect(formatMeetingDaysVerbose(makeMeetingTime({ thursday: true }))).toBe("Thursdays");
|
||||
});
|
||||
it("joins two days with ampersand", () => {
|
||||
expect(formatMeetingDaysVerbose(makeMeetingTime({ tuesday: true, thursday: true }))).toBe(
|
||||
"Tuesdays & Thursdays"
|
||||
);
|
||||
});
|
||||
it("uses Oxford-style ampersand for 3+ days", () => {
|
||||
expect(
|
||||
formatMeetingDaysVerbose(makeMeetingTime({ monday: true, wednesday: true, friday: true }))
|
||||
).toBe("Mondays, Wednesdays & Fridays");
|
||||
});
|
||||
it("returns empty string when no days", () => {
|
||||
expect(formatMeetingDaysVerbose(makeMeetingTime())).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatMeetingTimeTooltip", () => {
|
||||
it("formats full tooltip with location and dates", () => {
|
||||
const mt = makeMeetingTime({
|
||||
tuesday: true,
|
||||
thursday: true,
|
||||
begin_time: "1615",
|
||||
end_time: "1730",
|
||||
building_description: "Main Hall",
|
||||
room: "2.206",
|
||||
});
|
||||
expect(formatMeetingTimeTooltip(mt)).toBe(
|
||||
"Tuesdays & Thursdays, 4:15–5:30 PM\nMain Hall 2.206, Aug 26, 2024 – Dec 12, 2024"
|
||||
);
|
||||
});
|
||||
it("handles TBA days and times", () => {
|
||||
expect(formatMeetingTimeTooltip(makeMeetingTime())).toBe("TBA\nAug 26, 2024 – Dec 12, 2024");
|
||||
});
|
||||
it("handles days with TBA times", () => {
|
||||
expect(formatMeetingTimeTooltip(makeMeetingTime({ monday: true }))).toBe(
|
||||
"Mondays, TBA\nAug 26, 2024 – Dec 12, 2024"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatMeetingTimesTooltip", () => {
|
||||
it("returns TBA for empty array", () => {
|
||||
expect(formatMeetingTimesTooltip([])).toBe("TBA");
|
||||
});
|
||||
it("joins multiple meetings with blank line", () => {
|
||||
const mts = [
|
||||
makeMeetingTime({
|
||||
monday: true,
|
||||
wednesday: true,
|
||||
friday: true,
|
||||
begin_time: "0900",
|
||||
end_time: "0950",
|
||||
}),
|
||||
makeMeetingTime({
|
||||
thursday: true,
|
||||
begin_time: "1300",
|
||||
end_time: "1400",
|
||||
building_description: "Lab",
|
||||
room: "101",
|
||||
}),
|
||||
];
|
||||
const result = formatMeetingTimesTooltip(mts);
|
||||
expect(result).toContain("Mondays, Wednesdays & Fridays, 9:00–9:50 AM");
|
||||
expect(result).toContain("Thursdays, 1:00–2:00 PM\nLab 101");
|
||||
expect(result).toContain("\n\n");
|
||||
});
|
||||
});
|
||||
|
||||
+331
-22
@@ -10,21 +10,29 @@ export function formatTime(time: string | null): string {
|
||||
return `${display}:${minutes} ${period}`;
|
||||
}
|
||||
|
||||
/** Get day abbreviation string like "MWF" from a meeting time */
|
||||
/**
|
||||
* Compact day abbreviation for table cells.
|
||||
*
|
||||
* Single day → 3-letter: "Mon", "Thu"
|
||||
* Multi-day → concatenated codes: "MWF", "TTh", "MTWTh", "TSa"
|
||||
*
|
||||
* Codes use single letters where unambiguous (M/T/W/F) and
|
||||
* two letters where needed (Th/Sa/Su).
|
||||
*/
|
||||
export function formatMeetingDays(mt: DbMeetingTime): string {
|
||||
const days: [boolean, string][] = [
|
||||
[mt.monday, "M"],
|
||||
[mt.tuesday, "T"],
|
||||
[mt.wednesday, "W"],
|
||||
[mt.thursday, "R"],
|
||||
[mt.friday, "F"],
|
||||
[mt.saturday, "S"],
|
||||
[mt.sunday, "U"],
|
||||
const dayDefs: [boolean, string, string][] = [
|
||||
[mt.monday, "M", "Mon"],
|
||||
[mt.tuesday, "T", "Tue"],
|
||||
[mt.wednesday, "W", "Wed"],
|
||||
[mt.thursday, "Th", "Thu"],
|
||||
[mt.friday, "F", "Fri"],
|
||||
[mt.saturday, "Sa", "Sat"],
|
||||
[mt.sunday, "Su", "Sun"],
|
||||
];
|
||||
return days
|
||||
.filter(([active]) => active)
|
||||
.map(([, abbr]) => abbr)
|
||||
.join("");
|
||||
const active = dayDefs.filter(([a]) => a);
|
||||
if (active.length === 0) return "";
|
||||
if (active.length === 1) return active[0][2];
|
||||
return active.map(([, code]) => code).join("");
|
||||
}
|
||||
|
||||
/** Longer day names for detail view: single day → "Thursdays", multiple → "Mon, Wed, Fri" */
|
||||
@@ -44,23 +52,79 @@ export function formatMeetingDaysLong(mt: DbMeetingTime): string {
|
||||
return active.map(([, short]) => short).join(", ");
|
||||
}
|
||||
|
||||
/** Condensed meeting time: "MWF 9:00 AM–9:50 AM" */
|
||||
/**
|
||||
* Format a time range with smart AM/PM elision.
|
||||
*
|
||||
* Same period: "9:00–9:50 AM"
|
||||
* Cross-period: "11:30 AM–12:20 PM"
|
||||
* Missing: "TBA"
|
||||
*/
|
||||
export function formatTimeRange(begin: string | null, end: string | null): string {
|
||||
if (!begin || begin.length !== 4 || !end || end.length !== 4) return "TBA";
|
||||
|
||||
const bHours = parseInt(begin.slice(0, 2), 10);
|
||||
const eHours = parseInt(end.slice(0, 2), 10);
|
||||
const bPeriod = bHours >= 12 ? "PM" : "AM";
|
||||
const ePeriod = eHours >= 12 ? "PM" : "AM";
|
||||
|
||||
const bDisplay = bHours > 12 ? bHours - 12 : bHours === 0 ? 12 : bHours;
|
||||
const eDisplay = eHours > 12 ? eHours - 12 : eHours === 0 ? 12 : eHours;
|
||||
|
||||
const endStr = `${eDisplay}:${end.slice(2)} ${ePeriod}`;
|
||||
if (bPeriod === ePeriod) {
|
||||
return `${bDisplay}:${begin.slice(2)}–${endStr}`;
|
||||
}
|
||||
return `${bDisplay}:${begin.slice(2)} ${bPeriod}–${endStr}`;
|
||||
}
|
||||
|
||||
/** Condensed meeting time: "MWF 9:00–9:50 AM" */
|
||||
export function formatMeetingTime(mt: DbMeetingTime): string {
|
||||
const days = formatMeetingDays(mt);
|
||||
if (!days) return "TBA";
|
||||
const begin = formatTime(mt.begin_time);
|
||||
const end = formatTime(mt.end_time);
|
||||
if (begin === "TBA") return `${days} TBA`;
|
||||
return `${days} ${begin}–${end}`;
|
||||
const range = formatTimeRange(mt.begin_time, mt.end_time);
|
||||
if (range === "TBA") return `${days} TBA`;
|
||||
return `${days} ${range}`;
|
||||
}
|
||||
|
||||
/** Abbreviate instructor name: "Heaps, John" → "Heaps, J." */
|
||||
export function abbreviateInstructor(name: string): string {
|
||||
/**
|
||||
* Progressively abbreviate an instructor name to fit within a character budget.
|
||||
*
|
||||
* Tries each level until the result fits `maxLen`:
|
||||
* 1. Full name: "Ramirez, Maria Elena"
|
||||
* 2. Abbreviate trailing given names: "Ramirez, Maria E."
|
||||
* 3. Abbreviate all given names: "Ramirez, M. E."
|
||||
* 4. First initial only: "Ramirez, M."
|
||||
*
|
||||
* Names without a comma (e.g. "Staff") are returned as-is.
|
||||
*/
|
||||
export function abbreviateInstructor(name: string, maxLen: number = 18): string {
|
||||
if (name.length <= maxLen) return name;
|
||||
|
||||
const commaIdx = name.indexOf(", ");
|
||||
if (commaIdx === -1) return name;
|
||||
|
||||
const last = name.slice(0, commaIdx);
|
||||
const first = name.slice(commaIdx + 2);
|
||||
return `${last}, ${first.charAt(0)}.`;
|
||||
const parts = name.slice(commaIdx + 2).split(" ");
|
||||
|
||||
// Level 2: abbreviate trailing given names, keep first given name intact
|
||||
// "Maria Elena" → "Maria E."
|
||||
if (parts.length > 1) {
|
||||
const abbreviated = [parts[0], ...parts.slice(1).map((p) => `${p[0]}.`)].join(" ");
|
||||
const result = `${last}, ${abbreviated}`;
|
||||
if (result.length <= maxLen) return result;
|
||||
}
|
||||
|
||||
// Level 3: abbreviate all given names
|
||||
// "Maria Elena" → "M. E."
|
||||
if (parts.length > 1) {
|
||||
const allInitials = parts.map((p) => `${p[0]}.`).join(" ");
|
||||
const result = `${last}, ${allInitials}`;
|
||||
if (result.length <= maxLen) return result;
|
||||
}
|
||||
|
||||
// Level 4: first initial only
|
||||
// "Maria Elena" → "M." or "John" → "J."
|
||||
return `${last}, ${parts[0][0]}.`;
|
||||
}
|
||||
|
||||
/** Get primary instructor from a course, or first instructor */
|
||||
@@ -119,6 +183,230 @@ export function formatLocationLong(mt: DbMeetingTime): string | null {
|
||||
return mt.room ? `${name} ${mt.room}` : name;
|
||||
}
|
||||
|
||||
/** Format a date as "Aug 26, 2024". Accepts YYYY-MM-DD or MM/DD/YYYY. */
|
||||
export function formatDateShort(dateStr: string): string {
|
||||
let year: number, month: number, day: number;
|
||||
if (dateStr.includes("-")) {
|
||||
[year, month, day] = dateStr.split("-").map(Number);
|
||||
} else if (dateStr.includes("/")) {
|
||||
[month, day, year] = dateStr.split("/").map(Number);
|
||||
} else {
|
||||
return dateStr;
|
||||
}
|
||||
if (!year || !month || !day) return dateStr;
|
||||
const date = new Date(year, month - 1, day);
|
||||
return date.toLocaleDateString("en-US", { year: "numeric", month: "short", day: "numeric" });
|
||||
}
|
||||
|
||||
/**
|
||||
* Verbose day names for tooltips: "Tuesdays & Thursdays", "Mondays, Wednesdays & Fridays".
|
||||
* Single day → plural: "Thursdays".
|
||||
*/
|
||||
export function formatMeetingDaysVerbose(mt: DbMeetingTime): string {
|
||||
const dayDefs: [boolean, string][] = [
|
||||
[mt.monday, "Mondays"],
|
||||
[mt.tuesday, "Tuesdays"],
|
||||
[mt.wednesday, "Wednesdays"],
|
||||
[mt.thursday, "Thursdays"],
|
||||
[mt.friday, "Fridays"],
|
||||
[mt.saturday, "Saturdays"],
|
||||
[mt.sunday, "Sundays"],
|
||||
];
|
||||
const active = dayDefs.filter(([a]) => a).map(([, name]) => name);
|
||||
if (active.length === 0) return "";
|
||||
if (active.length === 1) return active[0];
|
||||
return active.slice(0, -1).join(", ") + " & " + active[active.length - 1];
|
||||
}
|
||||
|
||||
/**
|
||||
* Full verbose tooltip for a single meeting time:
|
||||
* "Tuesdays & Thursdays, 4:15–5:30 PM\nMain Hall 2.206 · Aug 26 – Dec 12, 2024"
|
||||
*/
|
||||
export function formatMeetingTimeTooltip(mt: DbMeetingTime): string {
|
||||
const days = formatMeetingDaysVerbose(mt);
|
||||
const range = formatTimeRange(mt.begin_time, mt.end_time);
|
||||
let line1: string;
|
||||
if (!days && range === "TBA") {
|
||||
line1 = "TBA";
|
||||
} else if (!days) {
|
||||
line1 = range;
|
||||
} else if (range === "TBA") {
|
||||
line1 = `${days}, TBA`;
|
||||
} else {
|
||||
line1 = `${days}, ${range}`;
|
||||
}
|
||||
|
||||
const parts = [line1];
|
||||
|
||||
const loc = formatLocationLong(mt);
|
||||
const dateRange =
|
||||
mt.start_date && mt.end_date
|
||||
? `${formatDateShort(mt.start_date)} – ${formatDateShort(mt.end_date)}`
|
||||
: null;
|
||||
|
||||
if (loc && dateRange) {
|
||||
parts.push(`${loc}, ${dateRange}`);
|
||||
} else if (loc) {
|
||||
parts.push(loc);
|
||||
} else if (dateRange) {
|
||||
parts.push(dateRange);
|
||||
}
|
||||
|
||||
return parts.join("\n");
|
||||
}
|
||||
|
||||
/** Full verbose tooltip for all meeting times on a course, newline-separated. */
|
||||
export function formatMeetingTimesTooltip(meetingTimes: DbMeetingTime[]): string {
|
||||
if (meetingTimes.length === 0) return "TBA";
|
||||
return meetingTimes.map(formatMeetingTimeTooltip).join("\n\n");
|
||||
}
|
||||
|
||||
/**
|
||||
* Delivery concern category for visual accent on location cells.
|
||||
* - "online": fully online with no physical location (OA, OS, OH without INT building)
|
||||
* - "internet": internet campus with INT building code
|
||||
* - "hybrid": mix of online and in-person (HB, H1, H2)
|
||||
* - "off-campus": in-person but not on Main Campus
|
||||
* - null: normal in-person on main campus (no accent)
|
||||
*/
|
||||
export type DeliveryConcern = "online" | "internet" | "hybrid" | "off-campus" | null;
|
||||
|
||||
const ONLINE_METHODS = new Set(["OA", "OS", "OH"]);
|
||||
const HYBRID_METHODS = new Set(["HB", "H1", "H2"]);
|
||||
const MAIN_CAMPUS = "11";
|
||||
const ONLINE_CAMPUSES = new Set(["9", "ONL"]);
|
||||
|
||||
export function getDeliveryConcern(course: CourseResponse): DeliveryConcern {
|
||||
const method = course.instructionalMethod;
|
||||
if (method && ONLINE_METHODS.has(method)) {
|
||||
const hasIntBuilding = course.meetingTimes.some((mt: DbMeetingTime) => mt.building === "INT");
|
||||
return hasIntBuilding ? "internet" : "online";
|
||||
}
|
||||
if (method && HYBRID_METHODS.has(method)) return "hybrid";
|
||||
if (course.campus && course.campus !== MAIN_CAMPUS && !ONLINE_CAMPUSES.has(course.campus)) {
|
||||
return "off-campus";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** Border accent color for each delivery concern type. */
|
||||
export function concernAccentColor(concern: DeliveryConcern): string | null {
|
||||
switch (concern) {
|
||||
case "online":
|
||||
return "#3b82f6"; // blue-500
|
||||
case "internet":
|
||||
return "#06b6d4"; // cyan-500
|
||||
case "hybrid":
|
||||
return "#a855f7"; // purple-500
|
||||
case "off-campus":
|
||||
return "#f59e0b"; // amber-500
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Location display text for the table cell.
|
||||
* Falls back to "Online" for online courses instead of showing a dash.
|
||||
*/
|
||||
export function formatLocationDisplay(course: CourseResponse): string | null {
|
||||
const loc = formatLocation(course);
|
||||
if (loc) return loc;
|
||||
const concern = getDeliveryConcern(course);
|
||||
if (concern === "online") return "Online";
|
||||
return null;
|
||||
}
|
||||
|
||||
/** Tooltip text for the location column: long-form location + delivery note */
|
||||
export function formatLocationTooltip(course: CourseResponse): string | null {
|
||||
const parts: string[] = [];
|
||||
|
||||
for (const mt of course.meetingTimes) {
|
||||
const loc = formatLocationLong(mt);
|
||||
if (loc && !parts.includes(loc)) parts.push(loc);
|
||||
}
|
||||
|
||||
const locationLine = parts.length > 0 ? parts.join(", ") : null;
|
||||
|
||||
const concern = getDeliveryConcern(course);
|
||||
let deliveryNote: string | null = null;
|
||||
if (concern === "online") deliveryNote = "Online";
|
||||
else if (concern === "internet") deliveryNote = "Internet";
|
||||
else if (concern === "hybrid") deliveryNote = "Hybrid";
|
||||
else if (concern === "off-campus") deliveryNote = "Off-campus";
|
||||
|
||||
if (locationLine && deliveryNote) return `${locationLine}\n${deliveryNote}`;
|
||||
if (locationLine) return locationLine;
|
||||
if (deliveryNote) return deliveryNote;
|
||||
return null;
|
||||
}
|
||||
|
||||
/** Number of open seats in a course section */
|
||||
export function openSeats(course: CourseResponse): number {
|
||||
return Math.max(0, course.maxEnrollment - course.enrollment);
|
||||
}
|
||||
|
||||
/** Text color class for seat availability: red (full), yellow (low), green (open) */
|
||||
export function seatsColor(course: CourseResponse): string {
|
||||
const open = openSeats(course);
|
||||
if (open === 0) return "text-status-red";
|
||||
if (open <= 5) return "text-yellow-500";
|
||||
return "text-status-green";
|
||||
}
|
||||
|
||||
/** Background dot color class for seat availability */
|
||||
export function seatsDotColor(course: CourseResponse): string {
|
||||
const open = openSeats(course);
|
||||
if (open === 0) return "bg-red-500";
|
||||
if (open <= 5) return "bg-yellow-500";
|
||||
return "bg-green-500";
|
||||
}
|
||||
|
||||
/** Minimum number of ratings needed to consider RMP data reliable */
|
||||
export const RMP_CONFIDENCE_THRESHOLD = 7;
|
||||
|
||||
/** RMP professor page URL from legacy ID */
|
||||
export function rmpUrl(legacyId: number): string {
|
||||
return `https://www.ratemyprofessors.com/professor/${legacyId}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Smooth OKLCH color + text-shadow for a RateMyProfessors rating.
|
||||
*
|
||||
* Three-stop gradient interpolated in OKLCH:
|
||||
* 1.0 → red, 3.0 → amber, 5.0 → green
|
||||
* with separate light/dark mode tuning.
|
||||
*/
|
||||
export function ratingStyle(rating: number, isDark: boolean): string {
|
||||
const clamped = Math.max(1, Math.min(5, rating));
|
||||
|
||||
// OKLCH stops: [lightness, chroma, hue]
|
||||
const stops: { light: [number, number, number]; dark: [number, number, number] }[] = [
|
||||
{ light: [0.63, 0.2, 25], dark: [0.7, 0.19, 25] }, // 1.0 – red
|
||||
{ light: [0.7, 0.16, 85], dark: [0.78, 0.15, 85] }, // 3.0 – amber
|
||||
{ light: [0.65, 0.2, 145], dark: [0.72, 0.19, 145] }, // 5.0 – green
|
||||
];
|
||||
|
||||
let t: number;
|
||||
let fromIdx: number;
|
||||
if (clamped <= 3) {
|
||||
t = (clamped - 1) / 2;
|
||||
fromIdx = 0;
|
||||
} else {
|
||||
t = (clamped - 3) / 2;
|
||||
fromIdx = 1;
|
||||
}
|
||||
|
||||
const from = isDark ? stops[fromIdx].dark : stops[fromIdx].light;
|
||||
const to = isDark ? stops[fromIdx + 1].dark : stops[fromIdx + 1].light;
|
||||
|
||||
const l = from[0] + (to[0] - from[0]) * t;
|
||||
const c = from[1] + (to[1] - from[1]) * t;
|
||||
const h = from[2] + (to[2] - from[2]) * t;
|
||||
|
||||
return `color: oklch(${l.toFixed(3)} ${c.toFixed(3)} ${h.toFixed(1)}); text-shadow: 0 0 4px oklch(${l.toFixed(3)} ${c.toFixed(3)} ${h.toFixed(1)} / 0.3);`;
|
||||
}
|
||||
|
||||
/** Format credit hours display */
|
||||
export function formatCreditHours(course: CourseResponse): string {
|
||||
if (course.creditHours != null) return String(course.creditHours);
|
||||
@@ -127,3 +415,24 @@ export function formatCreditHours(course: CourseResponse): string {
|
||||
}
|
||||
return "—";
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Banner "Last, First Middle" → "First Middle Last".
|
||||
* Handles: no comma (returned as-is), trailing/leading spaces,
|
||||
* middle names/initials preserved.
|
||||
*/
|
||||
export function formatInstructorName(displayName: string): string {
|
||||
const commaIdx = displayName.indexOf(",");
|
||||
if (commaIdx === -1) return displayName.trim();
|
||||
|
||||
const last = displayName.slice(0, commaIdx).trim();
|
||||
const rest = displayName.slice(commaIdx + 1).trim();
|
||||
if (!rest) return last;
|
||||
|
||||
return `${rest} ${last}`;
|
||||
}
|
||||
|
||||
/** Check if a rating value represents real data (not the 0.0 placeholder for unrated professors). */
|
||||
export function isRatingValid(avgRating: number | null, numRatings: number): boolean {
|
||||
return avgRating !== null && !(avgRating === 0 && numRatings === 0);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
import { format, formatDistanceToNow } from "date-fns";
|
||||
|
||||
/** Returns a relative time string like "3 minutes ago" or "in 2 hours". */
|
||||
export function formatRelativeDate(date: string | Date): string {
|
||||
const d = typeof date === "string" ? new Date(date) : date;
|
||||
return formatDistanceToNow(d, { addSuffix: true });
|
||||
}
|
||||
|
||||
/** Returns a full absolute datetime string for tooltip display, e.g. "Jan 29, 2026, 3:45:12 PM". */
|
||||
export function formatAbsoluteDate(date: string | Date): string {
|
||||
const d = typeof date === "string" ? new Date(date) : date;
|
||||
return format(d, "MMM d, yyyy, h:mm:ss a");
|
||||
}
|
||||
@@ -0,0 +1,131 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { formatDiffPath, jsonDiff, tryParseJson } from "./diff";
|
||||
|
||||
describe("jsonDiff", () => {
|
||||
describe("scalars", () => {
|
||||
it("returns empty array for identical primitives", () => {
|
||||
expect(jsonDiff(42, 42)).toEqual([]);
|
||||
expect(jsonDiff("hello", "hello")).toEqual([]);
|
||||
expect(jsonDiff(true, true)).toEqual([]);
|
||||
expect(jsonDiff(null, null)).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns single entry for different primitives", () => {
|
||||
expect(jsonDiff("Open", "Closed")).toEqual([{ path: "", oldVal: "Open", newVal: "Closed" }]);
|
||||
expect(jsonDiff(25, 30)).toEqual([{ path: "", oldVal: 25, newVal: 30 }]);
|
||||
expect(jsonDiff(true, false)).toEqual([{ path: "", oldVal: true, newVal: false }]);
|
||||
});
|
||||
|
||||
it("returns entry when types differ", () => {
|
||||
expect(jsonDiff(1, "1")).toEqual([{ path: "", oldVal: 1, newVal: "1" }]);
|
||||
expect(jsonDiff(null, 0)).toEqual([{ path: "", oldVal: null, newVal: 0 }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("objects", () => {
|
||||
it("detects changed key", () => {
|
||||
expect(jsonDiff({ a: 1 }, { a: 2 })).toEqual([{ path: ".a", oldVal: 1, newVal: 2 }]);
|
||||
});
|
||||
|
||||
it("detects added key", () => {
|
||||
expect(jsonDiff({}, { a: 1 })).toEqual([{ path: ".a", oldVal: undefined, newVal: 1 }]);
|
||||
});
|
||||
|
||||
it("detects removed key", () => {
|
||||
expect(jsonDiff({ a: 1 }, {})).toEqual([{ path: ".a", oldVal: 1, newVal: undefined }]);
|
||||
});
|
||||
|
||||
it("handles deeply nested changes", () => {
|
||||
const oldVal = { a: { b: { c: 1 } } };
|
||||
const newVal = { a: { b: { c: 2 } } };
|
||||
expect(jsonDiff(oldVal, newVal)).toEqual([{ path: ".a.b.c", oldVal: 1, newVal: 2 }]);
|
||||
});
|
||||
|
||||
it("returns empty for identical objects", () => {
|
||||
expect(jsonDiff({ a: 1, b: "x" }, { a: 1, b: "x" })).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("arrays", () => {
|
||||
it("detects changed element", () => {
|
||||
expect(jsonDiff([1, 2, 3], [1, 99, 3])).toEqual([{ path: "[1]", oldVal: 2, newVal: 99 }]);
|
||||
});
|
||||
|
||||
it("detects added element (new array longer)", () => {
|
||||
expect(jsonDiff([1], [1, 2])).toEqual([{ path: "[1]", oldVal: undefined, newVal: 2 }]);
|
||||
});
|
||||
|
||||
it("detects removed element (new array shorter)", () => {
|
||||
expect(jsonDiff([1, 2], [1])).toEqual([{ path: "[1]", oldVal: 2, newVal: undefined }]);
|
||||
});
|
||||
|
||||
it("returns empty for identical arrays", () => {
|
||||
expect(jsonDiff([1, 2, 3], [1, 2, 3])).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("mixed nesting", () => {
|
||||
it("handles array of objects", () => {
|
||||
const oldVal = [{ name: "Alice" }, { name: "Bob" }];
|
||||
const newVal = [{ name: "Alice" }, { name: "Charlie" }];
|
||||
expect(jsonDiff(oldVal, newVal)).toEqual([
|
||||
{ path: "[1].name", oldVal: "Bob", newVal: "Charlie" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("handles object with nested arrays", () => {
|
||||
const oldVal = { meetingTimes: [{ beginTime: "0900" }] };
|
||||
const newVal = { meetingTimes: [{ beginTime: "1000" }] };
|
||||
expect(jsonDiff(oldVal, newVal)).toEqual([
|
||||
{ path: ".meetingTimes[0].beginTime", oldVal: "0900", newVal: "1000" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("handles type change from object to array", () => {
|
||||
expect(jsonDiff({ a: 1 }, [1])).toEqual([{ path: "", oldVal: { a: 1 }, newVal: [1] }]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("tryParseJson", () => {
|
||||
it("parses valid JSON object", () => {
|
||||
expect(tryParseJson('{"a":1}')).toEqual({ a: 1 });
|
||||
});
|
||||
|
||||
it("parses valid JSON array", () => {
|
||||
expect(tryParseJson("[1,2,3]")).toEqual([1, 2, 3]);
|
||||
});
|
||||
|
||||
it("parses plain string numbers", () => {
|
||||
expect(tryParseJson("42")).toBe(42);
|
||||
expect(tryParseJson("3.14")).toBe(3.14);
|
||||
});
|
||||
|
||||
it("returns null for invalid JSON", () => {
|
||||
expect(tryParseJson("not json")).toBeNull();
|
||||
expect(tryParseJson("{broken")).toBeNull();
|
||||
});
|
||||
|
||||
it("parses boolean and null literals", () => {
|
||||
expect(tryParseJson("true")).toBe(true);
|
||||
expect(tryParseJson("null")).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatDiffPath", () => {
|
||||
it("strips leading dot", () => {
|
||||
expect(formatDiffPath(".a.b.c")).toBe("a.b.c");
|
||||
});
|
||||
|
||||
it("returns (root) for empty path", () => {
|
||||
expect(formatDiffPath("")).toBe("(root)");
|
||||
});
|
||||
|
||||
it("preserves bracket notation", () => {
|
||||
expect(formatDiffPath("[0].name")).toBe("[0].name");
|
||||
});
|
||||
|
||||
it("handles mixed paths", () => {
|
||||
expect(formatDiffPath(".meetingTimes[0].beginTime")).toBe("meetingTimes[0].beginTime");
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,86 @@
|
||||
export interface DiffEntry {
|
||||
path: string;
|
||||
oldVal: unknown;
|
||||
newVal: unknown;
|
||||
}
|
||||
|
||||
function isObject(val: unknown): val is Record<string, unknown> {
|
||||
return val !== null && typeof val === "object" && !Array.isArray(val);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively compares two JSON-compatible values and returns a list of
|
||||
* structural differences with dot-notation paths.
|
||||
*/
|
||||
export function jsonDiff(oldVal: unknown, newVal: unknown): DiffEntry[] {
|
||||
return diffRecurse("", oldVal, newVal);
|
||||
}
|
||||
|
||||
function diffRecurse(path: string, oldVal: unknown, newVal: unknown): DiffEntry[] {
|
||||
// Both arrays: compare by index up to max length
|
||||
if (Array.isArray(oldVal) && Array.isArray(newVal)) {
|
||||
const entries: DiffEntry[] = [];
|
||||
const maxLen = Math.max(oldVal.length, newVal.length);
|
||||
for (let i = 0; i < maxLen; i++) {
|
||||
const childPath = `${path}[${i}]`;
|
||||
const inOld = i < oldVal.length;
|
||||
const inNew = i < newVal.length;
|
||||
if (inOld && inNew) {
|
||||
entries.push(...diffRecurse(childPath, oldVal[i], newVal[i]));
|
||||
} else if (inNew) {
|
||||
entries.push({ path: childPath, oldVal: undefined, newVal: newVal[i] });
|
||||
} else {
|
||||
entries.push({ path: childPath, oldVal: oldVal[i], newVal: undefined });
|
||||
}
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
|
||||
// Both objects: iterate union of keys
|
||||
if (isObject(oldVal) && isObject(newVal)) {
|
||||
const entries: DiffEntry[] = [];
|
||||
const allKeys = new Set([...Object.keys(oldVal), ...Object.keys(newVal)]);
|
||||
for (const key of allKeys) {
|
||||
const childPath = `${path}.${key}`;
|
||||
const inOld = key in oldVal;
|
||||
const inNew = key in newVal;
|
||||
if (inOld && inNew) {
|
||||
entries.push(...diffRecurse(childPath, oldVal[key], newVal[key]));
|
||||
} else if (inNew) {
|
||||
entries.push({ path: childPath, oldVal: undefined, newVal: newVal[key] });
|
||||
} else {
|
||||
entries.push({ path: childPath, oldVal: oldVal[key], newVal: undefined });
|
||||
}
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
|
||||
// Leaf comparison (primitives, or type mismatch between object/array/primitive)
|
||||
if (oldVal !== newVal) {
|
||||
return [{ path, oldVal, newVal }];
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up a diff path for display. Strips the leading dot produced by
|
||||
* object-key paths, and returns "(root)" for the empty root path.
|
||||
*/
|
||||
export function formatDiffPath(path: string): string {
|
||||
if (path === "") return "(root)";
|
||||
if (path.startsWith(".")) return path.slice(1);
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to parse a string as JSON. Returns the parsed value on success,
|
||||
* or null if parsing fails. Used by the audit log to detect JSON values.
|
||||
*/
|
||||
export function tryParseJson(value: string): unknown | null {
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
import { beforeNavigate } from "$app/navigation";
|
||||
|
||||
export type NavDirection = "left" | "right" | "fade";
|
||||
|
||||
/** Sidebar nav order — indexes determine slide direction for same-depth siblings */
|
||||
const SIDEBAR_NAV_ORDER = [
|
||||
"/profile",
|
||||
"/settings",
|
||||
"/admin",
|
||||
"/admin/jobs",
|
||||
"/admin/audit",
|
||||
"/admin/users",
|
||||
];
|
||||
|
||||
function getDepth(path: string): number {
|
||||
return path.replace(/\/$/, "").split("/").filter(Boolean).length;
|
||||
}
|
||||
|
||||
function getSidebarIndex(path: string): number {
|
||||
return SIDEBAR_NAV_ORDER.indexOf(path);
|
||||
}
|
||||
|
||||
function computeDirection(from: string, to: string): NavDirection {
|
||||
const fromDepth = getDepth(from);
|
||||
const toDepth = getDepth(to);
|
||||
|
||||
if (toDepth > fromDepth) return "right";
|
||||
if (toDepth < fromDepth) return "left";
|
||||
|
||||
// Same depth — use sidebar ordering if both are sidebar routes
|
||||
const fromIdx = getSidebarIndex(from);
|
||||
const toIdx = getSidebarIndex(to);
|
||||
if (fromIdx >= 0 && toIdx >= 0) {
|
||||
return toIdx > fromIdx ? "right" : "left";
|
||||
}
|
||||
|
||||
return "fade";
|
||||
}
|
||||
|
||||
class NavigationStore {
|
||||
direction: NavDirection = $state("fade");
|
||||
}
|
||||
|
||||
export const navigationStore = new NavigationStore();
|
||||
|
||||
/** Call once from root layout to start tracking navigation direction */
|
||||
export function initNavigation() {
|
||||
beforeNavigate(({ from, to }) => {
|
||||
if (!from?.url || !to?.url) return;
|
||||
navigationStore.direction = computeDirection(from.url.pathname, to.url.pathname);
|
||||
});
|
||||
}
|
||||
+70
-44
@@ -1,69 +1,95 @@
|
||||
/**
|
||||
* Relative time formatting with adaptive refresh intervals.
|
||||
*
|
||||
* The key insight: a timestamp showing "3 seconds ago" needs to update every second,
|
||||
* but "2 hours ago" only needs to update every minute. This module provides both
|
||||
* The key insight: a timestamp showing "3s" needs to update every second,
|
||||
* but "2h 15m" only needs to update every minute. This module provides both
|
||||
* the formatted string and the optimal interval until the next meaningful change.
|
||||
*/
|
||||
|
||||
interface RelativeTimeResult {
|
||||
/** The human-readable relative time string (e.g. "3 seconds ago") */
|
||||
/** Compact relative time string (e.g. "9m 35s", "1h 23m", "3d") */
|
||||
text: string;
|
||||
/** Milliseconds until the displayed text would change */
|
||||
nextUpdateMs: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute a relative time string and the interval until it next changes.
|
||||
* Format a duration in milliseconds as a compact human-readable string.
|
||||
*
|
||||
* Granularity tiers:
|
||||
* - < 60s: per-second ("1 second ago", "45 seconds ago")
|
||||
* - < 60m: per-minute ("1 minute ago", "12 minutes ago")
|
||||
* - < 24h: per-hour ("1 hour ago", "5 hours ago")
|
||||
* - >= 24h: per-day ("1 day ago", "3 days ago")
|
||||
* Format tiers:
|
||||
* - < 60s: seconds only ("45s")
|
||||
* - < 1h: minutes + seconds ("9m 35s")
|
||||
* - < 24h: hours + minutes ("1h 23m")
|
||||
* - >= 24h: days only ("3d")
|
||||
*/
|
||||
export function formatDuration(ms: number): string {
|
||||
const totalSeconds = Math.floor(Math.abs(ms) / 1000);
|
||||
|
||||
if (totalSeconds < 60) return `${totalSeconds}s`;
|
||||
|
||||
const totalMinutes = Math.floor(totalSeconds / 60);
|
||||
if (totalMinutes < 60) {
|
||||
const secs = totalSeconds % 60;
|
||||
return `${totalMinutes}m ${secs}s`;
|
||||
}
|
||||
|
||||
const totalHours = Math.floor(totalMinutes / 60);
|
||||
if (totalHours < 24) {
|
||||
const mins = totalMinutes % 60;
|
||||
return `${totalHours}h ${mins}m`;
|
||||
}
|
||||
|
||||
const days = Math.floor(totalHours / 24);
|
||||
return `${days}d`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute a compact relative time string and the interval until it next changes.
|
||||
*
|
||||
* Uses {@link formatDuration} for the text, plus computes the optimal refresh
|
||||
* interval so callers can schedule the next update efficiently.
|
||||
*/
|
||||
/**
|
||||
* Format a millisecond duration with a dynamic unit, optimised for
|
||||
* scrape-style timings that are typically under 60 seconds.
|
||||
*
|
||||
* - < 1 000 ms → "423ms"
|
||||
* - < 10 000 ms → "4.52s" (two decimals)
|
||||
* - < 60 000 ms → "16.9s" (one decimal)
|
||||
* - ≥ 60 000 ms → delegates to {@link formatDuration} ("1m 5s")
|
||||
*/
|
||||
export function formatDurationMs(ms: number): string {
|
||||
const abs = Math.abs(ms);
|
||||
if (abs < 1_000) return `${Math.round(abs)}ms`;
|
||||
if (abs < 10_000) return `${(abs / 1_000).toFixed(2)}s`;
|
||||
if (abs < 60_000) return `${(abs / 1_000).toFixed(1)}s`;
|
||||
return formatDuration(ms);
|
||||
}
|
||||
|
||||
export function relativeTime(date: Date, ref: Date): RelativeTimeResult {
|
||||
const diffMs = ref.getTime() - date.getTime();
|
||||
const seconds = Math.round(diffMs / 1000);
|
||||
const totalSeconds = Math.floor(diffMs / 1000);
|
||||
|
||||
if (seconds < 1) {
|
||||
return { text: "just now", nextUpdateMs: 1000 - (diffMs % 1000) || 1000 };
|
||||
if (totalSeconds < 1) {
|
||||
return { text: "now", nextUpdateMs: 1000 - (diffMs % 1000) || 1000 };
|
||||
}
|
||||
|
||||
if (seconds < 60) {
|
||||
const remainder = 1000 - (diffMs % 1000);
|
||||
return {
|
||||
text: seconds === 1 ? "1 second ago" : `${seconds} seconds ago`,
|
||||
nextUpdateMs: remainder || 1000,
|
||||
};
|
||||
}
|
||||
const text = formatDuration(diffMs);
|
||||
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
if (minutes < 60) {
|
||||
// Update when the next minute boundary is crossed
|
||||
// Compute optimal next-update interval based on the current tier
|
||||
const totalMinutes = Math.floor(totalSeconds / 60);
|
||||
const totalHours = Math.floor(totalMinutes / 60);
|
||||
|
||||
let nextUpdateMs: number;
|
||||
if (totalHours >= 24) {
|
||||
const msIntoCurrentDay = diffMs % 86_400_000;
|
||||
nextUpdateMs = 86_400_000 - msIntoCurrentDay || 86_400_000;
|
||||
} else if (totalMinutes >= 60) {
|
||||
const msIntoCurrentMinute = diffMs % 60_000;
|
||||
const msUntilNextMinute = 60_000 - msIntoCurrentMinute;
|
||||
return {
|
||||
text: minutes === 1 ? "1 minute ago" : `${minutes} minutes ago`,
|
||||
nextUpdateMs: msUntilNextMinute || 60_000,
|
||||
};
|
||||
nextUpdateMs = 60_000 - msIntoCurrentMinute || 60_000;
|
||||
} else {
|
||||
nextUpdateMs = 1000 - (diffMs % 1000) || 1000;
|
||||
}
|
||||
|
||||
const hours = Math.floor(minutes / 60);
|
||||
if (hours < 24) {
|
||||
const msIntoCurrentHour = diffMs % 3_600_000;
|
||||
const msUntilNextHour = 3_600_000 - msIntoCurrentHour;
|
||||
return {
|
||||
text: hours === 1 ? "1 hour ago" : `${hours} hours ago`,
|
||||
nextUpdateMs: msUntilNextHour || 3_600_000,
|
||||
};
|
||||
}
|
||||
|
||||
const days = Math.floor(hours / 24);
|
||||
const msIntoCurrentDay = diffMs % 86_400_000;
|
||||
const msUntilNextDay = 86_400_000 - msIntoCurrentDay;
|
||||
return {
|
||||
text: days === 1 ? "1 day ago" : `${days} days ago`,
|
||||
nextUpdateMs: msUntilNextDay || 86_400_000,
|
||||
};
|
||||
return { text, nextUpdateMs };
|
||||
}
|
||||
|
||||
@@ -0,0 +1,111 @@
|
||||
/**
|
||||
* Animation-map management for the timeline's stacked-area transitions.
|
||||
*
|
||||
* Each visible slot has per-subject animated values that lerp toward
|
||||
* targets. This module owns the AnimMap lifecycle: syncing targets,
|
||||
* stepping current values, and pruning offscreen entries.
|
||||
*/
|
||||
import { VALUE_EASE, MAXY_EASE, SETTLE_THRESHOLD, MIN_MAXY } from "./constants";
|
||||
import type { AnimEntry, TimeSlot } from "./types";
|
||||
|
||||
export type AnimMap = Map<number, Map<string, AnimEntry>>;
|
||||
|
||||
/** Create a fresh, empty animation map. */
|
||||
export function createAnimMap(): AnimMap {
|
||||
return new Map();
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync animMap targets from data + filter state.
|
||||
* New slots start at current=0 so they animate in from the baseline.
|
||||
* Disabled subjects get target=0 so they animate out.
|
||||
*
|
||||
* @param subjects - the full list of known subject codes
|
||||
* @param enabledSubjects - subjects currently toggled on
|
||||
*/
|
||||
export function syncAnimTargets(
|
||||
animMap: AnimMap,
|
||||
slots: TimeSlot[],
|
||||
subjects: readonly string[],
|
||||
enabledSubjects: Set<string>
|
||||
): void {
|
||||
for (const slot of slots) {
|
||||
const timeMs = slot.time.getTime();
|
||||
let subjectMap = animMap.get(timeMs);
|
||||
if (!subjectMap) {
|
||||
subjectMap = new Map();
|
||||
animMap.set(timeMs, subjectMap);
|
||||
}
|
||||
|
||||
for (const subject of subjects) {
|
||||
const realValue = enabledSubjects.has(subject) ? slot.subjects[subject] || 0 : 0;
|
||||
const entry = subjectMap.get(subject);
|
||||
if (entry) {
|
||||
entry.target = realValue;
|
||||
} else {
|
||||
subjectMap.set(subject, { current: 0, target: realValue });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Advance all animated values toward their targets.
|
||||
* Returns the new animatedMaxY value and whether any entry is still moving.
|
||||
*/
|
||||
export function stepAnimations(
|
||||
animMap: AnimMap,
|
||||
dt: number,
|
||||
prevMaxY: number
|
||||
): { animating: boolean; maxY: number } {
|
||||
const ease = 1 - Math.pow(1 - VALUE_EASE, dt / 16);
|
||||
let animating = false;
|
||||
let computedMaxY = MIN_MAXY;
|
||||
|
||||
for (const subjectMap of animMap.values()) {
|
||||
let slotSum = 0;
|
||||
for (const entry of subjectMap.values()) {
|
||||
const diff = entry.target - entry.current;
|
||||
if (Math.abs(diff) < SETTLE_THRESHOLD) {
|
||||
if (entry.current !== entry.target) {
|
||||
entry.current = entry.target;
|
||||
}
|
||||
} else {
|
||||
entry.current += diff * ease;
|
||||
animating = true;
|
||||
}
|
||||
slotSum += entry.current;
|
||||
}
|
||||
if (slotSum > computedMaxY) computedMaxY = slotSum;
|
||||
}
|
||||
|
||||
const maxYEase = 1 - Math.pow(1 - MAXY_EASE, dt / 16);
|
||||
const maxDiff = computedMaxY - prevMaxY;
|
||||
let maxY: number;
|
||||
if (Math.abs(maxDiff) < SETTLE_THRESHOLD) {
|
||||
maxY = computedMaxY;
|
||||
} else {
|
||||
maxY = prevMaxY + maxDiff * maxYEase;
|
||||
animating = true;
|
||||
}
|
||||
|
||||
return { animating, maxY };
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove animMap entries whose timestamps fall outside the viewport
|
||||
* (with margin), preventing unbounded memory growth during long sessions.
|
||||
*/
|
||||
export function pruneAnimMap(
|
||||
animMap: AnimMap,
|
||||
viewStart: number,
|
||||
viewEnd: number,
|
||||
viewSpan: number
|
||||
): void {
|
||||
const margin = viewSpan;
|
||||
for (const timeMs of animMap.keys()) {
|
||||
if (timeMs < viewStart - margin || timeMs > viewEnd + margin) {
|
||||
animMap.delete(timeMs);
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user