mirror of
https://github.com/Xevion/banner.git
synced 2026-01-31 22:23:34 -06:00
refactor: standardize error responses with ApiError and ts-rs bindings
This commit is contained in:
+5
-2
@@ -4,10 +4,12 @@ use crate::data::models::{Course, CourseInstructorDetail};
|
||||
use crate::error::Result;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
use ts_rs::TS;
|
||||
|
||||
/// Column to sort search results by.
|
||||
#[derive(Debug, Clone, Copy, serde::Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, serde::Deserialize, serde::Serialize, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(export)]
|
||||
pub enum SortColumn {
|
||||
CourseCode,
|
||||
Title,
|
||||
@@ -17,8 +19,9 @@ pub enum SortColumn {
|
||||
}
|
||||
|
||||
/// Sort direction.
|
||||
#[derive(Debug, Clone, Copy, serde::Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, serde::Deserialize, serde::Serialize, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(export)]
|
||||
pub enum SortDirection {
|
||||
Asc,
|
||||
Desc,
|
||||
|
||||
+19
-10
@@ -14,25 +14,34 @@ use crate::web::extractors::AdminUser;
|
||||
// Query / body types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Deserialize, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct ListInstructorsParams {
|
||||
status: Option<String>,
|
||||
search: Option<String>,
|
||||
page: Option<i32>,
|
||||
per_page: Option<i32>,
|
||||
sort: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub status: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub search: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub page: Option<i32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub per_page: Option<i32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub sort: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Deserialize, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct MatchBody {
|
||||
rmp_legacy_id: i32,
|
||||
pub rmp_legacy_id: i32,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Deserialize, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct RejectCandidateBody {
|
||||
rmp_legacy_id: i32,
|
||||
pub rmp_legacy_id: i32,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@@ -77,10 +77,12 @@ fn default_bucket_for_period(period: &str) -> &'static str {
|
||||
// Endpoint 1: GET /api/admin/scraper/stats
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Deserialize, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct StatsParams {
|
||||
#[serde(default = "default_period")]
|
||||
period: String,
|
||||
pub period: String,
|
||||
}
|
||||
|
||||
fn default_period() -> String {
|
||||
@@ -195,11 +197,14 @@ pub async fn scraper_stats(
|
||||
// Endpoint 2: GET /api/admin/scraper/timeseries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Deserialize, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct TimeseriesParams {
|
||||
#[serde(default = "default_period")]
|
||||
period: String,
|
||||
bucket: Option<String>,
|
||||
pub period: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub bucket: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
@@ -215,6 +220,8 @@ pub struct TimeseriesResponse {
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TimeseriesPoint {
|
||||
/// ISO-8601 UTC timestamp for this data point (e.g., "2024-01-15T10:00:00Z")
|
||||
#[ts(type = "string")]
|
||||
timestamp: DateTime<Utc>,
|
||||
#[ts(type = "number")]
|
||||
scrape_count: i64,
|
||||
@@ -328,7 +335,11 @@ pub struct SubjectSummary {
|
||||
#[ts(type = "number")]
|
||||
current_interval_secs: u64,
|
||||
time_multiplier: u32,
|
||||
/// ISO-8601 UTC timestamp of last scrape (e.g., "2024-01-15T10:30:00Z")
|
||||
#[ts(type = "string")]
|
||||
last_scraped: DateTime<Utc>,
|
||||
/// ISO-8601 UTC timestamp when next scrape is eligible (e.g., "2024-01-15T11:00:00Z")
|
||||
#[ts(type = "string | null")]
|
||||
next_eligible_at: Option<DateTime<Utc>>,
|
||||
#[ts(type = "number | null")]
|
||||
cooldown_remaining_secs: Option<u64>,
|
||||
@@ -439,10 +450,12 @@ pub async fn scraper_subjects(
|
||||
// Endpoint 4: GET /api/admin/scraper/subjects/{subject}
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Deserialize, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct SubjectDetailParams {
|
||||
#[serde(default = "default_detail_limit")]
|
||||
limit: i32,
|
||||
pub limit: i32,
|
||||
}
|
||||
|
||||
fn default_detail_limit() -> i32 {
|
||||
@@ -463,6 +476,8 @@ pub struct SubjectDetailResponse {
|
||||
pub struct SubjectResultEntry {
|
||||
#[ts(type = "number")]
|
||||
id: i64,
|
||||
/// ISO-8601 UTC timestamp when the scrape job completed (e.g., "2024-01-15T10:30:00Z")
|
||||
#[ts(type = "string")]
|
||||
completed_at: DateTime<Utc>,
|
||||
duration_ms: i32,
|
||||
success: bool,
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
//! Standardized API error responses.
|
||||
|
||||
use axum::Json;
|
||||
use axum::http::StatusCode;
|
||||
use axum::response::{IntoResponse, Response};
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
/// Standardized error response for all API endpoints.
|
||||
#[derive(Debug, Clone, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct ApiError {
|
||||
/// Machine-readable error code (e.g., "NOT_FOUND", "INVALID_TERM")
|
||||
pub code: String,
|
||||
/// Human-readable error message
|
||||
pub message: String,
|
||||
/// Optional additional details (validation errors, field info, etc.)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub details: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
impl ApiError {
|
||||
pub fn new(code: impl Into<String>, message: impl Into<String>) -> Self {
|
||||
Self {
|
||||
code: code.into(),
|
||||
message: message.into(),
|
||||
details: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn with_details(mut self, details: serde_json::Value) -> Self {
|
||||
self.details = Some(details);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn not_found(message: impl Into<String>) -> Self {
|
||||
Self::new("NOT_FOUND", message)
|
||||
}
|
||||
|
||||
pub fn bad_request(message: impl Into<String>) -> Self {
|
||||
Self::new("BAD_REQUEST", message)
|
||||
}
|
||||
|
||||
pub fn internal_error(message: impl Into<String>) -> Self {
|
||||
Self::new("INTERNAL_ERROR", message)
|
||||
}
|
||||
|
||||
pub fn invalid_term(term: impl std::fmt::Display) -> Self {
|
||||
Self::new("INVALID_TERM", format!("Invalid term: {}", term))
|
||||
}
|
||||
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self.code.as_str() {
|
||||
"NOT_FOUND" => StatusCode::NOT_FOUND,
|
||||
"BAD_REQUEST" | "INVALID_TERM" | "INVALID_RANGE" => StatusCode::BAD_REQUEST,
|
||||
"UNAUTHORIZED" => StatusCode::UNAUTHORIZED,
|
||||
"FORBIDDEN" => StatusCode::FORBIDDEN,
|
||||
_ => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoResponse for ApiError {
|
||||
fn into_response(self) -> Response {
|
||||
let status = self.status_code();
|
||||
(status, Json(self)).into_response()
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert `(StatusCode, String)` tuple errors to ApiError
|
||||
impl From<(StatusCode, String)> for ApiError {
|
||||
fn from((status, message): (StatusCode, String)) -> Self {
|
||||
let code = match status {
|
||||
StatusCode::NOT_FOUND => "NOT_FOUND",
|
||||
StatusCode::BAD_REQUEST => "BAD_REQUEST",
|
||||
StatusCode::UNAUTHORIZED => "UNAUTHORIZED",
|
||||
StatusCode::FORBIDDEN => "FORBIDDEN",
|
||||
_ => "INTERNAL_ERROR",
|
||||
};
|
||||
Self::new(code, message)
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper for converting database errors to ApiError
|
||||
pub fn db_error(context: &str, error: anyhow::Error) -> ApiError {
|
||||
tracing::error!(error = %error, context = context, "Database error");
|
||||
ApiError::internal_error(format!("{} failed", context))
|
||||
}
|
||||
@@ -9,6 +9,7 @@ pub mod auth;
|
||||
pub mod calendar;
|
||||
#[cfg(feature = "embed-assets")]
|
||||
pub mod encoding;
|
||||
pub mod error;
|
||||
pub mod extractors;
|
||||
pub mod routes;
|
||||
pub mod schedule_cache;
|
||||
|
||||
+90
-100
@@ -4,7 +4,6 @@ use axum::{
|
||||
Extension, Router,
|
||||
body::Body,
|
||||
extract::{Path, Query, Request, State},
|
||||
http::StatusCode as AxumStatusCode,
|
||||
response::{Json, Response},
|
||||
routing::{get, post, put},
|
||||
};
|
||||
@@ -12,6 +11,7 @@ use axum::{
|
||||
use crate::web::admin_scraper;
|
||||
use crate::web::auth::{self, AuthConfig};
|
||||
use crate::web::calendar;
|
||||
use crate::web::error::{ApiError, db_error};
|
||||
use crate::web::timeline;
|
||||
use crate::web::ws;
|
||||
use crate::{data, web::admin};
|
||||
@@ -291,7 +291,7 @@ async fn status(State(state): State<AppState>) -> Json<StatusResponse> {
|
||||
async fn metrics(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<MetricsParams>,
|
||||
) -> Result<Json<MetricsResponse>, (AxumStatusCode, String)> {
|
||||
) -> Result<Json<MetricsResponse>, ApiError> {
|
||||
let limit = params.limit.clamp(1, 5000);
|
||||
|
||||
// Parse range shorthand, defaulting to 24h
|
||||
@@ -303,8 +303,8 @@ async fn metrics(
|
||||
"7d" => chrono::Duration::days(7),
|
||||
"30d" => chrono::Duration::days(30),
|
||||
_ => {
|
||||
return Err((
|
||||
AxumStatusCode::BAD_REQUEST,
|
||||
return Err(ApiError::new(
|
||||
"INVALID_RANGE",
|
||||
format!("Invalid range '{range_str}'. Valid: 1h, 6h, 24h, 7d, 30d"),
|
||||
));
|
||||
}
|
||||
@@ -321,13 +321,7 @@ async fn metrics(
|
||||
.bind(crn)
|
||||
.fetch_optional(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Course lookup for metrics failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Course lookup failed".to_string(),
|
||||
)
|
||||
})?;
|
||||
.map_err(|e| db_error("Course lookup for metrics", e.into()))?;
|
||||
row.map(|(id,)| id)
|
||||
} else {
|
||||
None
|
||||
@@ -361,13 +355,7 @@ async fn metrics(
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
}
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Metrics query failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Metrics query failed".to_string(),
|
||||
)
|
||||
})?;
|
||||
.map_err(|e| db_error("Metrics query", e.into()))?;
|
||||
|
||||
let count = metrics.len();
|
||||
let metrics_entries: Vec<MetricEntry> = metrics
|
||||
@@ -416,44 +404,60 @@ pub struct MetricsResponse {
|
||||
pub timestamp: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct MetricsParams {
|
||||
course_id: Option<i32>,
|
||||
term: Option<String>,
|
||||
crn: Option<String>,
|
||||
/// Shorthand durations: "1h", "6h", "24h", "7d", "30d"
|
||||
range: Option<String>,
|
||||
#[derive(Deserialize, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct MetricsParams {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub course_id: Option<i32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub term: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub crn: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub range: Option<String>,
|
||||
#[serde(default = "default_metrics_limit")]
|
||||
limit: i32,
|
||||
pub limit: i32,
|
||||
}
|
||||
|
||||
fn default_metrics_limit() -> i32 {
|
||||
500
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SubjectsParams {
|
||||
term: String,
|
||||
#[derive(Deserialize, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct SubjectsParams {
|
||||
pub term: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SearchParams {
|
||||
term: String,
|
||||
#[derive(Deserialize, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct SearchParams {
|
||||
pub term: String,
|
||||
#[serde(default)]
|
||||
subject: Vec<String>,
|
||||
q: Option<String>,
|
||||
course_number_low: Option<i32>,
|
||||
course_number_high: Option<i32>,
|
||||
pub subject: Vec<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub q: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub course_number_low: Option<i32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub course_number_high: Option<i32>,
|
||||
#[serde(default)]
|
||||
open_only: bool,
|
||||
instructional_method: Option<String>,
|
||||
campus: Option<String>,
|
||||
pub open_only: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub instructional_method: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub campus: Option<String>,
|
||||
#[serde(default = "default_limit")]
|
||||
limit: i32,
|
||||
pub limit: i32,
|
||||
#[serde(default)]
|
||||
offset: i32,
|
||||
sort_by: Option<SortColumn>,
|
||||
sort_dir: Option<SortDirection>,
|
||||
pub offset: i32,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub sort_by: Option<SortColumn>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub sort_dir: Option<SortDirection>,
|
||||
}
|
||||
|
||||
use crate::data::courses::{SortColumn, SortDirection};
|
||||
@@ -550,6 +554,32 @@ fn build_course_response(
|
||||
})
|
||||
.collect();
|
||||
|
||||
let meeting_times = serde_json::from_value(course.meeting_times.clone())
|
||||
.map_err(|e| {
|
||||
tracing::error!(
|
||||
course_id = course.id,
|
||||
crn = %course.crn,
|
||||
term = %course.term_code,
|
||||
error = %e,
|
||||
"Failed to deserialize meeting_times JSONB"
|
||||
);
|
||||
e
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let attributes = serde_json::from_value(course.attributes.clone())
|
||||
.map_err(|e| {
|
||||
tracing::error!(
|
||||
course_id = course.id,
|
||||
crn = %course.crn,
|
||||
term = %course.term_code,
|
||||
error = %e,
|
||||
"Failed to deserialize attributes JSONB"
|
||||
);
|
||||
e
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
CourseResponse {
|
||||
crn: course.crn.clone(),
|
||||
subject: course.subject.clone(),
|
||||
@@ -572,8 +602,8 @@ fn build_course_response(
|
||||
link_identifier: course.link_identifier.clone(),
|
||||
is_section_linked: course.is_section_linked,
|
||||
part_of_term: course.part_of_term.clone(),
|
||||
meeting_times: serde_json::from_value(course.meeting_times.clone()).unwrap_or_default(),
|
||||
attributes: serde_json::from_value(course.attributes.clone()).unwrap_or_default(),
|
||||
meeting_times,
|
||||
attributes,
|
||||
instructors,
|
||||
}
|
||||
}
|
||||
@@ -582,15 +612,11 @@ fn build_course_response(
|
||||
async fn search_courses(
|
||||
State(state): State<AppState>,
|
||||
axum_extra::extract::Query(params): axum_extra::extract::Query<SearchParams>,
|
||||
) -> Result<Json<SearchResponse>, (AxumStatusCode, String)> {
|
||||
) -> Result<Json<SearchResponse>, ApiError> {
|
||||
use crate::banner::models::terms::Term;
|
||||
|
||||
let term_code = Term::resolve_to_code(¶ms.term).ok_or_else(|| {
|
||||
(
|
||||
AxumStatusCode::BAD_REQUEST,
|
||||
format!("Invalid term: {}", params.term),
|
||||
)
|
||||
})?;
|
||||
let term_code =
|
||||
Term::resolve_to_code(¶ms.term).ok_or_else(|| ApiError::invalid_term(¶ms.term))?;
|
||||
let limit = params.limit.clamp(1, 100);
|
||||
let offset = params.offset.max(0);
|
||||
|
||||
@@ -614,13 +640,7 @@ async fn search_courses(
|
||||
params.sort_dir,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Course search failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Search failed".to_string(),
|
||||
)
|
||||
})?;
|
||||
.map_err(|e| db_error("Course search", e))?;
|
||||
|
||||
// Batch-fetch all instructors in a single query instead of N+1
|
||||
let course_ids: Vec<i32> = courses.iter().map(|c| c.id).collect();
|
||||
@@ -647,17 +667,11 @@ async fn search_courses(
|
||||
async fn get_course(
|
||||
State(state): State<AppState>,
|
||||
Path((term, crn)): Path<(String, String)>,
|
||||
) -> Result<Json<CourseResponse>, (AxumStatusCode, String)> {
|
||||
) -> Result<Json<CourseResponse>, ApiError> {
|
||||
let course = data::courses::get_course_by_crn(&state.db_pool, &crn, &term)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Course lookup failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Lookup failed".to_string(),
|
||||
)
|
||||
})?
|
||||
.ok_or_else(|| (AxumStatusCode::NOT_FOUND, "Course not found".to_string()))?;
|
||||
.map_err(|e| db_error("Course lookup", e))?
|
||||
.ok_or_else(|| ApiError::not_found("Course not found"))?;
|
||||
|
||||
let instructors = data::courses::get_course_instructors(&state.db_pool, course.id)
|
||||
.await
|
||||
@@ -666,20 +680,12 @@ async fn get_course(
|
||||
}
|
||||
|
||||
/// `GET /api/terms`
|
||||
async fn get_terms(
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Vec<TermResponse>>, (AxumStatusCode, String)> {
|
||||
async fn get_terms(State(state): State<AppState>) -> Result<Json<Vec<TermResponse>>, ApiError> {
|
||||
use crate::banner::models::terms::Term;
|
||||
|
||||
let term_codes = data::courses::get_available_terms(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Failed to get terms");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to get terms".to_string(),
|
||||
)
|
||||
})?;
|
||||
.map_err(|e| db_error("Get terms", e))?;
|
||||
|
||||
let terms: Vec<TermResponse> = term_codes
|
||||
.into_iter()
|
||||
@@ -700,24 +706,14 @@ async fn get_terms(
|
||||
async fn get_subjects(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<SubjectsParams>,
|
||||
) -> Result<Json<Vec<CodeDescription>>, (AxumStatusCode, String)> {
|
||||
) -> Result<Json<Vec<CodeDescription>>, ApiError> {
|
||||
use crate::banner::models::terms::Term;
|
||||
|
||||
let term_code = Term::resolve_to_code(¶ms.term).ok_or_else(|| {
|
||||
(
|
||||
AxumStatusCode::BAD_REQUEST,
|
||||
format!("Invalid term: {}", params.term),
|
||||
)
|
||||
})?;
|
||||
let term_code =
|
||||
Term::resolve_to_code(¶ms.term).ok_or_else(|| ApiError::invalid_term(¶ms.term))?;
|
||||
let rows = data::courses::get_subjects_by_enrollment(&state.db_pool, &term_code)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Failed to get subjects");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to get subjects".to_string(),
|
||||
)
|
||||
})?;
|
||||
.map_err(|e| db_error("Get subjects", e))?;
|
||||
|
||||
let subjects: Vec<CodeDescription> = rows
|
||||
.into_iter()
|
||||
@@ -731,7 +727,7 @@ async fn get_subjects(
|
||||
async fn get_reference(
|
||||
State(state): State<AppState>,
|
||||
Path(category): Path<String>,
|
||||
) -> Result<Json<Vec<CodeDescription>>, (AxumStatusCode, String)> {
|
||||
) -> Result<Json<Vec<CodeDescription>>, ApiError> {
|
||||
let cache = state.reference_cache.read().await;
|
||||
let entries = cache.entries_for_category(&category);
|
||||
|
||||
@@ -740,13 +736,7 @@ async fn get_reference(
|
||||
drop(cache);
|
||||
let rows = data::reference::get_by_category(&category, &state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, category = %category, "Reference lookup failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Lookup failed".to_string(),
|
||||
)
|
||||
})?;
|
||||
.map_err(|e| db_error(&format!("Reference lookup for {}", category), e))?;
|
||||
|
||||
return Ok(Json(
|
||||
rows.into_iter()
|
||||
|
||||
+14
-38
@@ -9,11 +9,7 @@
|
||||
//! [`ScheduleCache`]) that refreshes hourly in the background with
|
||||
//! stale-while-revalidate semantics.
|
||||
|
||||
use axum::{
|
||||
extract::State,
|
||||
http::StatusCode,
|
||||
response::{IntoResponse, Json, Response},
|
||||
};
|
||||
use axum::{extract::State, response::Json};
|
||||
use chrono::{DateTime, Datelike, Duration, NaiveTime, Timelike, Utc};
|
||||
use chrono_tz::US::Central;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -21,6 +17,7 @@ use std::collections::{BTreeMap, BTreeSet};
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::state::AppState;
|
||||
use crate::web::error::ApiError;
|
||||
use crate::web::schedule_cache::weekday_bit;
|
||||
|
||||
/// 15 minutes in seconds, matching the frontend `SLOT_INTERVAL_MS`.
|
||||
@@ -49,7 +46,11 @@ pub struct TimelineRequest {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct TimeRange {
|
||||
/// ISO-8601 UTC timestamp (e.g., "2024-01-15T10:30:00Z")
|
||||
#[ts(type = "string")]
|
||||
start: DateTime<Utc>,
|
||||
/// ISO-8601 UTC timestamp (e.g., "2024-01-15T12:30:00Z")
|
||||
#[ts(type = "string")]
|
||||
end: DateTime<Utc>,
|
||||
}
|
||||
|
||||
@@ -67,39 +68,14 @@ pub struct TimelineResponse {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct TimelineSlot {
|
||||
/// ISO-8601 timestamp at the start of this 15-minute bucket.
|
||||
/// ISO-8601 UTC timestamp at the start of this 15-minute bucket (e.g., "2024-01-15T10:30:00Z")
|
||||
#[ts(type = "string")]
|
||||
time: DateTime<Utc>,
|
||||
/// Subject code → total enrollment in this slot.
|
||||
#[ts(type = "Record<string, number>")]
|
||||
subjects: BTreeMap<String, i64>,
|
||||
}
|
||||
|
||||
// ── Error type ──────────────────────────────────────────────────────
|
||||
|
||||
pub(crate) struct TimelineError {
|
||||
status: StatusCode,
|
||||
message: String,
|
||||
}
|
||||
|
||||
impl TimelineError {
|
||||
fn bad_request(msg: impl Into<String>) -> Self {
|
||||
Self {
|
||||
status: StatusCode::BAD_REQUEST,
|
||||
message: msg.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoResponse for TimelineError {
|
||||
fn into_response(self) -> Response {
|
||||
(
|
||||
self.status,
|
||||
Json(serde_json::json!({ "error": self.message })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
|
||||
// ── Alignment helpers ───────────────────────────────────────────────
|
||||
|
||||
/// Floor a timestamp to the nearest 15-minute boundary.
|
||||
@@ -166,13 +142,13 @@ fn generate_slots(merged: &[AlignedRange]) -> BTreeSet<DateTime<Utc>> {
|
||||
pub(crate) async fn timeline(
|
||||
State(state): State<AppState>,
|
||||
Json(body): Json<TimelineRequest>,
|
||||
) -> Result<Json<TimelineResponse>, TimelineError> {
|
||||
) -> Result<Json<TimelineResponse>, ApiError> {
|
||||
// ── Validate ────────────────────────────────────────────────────
|
||||
if body.ranges.is_empty() {
|
||||
return Err(TimelineError::bad_request("At least one range is required"));
|
||||
return Err(ApiError::bad_request("At least one range is required"));
|
||||
}
|
||||
if body.ranges.len() > MAX_RANGES {
|
||||
return Err(TimelineError::bad_request(format!(
|
||||
return Err(ApiError::bad_request(format!(
|
||||
"Too many ranges (max {MAX_RANGES})"
|
||||
)));
|
||||
}
|
||||
@@ -180,14 +156,14 @@ pub(crate) async fn timeline(
|
||||
let mut aligned: Vec<AlignedRange> = Vec::with_capacity(body.ranges.len());
|
||||
for r in &body.ranges {
|
||||
if r.end <= r.start {
|
||||
return Err(TimelineError::bad_request(format!(
|
||||
return Err(ApiError::bad_request(format!(
|
||||
"Range end ({}) must be after start ({})",
|
||||
r.end, r.start
|
||||
)));
|
||||
}
|
||||
let span = r.end - r.start;
|
||||
if span > MAX_RANGE_SPAN {
|
||||
return Err(TimelineError::bad_request(format!(
|
||||
return Err(ApiError::bad_request(format!(
|
||||
"Range span ({} hours) exceeds maximum ({} hours)",
|
||||
span.num_hours(),
|
||||
MAX_RANGE_SPAN.num_hours()
|
||||
@@ -204,7 +180,7 @@ pub(crate) async fn timeline(
|
||||
// Validate total span
|
||||
let total_span: Duration = merged.iter().map(|r| r.end - r.start).sum();
|
||||
if total_span > MAX_TOTAL_SPAN {
|
||||
return Err(TimelineError::bad_request(format!(
|
||||
return Err(ApiError::bad_request(format!(
|
||||
"Total time span ({} hours) exceeds maximum ({} hours)",
|
||||
total_span.num_hours(),
|
||||
MAX_TOTAL_SPAN.num_hours()
|
||||
|
||||
@@ -58,9 +58,9 @@ describe("BannerApiClient", () => {
|
||||
|
||||
const result = await apiClient.searchCourses({
|
||||
term: "202420",
|
||||
subjects: ["CS"],
|
||||
subject: ["CS"],
|
||||
q: "data",
|
||||
open_only: true,
|
||||
openOnly: true,
|
||||
limit: 25,
|
||||
offset: 50,
|
||||
});
|
||||
|
||||
+80
-34
@@ -1,6 +1,7 @@
|
||||
import { authStore } from "$lib/auth.svelte";
|
||||
import type {
|
||||
AdminStatusResponse,
|
||||
ApiError,
|
||||
AuditLogEntry,
|
||||
AuditLogResponse,
|
||||
CandidateResponse,
|
||||
@@ -15,15 +16,19 @@ import type {
|
||||
LinkedRmpProfile,
|
||||
ListInstructorsResponse,
|
||||
MetricEntry,
|
||||
MetricsParams as MetricsParamsGenerated,
|
||||
MetricsResponse,
|
||||
RescoreResponse,
|
||||
ScrapeJobDto,
|
||||
ScrapeJobEvent,
|
||||
ScrapeJobsResponse,
|
||||
ScraperStatsResponse,
|
||||
SearchParams as SearchParamsGenerated,
|
||||
SearchResponse as SearchResponseGenerated,
|
||||
ServiceInfo,
|
||||
ServiceStatus,
|
||||
SortColumn,
|
||||
SortDirection,
|
||||
StatusResponse,
|
||||
SubjectDetailResponse,
|
||||
SubjectResultEntry,
|
||||
@@ -45,6 +50,7 @@ const API_BASE_URL = "/api";
|
||||
// Re-export generated types under their canonical names
|
||||
export type {
|
||||
AdminStatusResponse,
|
||||
ApiError,
|
||||
AuditLogEntry,
|
||||
AuditLogResponse,
|
||||
CandidateResponse,
|
||||
@@ -67,6 +73,8 @@ export type {
|
||||
ScraperStatsResponse,
|
||||
ServiceInfo,
|
||||
ServiceStatus,
|
||||
SortColumn,
|
||||
SortDirection,
|
||||
StatusResponse,
|
||||
SubjectDetailResponse,
|
||||
SubjectResultEntry,
|
||||
@@ -87,34 +95,13 @@ export type Term = TermResponse;
|
||||
export type Subject = CodeDescription;
|
||||
export type ReferenceEntry = CodeDescription;
|
||||
|
||||
// SearchResponse re-exported (aliased to strip the "Generated" suffix)
|
||||
// Re-export with simplified names
|
||||
export type SearchResponse = SearchResponseGenerated;
|
||||
export type SearchParams = SearchParamsGenerated;
|
||||
export type MetricsParams = MetricsParamsGenerated;
|
||||
|
||||
export type ScraperPeriod = "1h" | "6h" | "24h" | "7d" | "30d";
|
||||
|
||||
// Client-side only — not generated from Rust
|
||||
export type SortColumn = "course_code" | "title" | "instructor" | "time" | "seats";
|
||||
export type SortDirection = "asc" | "desc";
|
||||
|
||||
export interface MetricsParams {
|
||||
course_id?: number;
|
||||
term?: string;
|
||||
crn?: string;
|
||||
range?: "1h" | "6h" | "24h" | "7d" | "30d";
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
export interface SearchParams {
|
||||
term: string;
|
||||
subjects?: string[];
|
||||
q?: string;
|
||||
open_only?: boolean;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
sort_by?: SortColumn;
|
||||
sort_dir?: SortDirection;
|
||||
}
|
||||
|
||||
// Admin instructor query params (client-only, not generated)
|
||||
export interface AdminInstructorListParams {
|
||||
status?: string;
|
||||
@@ -124,6 +111,35 @@ export interface AdminInstructorListParams {
|
||||
sort?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* API error class that wraps the structured ApiError response from the backend.
|
||||
*/
|
||||
export class ApiErrorClass extends Error {
|
||||
public readonly code: string;
|
||||
public readonly details: unknown | null;
|
||||
|
||||
constructor(apiError: ApiError) {
|
||||
super(apiError.message);
|
||||
this.name = "ApiError";
|
||||
this.code = apiError.code;
|
||||
this.details = apiError.details;
|
||||
}
|
||||
|
||||
isNotFound(): boolean {
|
||||
return this.code === "NOT_FOUND";
|
||||
}
|
||||
|
||||
isBadRequest(): boolean {
|
||||
return (
|
||||
this.code === "BAD_REQUEST" || this.code === "INVALID_TERM" || this.code === "INVALID_RANGE"
|
||||
);
|
||||
}
|
||||
|
||||
isInternalError(): boolean {
|
||||
return this.code === "INTERNAL_ERROR";
|
||||
}
|
||||
}
|
||||
|
||||
export class BannerApiClient {
|
||||
private baseUrl: string;
|
||||
private fetchFn: typeof fetch;
|
||||
@@ -163,7 +179,17 @@ export class BannerApiClient {
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
|
||||
let apiError: ApiError;
|
||||
try {
|
||||
apiError = (await response.json()) as ApiError;
|
||||
} catch {
|
||||
apiError = {
|
||||
code: "UNKNOWN_ERROR",
|
||||
message: `API request failed: ${response.status} ${response.statusText}`,
|
||||
details: null,
|
||||
};
|
||||
}
|
||||
throw new ApiErrorClass(apiError);
|
||||
}
|
||||
|
||||
return (await response.json()) as T;
|
||||
@@ -184,7 +210,17 @@ export class BannerApiClient {
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
|
||||
let apiError: ApiError;
|
||||
try {
|
||||
apiError = (await response.json()) as ApiError;
|
||||
} catch {
|
||||
apiError = {
|
||||
code: "UNKNOWN_ERROR",
|
||||
message: `API request failed: ${response.status} ${response.statusText}`,
|
||||
details: null,
|
||||
};
|
||||
}
|
||||
throw new ApiErrorClass(apiError);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -192,20 +228,28 @@ export class BannerApiClient {
|
||||
return this.request<StatusResponse>("/status");
|
||||
}
|
||||
|
||||
async searchCourses(params: SearchParams): Promise<SearchResponse> {
|
||||
async searchCourses(params: Partial<SearchParams> & { term: string }): Promise<SearchResponse> {
|
||||
const query = new URLSearchParams();
|
||||
query.set("term", params.term);
|
||||
if (params.subjects) {
|
||||
for (const s of params.subjects) {
|
||||
if (params.subject && params.subject.length > 0) {
|
||||
for (const s of params.subject) {
|
||||
query.append("subject", s);
|
||||
}
|
||||
}
|
||||
if (params.q) query.set("q", params.q);
|
||||
if (params.open_only) query.set("open_only", "true");
|
||||
if (params.openOnly) query.set("open_only", "true");
|
||||
if (params.courseNumberLow !== undefined && params.courseNumberLow !== null) {
|
||||
query.set("course_number_low", String(params.courseNumberLow));
|
||||
}
|
||||
if (params.courseNumberHigh !== undefined && params.courseNumberHigh !== null) {
|
||||
query.set("course_number_high", String(params.courseNumberHigh));
|
||||
}
|
||||
if (params.instructionalMethod) query.set("instructional_method", params.instructionalMethod);
|
||||
if (params.campus) query.set("campus", params.campus);
|
||||
if (params.limit !== undefined) query.set("limit", String(params.limit));
|
||||
if (params.offset !== undefined) query.set("offset", String(params.offset));
|
||||
if (params.sort_by) query.set("sort_by", params.sort_by);
|
||||
if (params.sort_dir) query.set("sort_dir", params.sort_dir);
|
||||
if (params.sortBy) query.set("sort_by", params.sortBy);
|
||||
if (params.sortDir) query.set("sort_dir", params.sortDir);
|
||||
return this.request<SearchResponse>(`/courses/search?${query.toString()}`);
|
||||
}
|
||||
|
||||
@@ -281,9 +325,11 @@ export class BannerApiClient {
|
||||
});
|
||||
}
|
||||
|
||||
async getMetrics(params?: MetricsParams): Promise<MetricsResponse> {
|
||||
async getMetrics(params?: Partial<MetricsParams>): Promise<MetricsResponse> {
|
||||
const query = new URLSearchParams();
|
||||
if (params?.course_id !== undefined) query.set("course_id", String(params.course_id));
|
||||
if (params?.courseId !== undefined && params.courseId !== null) {
|
||||
query.set("course_id", String(params.courseId));
|
||||
}
|
||||
if (params?.term) query.set("term", params.term);
|
||||
if (params?.crn) query.set("crn", params.crn);
|
||||
if (params?.range) query.set("range", params.range);
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { JsonValue } from "./serde_json/JsonValue";
|
||||
|
||||
/**
|
||||
* Standardized error response for all API endpoints.
|
||||
*/
|
||||
export type ApiError = {
|
||||
/**
|
||||
* Machine-readable error code (e.g., "NOT_FOUND", "INVALID_TERM")
|
||||
*/
|
||||
code: string,
|
||||
/**
|
||||
* Human-readable error message
|
||||
*/
|
||||
message: string,
|
||||
/**
|
||||
* Optional additional details (validation errors, field info, etc.)
|
||||
*/
|
||||
details: JsonValue | null, };
|
||||
@@ -0,0 +1,3 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type ListInstructorsParams = { status: string | null, search: string | null, page: number | null, perPage: number | null, sort: string | null, };
|
||||
@@ -0,0 +1,3 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type MatchBody = { rmpLegacyId: number, };
|
||||
@@ -0,0 +1,3 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type MetricsParams = { courseId: number | null, term: string | null, crn: string | null, range: string | null, limit: number, };
|
||||
@@ -0,0 +1,3 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type RejectCandidateBody = { rmpLegacyId: number, };
|
||||
@@ -0,0 +1,5 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { SortColumn } from "./SortColumn";
|
||||
import type { SortDirection } from "./SortDirection";
|
||||
|
||||
export type SearchParams = { term: string, subject: Array<string>, q: string | null, courseNumberLow: number | null, courseNumberHigh: number | null, openOnly: boolean, instructionalMethod: string | null, campus: string | null, limit: number, offset: number, sortBy: SortColumn | null, sortDir: SortDirection | null, };
|
||||
@@ -0,0 +1,6 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* Column to sort search results by.
|
||||
*/
|
||||
export type SortColumn = "course_code" | "title" | "instructor" | "time" | "seats";
|
||||
@@ -0,0 +1,6 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* Sort direction.
|
||||
*/
|
||||
export type SortDirection = "asc" | "desc";
|
||||
@@ -0,0 +1,3 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type StatsParams = { period: string, };
|
||||
@@ -0,0 +1,3 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type SubjectDetailParams = { limit: number, };
|
||||
@@ -1,3 +1,7 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type SubjectResultEntry = { id: number, completedAt: string, durationMs: number, success: boolean, errorMessage: string | null, coursesFetched: number | null, coursesChanged: number | null, coursesUnchanged: number | null, auditsGenerated: number | null, metricsGenerated: number | null, };
|
||||
export type SubjectResultEntry = { id: number,
|
||||
/**
|
||||
* ISO-8601 UTC timestamp when the scrape job completed (e.g., "2024-01-15T10:30:00Z")
|
||||
*/
|
||||
completedAt: string, durationMs: number, success: boolean, errorMessage: string | null, coursesFetched: number | null, coursesChanged: number | null, coursesUnchanged: number | null, auditsGenerated: number | null, metricsGenerated: number | null, };
|
||||
|
||||
@@ -1,3 +1,11 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type SubjectSummary = { subject: string, subjectDescription: string | null, trackedCourseCount: number, scheduleState: string, currentIntervalSecs: number, timeMultiplier: number, lastScraped: string, nextEligibleAt: string | null, cooldownRemainingSecs: number | null, avgChangeRatio: number, consecutiveZeroChanges: number, recentRuns: number, recentFailures: number, };
|
||||
export type SubjectSummary = { subject: string, subjectDescription: string | null, trackedCourseCount: number, scheduleState: string, currentIntervalSecs: number, timeMultiplier: number,
|
||||
/**
|
||||
* ISO-8601 UTC timestamp of last scrape (e.g., "2024-01-15T10:30:00Z")
|
||||
*/
|
||||
lastScraped: string,
|
||||
/**
|
||||
* ISO-8601 UTC timestamp when next scrape is eligible (e.g., "2024-01-15T11:00:00Z")
|
||||
*/
|
||||
nextEligibleAt: string | null, cooldownRemainingSecs: number | null, avgChangeRatio: number, consecutiveZeroChanges: number, recentRuns: number, recentFailures: number, };
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type SubjectsParams = { term: string, };
|
||||
@@ -1,3 +1,11 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type TimeRange = { start: string, end: string, };
|
||||
export type TimeRange = {
|
||||
/**
|
||||
* ISO-8601 UTC timestamp (e.g., "2024-01-15T10:30:00Z")
|
||||
*/
|
||||
start: string,
|
||||
/**
|
||||
* ISO-8601 UTC timestamp (e.g., "2024-01-15T12:30:00Z")
|
||||
*/
|
||||
end: string, };
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
export type TimelineSlot = {
|
||||
/**
|
||||
* ISO-8601 timestamp at the start of this 15-minute bucket.
|
||||
* ISO-8601 UTC timestamp at the start of this 15-minute bucket (e.g., "2024-01-15T10:30:00Z")
|
||||
*/
|
||||
time: string,
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type TimeseriesParams = { period: string, bucket: string | null, };
|
||||
@@ -1,3 +1,7 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type TimeseriesPoint = { timestamp: string, scrapeCount: number, successCount: number, errorCount: number, coursesChanged: number, avgDurationMs: number, };
|
||||
export type TimeseriesPoint = {
|
||||
/**
|
||||
* ISO-8601 UTC timestamp for this data point (e.g., "2024-01-15T10:00:00Z")
|
||||
*/
|
||||
timestamp: string, scrapeCount: number, successCount: number, errorCount: number, coursesChanged: number, avgDurationMs: number, };
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
export type { AdminServiceInfo } from "./AdminServiceInfo";
|
||||
export type { AdminStatusResponse } from "./AdminStatusResponse";
|
||||
export type { ApiError } from "./ApiError";
|
||||
export type { AuditLogEntry } from "./AuditLogEntry";
|
||||
export type { AuditLogResponse } from "./AuditLogResponse";
|
||||
export type { CandidateResponse } from "./CandidateResponse";
|
||||
@@ -12,29 +13,40 @@ export type { InstructorListItem } from "./InstructorListItem";
|
||||
export type { InstructorResponse } from "./InstructorResponse";
|
||||
export type { InstructorStats } from "./InstructorStats";
|
||||
export type { LinkedRmpProfile } from "./LinkedRmpProfile";
|
||||
export type { ListInstructorsParams } from "./ListInstructorsParams";
|
||||
export type { ListInstructorsResponse } from "./ListInstructorsResponse";
|
||||
export type { MatchBody } from "./MatchBody";
|
||||
export type { MetricEntry } from "./MetricEntry";
|
||||
export type { MetricsParams } from "./MetricsParams";
|
||||
export type { MetricsResponse } from "./MetricsResponse";
|
||||
export type { OkResponse } from "./OkResponse";
|
||||
export type { RejectCandidateBody } from "./RejectCandidateBody";
|
||||
export type { RescoreResponse } from "./RescoreResponse";
|
||||
export type { ScrapeJobDto } from "./ScrapeJobDto";
|
||||
export type { ScrapeJobEvent } from "./ScrapeJobEvent";
|
||||
export type { ScrapeJobStatus } from "./ScrapeJobStatus";
|
||||
export type { ScrapeJobsResponse } from "./ScrapeJobsResponse";
|
||||
export type { ScraperStatsResponse } from "./ScraperStatsResponse";
|
||||
export type { SearchParams } from "./SearchParams";
|
||||
export type { SearchResponse } from "./SearchResponse";
|
||||
export type { ServiceInfo } from "./ServiceInfo";
|
||||
export type { ServiceStatus } from "./ServiceStatus";
|
||||
export type { SortColumn } from "./SortColumn";
|
||||
export type { SortDirection } from "./SortDirection";
|
||||
export type { StatsParams } from "./StatsParams";
|
||||
export type { StatusResponse } from "./StatusResponse";
|
||||
export type { SubjectDetailParams } from "./SubjectDetailParams";
|
||||
export type { SubjectDetailResponse } from "./SubjectDetailResponse";
|
||||
export type { SubjectResultEntry } from "./SubjectResultEntry";
|
||||
export type { SubjectSummary } from "./SubjectSummary";
|
||||
export type { SubjectsParams } from "./SubjectsParams";
|
||||
export type { SubjectsResponse } from "./SubjectsResponse";
|
||||
export type { TermResponse } from "./TermResponse";
|
||||
export type { TimeRange } from "./TimeRange";
|
||||
export type { TimelineRequest } from "./TimelineRequest";
|
||||
export type { TimelineResponse } from "./TimelineResponse";
|
||||
export type { TimelineSlot } from "./TimelineSlot";
|
||||
export type { TimeseriesParams } from "./TimeseriesParams";
|
||||
export type { TimeseriesPoint } from "./TimeseriesPoint";
|
||||
export type { TimeseriesResponse } from "./TimeseriesResponse";
|
||||
export type { TopCandidateResponse } from "./TopCandidateResponse";
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* Utilities for ISO-8601 date string validation and conversion.
|
||||
*
|
||||
* All DateTime<Utc> fields from Rust are serialized as ISO-8601 strings.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Validates if a string is a valid ISO-8601 date string.
|
||||
*
|
||||
* @param value - The string to validate
|
||||
* @returns True if the string is a valid ISO-8601 date
|
||||
*/
|
||||
export function isValidISODate(value: string): boolean {
|
||||
try {
|
||||
const date = new Date(value);
|
||||
return !isNaN(date.getTime()) && date.toISOString() === value;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses an ISO-8601 date string to a Date object.
|
||||
*
|
||||
* @param value - The ISO-8601 string to parse
|
||||
* @returns Date object, or null if invalid
|
||||
*/
|
||||
export function parseISODate(value: string): Date | null {
|
||||
try {
|
||||
const date = new Date(value);
|
||||
if (isNaN(date.getTime())) {
|
||||
return null;
|
||||
}
|
||||
return date;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a string is a valid ISO-8601 date, throwing if not.
|
||||
*
|
||||
* @param value - The string to validate
|
||||
* @param fieldName - Name of the field for error messages
|
||||
* @throws Error if the string is not a valid ISO-8601 date
|
||||
*/
|
||||
export function assertISODate(value: string, fieldName = "date"): void {
|
||||
if (!isValidISODate(value)) {
|
||||
throw new Error(`Invalid ISO-8601 date for ${fieldName}: ${value}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Date to an ISO-8601 UTC string.
|
||||
*
|
||||
* @param date - The Date object to convert
|
||||
* @returns ISO-8601 string in UTC (e.g., "2024-01-15T10:30:00Z")
|
||||
*/
|
||||
export function toISOString(date: Date): string {
|
||||
return date.toISOString();
|
||||
}
|
||||
@@ -212,13 +212,13 @@ async function performSearch(
|
||||
try {
|
||||
const result = await client.searchCourses({
|
||||
term,
|
||||
subjects: subjects.length > 0 ? subjects : undefined,
|
||||
subject: subjects.length > 0 ? subjects : [],
|
||||
q: q || undefined,
|
||||
open_only: open || undefined,
|
||||
openOnly: open || false,
|
||||
limit,
|
||||
offset: off,
|
||||
sort_by: sortBy,
|
||||
sort_dir: sortDir,
|
||||
sortBy,
|
||||
sortDir,
|
||||
});
|
||||
|
||||
const applyUpdate = () => {
|
||||
|
||||
Reference in New Issue
Block a user