diff --git a/src/data/models.rs b/src/data/models.rs index 4184e78..f92f1f6 100644 --- a/src/data/models.rs +++ b/src/data/models.rs @@ -192,8 +192,9 @@ pub enum TargetType { } /// Computed status for a scrape job, derived from existing fields. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, TS)] #[serde(rename_all = "camelCase")] +#[ts(export)] pub enum ScrapeJobStatus { Processing, StaleLock, diff --git a/src/web/admin.rs b/src/web/admin.rs index 20fd212..5e138d9 100644 --- a/src/web/admin.rs +++ b/src/web/admin.rs @@ -6,18 +6,51 @@ use axum::extract::{Path, State}; use axum::http::{HeaderMap, StatusCode, header}; use axum::response::{IntoResponse, Json, Response}; use chrono::{DateTime, Utc}; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use serde_json::{Value, json}; +use ts_rs::TS; use crate::data::models::User; use crate::state::AppState; +use crate::status::ServiceStatus; use crate::web::extractors::AdminUser; +use crate::web::ws::ScrapeJobDto; + +#[derive(Debug, Clone, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct ScrapeJobsResponse { + pub jobs: Vec, +} + +#[derive(Debug, Clone, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct AdminServiceInfo { + name: String, + status: ServiceStatus, +} + +#[derive(Debug, Clone, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct AdminStatusResponse { + #[ts(type = "number")] + user_count: i64, + #[ts(type = "number")] + session_count: i64, + #[ts(type = "number")] + course_count: i64, + #[ts(type = "number")] + scrape_job_count: i64, + services: Vec, +} /// `GET /api/admin/status` — Enhanced system status for admins. pub async fn admin_status( AdminUser(_user): AdminUser, State(state): State, -) -> Result, (StatusCode, Json)> { +) -> Result, (StatusCode, Json)> { let (user_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM users") .fetch_one(&state.db_pool) .await @@ -60,25 +93,20 @@ pub async fn admin_status( ) })?; - let services: Vec = state + let services: Vec = state .service_statuses .all() .into_iter() - .map(|(name, status)| { - json!({ - "name": name, - "status": status, - }) - }) + .map(|(name, status)| AdminServiceInfo { name, status }) .collect(); - Ok(Json(json!({ - "userCount": user_count, - "sessionCount": session_count, - "courseCount": course_count, - "scrapeJobCount": scrape_job_count, - "services": services, - }))) + Ok(Json(AdminStatusResponse { + user_count, + session_count, + course_count, + scrape_job_count, + services, + })) } /// `GET /api/admin/users` — List all users. @@ -136,7 +164,7 @@ pub async fn set_user_admin( pub async fn list_scrape_jobs( AdminUser(_user): AdminUser, State(state): State, -) -> Result, (StatusCode, Json)> { +) -> Result, (StatusCode, Json)> { let rows = sqlx::query_as::<_, crate::data::models::ScrapeJob>( "SELECT * FROM scrape_jobs ORDER BY priority DESC, execute_at ASC LIMIT 100", ) @@ -150,26 +178,9 @@ pub async fn list_scrape_jobs( ) })?; - let jobs: Vec = rows - .iter() - .map(|j| { - json!({ - "id": j.id, - "targetType": format!("{:?}", j.target_type), - "targetPayload": j.target_payload, - "priority": format!("{:?}", j.priority), - "executeAt": j.execute_at.to_rfc3339(), - "createdAt": j.created_at.to_rfc3339(), - "lockedAt": j.locked_at.map(|t| t.to_rfc3339()), - "retryCount": j.retry_count, - "maxRetries": j.max_retries, - "queuedAt": j.queued_at.to_rfc3339(), - "status": j.status(), - }) - }) - .collect(); + let jobs: Vec = rows.iter().map(ScrapeJobDto::from).collect(); - Ok(Json(json!({ "jobs": jobs }))) + Ok(Json(ScrapeJobsResponse { jobs })) } /// Row returned by the audit-log query (audit + joined course fields). @@ -188,6 +199,29 @@ struct AuditRow { title: Option, } +#[derive(Debug, Clone, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct AuditLogEntry { + pub id: i32, + pub course_id: i32, + pub timestamp: String, + pub field_changed: String, + pub old_value: String, + pub new_value: String, + pub subject: Option, + pub course_number: Option, + pub crn: Option, + pub course_title: Option, +} + +#[derive(Debug, Clone, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct AuditLogResponse { + pub entries: Vec, +} + /// Format a `DateTime` as an HTTP-date (RFC 2822) for Last-Modified headers. fn to_http_date(dt: &DateTime) -> String { dt.format("%a, %d %b %Y %H:%M:%S GMT").to_string() @@ -241,25 +275,23 @@ pub async fn list_audit_log( } } - let entries: Vec = rows + let entries: Vec = rows .iter() - .map(|a| { - json!({ - "id": a.id, - "courseId": a.course_id, - "timestamp": a.timestamp.to_rfc3339(), - "fieldChanged": a.field_changed, - "oldValue": a.old_value, - "newValue": a.new_value, - "subject": a.subject, - "courseNumber": a.course_number, - "crn": a.crn, - "courseTitle": a.title, - }) + .map(|a| AuditLogEntry { + id: a.id, + course_id: a.course_id, + timestamp: a.timestamp.to_rfc3339(), + field_changed: a.field_changed.clone(), + old_value: a.old_value.clone(), + new_value: a.new_value.clone(), + subject: a.subject.clone(), + course_number: a.course_number.clone(), + crn: a.crn.clone(), + course_title: a.title.clone(), }) .collect(); - let mut resp = Json(json!({ "entries": entries })).into_response(); + let mut resp = Json(AuditLogResponse { entries }).into_response(); if let Some(latest_ts) = latest && let Ok(val) = to_http_date(&latest_ts).parse() { diff --git a/src/web/routes.rs b/src/web/routes.rs index 95a60ff..b25478b 100644 --- a/src/web/routes.rs +++ b/src/web/routes.rs @@ -291,7 +291,7 @@ async fn status(State(state): State) -> Json { async fn metrics( State(state): State, Query(params): Query, -) -> Result, (AxumStatusCode, String)> { +) -> Result, (AxumStatusCode, String)> { let limit = params.limit.clamp(1, 5000); // Parse range shorthand, defaulting to 24h @@ -370,33 +370,52 @@ async fn metrics( })?; let count = metrics.len(); - let metrics_json: Vec = metrics + let metrics_entries: Vec = metrics .into_iter() .map( - |(id, course_id, timestamp, enrollment, wait_count, seats_available)| { - json!({ - "id": id, - "courseId": course_id, - "timestamp": timestamp.to_rfc3339(), - "enrollment": enrollment, - "waitCount": wait_count, - "seatsAvailable": seats_available, - }) + |(id, course_id, timestamp, enrollment, wait_count, seats_available)| MetricEntry { + id, + course_id, + timestamp: timestamp.to_rfc3339(), + enrollment, + wait_count, + seats_available, }, ) .collect(); - Ok(Json(json!({ - "metrics": metrics_json, - "count": count, - "timestamp": chrono::Utc::now().to_rfc3339(), - }))) + Ok(Json(MetricsResponse { + metrics: metrics_entries, + count, + timestamp: chrono::Utc::now().to_rfc3339(), + })) } // ============================================================ // Course search & detail API // ============================================================ +#[derive(Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct MetricEntry { + pub id: i32, + pub course_id: i32, + pub timestamp: String, + pub enrollment: i32, + pub wait_count: i32, + pub seats_available: i32, +} + +#[derive(Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct MetricsResponse { + pub metrics: Vec, + pub count: usize, + pub timestamp: String, +} + #[derive(Deserialize)] struct MetricsParams { course_id: Option, diff --git a/src/web/timeline.rs b/src/web/timeline.rs index 46012b3..1cebb0a 100644 --- a/src/web/timeline.rs +++ b/src/web/timeline.rs @@ -38,13 +38,17 @@ const MAX_TOTAL_SPAN: Duration = Duration::hours(168); // 1 week // ── Request / Response types ──────────────────────────────────────── -#[derive(Debug, Deserialize)] -pub(crate) struct TimelineRequest { +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct TimelineRequest { ranges: Vec, } -#[derive(Debug, Deserialize)] -pub(crate) struct TimeRange { +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct TimeRange { start: DateTime, end: DateTime, } @@ -66,6 +70,7 @@ pub struct TimelineSlot { /// ISO-8601 timestamp at the start of this 15-minute bucket. time: DateTime, /// Subject code → total enrollment in this slot. + #[ts(type = "Record")] subjects: BTreeMap, } diff --git a/src/web/ws.rs b/src/web/ws.rs index 348a194..82f2b3b 100644 --- a/src/web/ws.rs +++ b/src/web/ws.rs @@ -12,14 +12,16 @@ use serde::Serialize; use sqlx::PgPool; use tokio::sync::broadcast; use tracing::debug; +use ts_rs::TS; use crate::data::models::{ScrapeJob, ScrapeJobStatus}; use crate::state::AppState; use crate::web::extractors::AdminUser; /// A serializable DTO for `ScrapeJob` with computed `status`. -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, TS)] #[serde(rename_all = "camelCase")] +#[ts(export)] pub struct ScrapeJobDto { pub id: i32, pub target_type: String, @@ -53,8 +55,9 @@ impl From<&ScrapeJob> for ScrapeJobDto { } /// Events broadcast when scrape job state changes. -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, TS)] #[serde(tag = "type", rename_all = "camelCase")] +#[ts(export)] pub enum ScrapeJobEvent { Init { jobs: Vec, @@ -64,6 +67,7 @@ pub enum ScrapeJobEvent { }, JobLocked { id: i32, + #[serde(rename = "lockedAt")] locked_at: String, status: ScrapeJobStatus, }, @@ -72,7 +76,9 @@ pub enum ScrapeJobEvent { }, JobRetried { id: i32, + #[serde(rename = "retryCount")] retry_count: i32, + #[serde(rename = "queuedAt")] queued_at: String, status: ScrapeJobStatus, }, diff --git a/web/src/lib/api.ts b/web/src/lib/api.ts index 8d2f05b..2546305 100644 --- a/web/src/lib/api.ts +++ b/web/src/lib/api.ts @@ -1,5 +1,8 @@ import { authStore } from "$lib/auth.svelte"; import type { + AdminStatusResponse, + AuditLogEntry, + AuditLogResponse, CandidateResponse, CodeDescription, CourseResponse, @@ -11,7 +14,12 @@ import type { InstructorStats, LinkedRmpProfile, ListInstructorsResponse, + MetricEntry, + MetricsResponse, RescoreResponse, + ScrapeJobDto, + ScrapeJobEvent, + ScrapeJobsResponse, ScraperStatsResponse, SearchResponse as SearchResponseGenerated, ServiceInfo, @@ -22,6 +30,10 @@ import type { SubjectSummary, SubjectsResponse, TermResponse, + TimeRange, + TimelineRequest, + TimelineResponse, + TimelineSlot, TimeseriesPoint, TimeseriesResponse, TopCandidateResponse, @@ -32,6 +44,9 @@ const API_BASE_URL = "/api"; // Re-export generated types under their canonical names export type { + AdminStatusResponse, + AuditLogEntry, + AuditLogResponse, CandidateResponse, CodeDescription, CourseResponse, @@ -43,7 +58,12 @@ export type { InstructorStats, LinkedRmpProfile, ListInstructorsResponse, + MetricEntry, + MetricsResponse, RescoreResponse, + ScrapeJobDto, + ScrapeJobEvent, + ScrapeJobsResponse, ScraperStatsResponse, ServiceInfo, ServiceStatus, @@ -53,6 +73,10 @@ export type { SubjectSummary, SubjectsResponse, TermResponse, + TimelineRequest, + TimelineResponse, + TimelineSlot, + TimeRange, TimeseriesPoint, TimeseriesResponse, TopCandidateResponse, @@ -72,64 +96,6 @@ export type ScraperPeriod = "1h" | "6h" | "24h" | "7d" | "30d"; export type SortColumn = "course_code" | "title" | "instructor" | "time" | "seats"; export type SortDirection = "asc" | "desc"; -export interface AdminStatus { - userCount: number; - sessionCount: number; - courseCount: number; - scrapeJobCount: number; - services: { name: string; status: string }[]; -} - -export interface ScrapeJob { - id: number; - targetType: string; - targetPayload: unknown; - priority: string; - executeAt: string; - createdAt: string; - lockedAt: string | null; - retryCount: number; - maxRetries: number; - queuedAt: string; - status: "processing" | "staleLock" | "exhausted" | "scheduled" | "pending"; -} - -export interface ScrapeJobsResponse { - jobs: ScrapeJob[]; -} - -export interface AuditLogEntry { - id: number; - courseId: number; - timestamp: string; - fieldChanged: string; - oldValue: string; - newValue: string; - subject: string | null; - courseNumber: string | null; - crn: string | null; - courseTitle: string | null; -} - -export interface AuditLogResponse { - entries: AuditLogEntry[]; -} - -export interface MetricEntry { - id: number; - courseId: number; - timestamp: string; - enrollment: number; - waitCount: number; - seatsAvailable: number; -} - -export interface MetricsResponse { - metrics: MetricEntry[]; - count: number; - timestamp: string; -} - export interface MetricsParams { course_id?: number; term?: string; @@ -138,29 +104,6 @@ export interface MetricsParams { limit?: number; } -/** A time range for timeline queries (ISO-8601 strings). */ -export interface TimelineRange { - start: string; - end: string; -} - -/** Request body for POST /api/timeline. */ -export interface TimelineRequest { - ranges: TimelineRange[]; -} - -/** A single 15-minute slot returned by the timeline API. */ -export interface TimelineSlot { - time: string; - subjects: Record; -} - -/** Response from POST /api/timeline. */ -export interface TimelineResponse { - slots: TimelineSlot[]; - subjects: string[]; -} - export interface SearchParams { term: string; subjects?: string[]; @@ -279,8 +222,8 @@ export class BannerApiClient { } // Admin endpoints - async getAdminStatus(): Promise { - return this.request("/admin/status"); + async getAdminStatus(): Promise { + return this.request("/admin/status"); } async getAdminUsers(): Promise { @@ -331,7 +274,7 @@ export class BannerApiClient { /** Stored `Last-Modified` value for audit log conditional requests. */ private _auditLastModified: string | null = null; - async getTimeline(ranges: TimelineRange[]): Promise { + async getTimeline(ranges: TimeRange[]): Promise { return this.request("/timeline", { method: "POST", body: { ranges } satisfies TimelineRequest, diff --git a/web/src/lib/bindings/AdminServiceInfo.ts b/web/src/lib/bindings/AdminServiceInfo.ts new file mode 100644 index 0000000..6b3d576 --- /dev/null +++ b/web/src/lib/bindings/AdminServiceInfo.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ServiceStatus } from "./ServiceStatus"; + +export type AdminServiceInfo = { name: string, status: ServiceStatus, }; diff --git a/web/src/lib/bindings/AdminStatusResponse.ts b/web/src/lib/bindings/AdminStatusResponse.ts new file mode 100644 index 0000000..a8d577a --- /dev/null +++ b/web/src/lib/bindings/AdminStatusResponse.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { AdminServiceInfo } from "./AdminServiceInfo"; + +export type AdminStatusResponse = { userCount: number, sessionCount: number, courseCount: number, scrapeJobCount: number, services: Array, }; diff --git a/web/src/lib/bindings/AuditLogEntry.ts b/web/src/lib/bindings/AuditLogEntry.ts new file mode 100644 index 0000000..d750054 --- /dev/null +++ b/web/src/lib/bindings/AuditLogEntry.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type AuditLogEntry = { id: number, courseId: number, timestamp: string, fieldChanged: string, oldValue: string, newValue: string, subject: string | null, courseNumber: string | null, crn: string | null, courseTitle: string | null, }; diff --git a/web/src/lib/bindings/AuditLogResponse.ts b/web/src/lib/bindings/AuditLogResponse.ts new file mode 100644 index 0000000..ee1e7dc --- /dev/null +++ b/web/src/lib/bindings/AuditLogResponse.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { AuditLogEntry } from "./AuditLogEntry"; + +export type AuditLogResponse = { entries: Array, }; diff --git a/web/src/lib/bindings/MetricEntry.ts b/web/src/lib/bindings/MetricEntry.ts new file mode 100644 index 0000000..9a2beb0 --- /dev/null +++ b/web/src/lib/bindings/MetricEntry.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type MetricEntry = { id: number, courseId: number, timestamp: string, enrollment: number, waitCount: number, seatsAvailable: number, }; diff --git a/web/src/lib/bindings/MetricsResponse.ts b/web/src/lib/bindings/MetricsResponse.ts new file mode 100644 index 0000000..bd53b30 --- /dev/null +++ b/web/src/lib/bindings/MetricsResponse.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { MetricEntry } from "./MetricEntry"; + +export type MetricsResponse = { metrics: Array, count: number, timestamp: string, }; diff --git a/web/src/lib/bindings/ScrapeJobDto.ts b/web/src/lib/bindings/ScrapeJobDto.ts new file mode 100644 index 0000000..56b43a2 --- /dev/null +++ b/web/src/lib/bindings/ScrapeJobDto.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ScrapeJobStatus } from "./ScrapeJobStatus"; +import type { JsonValue } from "./serde_json/JsonValue"; + +/** + * A serializable DTO for `ScrapeJob` with computed `status`. + */ +export type ScrapeJobDto = { id: number, targetType: string, targetPayload: JsonValue, priority: string, executeAt: string, createdAt: string, lockedAt: string | null, retryCount: number, maxRetries: number, queuedAt: string, status: ScrapeJobStatus, }; diff --git a/web/src/lib/bindings/ScrapeJobEvent.ts b/web/src/lib/bindings/ScrapeJobEvent.ts new file mode 100644 index 0000000..8108f3a --- /dev/null +++ b/web/src/lib/bindings/ScrapeJobEvent.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ScrapeJobDto } from "./ScrapeJobDto"; +import type { ScrapeJobStatus } from "./ScrapeJobStatus"; + +/** + * Events broadcast when scrape job state changes. + */ +export type ScrapeJobEvent = { "type": "init", jobs: Array, } | { "type": "jobCreated", job: ScrapeJobDto, } | { "type": "jobLocked", id: number, lockedAt: string, status: ScrapeJobStatus, } | { "type": "jobCompleted", id: number, } | { "type": "jobRetried", id: number, retryCount: number, queuedAt: string, status: ScrapeJobStatus, } | { "type": "jobExhausted", id: number, } | { "type": "jobDeleted", id: number, }; diff --git a/web/src/lib/bindings/ScrapeJobStatus.ts b/web/src/lib/bindings/ScrapeJobStatus.ts new file mode 100644 index 0000000..a20e05a --- /dev/null +++ b/web/src/lib/bindings/ScrapeJobStatus.ts @@ -0,0 +1,6 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +/** + * Computed status for a scrape job, derived from existing fields. + */ +export type ScrapeJobStatus = "processing" | "staleLock" | "exhausted" | "scheduled" | "pending"; diff --git a/web/src/lib/bindings/ScrapeJobsResponse.ts b/web/src/lib/bindings/ScrapeJobsResponse.ts new file mode 100644 index 0000000..4183684 --- /dev/null +++ b/web/src/lib/bindings/ScrapeJobsResponse.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ScrapeJobDto } from "./ScrapeJobDto"; + +export type ScrapeJobsResponse = { jobs: Array, }; diff --git a/web/src/lib/bindings/TimeRange.ts b/web/src/lib/bindings/TimeRange.ts new file mode 100644 index 0000000..b877ca1 --- /dev/null +++ b/web/src/lib/bindings/TimeRange.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type TimeRange = { start: string, end: string, }; diff --git a/web/src/lib/bindings/TimelineRequest.ts b/web/src/lib/bindings/TimelineRequest.ts new file mode 100644 index 0000000..79461ad --- /dev/null +++ b/web/src/lib/bindings/TimelineRequest.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { TimeRange } from "./TimeRange"; + +export type TimelineRequest = { ranges: Array, }; diff --git a/web/src/lib/bindings/TimelineSlot.ts b/web/src/lib/bindings/TimelineSlot.ts index eb4ffe3..b05b395 100644 --- a/web/src/lib/bindings/TimelineSlot.ts +++ b/web/src/lib/bindings/TimelineSlot.ts @@ -8,4 +8,4 @@ time: string, /** * Subject code → total enrollment in this slot. */ -subjects: { [key in string]?: bigint }, }; +subjects: Record, }; diff --git a/web/src/lib/bindings/index.ts b/web/src/lib/bindings/index.ts index 1de67e1..f2446a6 100644 --- a/web/src/lib/bindings/index.ts +++ b/web/src/lib/bindings/index.ts @@ -1,3 +1,7 @@ +export type { AdminServiceInfo } from "./AdminServiceInfo"; +export type { AdminStatusResponse } from "./AdminStatusResponse"; +export type { AuditLogEntry } from "./AuditLogEntry"; +export type { AuditLogResponse } from "./AuditLogResponse"; export type { CandidateResponse } from "./CandidateResponse"; export type { CodeDescription } from "./CodeDescription"; export type { CourseResponse } from "./CourseResponse"; @@ -9,8 +13,14 @@ export type { InstructorResponse } from "./InstructorResponse"; export type { InstructorStats } from "./InstructorStats"; export type { LinkedRmpProfile } from "./LinkedRmpProfile"; export type { ListInstructorsResponse } from "./ListInstructorsResponse"; +export type { MetricEntry } from "./MetricEntry"; +export type { MetricsResponse } from "./MetricsResponse"; export type { OkResponse } from "./OkResponse"; export type { RescoreResponse } from "./RescoreResponse"; +export type { ScrapeJobDto } from "./ScrapeJobDto"; +export type { ScrapeJobEvent } from "./ScrapeJobEvent"; +export type { ScrapeJobStatus } from "./ScrapeJobStatus"; +export type { ScrapeJobsResponse } from "./ScrapeJobsResponse"; export type { ScraperStatsResponse } from "./ScraperStatsResponse"; export type { SearchResponse } from "./SearchResponse"; export type { ServiceInfo } from "./ServiceInfo"; @@ -21,6 +31,8 @@ export type { SubjectResultEntry } from "./SubjectResultEntry"; export type { SubjectSummary } from "./SubjectSummary"; export type { SubjectsResponse } from "./SubjectsResponse"; export type { TermResponse } from "./TermResponse"; +export type { TimeRange } from "./TimeRange"; +export type { TimelineRequest } from "./TimelineRequest"; export type { TimelineResponse } from "./TimelineResponse"; export type { TimelineSlot } from "./TimelineSlot"; export type { TimeseriesPoint } from "./TimeseriesPoint"; diff --git a/web/src/lib/bindings/serde_json/JsonValue.ts b/web/src/lib/bindings/serde_json/JsonValue.ts new file mode 100644 index 0000000..3ad5da8 --- /dev/null +++ b/web/src/lib/bindings/serde_json/JsonValue.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type JsonValue = number | string | boolean | Array | { [key in string]?: JsonValue } | null; diff --git a/web/src/lib/timeline/store.svelte.ts b/web/src/lib/timeline/store.svelte.ts index 2345ca3..07233e6 100644 --- a/web/src/lib/timeline/store.svelte.ts +++ b/web/src/lib/timeline/store.svelte.ts @@ -5,7 +5,7 @@ * the missing segments when the view expands into unloaded territory. * Fetches are throttled so rapid panning/zooming doesn't flood the API. */ -import { type TimelineRange, client } from "$lib/api"; +import { type TimeRange, client } from "$lib/api"; import { SLOT_INTERVAL_MS } from "./constants"; import type { TimeSlot } from "./types"; @@ -74,7 +74,7 @@ function mergeRange(ranges: Range[], added: Range): Range[] { * Converts gap ranges into the API request format. */ async function fetchFromApi(gaps: Range[]): Promise { - const ranges: TimelineRange[] = gaps.map(([start, end]) => ({ + const ranges: TimeRange[] = gaps.map(([start, end]) => ({ start: new Date(start).toISOString(), end: new Date(end).toISOString(), })); @@ -83,7 +83,9 @@ async function fetchFromApi(gaps: Range[]): Promise { return response.slots.map((slot) => ({ time: new Date(slot.time), - subjects: slot.subjects, + subjects: Object.fromEntries( + Object.entries(slot.subjects).map(([k, v]) => [k, Number(v)]) + ) as Record, })); } diff --git a/web/src/lib/ws.ts b/web/src/lib/ws.ts index cdf5ffb..03ce57e 100644 --- a/web/src/lib/ws.ts +++ b/web/src/lib/ws.ts @@ -1,21 +1,4 @@ -import type { ScrapeJob } from "$lib/api"; - -export type ScrapeJobStatus = "processing" | "staleLock" | "exhausted" | "scheduled" | "pending"; - -export type ScrapeJobEvent = - | { type: "init"; jobs: ScrapeJob[] } - | { type: "jobCreated"; job: ScrapeJob } - | { type: "jobLocked"; id: number; lockedAt: string; status: ScrapeJobStatus } - | { type: "jobCompleted"; id: number } - | { - type: "jobRetried"; - id: number; - retryCount: number; - queuedAt: string; - status: ScrapeJobStatus; - } - | { type: "jobExhausted"; id: number } - | { type: "jobDeleted"; id: number }; +import type { ScrapeJobDto, ScrapeJobEvent } from "$lib/bindings"; export type ConnectionState = "connected" | "reconnecting" | "disconnected"; @@ -29,7 +12,7 @@ const PRIORITY_ORDER: Record = { const MAX_RECONNECT_DELAY = 30_000; const MAX_RECONNECT_ATTEMPTS = 10; -function sortJobs(jobs: Iterable): ScrapeJob[] { +function sortJobs(jobs: Iterable): ScrapeJobDto[] { return Array.from(jobs).sort((a, b) => { const pa = PRIORITY_ORDER[a.priority.toLowerCase()] ?? 2; const pb = PRIORITY_ORDER[b.priority.toLowerCase()] ?? 2; @@ -40,7 +23,7 @@ function sortJobs(jobs: Iterable): ScrapeJob[] { export class ScrapeJobsStore { private ws: WebSocket | null = null; - private jobs = new Map(); + private jobs = new Map(); private _connectionState: ConnectionState = "disconnected"; private _initialized = false; private onUpdate: () => void; @@ -49,14 +32,14 @@ export class ScrapeJobsStore { private intentionalClose = false; /** Cached sorted array, invalidated on data mutations. */ - private cachedJobs: ScrapeJob[] = []; + private cachedJobs: ScrapeJobDto[] = []; private cacheDirty = false; constructor(onUpdate: () => void) { this.onUpdate = onUpdate; } - getJobs(): ScrapeJob[] { + getJobs(): ScrapeJobDto[] { if (this.cacheDirty) { this.cachedJobs = sortJobs(this.jobs.values()); this.cacheDirty = false; diff --git a/web/src/routes/(app)/admin/+page.svelte b/web/src/routes/(app)/admin/+page.svelte index 25d3491..d38802b 100644 --- a/web/src/routes/(app)/admin/+page.svelte +++ b/web/src/routes/(app)/admin/+page.svelte @@ -1,9 +1,9 @@