feat: modernize build tooling and add CI/CD workflow

Switch to Bun for 2-5x faster frontend builds, implement cargo-chef for
reliable Rust dependency caching, and add Biome for fast code
formatting.

Build system improvements:
- Replace pnpm with Bun for frontend package management
- Add cargo-chef to Dockerfile for better Rust build layer caching
- Update all commands to use bun instead of pnpm

Developer experience:
- Add comprehensive Justfile commands (format, format-check, db)
- Implement automated PostgreSQL Docker setup with random port
allocation
- Add stricter checks (--deny warnings on clippy, --all-features flag)

Code quality:
- Add Biome formatter for 10-100x faster TypeScript/JavaScript
formatting
- Add GitHub Actions CI/CD workflow for automated checks
- Update .dockerignore with comprehensive exclusions
- Format all code with cargo fmt (Rust) and Biome (TypeScript)

All changes maintain backward compatibility and can be tested
incrementally.
This commit is contained in:
Ryan Walters
2025-11-18 11:55:06 -06:00
parent 3292d35521
commit 966732a6d2
27 changed files with 1658 additions and 6939 deletions

View File

@@ -13,6 +13,16 @@ go/
# Development configuration # Development configuration
bacon.toml bacon.toml
.env .env
.env.*
!.env.example
# CI/CD
.github/
.git/
# Development tools
Justfile
rust-toolchain.toml
# Frontend build artifacts and cache # Frontend build artifacts and cache
web/node_modules/ web/node_modules/
@@ -20,4 +30,22 @@ web/dist/
web/.vite/ web/.vite/
web/.tanstack/ web/.tanstack/
web/.vscode/ web/.vscode/
# IDE and editor files
.vscode/ .vscode/
.idea/
*.swp
*.swo
*~
# OS files
.DS_Store
Thumbs.db
# Test coverage
coverage/
*.profdata
*.profraw
# SQLx offline mode (include this in builds)
!.sqlx/

65
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,65 @@
name: CI
on:
push:
branches: [master]
pull_request:
branches: [master]
env:
CARGO_TERM_COLOR: always
RUST_BACKTRACE: 1
jobs:
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
with:
components: rustfmt, clippy
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Cache Rust dependencies
uses: Swatinem/rust-cache@v2
with:
cache-on-failure: true
- name: Install frontend dependencies
working-directory: web
run: bun install --frozen-lockfile
- name: Check Rust formatting
run: cargo fmt --all -- --check
- name: Check TypeScript formatting
working-directory: web
run: bun run format:check
- name: TypeScript type check
working-directory: web
run: bun run typecheck
- name: ESLint
working-directory: web
run: bun run lint
- name: Clippy
run: cargo clippy --all-features -- --deny warnings
- name: Run tests
run: cargo test --all-features
- name: Build frontend
working-directory: web
run: bun run build
- name: Build backend
run: cargo build --release --bin banner

View File

@@ -2,11 +2,8 @@
ARG RUST_VERSION=1.89.0 ARG RUST_VERSION=1.89.0
ARG RAILWAY_GIT_COMMIT_SHA ARG RAILWAY_GIT_COMMIT_SHA
# Frontend Build Stage # --- Frontend Build Stage ---
FROM node:22-bookworm-slim AS frontend-builder FROM oven/bun:1 AS frontend-builder
# Install pnpm
RUN npm install -g pnpm
WORKDIR /app WORKDIR /app
@@ -14,64 +11,62 @@ WORKDIR /app
COPY ./Cargo.toml ./ COPY ./Cargo.toml ./
# Copy frontend package files # Copy frontend package files
COPY ./web/package.json ./web/pnpm-lock.yaml ./ COPY ./web/package.json ./web/bun.lock* ./
# Install dependencies # Install dependencies
RUN pnpm install --frozen-lockfile RUN bun install --frozen-lockfile
# Copy frontend source code # Copy frontend source code
COPY ./web ./ COPY ./web ./
# Build frontend # Build frontend
RUN pnpm run build RUN bun run build
# Rust Build Stage # --- Chef Base Stage ---
FROM rust:${RUST_VERSION}-bookworm AS builder FROM lukemathwalker/cargo-chef:latest-rust-${RUST_VERSION} AS chef
WORKDIR /app
# --- Planner Stage ---
FROM chef AS planner
COPY Cargo.toml Cargo.lock ./
COPY build.rs ./
COPY src ./src
# Migrations & .sqlx specifically left out to avoid invalidating cache
RUN cargo chef prepare --recipe-path recipe.json --bin banner
# --- Rust Build Stage ---
FROM chef AS builder
# Set build-time environment variable for Railway Git commit SHA # Set build-time environment variable for Railway Git commit SHA
ARG RAILWAY_GIT_COMMIT_SHA
ENV RAILWAY_GIT_COMMIT_SHA=${RAILWAY_GIT_COMMIT_SHA} ENV RAILWAY_GIT_COMMIT_SHA=${RAILWAY_GIT_COMMIT_SHA}
# Install build dependencies # Copy recipe from planner and build dependencies only
COPY --from=planner /app/recipe.json recipe.json
RUN cargo chef cook --release --recipe-path recipe.json --bin banner
# Install build dependencies for final compilation
RUN apt-get update && apt-get install -y \ RUN apt-get update && apt-get install -y \
pkg-config \ pkg-config \
libssl-dev \ libssl-dev \
git \ git \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
WORKDIR /usr/src # Copy source code and built frontend assets
RUN USER=root cargo new --bin banner COPY Cargo.toml Cargo.lock ./
WORKDIR /usr/src/banner COPY build.rs ./
COPY .git* ./
# Copy dependency files for better layer caching COPY src ./src
COPY ./Cargo.toml ./Cargo.lock* ./ COPY migrations ./migrations
# Copy .git directory for build.rs to access Git information (if available)
# This will copy .git (and .gitignore) if it exists, but won't fail if it doesn't
# While normally a COPY requires at least one file, .gitignore should still be available, so this wildcard should always work
COPY ./.git* ./
# Copy build.rs early so it can run during the first build
COPY ./build.rs ./
# Build empty app with downloaded dependencies to produce a stable image layer for next build
RUN cargo build --release
# Copy source code
RUN rm src/*.rs
COPY ./src ./src/
COPY ./migrations ./migrations/
# Copy built frontend assets
COPY --from=frontend-builder /app/dist ./web/dist COPY --from=frontend-builder /app/dist ./web/dist
# Build web app with embedded assets # Build web app with embedded assets
RUN rm ./target/release/deps/banner* RUN cargo build --release --bin banner
RUN cargo build --release
# Strip the binary to reduce size # Strip the binary to reduce size
RUN strip target/release/banner RUN strip target/release/banner
# Runtime Stage - Debian slim for glibc compatibility # --- Runtime Stage ---
FROM debian:12-slim FROM debian:12-slim
ARG APP=/usr/src/app ARG APP=/usr/src/app
@@ -95,7 +90,7 @@ RUN addgroup --gid $GID $APP_USER \
&& mkdir -p ${APP} && mkdir -p ${APP}
# Copy application binary # Copy application binary
COPY --from=builder --chown=$APP_USER:$APP_USER /usr/src/banner/target/release/banner ${APP}/banner COPY --from=builder --chown=$APP_USER:$APP_USER /app/target/release/banner ${APP}/banner
# Set proper permissions # Set proper permissions
RUN chmod +x ${APP}/banner RUN chmod +x ${APP}/banner
@@ -118,4 +113,4 @@ ENV HOSTS=0.0.0.0,[::]
# Implicitly uses PORT environment variable # Implicitly uses PORT environment variable
# temporary: running without 'scraper' service # temporary: running without 'scraper' service
CMD ["sh", "-c", "exec ./banner --services web,bot"] CMD ["sh", "-c", "exec ./banner --services web,bot"]

View File

@@ -3,19 +3,63 @@ default_services := "bot,web,scraper"
default: default:
just --list just --list
# Run all checks (format, clippy, tests, lint)
check: check:
cargo check cargo fmt --all -- --check
cargo clippy cargo clippy --all-features -- --deny warnings
cargo nextest run cargo nextest run
pnpm run -C web lint bun run --cwd web typecheck
bun run --cwd web lint
# Format all Rust and TypeScript code
format:
cargo fmt --all
bun run --cwd web format
# Check formatting without modifying (CI-friendly)
format-check:
cargo fmt --all -- --check
bun run --cwd web format:check
# Start PostgreSQL in Docker and update .env with connection string
db:
#!/usr/bin/env bash
set -euo pipefail
# Find available port
PORT=$(shuf -i 49152-65535 -n 1)
while ss -tlnp 2>/dev/null | grep -q ":$PORT "; do
PORT=$(shuf -i 49152-65535 -n 1)
done
# Start PostgreSQL container
docker run -d \
--name banner-postgres \
-e POSTGRES_PASSWORD=banner \
-e POSTGRES_USER=banner \
-e POSTGRES_DB=banner \
-p "$PORT:5432" \
postgres:17-alpine
# Update .env file
DB_URL="postgresql://banner:banner@localhost:$PORT/banner"
if [ -f .env ]; then
sed -i.bak "s|^DATABASE_URL=.*|DATABASE_URL=$DB_URL|" .env
else
echo "DATABASE_URL=$DB_URL" > .env
fi
echo "PostgreSQL started on port $PORT"
echo "DATABASE_URL=$DB_URL"
echo "Run: sqlx migrate run"
# Auto-reloading frontend server # Auto-reloading frontend server
frontend: frontend:
pnpm run -C web dev bun run --cwd web dev
# Production build of frontend # Production build of frontend
build-frontend: build-frontend:
pnpm run -C web build bun run --cwd web build
# Auto-reloading backend server # Auto-reloading backend server
backend *ARGS: backend *ARGS:
@@ -23,15 +67,13 @@ backend *ARGS:
# Production build # Production build
build: build:
pnpm run -C web build bun run --cwd web build
cargo build --release --bin banner cargo build --release --bin banner
# Run auto-reloading development build with release characteristics (frontend is embedded, non-auto-reloading) # Run auto-reloading development build with release characteristics
# This is useful for testing backend release-mode details.
dev-build *ARGS='--services web --tracing pretty': build-frontend dev-build *ARGS='--services web --tracing pretty': build-frontend
bacon --headless run -- --profile dev-release -- {{ARGS}} bacon --headless run -- --profile dev-release -- {{ARGS}}
# Auto-reloading development build for both frontend and backend # Auto-reloading development build for both frontend and backend
# Will not notice if either the frontend/backend crashes, but will generally be resistant to stopping on their own.
[parallel] [parallel]
dev *ARGS='--services web,bot': frontend (backend ARGS) dev *ARGS='--services web,bot': frontend (backend ARGS)

View File

@@ -26,7 +26,7 @@ The application consists of three modular services that can be run independently
## Quick Start ## Quick Start
```bash ```bash
pnpm install -C web # Install frontend dependencies bun install --cwd web # Install frontend dependencies
cargo build # Build the backend cargo build # Build the backend
just dev # Runs auto-reloading dev build just dev # Runs auto-reloading dev build

View File

@@ -124,7 +124,7 @@ impl App {
/// Setup bot service if enabled /// Setup bot service if enabled
pub async fn setup_bot_service(&mut self) -> Result<(), anyhow::Error> { pub async fn setup_bot_service(&mut self) -> Result<(), anyhow::Error> {
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::{broadcast, Mutex}; use tokio::sync::{Mutex, broadcast};
// Create shutdown channel for status update task // Create shutdown channel for status update task
let (status_shutdown_tx, status_shutdown_rx) = broadcast::channel(1); let (status_shutdown_tx, status_shutdown_rx) = broadcast::channel(1);

View File

@@ -32,7 +32,10 @@ pub fn parse_json_with_context<T: serde::de::DeserializeOwned>(body: &str) -> Re
if !path.is_empty() && path != "." { if !path.is_empty() && path != "." {
err_msg.push_str(&format!("for path '{}'\n", path)); err_msg.push_str(&format!("for path '{}'\n", path));
} }
err_msg.push_str(&format!("({}) at line {} column {}\n\n", type_info, line, column)); err_msg.push_str(&format!(
"({}) at line {} column {}\n\n",
type_info, line, column
));
err_msg.push_str(&context); err_msg.push_str(&context);
err_msg err_msg
@@ -86,10 +89,10 @@ fn parse_type_mismatch(error_msg: &str) -> String {
} }
// Try to parse "expected X at line Y" format // Try to parse "expected X at line Y" format
if error_msg.starts_with("expected ") { if error_msg.starts_with("expected ")
if let Some(expected_part) = error_msg.split(" at line ").next() { && let Some(expected_part) = error_msg.split(" at line ").next()
return expected_part.to_string(); {
} return expected_part.to_string();
} }
// Fallback: return original message without location info // Fallback: return original message without location info

View File

@@ -316,9 +316,11 @@ impl SessionPool {
return Err(anyhow::anyhow!("Failed to get cookies")); return Err(anyhow::anyhow!("Failed to get cookies"));
} }
let jsessionid = cookies.get("JSESSIONID") let jsessionid = cookies
.get("JSESSIONID")
.ok_or_else(|| anyhow::anyhow!("JSESSIONID cookie missing after validation"))?; .ok_or_else(|| anyhow::anyhow!("JSESSIONID cookie missing after validation"))?;
let ssb_cookie = cookies.get("SSB_COOKIE") let ssb_cookie = cookies
.get("SSB_COOKIE")
.ok_or_else(|| anyhow::anyhow!("SSB_COOKIE cookie missing after validation"))?; .ok_or_else(|| anyhow::anyhow!("SSB_COOKIE cookie missing after validation"))?;
let cookie_header = format!("JSESSIONID={}; SSB_COOKIE={}", jsessionid, ssb_cookie); let cookie_header = format!("JSESSIONID={}; SSB_COOKIE={}", jsessionid, ssb_cookie);
@@ -437,15 +439,23 @@ impl SessionPool {
let redirect: RedirectResponse = response.json().await?; let redirect: RedirectResponse = response.json().await?;
let base_url_path = self.base_url.parse::<Url>() let base_url_path = self
.base_url
.parse::<Url>()
.context("Failed to parse base URL")? .context("Failed to parse base URL")?
.path() .path()
.to_string(); .to_string();
let non_overlap_redirect = redirect.fwd_url.strip_prefix(&base_url_path) let non_overlap_redirect =
.ok_or_else(|| anyhow::anyhow!( redirect
"Redirect URL '{}' does not start with expected prefix '{}'", .fwd_url
redirect.fwd_url, base_url_path .strip_prefix(&base_url_path)
))?; .ok_or_else(|| {
anyhow::anyhow!(
"Redirect URL '{}' does not start with expected prefix '{}'",
redirect.fwd_url,
base_url_path
)
})?;
// Follow the redirect // Follow the redirect
let redirect_url = format!("{}{}", self.base_url, non_overlap_redirect); let redirect_url = format!("{}{}", self.base_url, non_overlap_redirect);

View File

@@ -54,10 +54,7 @@ pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Resul
let subjects: Vec<&str> = courses.iter().map(|c| c.subject.as_str()).collect(); let subjects: Vec<&str> = courses.iter().map(|c| c.subject.as_str()).collect();
let course_numbers: Vec<&str> = courses let course_numbers: Vec<&str> = courses.iter().map(|c| c.course_number.as_str()).collect();
.iter()
.map(|c| c.course_number.as_str())
.collect();
let titles: Vec<&str> = courses.iter().map(|c| c.course_title.as_str()).collect(); let titles: Vec<&str> = courses.iter().map(|c| c.course_title.as_str()).collect();

View File

@@ -67,7 +67,6 @@ impl ScraperService {
"Spawned worker tasks" "Spawned worker tasks"
); );
} }
} }
#[async_trait::async_trait] #[async_trait::async_trait]
@@ -104,7 +103,10 @@ impl Service for ScraperService {
let results = futures::future::join_all(all_handles).await; let results = futures::future::join_all(all_handles).await;
let failed = results.iter().filter(|r| r.is_err()).count(); let failed = results.iter().filter(|r| r.is_err()).count();
if failed > 0 { if failed > 0 {
warn!(failed_count = failed, "Some scraper tasks panicked during shutdown"); warn!(
failed_count = failed,
"Some scraper tasks panicked during shutdown"
);
return Err(anyhow::anyhow!("{} task(s) panicked", failed)); return Err(anyhow::anyhow!("{} task(s) panicked", failed));
} }

View File

@@ -7,7 +7,7 @@ use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use tokio::sync::broadcast; use tokio::sync::broadcast;
use tokio::time; use tokio::time;
use tracing::{debug, error, info, trace, warn, Instrument}; use tracing::{Instrument, debug, error, info, trace, warn};
/// A single worker instance. /// A single worker instance.
/// ///
@@ -73,7 +73,8 @@ impl Worker {
let duration = start.elapsed(); let duration = start.elapsed();
// Handle the job processing result // Handle the job processing result
self.handle_job_result(job_id, retry_count, max_retries, process_result, duration).await; self.handle_job_result(job_id, retry_count, max_retries, process_result, duration)
.await;
} }
} }
@@ -158,7 +159,7 @@ impl Worker {
"UPDATE scrape_jobs "UPDATE scrape_jobs
SET locked_at = NULL, retry_count = retry_count + 1 SET locked_at = NULL, retry_count = retry_count + 1
WHERE id = $1 WHERE id = $1
RETURNING CASE WHEN retry_count + 1 < $2 THEN retry_count + 1 ELSE NULL END" RETURNING CASE WHEN retry_count + 1 < $2 THEN retry_count + 1 ELSE NULL END",
) )
.bind(job_id) .bind(job_id)
.bind(max_retries) .bind(max_retries)
@@ -170,7 +171,10 @@ impl Worker {
/// Handle shutdown signal received during job processing /// Handle shutdown signal received during job processing
async fn handle_shutdown_during_processing(&self, job_id: i32) { async fn handle_shutdown_during_processing(&self, job_id: i32) {
info!(worker_id = self.id, job_id, "Shutdown received during job processing"); info!(
worker_id = self.id,
job_id, "Shutdown received during job processing"
);
if let Err(e) = self.unlock_job(job_id).await { if let Err(e) = self.unlock_job(job_id).await {
warn!( warn!(
@@ -187,7 +191,14 @@ impl Worker {
} }
/// Handle the result of job processing /// Handle the result of job processing
async fn handle_job_result(&self, job_id: i32, retry_count: i32, max_retries: i32, result: Result<(), JobError>, duration: std::time::Duration) { async fn handle_job_result(
&self,
job_id: i32,
retry_count: i32,
max_retries: i32,
result: Result<(), JobError>,
duration: std::time::Duration,
) {
match result { match result {
Ok(()) => { Ok(()) => {
debug!( debug!(
@@ -201,7 +212,8 @@ impl Worker {
} }
} }
Err(JobError::Recoverable(e)) => { Err(JobError::Recoverable(e)) => {
self.handle_recoverable_error(job_id, retry_count, max_retries, e, duration).await; self.handle_recoverable_error(job_id, retry_count, max_retries, e, duration)
.await;
} }
Err(JobError::Unrecoverable(e)) => { Err(JobError::Unrecoverable(e)) => {
error!( error!(
@@ -219,7 +231,14 @@ impl Worker {
} }
/// Handle recoverable errors by logging appropriately and unlocking the job /// Handle recoverable errors by logging appropriately and unlocking the job
async fn handle_recoverable_error(&self, job_id: i32, retry_count: i32, max_retries: i32, e: anyhow::Error, duration: std::time::Duration) { async fn handle_recoverable_error(
&self,
job_id: i32,
retry_count: i32,
max_retries: i32,
e: anyhow::Error,
duration: std::time::Duration,
) {
let next_attempt = retry_count.saturating_add(1); let next_attempt = retry_count.saturating_add(1);
let remaining_retries = max_retries.saturating_sub(next_attempt); let remaining_retries = max_retries.saturating_sub(next_attempt);

View File

@@ -7,7 +7,7 @@ use serenity::Client;
use serenity::all::{ActivityData, ClientBuilder, GatewayIntents}; use serenity::all::{ActivityData, ClientBuilder, GatewayIntents};
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use tokio::sync::{broadcast, Mutex}; use tokio::sync::{Mutex, broadcast};
use tokio::task::JoinHandle; use tokio::task::JoinHandle;
use tracing::{debug, error, info, warn}; use tracing::{debug, error, info, warn};
@@ -91,7 +91,11 @@ impl BotService {
poise::builtins::register_globally(ctx, &framework.options().commands).await?; poise::builtins::register_globally(ctx, &framework.options().commands).await?;
// Start status update task with shutdown support // Start status update task with shutdown support
let handle = Self::start_status_update_task(ctx.clone(), app_state.clone(), status_shutdown_rx); let handle = Self::start_status_update_task(
ctx.clone(),
app_state.clone(),
status_shutdown_rx,
);
*status_task_handle.lock().await = Some(handle); *status_task_handle.lock().await = Some(handle);
Ok(Data { app_state }) Ok(Data { app_state })

View File

@@ -62,7 +62,8 @@ impl ServiceManager {
}); });
// Store abort handle for shutdown control // Store abort handle for shutdown control
self.service_handles.insert(name.clone(), handle.abort_handle()); self.service_handles
.insert(name.clone(), handle.abort_handle());
debug!(service = name, id = ?handle.id(), "service spawned"); debug!(service = name, id = ?handle.id(), "service spawned");
} }

View File

@@ -12,7 +12,10 @@ async fn test_join_tasks_success() {
// All tasks should complete successfully // All tasks should complete successfully
let result = join_tasks(handles).await; let result = join_tasks(handles).await;
assert!(result.is_ok(), "Expected all tasks to complete successfully"); assert!(
result.is_ok(),
"Expected all tasks to complete successfully"
);
} }
#[tokio::test] #[tokio::test]
@@ -29,5 +32,8 @@ async fn test_join_tasks_with_panic() {
assert!(result.is_err(), "Expected an error when a task panics"); assert!(result.is_err(), "Expected an error when a task panics");
let error_msg = result.unwrap_err().to_string(); let error_msg = result.unwrap_err().to_string();
assert!(error_msg.contains("1 task(s) panicked"), "Error message should mention panicked tasks"); assert!(
error_msg.contains("1 task(s) panicked"),
"Error message should mention panicked tasks"
);
} }

30
web/biome.json Normal file
View File

@@ -0,0 +1,30 @@
{
"$schema": "https://biomejs.dev/schemas/1.9.4/schema.json",
"vcs": {
"enabled": true,
"clientKind": "git",
"useIgnoreFile": true
},
"files": {
"ignoreUnknown": false,
"ignore": ["dist/", "node_modules/", ".tanstack/"]
},
"formatter": {
"enabled": true,
"indentStyle": "space",
"indentWidth": 2,
"lineWidth": 100,
"lineEnding": "lf"
},
"javascript": {
"formatter": {
"quoteStyle": "double",
"trailingCommas": "es5",
"semicolons": "always",
"arrowParentheses": "always"
}
},
"linter": {
"enabled": false
}
}

1297
web/bun.lock Normal file
View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,24 +1,24 @@
import js from '@eslint/js'; import js from "@eslint/js";
import tseslint from 'typescript-eslint'; import tseslint from "typescript-eslint";
import react from 'eslint-plugin-react'; import react from "eslint-plugin-react";
import reactHooks from 'eslint-plugin-react-hooks'; import reactHooks from "eslint-plugin-react-hooks";
import reactRefresh from 'eslint-plugin-react-refresh'; import reactRefresh from "eslint-plugin-react-refresh";
export default tseslint.config( export default tseslint.config(
// Ignore generated files and build outputs // Ignore generated files and build outputs
{ {
ignores: ['dist', 'node_modules', 'src/routeTree.gen.ts', '*.config.js'], ignores: ["dist", "node_modules", "src/routeTree.gen.ts", "*.config.js"],
}, },
// Base configs // Base configs
js.configs.recommended, js.configs.recommended,
...tseslint.configs.recommendedTypeChecked, ...tseslint.configs.recommendedTypeChecked,
// React plugin configuration // React plugin configuration
{ {
files: ['**/*.{ts,tsx}'], files: ["**/*.{ts,tsx}"],
plugins: { plugins: {
react, react,
'react-hooks': reactHooks, "react-hooks": reactHooks,
'react-refresh': reactRefresh, "react-refresh": reactRefresh,
}, },
languageOptions: { languageOptions: {
parserOptions: { parserOptions: {
@@ -31,33 +31,30 @@ export default tseslint.config(
}, },
settings: { settings: {
react: { react: {
version: '19.0', version: "19.0",
}, },
}, },
rules: { rules: {
// React rules // React rules
...react.configs.recommended.rules, ...react.configs.recommended.rules,
...react.configs['jsx-runtime'].rules, ...react.configs["jsx-runtime"].rules,
...reactHooks.configs.recommended.rules, ...reactHooks.configs.recommended.rules,
// React Refresh // React Refresh
'react-refresh/only-export-components': [ "react-refresh/only-export-components": ["warn", { allowConstantExport: true }],
'warn',
{ allowConstantExport: true },
],
// TypeScript overrides // TypeScript overrides
'@typescript-eslint/no-unused-vars': [ "@typescript-eslint/no-unused-vars": [
'error', "error",
{ {
argsIgnorePattern: '^_', argsIgnorePattern: "^_",
varsIgnorePattern: '^_', varsIgnorePattern: "^_",
}, },
], ],
'@typescript-eslint/no-explicit-any': 'warn', "@typescript-eslint/no-explicit-any": "warn",
// Disable prop-types since we're using TypeScript // Disable prop-types since we're using TypeScript
'react/prop-types': 'off', "react/prop-types": "off",
}, },
} }
); );

View File

@@ -8,7 +8,10 @@
"build": "vite build && tsc", "build": "vite build && tsc",
"serve": "vite preview", "serve": "vite preview",
"test": "vitest run", "test": "vitest run",
"lint": "tsc && eslint . --ext .ts,.tsx" "lint": "tsc && eslint . --ext .ts,.tsx",
"typecheck": "tsc --noEmit",
"format": "biome format --write .",
"format:check": "biome format ."
}, },
"dependencies": { "dependencies": {
"@radix-ui/themes": "^3.2.1", "@radix-ui/themes": "^3.2.1",
@@ -24,6 +27,7 @@
"recharts": "^3.2.0" "recharts": "^3.2.0"
}, },
"devDependencies": { "devDependencies": {
"@biomejs/biome": "^1.9.4",
"@eslint/js": "^9.39.0", "@eslint/js": "^9.39.0",
"@testing-library/dom": "^10.4.0", "@testing-library/dom": "^10.4.0",
"@testing-library/react": "^16.2.0", "@testing-library/react": "^16.2.0",

6749
web/pnpm-lock.yaml generated
View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,6 @@
.App { .App {
min-height: 100vh; min-height: 100vh;
font-family: font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", "Ubuntu",
-apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", "Ubuntu",
"Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", sans-serif; "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", sans-serif;
background-color: var(--color-background); background-color: var(--color-background);
color: var(--color-text); color: var(--color-text);

View File

@@ -38,9 +38,7 @@ export class BannerApiClient {
const response = await fetch(`${this.baseUrl}${endpoint}`); const response = await fetch(`${this.baseUrl}${endpoint}`);
if (!response.ok) { if (!response.ok) {
throw new Error( throw new Error(`API request failed: ${response.status} ${response.statusText}`);
`API request failed: ${response.status} ${response.statusText}`
);
} }
return (await response.json()) as T; return (await response.json()) as T;

View File

@@ -1,13 +1,13 @@
const reportWebVitals = (onPerfEntry?: () => void) => { const reportWebVitals = (onPerfEntry?: () => void) => {
if (onPerfEntry && onPerfEntry instanceof Function) { if (onPerfEntry && onPerfEntry instanceof Function) {
void import('web-vitals').then(({ onCLS, onINP, onFCP, onLCP, onTTFB }) => { void import("web-vitals").then(({ onCLS, onINP, onFCP, onLCP, onTTFB }) => {
onCLS(onPerfEntry) onCLS(onPerfEntry);
onINP(onPerfEntry) onINP(onPerfEntry);
onFCP(onPerfEntry) onFCP(onPerfEntry);
onLCP(onPerfEntry) onLCP(onPerfEntry);
onTTFB(onPerfEntry) onTTFB(onPerfEntry);
}) });
} }
} };
export default reportWebVitals export default reportWebVitals;

View File

@@ -8,52 +8,52 @@
// You should NOT make any changes in this file as it will be overwritten. // You should NOT make any changes in this file as it will be overwritten.
// Additionally, you should also exclude this file from your linter and/or formatter to prevent it from being checked or modified. // Additionally, you should also exclude this file from your linter and/or formatter to prevent it from being checked or modified.
import { Route as rootRouteImport } from './routes/__root' import { Route as rootRouteImport } from "./routes/__root";
import { Route as IndexRouteImport } from './routes/index' import { Route as IndexRouteImport } from "./routes/index";
const IndexRoute = IndexRouteImport.update({ const IndexRoute = IndexRouteImport.update({
id: '/', id: "/",
path: '/', path: "/",
getParentRoute: () => rootRouteImport, getParentRoute: () => rootRouteImport,
} as any) } as any);
export interface FileRoutesByFullPath { export interface FileRoutesByFullPath {
'/': typeof IndexRoute "/": typeof IndexRoute;
} }
export interface FileRoutesByTo { export interface FileRoutesByTo {
'/': typeof IndexRoute "/": typeof IndexRoute;
} }
export interface FileRoutesById { export interface FileRoutesById {
__root__: typeof rootRouteImport __root__: typeof rootRouteImport;
'/': typeof IndexRoute "/": typeof IndexRoute;
} }
export interface FileRouteTypes { export interface FileRouteTypes {
fileRoutesByFullPath: FileRoutesByFullPath fileRoutesByFullPath: FileRoutesByFullPath;
fullPaths: '/' fullPaths: "/";
fileRoutesByTo: FileRoutesByTo fileRoutesByTo: FileRoutesByTo;
to: '/' to: "/";
id: '__root__' | '/' id: "__root__" | "/";
fileRoutesById: FileRoutesById fileRoutesById: FileRoutesById;
} }
export interface RootRouteChildren { export interface RootRouteChildren {
IndexRoute: typeof IndexRoute IndexRoute: typeof IndexRoute;
} }
declare module '@tanstack/react-router' { declare module "@tanstack/react-router" {
interface FileRoutesByPath { interface FileRoutesByPath {
'/': { "/": {
id: '/' id: "/";
path: '/' path: "/";
fullPath: '/' fullPath: "/";
preLoaderRoute: typeof IndexRouteImport preLoaderRoute: typeof IndexRouteImport;
parentRoute: typeof rootRouteImport parentRoute: typeof rootRouteImport;
} };
} }
} }
const rootRouteChildren: RootRouteChildren = { const rootRouteChildren: RootRouteChildren = {
IndexRoute: IndexRoute, IndexRoute: IndexRoute,
} };
export const routeTree = rootRouteImport export const routeTree = rootRouteImport
._addFileChildren(rootRouteChildren) ._addFileChildren(rootRouteChildren)
._addFileTypes<FileRouteTypes>() ._addFileTypes<FileRouteTypes>();

View File

@@ -101,13 +101,11 @@ const getOverallHealth = (state: StatusState): Status | "Unreachable" => {
const getServices = (state: StatusState): Service[] => { const getServices = (state: StatusState): Service[] => {
if (state.mode !== "response") return []; if (state.mode !== "response") return [];
return Object.entries(state.status.services).map( return Object.entries(state.status.services).map(([serviceId, serviceInfo]) => ({
([serviceId, serviceInfo]) => ({ name: serviceInfo.name,
name: serviceInfo.name, status: serviceInfo.status,
status: serviceInfo.status, icon: SERVICE_ICONS[serviceId] || SERVICE_ICONS.default,
icon: SERVICE_ICONS[serviceId] || SERVICE_ICONS.default, }));
})
);
}; };
const StatusDisplay = ({ status }: { status: Status | "Unreachable" }) => { const StatusDisplay = ({ status }: { status: Status | "Unreachable" }) => {
@@ -197,17 +195,11 @@ function App() {
// Create a timeout promise // Create a timeout promise
const timeoutPromise = new Promise<never>((_, reject) => { const timeoutPromise = new Promise<never>((_, reject) => {
setTimeout( setTimeout(() => reject(new Error("Request timeout")), REQUEST_TIMEOUT);
() => reject(new Error("Request timeout")),
REQUEST_TIMEOUT
);
}); });
// Race between the API call and timeout // Race between the API call and timeout
const statusData = await Promise.race([ const statusData = await Promise.race([client.getStatus(), timeoutPromise]);
client.getStatus(),
timeoutPromise,
]);
const endTime = Date.now(); const endTime = Date.now();
const responseTime = endTime - startTime; const responseTime = endTime - startTime;
@@ -219,8 +211,7 @@ function App() {
lastFetch: new Date(), lastFetch: new Date(),
}); });
} catch (err) { } catch (err) {
const errorMessage = const errorMessage = err instanceof Error ? err.message : "Failed to fetch data";
err instanceof Error ? err.message : "Failed to fetch data";
// Check if it's a timeout error // Check if it's a timeout error
if (errorMessage === "Request timeout") { if (errorMessage === "Request timeout") {
@@ -302,12 +293,8 @@ function App() {
<Flex direction="column" gap="3" style={{ marginTop: "16px" }}> <Flex direction="column" gap="3" style={{ marginTop: "16px" }}>
{shouldShowSkeleton {shouldShowSkeleton
? // Show skeleton for 3 services during initial loading only ? // Show skeleton for 3 services during initial loading only
Array.from({ length: 3 }).map((_, index) => ( Array.from({ length: 3 }).map((_, index) => <SkeletonService key={index} />)
<SkeletonService key={index} /> : services.map((service) => <ServiceStatus key={service.name} service={service} />)}
))
: services.map((service) => (
<ServiceStatus key={service.name} service={service} />
))}
</Flex> </Flex>
<Flex direction="column" gap="2" style={BORDER_STYLES}> <Flex direction="column" gap="2" style={BORDER_STYLES}>
@@ -326,17 +313,11 @@ function App() {
{shouldShowLastFetch ? ( {shouldShowLastFetch ? (
<TimingRow icon={Clock} name="Last Updated"> <TimingRow icon={Clock} name="Last Updated">
{isLoading ? ( {isLoading ? (
<Text <Text size="2" style={{ paddingBottom: "2px" }} color="gray">
size="2"
style={{ paddingBottom: "2px" }}
color="gray"
>
Loading... Loading...
</Text> </Text>
) : ( ) : (
<Tooltip <Tooltip content={`as of ${state.lastFetch.toLocaleTimeString()}`}>
content={`as of ${state.lastFetch.toLocaleTimeString()}`}
>
<abbr <abbr
style={{ style={{
cursor: "pointer", cursor: "pointer",
@@ -363,12 +344,7 @@ function App() {
</Flex> </Flex>
</Flex> </Flex>
</Card> </Card>
<Flex <Flex justify="center" style={{ marginTop: "12px" }} gap="2" align="center">
justify="center"
style={{ marginTop: "12px" }}
gap="2"
align="center"
>
{__APP_VERSION__ && ( {__APP_VERSION__ && (
<Text <Text
size="1" size="1"

View File

@@ -2,14 +2,12 @@
body { body {
margin: 0; margin: 0;
font-family: font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", "Ubuntu",
-apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", "Ubuntu",
"Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", sans-serif; "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", sans-serif;
-webkit-font-smoothing: antialiased; -webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale; -moz-osx-font-smoothing: grayscale;
} }
code { code {
font-family: font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New", monospace;
source-code-pro, Menlo, Monaco, Consolas, "Courier New", monospace;
} }

View File

@@ -23,7 +23,7 @@
"noUncheckedSideEffectImports": true, "noUncheckedSideEffectImports": true,
"baseUrl": ".", "baseUrl": ".",
"paths": { "paths": {
"@/*": ["./src/*"], "@/*": ["./src/*"]
} }
} }
} }

View File

@@ -7,10 +7,7 @@ import { readFileSync, existsSync } from "node:fs";
// Extract version from Cargo.toml // Extract version from Cargo.toml
function getVersion() { function getVersion() {
const filename = "Cargo.toml"; const filename = "Cargo.toml";
const paths = [ const paths = [resolve(__dirname, filename), resolve(__dirname, "..", filename)];
resolve(__dirname, filename),
resolve(__dirname, "..", filename),
];
for (const path of paths) { for (const path of paths) {
try { try {