1 Commits

Author SHA1 Message Date
dependabot[bot]
2edcec9ef9 chore(deps): bump tauri-build from 2.3.0 to 2.3.1 in /src-tauri
---
updated-dependencies:
- dependency-name: tauri-build
  dependency-version: 2.3.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-07-21 14:14:17 +00:00
40 changed files with 2349 additions and 4118 deletions

43
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,43 @@
version: 2
updates:
# Enable version updates for npm
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
reviewers:
- "dependabot[bot]"
assignees:
- "dependabot[bot]"
commit-message:
prefix: "chore"
include: "scope"
# Enable version updates for Cargo
- package-ecosystem: "cargo"
directory: "/src-tauri"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
reviewers:
- "dependabot[bot]"
assignees:
- "dependabot[bot]"
commit-message:
prefix: "chore"
include: "scope"
# Enable version updates for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 5
reviewers:
- "dependabot[bot]"
assignees:
- "dependabot[bot]"
commit-message:
prefix: "chore"
include: "scope"

View File

@@ -9,111 +9,117 @@ env:
CARGO_TERM_COLOR: always CARGO_TERM_COLOR: always
jobs: jobs:
build: # Frontend checks
name: Build (${{ matrix.os }}${{ matrix.target && format(' / {0}', matrix.target) || '' }}) frontend-check:
runs-on: ${{ matrix.os }} name: Frontend Check
strategy: runs-on: ubuntu-latest
fail-fast: false
matrix:
include:
- os: ubuntu-22.04
target: x86_64-unknown-linux-gnu
artifact_name: byte-me-linux-x86_64
- os: windows-latest
target: x86_64-pc-windows-msvc
artifact_name: byte-me-windows-x86_64
artifact_extension: .exe
- os: macos-latest
target: aarch64-apple-darwin
artifact_name: byte-me-macos-aarch64
- os: macos-latest
target: x86_64-apple-darwin
artifact_name: byte-me-macos-x86_64
steps: steps:
- name: Checkout - uses: actions/checkout@v4
uses: actions/checkout@v5
- name: Setup Rust - name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: "20"
cache: "pnpm"
- name: Install dependencies
run: pnpm install
- name: Check TypeScript
run: pnpm run build
- name: Format check
run: pnpm exec prettier --check .
continue-on-error: true
# Rust backend checks
rust-check:
name: Rust Check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable uses: dtolnay/rust-toolchain@stable
with: with:
components: rustfmt, clippy components: rustfmt, clippy
targets: ${{ matrix.target }}
- name: Use sccache - name: Rust Cache
if: runner.os != 'Linux' && github.event_name != 'release' && github.event_name != 'workflow_dispatch'
uses: mozilla-actions/sccache-action@v0.0.9
- name: Configure sccache
if: runner.os != 'Linux' && github.event_name != 'release' && github.event_name != 'workflow_dispatch'
run: |
# Enable GitHub Actions for cache storage
echo "SCCACHE_GHA_ENABLED=true" >> $GITHUB_ENV
# Use sccache for rustc
echo "RUSTC_WRAPPER=sccache" >> $GITHUB_ENV
- name: Rust cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
with: with:
workspaces: src-tauri workspaces: src-tauri
- name: Setup pnpm - name: Install Linux dependencies
run: |
sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
- name: Format check
run: cargo fmt --manifest-path src-tauri/Cargo.toml --all -- --check
- name: Clippy
run: cargo clippy --manifest-path src-tauri/Cargo.toml --all-targets --all-features -- -D warnings
- name: Run tests
run: cargo test --manifest-path src-tauri/Cargo.toml --all-features
# Security audit
security-audit:
name: Security Audit
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
- name: Install cargo-audit
uses: taiki-e/cache-cargo-install-action@v2
with:
tool: cargo-audit
- name: Run security audit
run: cargo audit --file src-tauri/Cargo.lock
# Check if Tauri app builds successfully
build-check:
name: Build Check
runs-on: ubuntu-latest
needs: [frontend-check, rust-check]
steps:
- uses: actions/checkout@v4
- name: Install pnpm
uses: pnpm/action-setup@v4 uses: pnpm/action-setup@v4
with: with:
version: 10.25.0 version: 10
- name: Setup Node - name: Setup Node.js
uses: actions/setup-node@v6 uses: actions/setup-node@v4
with: with:
node-version: 22.21.1 node-version: "20"
cache: "pnpm" cache: "pnpm"
- name: Install frontend dependencies - name: Install Rust toolchain
run: pnpm install --frozen-lockfile --prefer-offline uses: dtolnay/rust-toolchain@stable
- name: Install backend dependencies - name: Rust Cache
run: cargo fetch --manifest-path src-tauri/Cargo.toml uses: Swatinem/rust-cache@v2
- name: Install Tauri CLI
uses: Xevion/cache-cargo-install-action@main
with: with:
tool: tauri-cli@2 workspaces: src-tauri
locked: true
- name: Cache apt packages
if: runner.os == 'Linux'
uses: actions/cache@v4
with:
path: /var/cache/apt/archives
key: ${{ runner.os }}-apt-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-apt-
- name: Install Linux dependencies - name: Install Linux dependencies
if: runner.os == 'Linux'
run: | run: |
# Update package list and install dependencies in one command to reduce time sudo apt-get update
sudo apt-get update -qq && sudo apt-get install -y --no-install-recommends \ sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
build-essential \
libxdo-dev \
libglib2.0-dev \
libwebkit2gtk-4.1-dev \
libayatana-appindicator3-dev \
librsvg2-dev \
patchelf \
musl-tools
- name: Generate frontend bindings - name: Install frontend dependencies
run: pnpm run generate-types run: pnpm install
- name: Build app (tauri) - name: Build Tauri app
run: cargo tauri build --target ${{ matrix.target }} run: pnpm tauri build --no-bundle
- name: Upload binary artifact
uses: actions/upload-artifact@v5
with:
name: ${{ matrix.artifact_name }}
path: |
src-tauri/target/${{ matrix.target }}/release/byte-me${{ matrix.artifact_extension }}
src-tauri/target/${{ matrix.target }}/release/bundle/**/*
if-no-files-found: error

View File

@@ -1,94 +0,0 @@
name: Code Quality
permissions: read-all
on:
workflow_dispatch: # Allow manual triggering
pull_request:
branches: [master]
push:
branches: [master]
jobs:
rust-quality:
name: Rust Code Quality
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@nightly
with:
components: rustfmt, clippy
- name: Rust Cache
uses: Swatinem/rust-cache@v2
with:
workspaces: src-tauri
- name: Install Linux dependencies
run: |
sudo apt-get update
sudo apt-get install -y \
pkg-config \
build-essential \
libxdo-dev \
libssl-dev \
libglib2.0-dev \
libwebkit2gtk-4.1-dev \
libayatana-appindicator3-dev \
librsvg2-dev \
patchelf
- name: Install cargo-udeps
uses: taiki-e/install-action@cargo-udeps
- name: Check for unused dependencies
run: cargo +nightly udeps --manifest-path src-tauri/Cargo.toml --all-targets
- name: Install cargo-machete
uses: taiki-e/install-action@cargo-machete
- name: Check for unused Cargo.toml dependencies
run: cargo machete src-tauri/
- name: Install cargo-outdated
uses: taiki-e/cache-cargo-install-action@v2
with:
tool: cargo-outdated
- name: Check for outdated dependencies
run: cargo outdated --manifest-path src-tauri/Cargo.toml --exit-code 1
continue-on-error: true
frontend-quality:
name: Frontend Code Quality
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10.25.0
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: 22.21.1
cache: pnpm
- name: Install dependencies
run: pnpm install
- name: Check for unused dependencies
run: pnpm exec depcheck --ignore-bin-package=false --skip-missing=true
continue-on-error: true
- name: Check for outdated dependencies
run: pnpm outdated
continue-on-error: true
- name: Bundle size analysis
run: pnpm run build && du -sh dist/
continue-on-error: true

View File

@@ -1,68 +0,0 @@
name: Release
on:
release:
types: [published]
jobs:
build-tauri:
permissions:
contents: write
strategy:
fail-fast: false
matrix:
include:
- platform: macos-latest
args: --target aarch64-apple-darwin
- platform: macos-latest
args: --target x86_64-apple-darwin
- platform: ubuntu-22.04
args: ""
- platform: windows-latest
args: ""
runs-on: ${{ matrix.platform }}
steps:
- uses: actions/checkout@v5
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10.25.0
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: 22.21.1
cache: pnpm
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.platform == 'macos-latest' && 'aarch64-apple-darwin,x86_64-apple-darwin' || '' }}
- name: Rust Cache
uses: Swatinem/rust-cache@v2
with:
workspaces: src-tauri
- name: Install dependencies (ubuntu only)
if: matrix.platform == 'ubuntu-22.04'
run: |
sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
- name: Install frontend dependencies
run: pnpm install
- name: Build Tauri app
uses: tauri-apps/tauri-action@v0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tagName: app-v__VERSION__
releaseName: "App v__VERSION__"
releaseBody: "See the assets to download this version and install."
releaseDraft: true
prerelease: false
args: ${{ matrix.args }}

View File

@@ -3,35 +3,53 @@ name: Security Audit
on: on:
workflow_dispatch: # Allow manual triggering workflow_dispatch: # Allow manual triggering
push: push:
paths:
- "**/Cargo.toml"
- "**/Cargo.lock"
- "**/package.json"
- "**/pnpm-lock.yaml"
jobs: jobs:
rust-audit: rust-audit:
name: Rust Security Audit name: Rust Security Audit
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v4
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
- name: Install cargo-audit
uses: taiki-e/cache-cargo-install-action@v2
with:
tool: cargo-audit
- name: Run cargo audit
run: cargo audit --file src-tauri/Cargo.lock
- name: Install cargo-deny - name: Install cargo-deny
uses: taiki-e/install-action@cargo-deny uses: taiki-e/cache-cargo-install-action@v2
with:
tool: cargo-deny
- name: Run cargo deny - name: Run cargo deny
run: cargo deny --manifest-path src-tauri/Cargo.toml check sources advisories bans --show-stats run: cargo deny --manifest-path src-tauri/Cargo.toml check
npm-audit: npm-audit:
name: NPM Security Audit name: NPM Security Audit
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v4
- name: Install pnpm - name: Install pnpm
uses: pnpm/action-setup@v4 uses: pnpm/action-setup@v4
with: with:
version: 10.25.0 version: 10
- name: Setup Node.js - name: Setup Node.js
uses: actions/setup-node@v6 uses: actions/setup-node@v4
with: with:
node-version: "22.21.1" node-version: "20"
cache: "pnpm" cache: "pnpm"
- name: Install dependencies - name: Install dependencies

3
.gitignore vendored
View File

@@ -1,6 +1,3 @@
src/bindings/*.ts
src-tauri/bindings/*.ts
# Seed data # Seed data
.data/* .data/*
!.data/seed.ps1 !.data/seed.ps1

View File

@@ -1,3 +0,0 @@
src/bindings.ts
src-tauri/target/**
src-tauri/gen/**

View File

@@ -1,4 +1,5 @@
{ {
"ignore": ["src/bindings.ts"],
"useTabs": true, "useTabs": true,
"tabWidth": 2 "tabWidth": 2
} }

View File

@@ -1,3 +1,3 @@
{ {
"recommendations": ["tauri-apps.tauri-vscode", "rust-lang.rust-analyzer"] "recommendations": ["tauri-apps.tauri-vscode", "rust-lang.rust-analyzer"]
} }

View File

@@ -1,37 +0,0 @@
# Default recipe - show available commands
default:
@just --list
dev:
cargo tauri dev
dev-build:
cargo tauri build --debug
generate-types:
cargo test --manifest-path src-tauri/Cargo.toml -- --test export_bindings
check-frontend:
pnpm exec tsc --noEmit
check-backend:
cargo clippy --manifest-path src-tauri/Cargo.toml
check: check-frontend check-backend
build-frontend: generate-types
pnpm exec tsc
pnpm exec vite build
build-backend:
cargo build --manifest-path src-tauri/Cargo.toml
build: build-frontend build-backend
test-frontend:
pnpm exec vitest run
test-backend:
cargo nextest run --manifest-path src-tauri/Cargo.toml
test: test-frontend test-backend

View File

@@ -1,14 +1,14 @@
<!doctype html> <!DOCTYPE html>
<html lang="en"> <html lang="en">
<head> <head>
<meta charset="UTF-8" /> <meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" /> <link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>byte-me</title> <title>byte-me</title>
</head> </head>
<body> <body>
<div id="root"></div> <div id="root"></div>
<script type="module" src="/src/main.tsx"></script> <script type="module" src="/src/main.tsx"></script>
</body> </body>
</html> </html>

View File

@@ -1,38 +1,33 @@
{ {
"name": "byte-me", "name": "byte-me",
"private": true, "private": true,
"version": "0.1.0", "version": "0.1.0",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "vite", "dev": "vite",
"build": "pnpm generate-types && tsc && vite build", "build": "tsc && vite build",
"preview": "vite preview", "preview": "vite preview",
"test": "vitest run", "tauri": "tauri"
"tauri": "tauri", },
"generate-types": "cargo test --manifest-path src-tauri/Cargo.toml -- --test export_bindings" "dependencies": {
}, "@nivo/core": "^0.99.0",
"dependencies": { "@nivo/line": "^0.99.0",
"@nivo/core": "^0.99.0", "@tailwindcss/vite": "^4.1.11",
"@nivo/line": "^0.99.0", "@tauri-apps/api": "^2",
"@tailwindcss/vite": "^4.1.17", "@tauri-apps/plugin-opener": "^2",
"@tauri-apps/api": "^2.9.0", "lucide-react": "^0.525.0",
"@tauri-apps/plugin-opener": "^2.5.2", "react": "^18.3.1",
"lucide-react": "^0.548.0", "react-dom": "^18.3.1",
"react": "^19.2.1", "tailwindcss": "^4.1.11",
"react-dom": "^19.2.1", "ts-pattern": "^5.7.1"
"tailwindcss": "^4.1.17", },
"ts-pattern": "^5.9.0" "devDependencies": {
}, "@tauri-apps/cli": "^2",
"devDependencies": { "@types/react": "^18.3.1",
"@tauri-apps/cli": "^2.9.3", "@types/react-dom": "^18.3.1",
"@tsconfig/vite-react": "^7.0.2", "@vitejs/plugin-react": "^4.3.4",
"@types/react": "^19.2.7", "typescript": "~5.6.2",
"@types/react-dom": "^19.2.3", "vite": "^6.0.3",
"@vitejs/plugin-react": "^5.1.2", "vitest": "^3.2.4"
"prettier": "^3.7.4", }
"tsx": "^4.20.6",
"typescript": "~5.9.3",
"vite": "^7.2.7",
"vitest": "^3.2.4"
}
} }

1697
pnpm-lock.yaml generated
View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +1,3 @@
onlyBuiltDependencies: onlyBuiltDependencies:
- "@tailwindcss/oxide" - '@tailwindcss/oxide'
- esbuild - esbuild

View File

@@ -1,153 +0,0 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:recommended",
":enableVulnerabilityAlertsWithLabel(security)"
],
"schedule": ["before 6am on Monday"],
"timezone": "UTC",
"prConcurrentLimit": 10,
"prHourlyLimit": 0,
"semanticCommits": "enabled",
"dependencyDashboard": true,
"labels": ["dependencies"],
"rangeStrategy": "bump",
"postUpdateOptions": ["pnpmDedupe"],
"packageRules": [
{
"description": "Automerge non-major dev dependencies",
"matchDepTypes": ["devDependencies"],
"matchUpdateTypes": ["minor", "patch"],
"automerge": true,
"automergeType": "pr",
"ignoreTests": false
},
{
"description": "Automerge patch updates for production dependencies",
"matchDepTypes": ["dependencies"],
"matchUpdateTypes": ["patch"],
"automerge": true,
"automergeType": "pr",
"ignoreTests": false
},
{
"description": "Group all Tauri packages together",
"groupName": "Tauri",
"matchManagers": ["npm"],
"automerge": false,
"labels": ["dependencies", "tauri"],
"matchPackageNames": ["/^@tauri-apps//", "/^tauri-/"]
},
{
"description": "Group Tauri Rust dependencies",
"groupName": "Tauri (Rust)",
"matchPackageNames": ["tauri", "tauri-build"],
"matchManagers": ["cargo"],
"automerge": false,
"labels": ["dependencies", "tauri", "rust"]
},
{
"description": "Group React ecosystem updates",
"groupName": "React",
"matchPackageNames": ["react", "react-dom", "/^@types/react/"],
"labels": ["dependencies", "react"]
},
{
"description": "Group TypeScript and build tooling",
"groupName": "Build tooling",
"matchPackageNames": [
"typescript",
"vite",
"@vitejs/plugin-react",
"vite-tsconfig-paths"
],
"labels": ["dependencies", "tooling"]
},
{
"description": "Group ESLint and related plugins",
"groupName": "ESLint",
"labels": ["dependencies", "linting"],
"matchPackageNames": [
"/^eslint/",
"/^@eslint//",
"/^@typescript-eslint//"
]
},
{
"description": "Group testing frameworks",
"groupName": "Testing",
"labels": ["dependencies", "testing"],
"matchPackageNames": ["/^vitest/", "/^@vitest//"]
},
{
"description": "Group TailwindCSS and plugins",
"groupName": "TailwindCSS",
"labels": ["dependencies", "styling"],
"matchPackageNames": ["/^tailwindcss/", "/^@tailwindcss//"]
},
{
"description": "Group Nivo chart libraries",
"groupName": "Nivo",
"labels": ["dependencies", "charts"],
"matchPackageNames": ["/^@nivo//"]
},
{
"description": "Separate major updates for manual review",
"matchUpdateTypes": ["major"],
"automerge": false,
"labels": ["dependencies", "major-update"],
"reviewers": []
},
{
"description": "High priority security updates",
"matchUpdateTypes": ["security"],
"labels": ["dependencies", "security"],
"automerge": false,
"schedule": ["at any time"]
},
{
"description": "Rust patch updates - automerge",
"matchManagers": ["cargo"],
"matchUpdateTypes": ["patch"],
"automerge": true,
"automergeType": "pr"
},
{
"description": "Rust minor updates - review required",
"matchManagers": ["cargo"],
"matchUpdateTypes": ["minor"],
"automerge": false,
"labels": ["dependencies", "rust", "minor-update"]
},
{
"description": "Rust major updates - careful review",
"matchManagers": ["cargo"],
"matchUpdateTypes": ["major"],
"automerge": false,
"labels": ["dependencies", "rust", "major-update"]
},
{
"description": "Pin ts-rs (type generation critical)",
"matchPackageNames": ["ts-rs"],
"matchManagers": ["cargo"],
"automerge": false,
"labels": ["dependencies", "rust", "type-generation"]
}
],
"cargo": {
"enabled": true,
"rangeStrategy": "bump"
},
"npm": {
"enabled": true,
"rangeStrategy": "bump"
},
"lockFileMaintenance": {
"enabled": true,
"automerge": true,
"schedule": ["before 6am on Monday"]
},
"platformAutomerge": true,
"ignoreTests": false,
"commitMessagePrefix": "chore(deps):"
}

2136
src-tauri/Cargo.lock generated
View File

File diff suppressed because it is too large Load Diff

View File

@@ -15,14 +15,15 @@ name = "byte_me_lib"
crate-type = ["staticlib", "cdylib", "rlib"] crate-type = ["staticlib", "cdylib", "rlib"]
[build-dependencies] [build-dependencies]
tauri-build = { version = "2.5.3", features = [] } tauri-build = { version = "2", features = [] }
[dependencies] [dependencies]
tauri = { version = "2.9.4", features = [] } tauri = { version = "2.0", features = [] }
tauri-plugin-opener = "2.5.2" tauri-plugin-opener = "2"
serde = { version = "1.0.228", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = "1"
ffprobe = "0.4.0" ffprobe = "0.4.0"
ts-rs = { version = "11.1.0", features = ["format"] } specta = "=2.0.0-rc.22"
infer = "0.19.0" specta-typescript = "0.0.9"
tracing = "0.1.43" tauri-specta = { version = "=2.0.0-rc.21", features = ["derive", "typescript"] }
tracing-subscriber = { version = "0.3.22", features = ["env-filter"] }

View File

@@ -1,7 +1,10 @@
{ {
"$schema": "../gen/schemas/desktop-schema.json", "$schema": "../gen/schemas/desktop-schema.json",
"identifier": "default", "identifier": "default",
"description": "Capability for the main window", "description": "Capability for the main window",
"windows": ["main"], "windows": ["main"],
"permissions": ["core:default", "opener:default"] "permissions": [
"core:default",
"opener:default"
]
} }

View File

@@ -1,39 +0,0 @@
[graph]
targets = [
"x86_64-unknown-linux-gnu",
"x86_64-pc-windows-msvc",
"aarch64-apple-darwin",
"x86_64-apple-darwin",
]
all-features = true
no-default-features = false
[output]
feature-depth = 1
[advisories]
ignore = [
"RUSTSEC-2024-0429",
]
unmaintained = "workspace"
[licenses]
allow = []
confidence-threshold = 0.8
exceptions = []
[licenses.private]
ignore = false
registries = []
[bans]
multiple-versions = "allow"
wildcards = "allow"
highlight = "all"
workspace-default-features = "allow"
external-default-features = "allow"
allow = []
[sources]
unknown-registry = "warn"
unknown-git = "warn"

View File

@@ -1,109 +0,0 @@
use crate::models::StreamDetail;
use tracing::{debug, info, instrument};
#[instrument(skip(info), fields(stream_count = info.streams.len()))]
pub fn extract_streams(info: &ffprobe::FfProbe) -> Vec<StreamDetail> {
let mut streams = Vec::new();
let mut video_count = 0;
let mut audio_count = 0;
let mut subtitle_count = 0;
info!(total_streams = info.streams.len(), "Extracting streams from media file");
for (index, stream) in info.streams.iter().enumerate() {
match stream.codec_type.as_deref() {
Some("video") => {
video_count += 1;
let codec = stream
.codec_name
.clone()
.unwrap_or_else(|| "unknown".to_string());
let width = stream.width.map(|w| w as u32);
let height = stream.height.map(|h| h as u32);
let bit_rate = stream.bit_rate.as_ref().map(|b| b.to_string());
let frame_rate = Some(stream.r_frame_rate.clone());
debug!(
stream_index = index,
codec = %codec,
width = ?width,
height = ?height,
bit_rate = ?bit_rate,
frame_rate = ?frame_rate,
"Extracted video stream"
);
streams.push(StreamDetail::Video {
codec,
width,
height,
bit_rate,
frame_rate,
});
}
Some("audio") => {
audio_count += 1;
let codec = stream
.codec_name
.clone()
.unwrap_or_else(|| "unknown".to_string());
let sample_rate = stream.sample_rate.clone();
let channels = stream.channels.map(|c| c as u32);
let bit_rate = stream.bit_rate.as_ref().map(|b| b.to_string());
debug!(
stream_index = index,
codec = %codec,
sample_rate = ?sample_rate,
channels = ?channels,
bit_rate = ?bit_rate,
"Extracted audio stream"
);
streams.push(StreamDetail::Audio {
codec,
sample_rate,
channels,
bit_rate,
});
}
Some("subtitle") => {
subtitle_count += 1;
let codec = stream
.codec_name
.clone()
.unwrap_or_else(|| "unknown".to_string());
let language = stream.tags.as_ref().and_then(|tags| tags.language.clone());
debug!(
stream_index = index,
codec = %codec,
language = ?language,
"Extracted subtitle stream"
);
streams.push(StreamDetail::Subtitle {
codec,
language,
});
}
other => {
debug!(
stream_index = index,
codec_type = ?other,
"Skipping unknown stream type"
);
}
}
}
info!(
video_streams = video_count,
audio_streams = audio_count,
subtitle_streams = subtitle_count,
total_extracted = streams.len(),
"Stream extraction completed"
);
streams
}

View File

@@ -1,306 +1,91 @@
pub mod ff; use serde::{Deserialize, Serialize};
pub mod media; use specta::Type;
pub mod models; use specta_typescript::Typescript;
pub mod strings;
use ff::extract_streams;
use media::{detect_media_type, is_media_file};
use models::{StreamResult, StreamResultError, File, FileCandidacy, BitrateData, BitrateFrame};
use strings::transform_filename;
use std::path::Path; use std::path::Path;
use std::process::Command; use tauri_specta::{collect_commands, Builder};
use tracing::{debug, error, info, instrument, warn};
// detection, helpers moved to modules above #[derive(Serialize, Deserialize, Debug, Clone, Type)]
struct StreamResult {
path: String,
filename: String,
streams: Vec<StreamDetail>,
}
#[derive(Serialize, Deserialize, Debug, Clone, Type)]
enum StreamDetail {
Video { codec: String },
Audio { codec: String },
Subtitle { codec: String },
}
#[derive(Serialize, Deserialize, Debug, Clone, Type)]
struct StreamResultError {
filename: Option<String>,
reason: String,
}
#[tauri::command] #[tauri::command]
#[instrument(skip(paths), fields(file_count = paths.len()))] #[specta::specta]
fn has_streams(paths: Vec<String>) -> Result<Vec<StreamResult>, StreamResultError> { fn has_streams(paths: Vec<String>) -> Result<Vec<StreamResult>, StreamResultError> {
info!(file_count = paths.len(), "Processing files for stream analysis"); paths
let results = paths
.into_iter() .into_iter()
.enumerate() .map(|path_str| {
.map(|(index, path_str)| {
let path = Path::new(&path_str); let path = Path::new(&path_str);
let filename = path let filename = path.file_name().unwrap().to_str().unwrap().to_string();
.file_name()
.and_then(|name| name.to_str())
.unwrap_or("unknown")
.to_string();
// Log full path only on first occurrence, then use truncated filename
if index == 0 {
debug!(full_path = %path_str, filename = %filename, "Processing first file");
} else {
let truncated_name = transform_filename(&filename, 15);
debug!(filename = %truncated_name, "Processing file");
}
// Check if file exists
if !path.exists() { if !path.exists() {
let truncated_name = transform_filename(&filename, 15);
warn!(filename = %truncated_name, "File does not exist");
return Err(StreamResultError { return Err(StreamResultError {
filename: Some(filename), filename: Some(filename),
reason: "File does not exist".to_string(), reason: "File does not exist".to_string(),
error_type: "not_found".to_string(),
}); });
} }
// Check if it's a file (not directory)
if !path.is_file() { if !path.is_file() {
let truncated_name = transform_filename(&filename, 15);
warn!(filename = %truncated_name, "Path is not a file");
return Err(StreamResultError { return Err(StreamResultError {
filename: Some(filename), filename: Some(filename),
reason: "Not a file (directory or other)".to_string(), reason: "Not a file".to_string(),
error_type: "not_file".to_string(),
}); });
} }
// Get file size match ffprobe::ffprobe(&path_str) {
let size = std::fs::metadata(&path_str) Ok(info) => {
.map(|metadata| metadata.len()) dbg!(info);
.unwrap_or(0); Ok(StreamResult {
filename,
let truncated_name = transform_filename(&filename, 15); path: path_str,
debug!(filename = %truncated_name, size = size, "File metadata retrieved"); streams: vec![],
})
// Detect media type using magic numbers and fallback to extensions
let media_type = detect_media_type(path);
debug!(filename = %truncated_name, media_type = ?media_type, "Media type detected");
// Only try to analyze media files with ffprobe
if is_media_file(&media_type) {
info!(filename = %truncated_name, media_type = ?media_type, "Analyzing media file with ffprobe");
// Analyze with ffprobe
match ffprobe::ffprobe(&path_str) {
Ok(info) => {
let streams = extract_streams(&info);
let duration = info
.format
.duration
.and_then(|dur_str| dur_str.parse::<f64>().ok());
info!(
filename = %truncated_name,
stream_count = streams.len(),
duration = ?duration,
"Successfully analyzed media file"
);
Ok(StreamResult {
filename,
path: path_str,
media_type,
duration,
size,
streams,
})
}
Err(err) => {
error!(filename = %truncated_name, error = %err, "Failed to analyze media file with ffprobe");
Err(StreamResultError {
filename: Some(filename),
reason: format!("Could not analyze media file: {err}"),
error_type: "analysis_failed".to_string(),
})
}
} }
} else { Err(err) => {
debug!(filename = %truncated_name, media_type = ?media_type, "Skipping non-media file"); eprintln!("Could not analyze file with ffprobe: {:?}", err);
// For non-media files, return an error indicating it's not a media file Err(StreamResultError {
Err(StreamResultError { filename: Some(filename),
filename: Some(filename), reason: "Could not analyze file with ffprobe".to_string(),
reason: format!("Not a media file (detected as {media_type:?})"), })
error_type: "not_media".to_string(),
})
}
})
.collect::<Result<Vec<_>, _>>();
match &results {
Ok(streams) => {
info!(successful_files = streams.len(), "Successfully processed all files");
}
Err(_) => {
warn!("Some files failed to process");
}
}
results
}
#[tauri::command]
#[instrument(skip(paths), fields(file_count = paths.len()))]
fn analyze_files(paths: Vec<String>) -> Vec<File> {
info!(file_count = paths.len(), "Analyzing files for candidacy");
paths
.into_iter()
.enumerate()
.map(|(index, path_str)| {
let path = Path::new(&path_str);
let filename = path
.file_name()
.and_then(|name| name.to_str())
.unwrap_or("unknown")
.to_string();
// Log full path only on first occurrence, then use truncated filename
if index == 0 {
debug!(full_path = %path_str, filename = %filename, "Processing first file");
} else {
let truncated_name = transform_filename(&filename, 15);
debug!(filename = %truncated_name, "Processing file");
}
// Get file size
let size = std::fs::metadata(&path_str)
.map(|metadata| metadata.len())
.unwrap_or(0) as u32;
let truncated_name = transform_filename(&filename, 15);
debug!(filename = %truncated_name, size = size, "File metadata retrieved");
// Check if file exists
if !path.exists() {
let truncated_name = transform_filename(&filename, 15);
warn!(filename = %truncated_name, "File does not exist");
return File {
filename,
size,
candidacy: FileCandidacy::Error {
reason: "File does not exist".to_string(),
},
};
}
// Check if it's a file (not directory)
if !path.is_file() {
let truncated_name = transform_filename(&filename, 15);
warn!(filename = %truncated_name, "Path is not a file");
return File {
filename,
size,
candidacy: FileCandidacy::Error {
reason: "Not a file (directory or other)".to_string(),
},
};
}
// Detect media type using magic numbers and fallback to extensions
let media_type = detect_media_type(path);
debug!(filename = %truncated_name, media_type = ?media_type, "Media type detected");
// Check if it's a media file
if is_media_file(&media_type) {
info!(filename = %truncated_name, media_type = ?media_type, "Valid media file detected");
File {
filename,
size,
candidacy: FileCandidacy::Success {
file_type: media_type,
},
}
} else {
debug!(filename = %truncated_name, media_type = ?media_type, "Non-media file detected");
File {
filename,
size,
candidacy: FileCandidacy::Error {
reason: format!("Not a media file (detected as {media_type:?})"),
},
} }
} }
}) })
.collect() .collect::<Result<Vec<_>, _>>()
}
#[tauri::command]
#[instrument(skip(path), fields(path = %path))]
fn extract_bitrate_data(path: String) -> Result<BitrateData, String> {
info!(path = %path, "Extracting bitrate data from video file");
let path_obj = Path::new(&path);
let filename = path_obj
.file_name()
.and_then(|name| name.to_str())
.unwrap_or("unknown")
.to_string();
// Check if file exists
if !path_obj.exists() {
error!(filename = %filename, "File does not exist");
return Err("File does not exist".to_string());
}
// Run ffprobe to get frame packet sizes
// -v quiet: suppress ffprobe info
// -select_streams v:0: only first video stream
// -show_entries frame=pkt_size: only show packet size
// -of csv=p=0: output as CSV without headers
info!(filename = %filename, "Running ffprobe to extract frame data");
let output = Command::new("ffprobe")
.args([
"-v", "quiet",
"-select_streams", "v:0",
"-show_entries", "frame=pkt_size",
"-of", "csv=p=0",
&path
])
.output()
.map_err(|e| {
error!(error = %e, "Failed to execute ffprobe");
format!("Failed to execute ffprobe: {e}")
})?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
error!(stderr = %stderr, "ffprobe command failed");
return Err(format!("ffprobe failed: {stderr}"));
}
let stdout = String::from_utf8_lossy(&output.stdout);
debug!(line_count = stdout.lines().count(), "Parsing ffprobe output");
let frames: Vec<BitrateFrame> = stdout
.lines()
.enumerate()
.filter_map(|(index, line)| {
line.trim().parse::<u64>().ok().map(|packet_size| BitrateFrame {
frame_num: index as u32,
packet_size,
})
})
.collect();
if frames.is_empty() {
warn!(filename = %filename, "No frame data extracted");
return Err("No frame data could be extracted from file".to_string());
}
info!(
filename = %filename,
frame_count = frames.len(),
"Successfully extracted bitrate data"
);
Ok(BitrateData {
id: filename,
frames,
})
} }
#[cfg_attr(mobile, tauri::mobile_entry_point)] #[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() { pub fn run() {
info!("Initializing Tauri application"); let builder = Builder::<tauri::Wry>::new()
// Then register them (separated by a comma)
.commands(collect_commands![has_streams,]);
#[cfg(debug_assertions)] // <- Only export on non-release builds
builder
.export(Typescript::default(), "../src/bindings.ts")
.expect("Failed to export typescript bindings");
tauri::Builder::default() tauri::Builder::default()
.plugin(tauri_plugin_opener::init()) .plugin(tauri_plugin_opener::init())
.invoke_handler(tauri::generate_handler![has_streams, analyze_files, extract_bitrate_data]) .invoke_handler(tauri::generate_handler![has_streams])
.setup(move |app| {
// Ensure you mount your events!
builder.mount_events(app);
Ok(())
})
.run(tauri::generate_context!()) .run(tauri::generate_context!())
.expect("error while running tauri application"); .expect("error while running tauri application");
} }

View File

@@ -1,19 +1,6 @@
// Prevents additional console window on Windows in release, DO NOT REMOVE!! // Prevents additional console window on Windows in release, DO NOT REMOVE!!
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
fn main() { fn main() {
// Initialize tracing with env-filter
tracing_subscriber::registry()
.with(
EnvFilter::from_default_env()
.add_directive("byte_me=debug".parse().unwrap())
.add_directive("tauri=info".parse().unwrap()),
)
.with(tracing_subscriber::fmt::layer())
.init();
tracing::info!("Starting byte-me application");
byte_me_lib::run() byte_me_lib::run()
} }

View File

@@ -1,170 +0,0 @@
use crate::models::MediaType;
use std::{fs::File, io::Read, path::Path};
use tracing::{debug, instrument, trace, warn};
#[instrument(skip(path), fields(path = %path.display()))]
pub fn detect_media_type(path: &Path) -> MediaType {
debug!("Starting media type detection");
// First try to detect using infer crate (magic number detection)
if let Ok(mut file) = File::open(path) {
let mut buffer = [0; 512];
if let Ok(bytes_read) = file.read(&mut buffer) {
trace!(bytes_read = bytes_read, "Read file header for magic number detection");
if let Some(kind) = infer::get(&buffer[..bytes_read]) {
let mime_type = kind.mime_type();
debug!(mime_type = %mime_type, "Detected MIME type from magic numbers");
let media_type = match mime_type {
// Audio types
"audio/mpeg" | "audio/mp3" | "audio/m4a" | "audio/ogg" | "audio/x-flac"
| "audio/x-wav" | "audio/amr" | "audio/aac" | "audio/x-aiff"
| "audio/x-dsf" | "audio/x-ape" | "audio/midi" => MediaType::Audio,
// Video types
"video/mp4" | "video/x-m4v" | "video/x-matroska" | "video/webm"
| "video/quicktime" | "video/x-msvideo" | "video/x-ms-wmv" | "video/mpeg"
| "video/x-flv" => MediaType::Video,
// Image types
"image/jpeg"
| "image/png"
| "image/gif"
| "image/webp"
| "image/x-canon-cr2"
| "image/tiff"
| "image/bmp"
| "image/heif"
| "image/avif"
| "image/vnd.ms-photo"
| "image/vnd.adobe.photoshop"
| "image/vnd.microsoft.icon"
| "image/openraster"
| "image/vnd.djvu" => MediaType::Image,
// Document types
"application/pdf"
| "application/rtf"
| "application/msword"
| "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
| "application/vnd.ms-excel"
| "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
| "application/vnd.ms-powerpoint"
| "application/vnd.openxmlformats-officedocument.presentationml.presentation"
| "application/vnd.oasis.opendocument.text"
| "application/vnd.oasis.opendocument.spreadsheet"
| "application/vnd.oasis.opendocument.presentation" => MediaType::Document,
// Archive types
"application/zip"
| "application/x-tar"
| "application/vnd.rar"
| "application/gzip"
| "application/x-bzip2"
| "application/vnd.bzip3"
| "application/x-7z-compressed"
| "application/x-xz"
| "application/x-shockwave-flash"
| "application/octet-stream"
| "application/postscript"
| "application/vnd.sqlite3"
| "application/x-nintendo-nes-rom"
| "application/x-google-chrome-extension"
| "application/vnd.ms-cab-compressed"
| "application/vnd.debian.binary-package"
| "application/x-unix-archive"
| "application/x-compress"
| "application/x-lzip"
| "application/x-rpm"
| "application/dicom"
| "application/zstd"
| "application/x-lz4"
| "application/x-ole-storage"
| "application/x-cpio"
| "application/x-par2"
| "application/epub+zip"
| "application/x-mobipocket-ebook" => MediaType::Archive,
// Executable types
"application/vnd.microsoft.portable-executable"
| "application/x-executable"
| "application/llvm"
| "application/x-mach-binary"
| "application/java"
| "application/vnd.android.dex"
| "application/vnd.android.dey"
| "application/x-x509-ca-cert" => MediaType::Executable,
// Library types (covered by executable types above, but keeping for clarity)
_ => MediaType::Unknown,
};
debug!(media_type = ?media_type, "Detected media type from magic numbers");
return media_type;
} else {
debug!("Magic number detection failed, falling back to extension-based detection");
}
} else {
warn!("Failed to read file for magic number detection");
}
} else {
warn!("Failed to open file for magic number detection");
}
// Fallback to extension-based detection
if let Some(extension) = path.extension() {
let ext_str = extension.to_str().unwrap_or("").to_lowercase();
debug!(extension = %ext_str, "Detecting media type from file extension");
let media_type = match ext_str.as_str() {
// Audio extensions
"mp3" | "wav" | "flac" | "ogg" | "m4a" | "aac" | "wma" | "mid" | "amr" | "aiff"
| "dsf" | "ape" => MediaType::Audio,
// Video extensions
"mp4" | "mkv" | "webm" | "mov" | "avi" | "wmv" | "mpg" | "flv" | "m4v" => {
MediaType::Video
}
// Image extensions
"gif" | "png" | "jpg" | "jpeg" | "bmp" | "tiff" | "webp" | "cr2" | "heif" | "avif"
| "jxr" | "psd" | "ico" | "ora" | "djvu" => MediaType::Image,
// Document extensions
"txt" | "md" | "pdf" | "doc" | "docx" | "xls" | "xlsx" | "ppt" | "pptx" | "odt"
| "ods" | "odp" | "rtf" => MediaType::Document,
// Archive extensions
"zip" | "rar" | "7z" | "tar" | "gz" | "bz2" | "bz3" | "xz" | "swf" | "sqlite"
| "nes" | "crx" | "cab" | "deb" | "ar" | "Z" | "lz" | "rpm" | "dcm" | "zst" | "lz4"
| "cpio" | "par2" | "epub" | "mobi" => MediaType::Archive,
// Executable extensions
"exe" | "dll" | "msi" | "dmg" | "pkg" | "app" | "elf" | "bc" | "mach" | "class"
| "dex" | "dey" | "der" | "obj" => MediaType::Executable,
// Library extensions
"so" | "dylib" => MediaType::Library,
_ => MediaType::Unknown,
};
debug!(media_type = ?media_type, "Detected media type from extension");
media_type
} else {
debug!("No file extension found, returning Unknown");
MediaType::Unknown
}
}
#[instrument(skip(media_type))]
pub fn is_media_file(media_type: &MediaType) -> bool {
let is_media = matches!(
media_type,
MediaType::Audio | MediaType::Video | MediaType::Image
);
debug!(media_type = ?media_type, is_media = is_media, "Checking if file is media type");
is_media
}

View File

@@ -1,103 +0,0 @@
use serde::{Deserialize, Serialize};
use ts_rs::TS;
#[derive(Serialize, Deserialize, Debug, Clone, TS)]
pub enum MediaType {
Audio,
Video,
Image,
Document,
Executable,
Archive,
Library,
Unknown,
}
#[derive(Serialize, Deserialize, Debug, Clone, TS)]
pub struct StreamResult {
pub path: String,
pub filename: String,
pub media_type: MediaType,
pub duration: Option<f64>,
pub size: u64,
pub streams: Vec<StreamDetail>,
}
#[derive(Serialize, Deserialize, Debug, Clone, TS)]
pub enum StreamDetail {
Video {
codec: String,
width: Option<u32>,
height: Option<u32>,
bit_rate: Option<String>,
frame_rate: Option<String>,
},
Audio {
codec: String,
sample_rate: Option<String>,
channels: Option<u32>,
bit_rate: Option<String>,
},
Subtitle {
codec: String,
language: Option<String>,
},
}
#[derive(Serialize, Deserialize, Debug, Clone, TS)]
pub struct StreamResultError {
pub filename: Option<String>,
pub reason: String,
pub error_type: String,
}
// New types for simplified drop overlay
#[derive(Serialize, Deserialize, Debug, Clone, TS)]
pub struct File {
pub filename: String,
pub size: u32,
pub candidacy: FileCandidacy,
}
#[derive(Serialize, Deserialize, Debug, Clone, TS)]
pub enum FileCandidacy {
Success {
#[serde(rename = "type")]
file_type: MediaType,
},
Error {
reason: String,
},
Loading,
}
// Bitrate visualization types
#[derive(Serialize, Deserialize, Debug, Clone, TS)]
pub struct BitrateFrame {
pub frame_num: u32,
pub packet_size: u64,
}
#[derive(Serialize, Deserialize, Debug, Clone, TS)]
pub struct BitrateData {
pub id: String,
pub frames: Vec<BitrateFrame>,
}
#[cfg(test)]
mod tests {
#[test]
fn export_bindings() {
// This will generate TypeScript bindings when you run `cargo test export_bindings`
use super::*;
StreamDetail::export_all_to("../src/bindings").expect("Failed to export bindings");
StreamResult::export_all_to("../src/bindings").expect("Failed to export bindings");
StreamResultError::export_all_to("../src/bindings").expect("Failed to export bindings");
MediaType::export_all_to("../src/bindings").expect("Failed to export bindings");
File::export_all_to("../src/bindings").expect("Failed to export bindings");
FileCandidacy::export_all_to("../src/bindings").expect("Failed to export bindings");
BitrateFrame::export_all_to("../src/bindings").expect("Failed to export bindings");
BitrateData::export_all_to("../src/bindings").expect("Failed to export bindings");
}
}

View File

@@ -1,122 +0,0 @@
/// Transforms a filename to fit within a character limit while preserving the most useful context
///
/// This function prioritizes preserving:
/// 1. File extension (if reasonable length ≤ 5 chars including dot)
/// 2. Beginning of filename (for identification)
/// 3. End of filename before extension (often contains important info like numbers)
///
/// # Arguments
/// * `filename` - The filename to transform
/// * `limit` - Maximum number of characters
///
/// # Returns
/// * Transformed filename that fits within the limit, using ellipsis (...) to indicate truncation
///
/// # Examples
/// ```
/// use byte_me_lib::strings::transform_filename;
///
/// // Short filenames remain unchanged
/// assert_eq!(transform_filename("test.mp4", 20), "test.mp4");
///
/// // Long filename with extension - preserve extension and context
/// assert_eq!(transform_filename("very_long_video_file_name.mp4", 18), "ver...ile_name.mp4");
///
/// // Numeric sequences - preserve start and end numbers
/// assert_eq!(transform_filename("43509374693.TS.mp4", 15), "435...93.TS.mp4");
///
/// // No extension - preserve start and end of name
/// assert_eq!(transform_filename("very_long_document_name", 15), "ver...ment_name");
///
/// // Long extension treated as part of name
/// assert_eq!(transform_filename("file.verylongextension", 15), "fil...extension");
/// ```
pub fn transform_filename(filename: &str, limit: usize) -> String {
// Handle edge cases
if limit == 0 || filename.is_empty() {
return String::new();
}
if filename.len() <= limit {
return filename.to_string();
}
// Find potential extension (last dot, not at start or end)
let extension_start = filename
.rfind('.')
.filter(|&pos| pos > 0 && pos < filename.len() - 1);
let (name_part, extension_part) = if let Some(ext_pos) = extension_start {
let ext = &filename[ext_pos..];
// Only treat as extension if it's reasonable length (≤ 5 chars including dot)
// and doesn't contain additional dots (compound extensions like .TS.mp4)
if ext.len() <= 5 && !ext[1..].contains('.') {
(&filename[..ext_pos], ext)
} else {
(filename, "")
}
} else {
(filename, "")
};
// If even just the extension is too long, truncate the whole thing
if extension_part.len() >= limit {
return truncate_string(filename, limit);
}
// Calculate space available for the name part
let name_limit = limit - extension_part.len();
// If name fits in available space, no truncation needed
if name_part.len() <= name_limit {
return filename.to_string();
}
// Need to truncate the name part
let truncated_name = truncate_string(name_part, name_limit);
format!("{}{}", truncated_name, extension_part)
}
/// Helper function to truncate a string with ellipsis, preserving start and end context
pub fn truncate_string(s: &str, limit: usize) -> String {
if s.len() <= limit {
return s.to_string();
}
// For very small limits, just truncate without ellipsis
if limit < 5 {
return s.chars().take(limit).collect();
}
// For limits 5 and above, use start + "..." + end pattern
// Strategy: Use 3 chars for ellipsis, split remaining between start and end
// But ensure we get meaningful chunks from both ends
let available_for_content = limit - 3; // Reserve 3 for "..."
// Determine start and end characters based on available space
let (start_chars, end_chars) = if available_for_content <= 4 {
// Very limited space: minimal start, rest for end
(1, available_for_content - 1)
} else if available_for_content <= 6 {
// Medium space: balanced approach
let start = available_for_content / 2;
(start, available_for_content - start)
} else {
// Plenty of space: cap start at 3, use more for end to preserve context
let start = 3;
(start, available_for_content - start)
};
let start: String = s.chars().take(start_chars).collect();
let end: String = s
.chars()
.rev()
.take(end_chars)
.collect::<String>()
.chars()
.rev()
.collect();
format!("{}...{}", start, end)
}

View File

@@ -1,35 +1,35 @@
{ {
"$schema": "https://schema.tauri.app/config/2", "$schema": "https://schema.tauri.app/config/2",
"productName": "byte-me", "productName": "byte-me",
"version": "0.1.0", "version": "0.1.0",
"identifier": "com.xevion.byteme", "identifier": "com.xevion.byteme",
"build": { "build": {
"beforeDevCommand": "pnpm dev", "beforeDevCommand": "pnpm dev",
"devUrl": "http://localhost:1420", "devUrl": "http://localhost:1420",
"beforeBuildCommand": "pnpm build", "beforeBuildCommand": "pnpm build",
"frontendDist": "../dist" "frontendDist": "../dist"
}, },
"app": { "app": {
"windows": [ "windows": [
{ {
"title": "byte-me", "title": "byte-me",
"width": 800, "width": 800,
"height": 600 "height": 600
} }
], ],
"security": { "security": {
"csp": null "csp": null
} }
}, },
"bundle": { "bundle": {
"active": true, "active": true,
"targets": "all", "targets": "all",
"icon": [ "icon": [
"icons/32x32.png", "icons/32x32.png",
"icons/128x128.png", "icons/128x128.png",
"icons/128x128@2x.png", "icons/128x128@2x.png",
"icons/icon.icns", "icons/icon.icns",
"icons/icon.ico" "icons/icon.ico"
] ]
} }
} }

View File

@@ -1,105 +0,0 @@
use byte_me_lib::strings::{transform_filename, truncate_string};
#[test]
fn test_transform_filename() {
// Test cases focusing on practical, readable outputs
// 1. Short filenames should remain unchanged
assert_eq!(transform_filename("test.mp4", 20), "test.mp4");
assert_eq!(transform_filename("short.txt", 15), "short.txt");
assert_eq!(transform_filename("a.b", 10), "a.b");
// 2. No extension cases - preserve meaningful start and end
assert_eq!(transform_filename("short_name", 15), "short_name");
assert_eq!(
transform_filename("very_long_document_name", 15),
"ver...ment_name"
);
assert_eq!(
transform_filename("medium_length_name", 13),
"med...th_name"
);
// 3. Normal extension cases (preserving extension)
assert_eq!(
transform_filename("very_long_video_file_name.mp4", 18),
"ver...ile_name.mp4"
);
assert_eq!(
transform_filename("document_with_long_name.pdf", 15),
"doc..._name.pdf"
);
assert_eq!(
transform_filename("image_file_name.jpeg", 15),
"ima...name.jpeg"
);
// 4. Numeric sequences (like user's example) - preserve start and end numbers
assert_eq!(
transform_filename("43509374693.TS.mp4", 15),
"435...93.TS.mp4"
);
assert_eq!(
transform_filename("20231201_video.mp4", 15),
"202...video.mp4"
);
assert_eq!(transform_filename("file_v2.1.3.tar", 12), "fi...1.3.tar");
// 5. Long extensions (treated as part of filename)
assert_eq!(
transform_filename("file.verylongextension", 15),
"fil...extension"
);
assert_eq!(
transform_filename("document.backup_old", 15),
"doc...ackup_old"
);
// 6. Edge cases
assert_eq!(transform_filename("", 10), "");
assert_eq!(transform_filename("a", 0), "");
assert_eq!(transform_filename("test", 4), "test");
assert_eq!(transform_filename("test", 3), "tes");
assert_eq!(transform_filename("ab", 2), "ab");
// 7. Very short limits - graceful degradation
assert_eq!(transform_filename("test.mp4", 8), "test.mp4");
assert_eq!(transform_filename("verylongname", 8), "ve...ame");
assert_eq!(transform_filename("test.mp4", 7), "tes.mp4");
assert_eq!(transform_filename("hello.txt", 9), "hello.txt");
// 8. Extension edge cases
assert_eq!(transform_filename("file.", 10), "file.");
assert_eq!(transform_filename(".hidden", 10), ".hidden");
assert_eq!(transform_filename("test.a", 10), "test.a");
// 9. Real-world examples
assert_eq!(
transform_filename("IMG_20231201_143022.jpg", 15),
"IMG...43022.jpg"
);
assert_eq!(
transform_filename("meeting_recording_final_v2.mp4", 20),
"mee...g_final_v2.mp4"
);
assert_eq!(
transform_filename("my document (copy).docx", 15),
"my ...opy).docx"
);
}
#[test]
fn test_truncate_string() {
// Test the helper function directly
assert_eq!(truncate_string("hello", 10), "hello");
assert_eq!(truncate_string("hello", 5), "hello");
assert_eq!(truncate_string("hello_world", 8), "he...rld");
assert_eq!(truncate_string("test", 4), "test");
assert_eq!(truncate_string("test", 3), "tes");
assert_eq!(truncate_string("ab", 2), "ab");
assert_eq!(truncate_string("a", 1), "a");
assert_eq!(truncate_string("hello", 1), "h");
assert_eq!(truncate_string("hello", 0), "");
assert_eq!(truncate_string("very_long_name", 10), "ver...name");
assert_eq!(truncate_string("document_name", 9), "doc...ame");
}

View File

@@ -1,62 +1,51 @@
type Frame = {
id: string;
data: { x: string | number; y: number }[];
};
import { getCurrentWebview } from "@tauri-apps/api/webview";
import { useEffect, useState } from "react"; import { useEffect, useState } from "react";
import { useDragDropPaths } from "@/hooks/useDragDropPaths"; import Graph from "./components/graph.js";
import Graph from "@/components/graph"; import DropOverlay from "./components/drop-overlay.js";
import DropOverlay from "@/components/drop-overlay";
import type { Frame } from "@/types/graph";
import { commands } from "@/bindings";
import type { BitrateData } from "@/bindings";
function App() { function App() {
const [data, setData] = useState<Frame[]>([]); const data: Frame[] = [];
const [isLoading, setIsLoading] = useState(false);
const paths = useDragDropPaths();
useEffect(() => { const [paths, setPaths] = useState<string[]>([]);
if (paths.length === 0) { useEffect(() => {
return; const unlistenPromise = getCurrentWebview().onDragDropEvent(
} async ({ payload }) => {
if (payload.type === "enter") {
setPaths(payload.paths);
console.log("User hovering", payload);
} else if (payload.type === "leave" || payload.type === "drop") {
setPaths([]);
console.log("User left", payload);
}
}
);
// For minimal prototype, just process the first file // you need to call unlisten if your handler goes out of scope e.g. the component is unmounted
const firstPath = paths[0]; return () => {
setIsLoading(true); unlistenPromise.then((unlisten) => {
unlisten();
console.log("Unlistened");
});
};
}, []);
commands const graph = <Graph data={data} />;
.extractBitrateData(firstPath)
.then((bitrateData: BitrateData) => {
// Transform BitrateData to Nivo's Frame format
const frame: Frame = {
id: bitrateData.id,
data: bitrateData.frames.map((frame) => ({
x: frame.frame_num,
y: Number(frame.packet_size),
})),
};
setData([frame]);
setIsLoading(false);
})
.catch((error) => {
console.error("Failed to extract bitrate data:", error);
setIsLoading(false);
});
}, [paths]);
const graph = <Graph data={data} />; return (
<div
return ( id="App"
<div className="min-h-screen min-w-screen overflow-hidden"
id="App" style={{ "--wails-drop-target": "drop" } as React.CSSProperties}
className="min-h-screen min-w-screen overflow-hidden" >
style={{ "--wails-drop-target": "drop" } as React.CSSProperties} <DropOverlay paths={paths} />
> {graph}
<DropOverlay paths={paths} /> </div>
{isLoading && ( );
<div className="absolute z-20 top-4 right-4 text-white bg-blue-600 px-4 py-2 rounded-lg">
Extracting bitrate data...
</div>
)}
{graph}
</div>
);
} }
export default App; export default App;

View File

@@ -1,37 +1,90 @@
// Import generated TypeScript types from ts-rs
import type { StreamResult } from "@/bindings/StreamResult";
import type { StreamDetail } from "@/bindings/StreamDetail";
import type { StreamResultError } from "@/bindings/StreamResultError";
import type { MediaType } from "@/bindings/MediaType";
import type { File } from "@/bindings/File";
import type { FileCandidacy } from "@/bindings/FileCandidacy";
import type { BitrateData } from "@/bindings/BitrateData";
import type { BitrateFrame } from "@/bindings/BitrateFrame";
export type { StreamResult, StreamDetail, StreamResultError, MediaType, File, FileCandidacy, BitrateData, BitrateFrame };
// Tauri invoke wrapper // This file was generated by [tauri-specta](https://github.com/oscartbeaumont/tauri-specta). Do not edit this file manually.
import { invoke } from "@tauri-apps/api/core";
/** user-defined commands **/
export type Result<T, E> =
| { status: "ok"; data: T }
| { status: "error"; error: E };
export const commands = { export const commands = {
async hasStreams(paths: string[]): Promise<Result<StreamResult[], StreamResultError>> { async hasStreams(paths: string[]) : Promise<Result<StreamResult[], StreamResultError>> {
try { try {
const data = await invoke<StreamResult[]>("has_streams", { paths }); return { status: "ok", data: await TAURI_INVOKE("has_streams", { paths }) };
return { status: "ok", data }; } catch (e) {
} catch (e) { if(e instanceof Error) throw e;
if (e instanceof Error) throw e; else return { status: "error", error: e as any };
else return { status: "error", error: e as any }; }
} }
}, }
async analyzeFiles(paths: string[]): Promise<File[]> {
return await invoke<File[]>("analyze_files", { paths });
},
async extractBitrateData(path: string): Promise<BitrateData> { /** user-defined events **/
return await invoke<BitrateData>("extract_bitrate_data", { path });
}
/** user-defined constants **/
/** user-defined types **/
export type StreamDetail = { Video: { codec: string } } | { Audio: { codec: string } } | { Subtitle: { codec: string } }
export type StreamResult = { path: string; filename: string; streams: StreamDetail[] }
export type StreamResultError = { filename: string | null; reason: string }
/** tauri-specta globals **/
import {
invoke as TAURI_INVOKE,
Channel as TAURI_CHANNEL,
} from "@tauri-apps/api/core";
import * as TAURI_API_EVENT from "@tauri-apps/api/event";
import { type WebviewWindow as __WebviewWindow__ } from "@tauri-apps/api/webviewWindow";
type __EventObj__<T> = {
listen: (
cb: TAURI_API_EVENT.EventCallback<T>,
) => ReturnType<typeof TAURI_API_EVENT.listen<T>>;
once: (
cb: TAURI_API_EVENT.EventCallback<T>,
) => ReturnType<typeof TAURI_API_EVENT.once<T>>;
emit: null extends T
? (payload?: T) => ReturnType<typeof TAURI_API_EVENT.emit>
: (payload: T) => ReturnType<typeof TAURI_API_EVENT.emit>;
}; };
export type Result<T, E> =
| { status: "ok"; data: T }
| { status: "error"; error: E };
function __makeEvents__<T extends Record<string, any>>(
mappings: Record<keyof T, string>,
) {
return new Proxy(
{} as unknown as {
[K in keyof T]: __EventObj__<T[K]> & {
(handle: __WebviewWindow__): __EventObj__<T[K]>;
};
},
{
get: (_, event) => {
const name = mappings[event as keyof T];
return new Proxy((() => {}) as any, {
apply: (_, __, [window]: [__WebviewWindow__]) => ({
listen: (arg: any) => window.listen(name, arg),
once: (arg: any) => window.once(name, arg),
emit: (arg: any) => window.emit(name, arg),
}),
get: (_, command: keyof __EventObj__<any>) => {
switch (command) {
case "listen":
return (arg: any) => TAURI_API_EVENT.listen(name, arg);
case "once":
return (arg: any) => TAURI_API_EVENT.once(name, arg);
case "emit":
return (arg: any) => TAURI_API_EVENT.emit(name, arg);
}
},
});
},
},
);
}

View File

@@ -1,193 +1,145 @@
import { type ReactNode, useEffect, useState } from "react"; import { ReactNode, useEffect, useRef, useState } from "react";
import { match, P } from "ts-pattern"; import { match, P } from "ts-pattern";
import {
File as FileIcon,
FileText,
Film,
Image,
Loader2,
Music,
XCircle,
} from "lucide-react";
import { commands } from "@/bindings";
import type { File, FileCandidacy, MediaType } from "@/bindings";
type DropOverlayProps = { type DropOverlayProps = {
paths: string[]; paths: string[];
}; };
const formatFileSize = (bytes: number): string => { type State =
if (bytes === 0) return "0 B"; | { status: "hidden" }
const k = 1024; | { status: "loading"; count: number }
const sizes = ["B", "KB", "MB", "GB"]; | { status: "ready"; files: { name: string; key: string }[] }
const i = Math.floor(Math.log(bytes) / Math.log(k)); | { status: "error"; reason: string; filename?: string };
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + " " + sizes[i];
import {
CircleQuestionMarkIcon,
File as FileIcon,
Film,
Image,
Music,
} from "lucide-react";
import { commands } from "../bindings";
type FileItemProps = {
filename: string;
error?: string;
}; };
const getFileIcon = (candidacy: FileCandidacy): ReactNode => { const Item = ({ icon, text }: { icon: ReactNode; text: ReactNode }) => {
return match(candidacy)
.with("Loading", () => (
<Loader2 className="w-5 h-5 text-blue-400 animate-spin" />
))
.with({ Error: P._ }, () => <XCircle className="w-5 h-5 text-red-400" />)
.with({ Success: { type: P.select() } }, (mediaType: MediaType) => {
switch (mediaType) {
case "Audio":
return <Music className="w-5 h-5 text-blue-400" />;
case "Video":
return <Film className="w-5 h-5 text-purple-400" />;
case "Image":
return <Image className="w-5 h-5 text-pink-400" />;
case "Document":
return <FileText className="w-5 h-5 text-green-400" />;
case "Executable":
return <FileIcon className="w-5 h-5 text-orange-400" />;
case "Archive":
return <FileIcon className="w-5 h-5 text-yellow-400" />;
case "Library":
return <FileIcon className="w-5 h-5 text-indigo-400" />;
default:
return <FileIcon className="w-5 h-5 text-neutral-300" />;
}
})
.exhaustive();
};
const getStatusColor = (candidacy: FileCandidacy): string => {
return match(candidacy)
.with("Loading", () => "border-blue-500/50")
.with({ Error: P._ }, () => "border-red-500/50")
.with({ Success: P._ }, () => "border-green-500/50")
.exhaustive();
};
const FileItem = ({ file }: { file: File }) => {
const icon = getFileIcon(file.candidacy);
const statusColor = getStatusColor(file.candidacy);
const fileSize = formatFileSize(file.size);
const subtitle = match(file.candidacy)
.with("Loading", () => "Analyzing...")
.with({ Error: { reason: P.select() } }, (reason: string) => reason)
.with({ Success: { type: P.select() } }, (mediaType: MediaType) => {
switch (mediaType) {
case "Audio":
return "Audio file";
case "Video":
return "Video file";
case "Image":
return "Image file";
case "Document":
return "Document file";
case "Executable":
return "Executable file";
case "Archive":
return "Archive file";
case "Library":
return "Library file";
default:
return "Unknown file type";
}
})
.exhaustive();
return ( return (
<div <div
className={`flex items-center gap-3 px-4 py-3 rounded-lg bg-neutral-800 border ${statusColor} transition-all duration-200`} className="flex items-center gap-2 px-3 py-2 bg-neutral-800 rounded-md shadow-sm"
style={{ style={{
maxWidth: "100%", maxWidth: "100%",
marginBottom: "0.75rem", marginBottom: "0.5rem",
}} }}
> >
{icon} {icon}
<div className="flex-1 min-w-0"> <span className="truncate text-neutral-100 max-w-md">{text}</span>
<div className="truncate text-neutral-100 font-medium">
{file.filename}
</div>
<div className="truncate text-neutral-400 text-sm mt-1">
{fileSize} {subtitle}
</div>
</div>
</div> </div>
); );
}; };
const FileItem = ({ filename, error }: FileItemProps) => {
const ext = filename.split(".").pop()?.toLowerCase();
const icon =
error == null ? (
match(ext)
.with("mp3", "wav", "flac", "ogg", "m4a", "aac", () => (
<Music className="w-5 h-5 text-blue-400" />
))
.with("mp4", "mkv", "webm", "mov", "avi", () => (
<Film className="w-5 h-5 text-purple-400" />
))
.with("gif", () => <Image className="w-5 h-5 text-pink-400" />)
.otherwise(() => <FileIcon className="w-5 h-5 text-neutral-300" />)
) : (
<CircleQuestionMarkIcon className="w-5 h-5 text-neutral-300" />
);
return <Item icon={icon} text={filename} />;
};
const DropOverlay = ({ paths }: DropOverlayProps) => { const DropOverlay = ({ paths }: DropOverlayProps) => {
const [files, setFiles] = useState<File[]>([]); const [state, setState] = useState<State>({ status: "hidden" });
const [isLoading, setIsLoading] = useState(false); const aborterRef = useRef<AbortController | null>(null);
useEffect(() => { useEffect(() => {
if (paths.length === 0) { if (paths.length === 0) {
setFiles([]); setState({ status: "hidden" });
setIsLoading(false);
return; return;
} }
setIsLoading(true); setState({ status: "loading", count: paths.length });
setFiles([]);
// Initialize with loading state for all files aborterRef.current = new AbortController();
const loadingFiles: File[] = paths.map((path) => {
const filename = path.split(/[/\\]/).pop() || "unknown";
return {
filename,
size: 0,
candidacy: "Loading" as const,
};
});
setFiles(loadingFiles);
// Analyze files commands.hasStreams(paths).then((result) => {
commands setState((_state) => {
.analyzeFiles(paths) return match(result)
.then((analyzedFiles) => { .with({ status: "ok" }, (r) => ({
setFiles(analyzedFiles); status: "ready" as const,
setIsLoading(false); files: r.data.map((item) => ({
}) name: item.filename,
.catch((error) => { key: item.path,
console.error("Failed to analyze files:", error); })),
// Set all files to error state }))
const errorFiles: File[] = paths.map((path) => { .with({ status: "error" }, (r) => {
const filename = path.split(/[/\\]/).pop() || "unknown"; if (r.error.filename) {
return { return {
filename, status: "error" as const,
size: 0, reason: r.error.reason,
candidacy: { filename: r.error.filename,
Error: { };
reason: "Failed to analyze file", }
},
}, return { status: "error" as const, reason: r.error.reason };
}; })
}); .exhaustive();
setFiles(errorFiles);
setIsLoading(false);
}); });
});
}, [paths]); }, [paths]);
if (files.length === 0) { if (state.status === "hidden") {
return null; return null;
} }
const inner = match(state)
.with({ status: "loading" }, ({ count }) =>
Array.from({ length: count }).map((_, i) => (
<Item
key={i}
icon={
<CircleQuestionMarkIcon className="w-5 h-5 text-neutral-300/50" />
}
text={
<span className="inline-block w-32 h-5 bg-neutral-300/10 rounded animate-pulse" />
}
/>
))
)
.with({ status: "ready" }, (r) => {
return r.files
.slice(0, 8)
.map((file) => <FileItem key={file.key} filename={file.name} />);
})
.with({ status: "error", filename: P.string }, (r) => {
return <FileItem filename={r.filename} error={r.reason} />;
})
.with({ status: "error" }, ({ reason }) => {
return (
<Item
icon={<CircleQuestionMarkIcon className="w-5 h-5 text-neutral-300" />}
text={reason}
/>
);
})
.exhaustive();
return ( return (
<div className="absolute z-10 top-0 left-0 w-full h-full backdrop-blur-[1px] backdrop-saturate-0 transition-all duration-300 ease-in-out"> <div className="absolute z-10 top-0 left-0 w-full h-full bg-black/40 backdrop-blur-sm transition-all duration-300 ease-in-out">
<div className="flex flex-col justify-center items-center h-full p-8"> <div className="flex flex-col justify-center items-center h-full">
<div className="rounded-xl p-6 max-w-2xl w-full"> <span className="text-white text-2xl">{inner}</span>
<div className="flex flex-col items-center gap-4">
{isLoading && (
<div className="flex items-center gap-2 text-blue-400 mb-4">
<Loader2 className="w-6 h-6 animate-spin" />
<span className="text-lg font-medium">
Analyzing {files.length} file{files.length > 1 ? "s" : ""}...
</span>
</div>
)}
<div className="max-h-96 overflow-y-auto w-full">
{files.map((file, index) => (
<FileItem key={`${file.filename}-${index}`} file={file} />
))}
</div>
</div>
</div>
</div> </div>
</div> </div>
); );

View File

@@ -1,81 +1,85 @@
import { ResponsiveLine } from "@nivo/line"; import { ResponsiveLine } from "@nivo/line";
import { formatBytes } from "@/lib/format"; import { formatBytes } from "../lib/format.js";
import type { Frame } from "@/types/graph";
type Frame = {
id: string;
data: { x: string | number; y: number }[];
};
type GraphProps = { type GraphProps = {
data: Frame[]; data: Frame[];
}; };
const Graph = ({ data }: GraphProps) => ( const Graph = ({ data }: GraphProps) => (
<ResponsiveLine <ResponsiveLine
data={data} data={data}
margin={{ top: 50, right: 110, bottom: 50, left: 60 }} margin={{ top: 50, right: 110, bottom: 50, left: 60 }}
xScale={{ type: "linear" }} xScale={{ type: "linear" }}
yScale={{ yScale={{
type: "linear", type: "linear",
min: 0, min: 0,
max: "auto", max: "auto",
stacked: false, stacked: false,
reverse: false, reverse: false,
}} }}
theme={{ theme={{
tooltip: { tooltip: {
container: { container: {
backgroundColor: "#2e2b45", backgroundColor: "#2e2b45",
}, },
}, },
grid: { grid: {
line: { line: {
stroke: "rgb(252, 191, 212)", stroke: "rgb(252, 191, 212)",
strokeWidth: 0.35, strokeWidth: 0.35,
strokeOpacity: 0.75, strokeOpacity: 0.75,
}, },
}, },
crosshair: { crosshair: {
line: { line: {
stroke: "#fdd3e2", stroke: "#fdd3e2",
strokeWidth: 1, strokeWidth: 1,
}, },
}, },
axis: { axis: {
legend: {}, legend: {},
domain: { domain: {
line: { line: {
stroke: "rgb(252, 191, 212)", stroke: "rgb(252, 191, 212)",
strokeWidth: 0.5, strokeWidth: 0.5,
strokeOpacity: 0.5, strokeOpacity: 0.5,
}, },
}, },
}, },
text: { text: {
fill: "#6e6a86", fill: "#6e6a86",
}, },
}} }}
axisBottom={{ legend: "Frame Number", legendOffset: 36 }} axisBottom={{ legend: "transportation", legendOffset: 36 }}
axisLeft={{ axisLeft={{
legend: "Packet Size", legend: "count",
legendOffset: -40, legendOffset: -40,
format: (v) => formatBytes(v), format: (v) => formatBytes(v * 1024 * 53),
}} }}
pointSize={10} pointSize={10}
colors={["#3e8faf", "#c4a7e7", "#f5c276", "#EA9B96", "#EB7092", "#9CCFD8"]} colors={["#3e8faf", "#c4a7e7", "#f5c276", "#EA9B96", "#EB7092", "#9CCFD8"]}
pointBorderWidth={0} pointBorderWidth={0}
pointBorderColor={{ from: "seriesColor" }} pointBorderColor={{ from: "seriesColor" }}
pointLabelYOffset={-12} pointLabelYOffset={-12}
enableSlices={"x"} enableSlices={"x"}
enableTouchCrosshair={true} enableTouchCrosshair={true}
useMesh={true} useMesh={true}
legends={[ legends={[
{ {
anchor: "bottom-right", anchor: "bottom-right",
direction: "column", direction: "column",
translateX: 100, translateX: 100,
itemWidth: 80, itemWidth: 80,
itemHeight: 22, itemHeight: 22,
symbolShape: "circle", symbolShape: "circle",
}, },
]} ]}
/> />
); );
export default Graph; export default Graph;

View File

@@ -1,22 +1,22 @@
@import "tailwindcss"; @import "tailwindcss";
:root { :root {
font-family: Inter, Avenir, Helvetica, Arial, sans-serif; font-family: Inter, Avenir, Helvetica, Arial, sans-serif;
font-size: 16px; font-size: 16px;
line-height: 24px; line-height: 24px;
font-weight: 400; font-weight: 400;
color: #e0def4; color: #e0def4;
background-color: #232136; background-color: #232136;
font-synthesis: none; font-synthesis: none;
text-rendering: optimizeLegibility; text-rendering: optimizeLegibility;
-webkit-font-smoothing: antialiased; -webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale; -moz-osx-font-smoothing: grayscale;
-webkit-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;
} }
#app { #app {
height: 100vh; height: 100vh;
text-align: center; text-align: center;
} }

View File

@@ -1,23 +0,0 @@
import { useEffect, useState } from "react";
import { getCurrentWebview } from "@tauri-apps/api/webview";
export function useDragDropPaths(): string[] {
const [paths, setPaths] = useState<string[]>([]);
useEffect(() => {
const unlistenPromise = getCurrentWebview().onDragDropEvent(
async ({ payload }) => {
if (payload.type === "drop") {
setPaths(payload.paths);
} else if (payload.type === "leave") {
setPaths([]);
}
},
);
return () => {
unlistenPromise.then((unlisten) => unlisten());
};
}, []);
return paths;
}

View File

@@ -1,28 +1,28 @@
import { formatBytes } from "@/lib/format"; import { formatBytes } from "./format.js";
import { test, expect } from "vitest"; import { test, expect } from "vitest";
test("formats bytes less than 1024", () => { test("formats bytes less than 1024", () => {
expect(formatBytes(512)).toBe("512 B"); expect(formatBytes(512)).toBe("512 B");
}); });
test("formats KiB correctly", () => { test("formats KiB correctly", () => {
expect(formatBytes(2048)).toBe("2 KiB"); expect(formatBytes(2048)).toBe("2 KiB");
expect(formatBytes(1536)).toBe("1.5 KiB"); expect(formatBytes(1536)).toBe("1.5 KiB");
expect(formatBytes(1024)).toBe("1 KiB"); expect(formatBytes(1024)).toBe("1 KiB");
}); });
test("formats MiB correctly", () => { test("formats MiB correctly", () => {
expect(formatBytes(1048576)).toBe("1 MiB"); expect(formatBytes(1048576)).toBe("1 MiB");
expect(formatBytes(1572864)).toBe("1.5 MiB"); expect(formatBytes(1572864)).toBe("1.5 MiB");
expect(formatBytes(2097152)).toBe("2 MiB"); expect(formatBytes(2097152)).toBe("2 MiB");
}); });
test("formats GiB correctly", () => { test("formats GiB correctly", () => {
expect(formatBytes(1073741824)).toBe("1 GiB"); expect(formatBytes(1073741824)).toBe("1 GiB");
expect(formatBytes(1610612736)).toBe("1.5 GiB"); expect(formatBytes(1610612736)).toBe("1.5 GiB");
expect(formatBytes(2147483648)).toBe("2 GiB"); expect(formatBytes(2147483648)).toBe("2 GiB");
}); });
test("formats large values with no decimal if intValue >= 1000", () => { test("formats large values with no decimal if intValue >= 1000", () => {
expect(formatBytes(1024 * 1024 * 1000)).toBe("1000 MiB"); expect(formatBytes(1024 * 1024 * 1000)).toBe("1000 MiB");
}); });

View File

@@ -11,28 +11,28 @@
* @returns The formatted string with the appropriate unit. * @returns The formatted string with the appropriate unit.
*/ */
export function formatBytes(v: number): string { export function formatBytes(v: number): string {
if (v < 1024) return `${v} B`; if (v < 1024) return `${v} B`;
const units = ["KiB", "MiB", "GiB", "TiB"]; const units = ["KiB", "MiB", "GiB", "TiB"];
let unitIndex = -1; let unitIndex = -1;
let value = v; let value = v;
while (value >= 1024 && unitIndex < units.length - 1) { while (value >= 1024 && unitIndex < units.length - 1) {
value /= 1024; value /= 1024;
unitIndex++; unitIndex++;
} }
const intValue = Math.floor(value); const intValue = Math.floor(value);
const decimal = value - intValue; const decimal = value - intValue;
if (intValue >= 1000) { if (intValue >= 1000) {
// More than 3 digits, no decimal // More than 3 digits, no decimal
return `${intValue} ${units[unitIndex]}`; return `${intValue} ${units[unitIndex]}`;
} else if (decimal >= 0.1) { } else if (decimal >= 0.1) {
// Show 1 decimal if decimal >= 0.1 // Show 1 decimal if decimal >= 0.1
return `${value.toFixed(1)} ${units[unitIndex]}`; return `${value.toFixed(1)} ${units[unitIndex]}`;
} else { } else {
// No decimal // No decimal
return `${intValue} ${units[unitIndex]}`; return `${intValue} ${units[unitIndex]}`;
} }
} }

View File

@@ -1,10 +1,10 @@
import React from "react"; import React from "react";
import ReactDOM from "react-dom/client"; import ReactDOM from "react-dom/client";
import App from "@/App"; import App from "./App";
import "@/global.css"; import "./global.css";
ReactDOM.createRoot(document.getElementById("root") as HTMLElement).render( ReactDOM.createRoot(document.getElementById("root") as HTMLElement).render(
<React.StrictMode> <React.StrictMode>
<App /> <App />
</React.StrictMode>, </React.StrictMode>
); );

View File

@@ -1,4 +0,0 @@
export type Frame = {
id: string;
data: { x: string | number; y: number }[];
};

View File

@@ -1,10 +1,25 @@
{ {
"extends": "@tsconfig/vite-react/tsconfig.json", "compilerOptions": {
"compilerOptions": { "target": "ES2020",
/* Paths */ "useDefineForClassFields": true,
"paths": { "lib": ["ES2020", "DOM", "DOM.Iterable"],
"@/*": ["./src/*"] "module": "ESNext",
} "skipLibCheck": true,
},
"include": ["src"] /* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react-jsx",
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"noFallthroughCasesInSwitch": true
},
"include": ["src"],
"references": [{ "path": "./tsconfig.node.json" }]
} }

View File

@@ -1,10 +1,10 @@
{ {
"compilerOptions": { "compilerOptions": {
"composite": true, "composite": true,
"skipLibCheck": true, "skipLibCheck": true,
"module": "ESNext", "module": "ESNext",
"moduleResolution": "bundler", "moduleResolution": "bundler",
"allowSyntheticDefaultImports": true "allowSyntheticDefaultImports": true
}, },
"include": ["vite.config.ts"] "include": ["vite.config.ts"]
} }

View File

@@ -1,38 +1,33 @@
import { defineConfig } from "vite"; import { defineConfig } from "vite";
import react from "@vitejs/plugin-react"; import react from "@vitejs/plugin-react";
import tailwindcss from "@tailwindcss/vite"; import tailwindcss from "@tailwindcss/vite";
import path from "path";
// @ts-expect-error process is a nodejs global
const host = process.env.TAURI_DEV_HOST; const host = process.env.TAURI_DEV_HOST;
// https://vite.dev/config/ // https://vite.dev/config/
export default defineConfig(async () => ({ export default defineConfig(async () => ({
plugins: [react(), tailwindcss()], plugins: [react(), tailwindcss()],
resolve: {
alias: {
"@": path.resolve(__dirname, "src"),
},
},
// Vite options tailored for Tauri development and only applied in `tauri dev` or `tauri build` // Vite options tailored for Tauri development and only applied in `tauri dev` or `tauri build`
// //
// 1. prevent Vite from obscuring rust errors // 1. prevent Vite from obscuring rust errors
clearScreen: false, clearScreen: false,
// 2. tauri expects a fixed port, fail if that port is not available // 2. tauri expects a fixed port, fail if that port is not available
server: { server: {
port: 1420, port: 1420,
strictPort: true, strictPort: true,
host: host || false, host: host || false,
hmr: host hmr: host
? { ? {
protocol: "ws", protocol: "ws",
host, host,
port: 1421, port: 1421,
} }
: undefined, : undefined,
watch: { watch: {
// 3. tell Vite to ignore watching `src-tauri` // 3. tell Vite to ignore watching `src-tauri`
ignored: ["**/src-tauri/**"], ignored: ["**/src-tauri/**"],
}, },
}, },
})); }));