diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..2160cde --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,95 @@ +name: Tests + +on: + push: + branches: [main, 'claude/**'] + pull_request: + branches: [main] + +jobs: + unit-tests: + name: Unit Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + + - name: Install dependencies + run: bun install + + - name: Run unit tests + run: bun test src/ + + # Integration test using the GitHub Action + test: + name: Test Worker ${{ matrix.index }} + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + index: [0, 1, 2] + steps: + - uses: actions/checkout@v4 + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + + - name: Install dependencies + run: bun install + + - name: Split tests + id: split + uses: ./ + with: + command: split + pattern: 'tests/dummy/*.test.ts' + total: 3 + index: ${{ matrix.index }} + cache-key: integration-tests + + - name: Show assigned tests + run: echo "Worker ${{ matrix.index }} running:${{ steps.split.outputs.tests }}" + + - name: Run tests + if: steps.split.outputs.tests != '' + run: bun test ${{ steps.split.outputs.tests }} --reporter=junit --reporter-outfile=junit-${{ matrix.index }}.xml + + - name: Convert JUnit to timing JSON + if: steps.split.outputs.tests != '' + uses: ./ + with: + command: convert + from: junit-${{ matrix.index }}.xml + to: timing-${{ matrix.index }}.json + + - name: Upload timing artifact + if: always() + uses: actions/upload-artifact@v4 + with: + name: timing-${{ matrix.index }} + path: timing-${{ matrix.index }}.json + + save-timings: + name: Save Timings + needs: test + if: always() + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/download-artifact@v4 + + - name: Show downloaded artifacts + run: find . -name "*.json" -type f + + - name: Merge timings + uses: ./ + with: + command: merge + prefix: 'timing-*/timing-' + cache-key: integration-tests + + - name: Show saved timings + run: cat .fairsplice-timings.json diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..de045f4 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,57 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Project Overview + +Fairsplice is a TypeScript/Bun CLI tool and GitHub Action that optimizes test distribution across parallel workers. It provides CircleCI-style test splitting based on historical timing data for GitHub Actions. + +## Commands + +```bash +# Run locally +bun run index.ts + +# Run all tests +bun test + +# Run tests in src directory +bun test src/ + +# Run a specific test file +bun test src/lib/splitFiles.test.ts + +# Compile to standalone binary +bun build ./index.ts --compile --outfile fairsplice +``` + +## Architecture + +**Entry Point**: `index.ts` - CLI with three commands: `split`, `convert`, `merge` + +**Source Structure**: +- `src/commands/` - CLI command implementations + - `split.ts` - Distributes test files across workers using bin packing + - `merge.ts` - Aggregates timing JSON files and updates history + - `convert.ts` - Converts JUnit XML to timing JSON +- `src/lib/` - Core algorithms + - `splitFiles.ts` - Greedy bin packing algorithm (assigns heaviest tests first to balance workload) + - `junit.ts` - JUnit XML parser using `fast-xml-parser` + - `average.ts` - Timing averaging utility +- `src/backend/` - Storage layer + - `fileStorage.ts` - JSON-based timing persistence with rolling window of last 10 timings per file +- `src/config.ts` - Constants (`NUMBER_OF_TIMINGS_TO_KEEP=10`, `DEFAULT_TIMING_IF_MISSING=10000ms`) + +**GitHub Action**: `action.yml` - Composite action wrapping the CLI with automatic cache handling + +**Data Flow**: +1. `split` loads cached timings, globs test files, applies bin packing, outputs bucket assignments +2. Tests run in parallel workers, each outputting JUnit XML +3. `convert` transforms JUnit XML to timing JSON (one per worker) +4. `merge` aggregates timing JSONs into cached timings history + +## Testing + +Tests are co-located with source files (`*.test.ts`). Test fixtures for JUnit parsing are in `src/lib/fixtures/`. + +The CI workflow (`.github/workflows/test.yml`) runs unit tests plus a 3-worker integration test that exercises the full split→run→convert→merge pipeline. diff --git a/README.md b/README.md index e71b8f7..fc6dcb1 100644 --- a/README.md +++ b/README.md @@ -1,102 +1,238 @@ # Fairsplice -**Warning: this project is still in very early development!** +Fairsplice is a CLI tool and GitHub Action that optimizes test distribution across parallel workers. It provides CircleCI-style test splitting based on timing data for GitHub Actions. + +## Quick Start (GitHub Action) + +**Recommended:** Compute splits once and pass to test jobs. This ensures re-running a failed job runs the same tests. + +```yaml +jobs: + # Compute splits once - ensures consistent re-runs + compute-splits: + runs-on: ubuntu-latest + outputs: + test-buckets: ${{ steps.split.outputs.buckets }} + steps: + - uses: actions/checkout@v4 + + - name: Split tests + id: split + uses: dashdoc/fairsplice@v1 + with: + command: split + pattern: 'tests/**/*.py' + total: 3 + cache-key: python-tests + # No index = outputs all buckets as JSON array + + test: + needs: compute-splits + runs-on: ubuntu-latest + strategy: + matrix: + index: [0, 1, 2] + steps: + - uses: actions/checkout@v4 + + - name: Get test files + id: split + run: | + echo "tests=$(echo '${{ needs.compute-splits.outputs.test-buckets }}' | jq -r '.[${{ matrix.index }}] | join(" ")')" >> "$GITHUB_OUTPUT" + + - name: Run tests + run: pytest ${{ steps.split.outputs.tests }} --junit-xml=junit.xml + + - name: Convert JUnit to timing JSON + uses: dashdoc/fairsplice@v1 + with: + command: convert + from: junit.xml + to: timing.json + + - uses: actions/upload-artifact@v4 + with: + name: timing-${{ matrix.index }} + path: timing.json + + save-timings: + needs: test + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/download-artifact@v4 + + - name: Merge timings + uses: dashdoc/fairsplice@v1 + with: + command: merge + prefix: 'timing-*/timing' + cache-key: python-tests +``` -Fairsplice is a CLI tool designed to optimize test distribution across multiple workers. By intelligently splitting and saving test cases, Fairsplice ensures a balanced workload distribution for your CI/CD pipelines, making tests run time more predictable. +That's it! Caching is handled automatically. -We found Github Actions lacking when compared to CircleCI which has [tests splitting](https://circleci.com/docs/parallelism-faster-jobs/#how-test-splitting-works) based on timings. +### Why compute splits once? -There are a number of projects like [Split tests](https://github.com/marketplace/actions/split-tests) but they require uploading and downloading Junit XML files and merging them, or committing the Junit files to have them when running the tests. +When you compute splits inside each matrix job (using `index`), re-running a failed job can run different tests: +1. Other jobs may have updated the timing cache +2. The re-run computes a new split with updated timings +3. The failed test might now be assigned to a different worker -This tool uses instead a Redis server to store the last 10 timings for each test file and uses the average of these to split tests. It is easy to setup if you have a Redis server running. +By computing splits once in a dedicated job and passing via workflow outputs, GitHub Actions preserves the same split on re-runs. -## Installation +## How It Works -This project is built using [Bun](https://bun.sh) and [Redis](https://redis.io/). +``` +┌─────────────────────────────────────────────────────────────────────────────┐ +│ CI PIPELINE │ +└─────────────────────────────────────────────────────────────────────────────┘ + + ┌─────────────────────┐ + │ 1. SPLIT PHASE │ + └─────────────────────┘ + + timings (cached) fairsplice split + ┌──────────────────────┐ ┌─────────────────┐ + │ { │ │ │ + │ "test_a.py": [2.1],│ ──────▶ │ Load timings │ + │ "test_b.py": [5.3],│ │ + glob files │ + │ "test_c.py": [1.8] │ │ │ + │ } │ └────────┬────────┘ + └──────────────────────┘ │ + ▼ + ┌─────────────────────────┐ + │ Distribute tests by │ + │ timing (bin packing) │ + └─────────────────────────┘ + │ + ┌─────────────────────────────┼─────────────────────────────┐ + ▼ ▼ ▼ + ┌───────────────────┐ ┌───────────────────┐ ┌───────────────────┐ + │ Worker 0 │ │ Worker 1 │ │ Worker 2 │ + │ ~5.3s │ │ ~3.9s │ │ ~5.1s │ + └─────────┬─────────┘ └─────────┬─────────┘ └─────────┬─────────┘ + │ │ │ + ▼ ▼ ▼ + ┌───────────────────┐ ┌───────────────────┐ ┌───────────────────┐ + │ Run tests │ │ Run tests │ │ Run tests │ + │ Output JUnit │ │ Output JUnit │ │ Output JUnit │ + └─────────┬─────────┘ └─────────┬─────────┘ └─────────┬─────────┘ + │ │ │ + └───────────────────────────┴───────────────────────────┘ + │ + ┌─────────────────────┐ │ + │ 2. MERGE PHASE │ │ + └─────────────────────┘ │ + ▼ + ┌─────────────────────────┐ + │ fairsplice merge │ + │ (extracts timings) │ + └─────────────────────────┘ + │ + ▼ + ┌──────────────────────┐ + │ timings (cached) │◀─── Auto-cached + └──────────────────────┘ for next run +``` -Ensure you have Bun installed. -To launch it, run +**Key concepts:** +- **Split phase**: Distributes test files across workers based on historical timing data +- **Convert phase**: Extracts timing from JUnit XML into timing JSON (one per worker) +- **Merge phase**: Combines timing JSON files from all workers and caches for next run +- **Bin packing**: Assigns tests to balance total execution time (heaviest tests first) +- **Rolling average**: Keeps last 10 timings per test file for predictions -```bash -bunx fairsplice -``` +## GitHub Action Reference -## Configuration +### Inputs -Before using Fairsplice, set the environment variable `FAIRSPLICE_REDIS_URL` to your Redis server URL. This is necessary for storing and retrieving test case information. +| Input | Required | Description | +|-------|----------|-------------| +| `command` | Yes | `split`, `convert`, or `merge` | +| `cache-key` | For split/merge | Cache key for storing timings (use different keys for frontend/backend workflows) | +| `timings-file` | No | JSON file for timings (default: `.fairsplice-timings.json`) | +| `pattern` | For split | Glob pattern to match test files | +| `total` | For split | Total number of workers | +| `index` | For split | Current worker index (0-based) | +| `from` | For convert | JUnit XML file to read | +| `to` | For convert | Timing JSON file to write | +| `path-prefix` | For convert | Prefix to prepend to file paths (to match split pattern) | +| `prefix` | For merge | Prefix to match timing JSON files | -```bash -export FAIRSPLICE_REDIS_URL='redis://myuser:mypassword@your-redis-url.upstash.io:33683' -``` +### Cache Behavior -## Usage +Fairsplice uses GitHub Actions cache for storing timing history. Important characteristics: -Fairsplice supports two main commands: `save` and `split`. +- **Repository-scoped, branch-gated**: Caches are repository-scoped but restore access is gated by branch context +- **Default branch is global**: Caches saved from the default branch (usually `main`) are restorable by all branches +- **Immutable, single-writer**: Each cache key can only be written once; updates require a new key (handled automatically via run ID suffix) +- **Asymmetric cross-branch sharing**: Restore is permissive (branches can read from main), save is restricted (branches can only write to their own scope) -### Saving test results +To seed shared timings for all branches, run the workflow on `main` first. Subsequent PRs and feature branches will restore timings from main's cache. -To save test results: +### Outputs -```bash -fairsplice save --from -``` +| Output | Description | +|--------|-------------| +| `tests` | Space-separated list of test files (when `index` provided) | +| `buckets` | JSON array of all test buckets | -- `--from `: Specify the file path to read test results from. +## CLI Usage -Example: +Install with Bun: ```bash -fairsplice save --from results/junit.xml +bunx fairsplice ``` -### Splitting test cases - -To split test cases for execution: +### Commands +**Split tests:** ```bash -fairsplice split --pattern "" [--pattern "" ...] --total --out --replace-from --replace-to [--replace-from --replace-to ] +fairsplice split --timings-file timings.json --pattern "tests/**/*.py" --total 3 --out split.json ``` -- `--pattern ""`: Pattern to match test files. Can be used multiple times to specify multiple patterns. -- `--total `: Total number of workers in the test environment. -- `--out `: File to write split test files to (newline separated) -- `--replace-from `: Substring to replace in the file paths (can be used multiple times) -- `--replace-to `: Replacement for the substring (can be used multiple times but must match the number of --replace-from) - -Example: - +**Convert JUnit XML to timing JSON:** ```bash -fairsplice split --pattern "test_*.py" --pattern "tests*.py" --total 3 --out split.json +fairsplice convert --from junit.xml --to timing.json ``` -## Help - -For a detailed list of commands and options, use the help command: - +**Merge timing results:** ```bash -fairsplice --help +fairsplice merge --timings-file timings.json --prefix timing- ``` -## Contributing - -Contributions are welcome! Please fork the repository and submit a pull request with your improvements. +### CLI Options -### Running locally +``` +fairsplice split + --timings-file JSON file with stored timings + --pattern Glob pattern for test files (can repeat) + --total Number of workers + --out Output JSON file + +fairsplice convert + --from JUnit XML file to read + --to Timing JSON file to write + --path-prefix Prefix to prepend to file paths + +fairsplice merge + --timings-file JSON file to store timings + --prefix Prefix to match timing JSON files +``` -Launch the development version with: +## Contributing ```bash +# Run locally bun run index.ts -``` -### Running tests - -Launch the following command to run tests: - -```bash -bun test [--watch] +# Run tests +bun test ``` ## License -Fairsplice is open-source software licensed under the MIT license. +MIT diff --git a/action.yml b/action.yml new file mode 100644 index 0000000..8a84ea7 --- /dev/null +++ b/action.yml @@ -0,0 +1,123 @@ +name: 'Fairsplice' +description: 'Split tests across parallel workers based on timing data' +branding: + icon: 'scissors' + color: 'blue' + +inputs: + command: + description: 'Command to run: split, convert, or merge' + required: true + timings-file: + description: 'JSON file to store/read timings (default: .fairsplice-timings.json)' + required: false + default: '.fairsplice-timings.json' + cache-key: + description: 'Cache key for storing timings (required for split and merge commands)' + required: false + # split inputs + pattern: + description: 'Glob pattern to match test files (for split)' + required: false + total: + description: 'Total number of workers (for split)' + required: false + index: + description: 'Current worker index, 0-based (for split) - outputs only this worker tests' + required: false + # convert inputs + from: + description: 'JUnit XML file to read (for convert)' + required: false + to: + description: 'Timing JSON file to write (for convert)' + required: false + path-prefix: + description: 'Prefix to prepend to file paths (for convert) - use to match paths with split pattern' + required: false + # merge inputs + prefix: + description: 'Prefix to match timing JSON files (for merge)' + required: false + +outputs: + tests: + description: 'Space-separated list of test files for the current worker (when index is provided)' + value: ${{ steps.split.outputs.tests }} + buckets: + description: 'JSON array of test buckets (when index is not provided)' + value: ${{ steps.split.outputs.buckets }} + +runs: + using: 'composite' + steps: + - name: Validate cache-key for split/merge + if: (inputs.command == 'split' || inputs.command == 'merge') && inputs.cache-key == '' + shell: bash + run: | + echo "::error::cache-key is required for split and merge commands" + exit 1 + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + with: + bun-version: "1.2.23" + + - name: Install dependencies + shell: bash + run: cd ${{ github.action_path }} && bun install --frozen-lockfile + + - name: Restore timings cache + if: inputs.command == 'split' || inputs.command == 'merge' + uses: actions/cache/restore@v4 + with: + path: ${{ inputs.timings-file }} + key: fairsplice-${{ inputs.cache-key }}-${{ github.repository }}-${{ github.run_id }}-${{ github.run_attempt }} + restore-keys: fairsplice-${{ inputs.cache-key }}-${{ github.repository }}- + + - name: Run split + id: split + if: inputs.command == 'split' + shell: bash + run: | + # Run fairsplice split + bun run ${{ github.action_path }}/index.ts split \ + --timings-file "${{ inputs.timings-file }}" \ + --pattern "${{ inputs.pattern }}" \ + --total "${{ inputs.total }}" \ + --out /tmp/fairsplice-buckets.json + + # Output results + if [ -n "${{ inputs.index }}" ]; then + # Extract tests for specific worker index + TESTS=$(jq -r '.[${{ inputs.index }}] | join(" ")' /tmp/fairsplice-buckets.json) + echo "tests=$TESTS" >> $GITHUB_OUTPUT + else + # Output all buckets + BUCKETS=$(cat /tmp/fairsplice-buckets.json) + echo "buckets=$BUCKETS" >> $GITHUB_OUTPUT + fi + + - name: Run convert + if: inputs.command == 'convert' + shell: bash + run: | + bun run ${{ github.action_path }}/index.ts convert \ + --from "${{ inputs.from }}" \ + --to "${{ inputs.to }}" \ + ${{ inputs.path-prefix != '' && format('--path-prefix "{0}"', inputs.path-prefix) || '' }} + + - name: Run merge + if: inputs.command == 'merge' + shell: bash + run: | + bun run ${{ github.action_path }}/index.ts merge \ + --timings-file "${{ inputs.timings-file }}" \ + --prefix "${{ inputs.prefix }}" + + - name: Save timings cache + if: inputs.command == 'merge' + uses: actions/cache/save@v4 + with: + path: ${{ inputs.timings-file }} + key: fairsplice-${{ inputs.cache-key }}-${{ github.repository }}-${{ github.run_id }}-${{ github.run_attempt }} diff --git a/bun.lockb b/bun.lockb index aa480fd..40412cb 100755 Binary files a/bun.lockb and b/bun.lockb differ diff --git a/index.ts b/index.ts index 0f68c0f..e9c62ee 100755 --- a/index.ts +++ b/index.ts @@ -1,6 +1,7 @@ #!/usr/bin/env bun -import { save } from "./src/commands/save"; +import { convert } from "./src/commands/convert"; +import { merge } from "./src/commands/merge"; import { split } from "./src/commands/split"; import { parseArgs } from "util"; @@ -11,10 +12,23 @@ const { positionals, values } = parseArgs({ type: "boolean", short: "h", }, - // save options + // convert options from: { type: "string", }, + to: { + type: "string", + }, + ["path-prefix"]: { + type: "string", + }, + // merge options + ["timings-file"]: { + type: "string", + }, + prefix: { + type: "string", + }, // split options pattern: { type: "string", @@ -43,54 +57,94 @@ const command = positionals[2]; if (values.help || !command) { console.log(` -Usage: fairsplice [save|split] [options] +Usage: fairsplice [split|convert|merge] [options] -Make sure the environment variable FAIRSPLICE_REDIS_URL is set. +fairsplice split +---------------- +Split test files across workers based on historical timings. -fairsplice save ---------------- -Available options: - --from File to read test results from +Required options: + --timings-file JSON file with stored timings + --pattern Pattern to match test files (can be used multiple times) + --total Total number of workers + --out File to write split result to (JSON) -Example: fairsplice save --from results/junit.xml +Optional: + --replace-from Substring to replace in file paths (can be used multiple times) + --replace-to Replacement string (must match number of --replace-from) +Example: fairsplice split --timings-file timings.json --pattern "test_*.py" --total 3 --out split.json -fairsplice split ------------------ -Available options: - --pattern Pattern to match test files (can be used multiple times) - --total Total number of workers - --out File to write test files to (JSON) - --replace-from Substring to replace in the file paths (can be used multiple times) - --replace-to Replacement for the substring (can be used multiple times but must match the number of --replace-from) - -Example: fairsplice split --pattern "test_*.py" --pattern "tests*.py" --total 3 --out split.json + +fairsplice convert +------------------ +Convert JUnit XML to timing JSON (for a single worker). + +Required options: + --from JUnit XML file to read + --to Timing JSON file to write + +Optional: + --path-prefix Prefix to prepend to all file paths (e.g., "src/tests/") + +Example: fairsplice convert --from junit.xml --to timing.json --path-prefix "frontends/apps/e2e/" + + +fairsplice merge +---------------- +Merge timing JSON files and save to timings history. + +Required options: + --timings-file JSON file to store timing history + --prefix Prefix to match timing JSON files + +Example: fairsplice merge --timings-file timings.json --prefix timing- `); process.exit(0); } -if (!process.env.FAIRSPLICE_REDIS_URL) { - console.error( - "Please set the FAIRSPLICE_REDIS_URL environment variable to use fairsplice." - ); - process.exit(1); -} - -if (command === "save") { - await save({ from: values.from }); - process.exit(0); -} else if (command === "split") { +if (command === "split") { + if ( + !values["timings-file"] || + !values.pattern || + !values.total || + !values.out + ) { + console.error( + "Error: --timings-file, --pattern, --total, and --out are required for the split command." + ); + process.exit(1); + } await split({ patterns: values.pattern, total: values.total, out: values.out, replaceFrom: values["replace-from"], replaceTo: values["replace-to"], + timingsFile: values["timings-file"], }); process.exit(0); +} else if (command === "convert") { + if (!values.from || !values.to) { + console.error( + "Error: --from and --to are required for the convert command." + ); + process.exit(1); + } + await convert({ from: values.from, to: values.to, pathPrefix: values["path-prefix"] }); + process.exit(0); +} else if (command === "merge") { + if (!values["timings-file"] || !values.prefix) { + console.error( + "Error: --timings-file and --prefix are required for the merge command." + ); + process.exit(1); + } + await merge({ prefix: values.prefix, timingsFile: values["timings-file"] }); + process.exit(0); } else { console.error( - `Invalid command "${command}". Available commands: save, split.` + `Invalid command "${command}". Available commands: split, convert, merge.` ); process.exit(1); } diff --git a/package.json b/package.json index 48a4f43..205613a 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,6 @@ "typescript": "^5.0.0" }, "dependencies": { - "fast-xml-parser": "^4.3.4", - "redis": "^4.6.13" + "fast-xml-parser": "^4.3.4" } } diff --git a/src/backend/fileStorage.test.ts b/src/backend/fileStorage.test.ts new file mode 100644 index 0000000..89330f9 --- /dev/null +++ b/src/backend/fileStorage.test.ts @@ -0,0 +1,84 @@ +import { describe, it, expect, beforeEach, afterEach } from "bun:test"; +import { saveTimings, getTimings } from "./fileStorage"; +import { unlink } from "node:fs/promises"; + +const TEST_TIMINGS_FILE = "/tmp/fairsplice-test-timings.json"; + +describe("fileStorage", () => { + beforeEach(async () => { + // Clean up before each test + try { + await unlink(TEST_TIMINGS_FILE); + } catch { + // File might not exist + } + }); + + afterEach(async () => { + // Clean up after test + try { + await unlink(TEST_TIMINGS_FILE); + } catch { + // File might not exist + } + }); + + it("should save and retrieve timings", async () => { + const timings = { + "test1.ts": 100, + "test2.ts": 200, + "test3.ts": 300, + }; + + await saveTimings(TEST_TIMINGS_FILE, timings); + const retrieved = await getTimings(TEST_TIMINGS_FILE, [ + "test1.ts", + "test2.ts", + "test3.ts", + ]); + + expect(retrieved).toEqual(timings); + }); + + it("should return empty object for non-existent files", async () => { + const retrieved = await getTimings(TEST_TIMINGS_FILE, ["nonexistent.ts"]); + expect(retrieved).toEqual({}); + }); + + it("should average multiple timing entries", async () => { + // Save first set of timings + await saveTimings(TEST_TIMINGS_FILE, { "test.ts": 100 }); + // Save second set of timings + await saveTimings(TEST_TIMINGS_FILE, { "test.ts": 200 }); + // Save third set of timings + await saveTimings(TEST_TIMINGS_FILE, { "test.ts": 300 }); + + const retrieved = await getTimings(TEST_TIMINGS_FILE, ["test.ts"]); + // Average of [300, 200, 100] = 200 + expect(retrieved["test.ts"]).toBe(200); + }); + + it("should handle partial file requests", async () => { + await saveTimings(TEST_TIMINGS_FILE, { + "test1.ts": 100, + "test2.ts": 200, + }); + + const retrieved = await getTimings(TEST_TIMINGS_FILE, [ + "test1.ts", + "nonexistent.ts", + ]); + expect(retrieved).toEqual({ "test1.ts": 100 }); + }); + + it("should persist data to JSON file", async () => { + await saveTimings(TEST_TIMINGS_FILE, { "myfile.ts": 42 }); + + // Verify file exists and contains valid JSON + const content = await Bun.file(TEST_TIMINGS_FILE).text(); + const data = JSON.parse(content); + + expect(data.version).toBe(1); + expect(data.timings["myfile.ts"]).toEqual([42]); + }); +}); diff --git a/src/backend/fileStorage.ts b/src/backend/fileStorage.ts new file mode 100644 index 0000000..0f85396 --- /dev/null +++ b/src/backend/fileStorage.ts @@ -0,0 +1,72 @@ +import { average } from "../lib/average"; +import { NUMBER_OF_TIMINGS_TO_KEEP } from "../config"; + +interface TimingsData { + version: number; + timings: Record; +} + +async function readTimingsFile(filePath: string): Promise { + const file = Bun.file(filePath); + if (!(await file.exists())) { + return { version: 1, timings: {} }; + } + try { + const content = await file.text(); + return JSON.parse(content) as TimingsData; + } catch { + // If file is corrupted or invalid, start fresh + return { version: 1, timings: {} }; + } +} + +async function writeTimingsFile( + filePath: string, + data: TimingsData +): Promise { + await Bun.write(filePath, JSON.stringify(data, null, 2)); +} + +export async function saveTimings( + filePath: string, + timingByFile: Record +): Promise { + const data = await readTimingsFile(filePath); + + for (const [file, timing] of Object.entries(timingByFile)) { + // Initialize array if doesn't exist + if (!data.timings[file]) { + data.timings[file] = []; + } + + // Add new timing at the beginning (like Redis LPUSH) + data.timings[file].unshift(timing); + + // Keep only the last NUMBER_OF_TIMINGS_TO_KEEP timings (like Redis LTRIM) + if (data.timings[file].length > NUMBER_OF_TIMINGS_TO_KEEP) { + data.timings[file] = data.timings[file].slice( + 0, + NUMBER_OF_TIMINGS_TO_KEEP + ); + } + } + + await writeTimingsFile(filePath, data); +} + +export async function getTimings( + filePath: string, + files: string[] +): Promise> { + const data = await readTimingsFile(filePath); + + const timingByFile: Record = {}; + for (const file of files) { + const timings = data.timings[file]; + if (timings && timings.length > 0) { + timingByFile[file] = average(timings); + } + } + + return timingByFile; +} diff --git a/src/backend/redis.ts b/src/backend/redis.ts deleted file mode 100644 index 8252176..0000000 --- a/src/backend/redis.ts +++ /dev/null @@ -1,61 +0,0 @@ -import { createClient } from "redis"; -import { average } from "../lib/average"; -import { - NUMBER_OF_TIMINGS_TO_KEEP, - REDIS_KEY_PREFIX, - REDIS_URL, -} from "../config"; - -async function getClient() { - const client = createClient({ url: REDIS_URL, socket: { tls: true } }); - await client.connect(); - return client; -} - -function getKey(file: string) { - return `${REDIS_KEY_PREFIX}:${file}`; -} - -export async function saveTimings(timingByFile: Record) { - const client = await getClient(); - const transaction = client.multi(); - for (const [file, timing] of Object.entries(timingByFile)) { - const key = getKey(file); - // first we push the new timing - transaction.lPush(key, timing.toString()); - // then we trim the list to keep only the last TIMINGS_TO_KEEP timings - transaction.lTrim(key, 0, NUMBER_OF_TIMINGS_TO_KEEP - 2); - // then we set the expiration time for the key (30 days in seconds) - transaction.expire(key, 2592000); - } - await transaction.exec(); -} - -export async function getTimings(files: string[]) { - const client = await getClient(); - // fetch the last NUMBER_OF_TIMINGS_TO_KEEP timings for each file - const transaction = client.multi(); - for (const file of files) { - const key = getKey(file); - transaction.lRange(key, 0, NUMBER_OF_TIMINGS_TO_KEEP - 1); - } - const results = await transaction.exec(); - - // convert results to a map of file -> average timing - const timingByFile: Record = {}; - for (const [i, file] of files.entries()) { - const result = results[i]; - if ( - typeof result === "number" || - typeof result === "string" || - result?.length === 0 || - !result - ) { - continue; - } - const timings = Array.from(result).map(Number); - const timing = average(timings); - timingByFile[file] = timing; - } - return timingByFile; -} diff --git a/src/commands/convert.ts b/src/commands/convert.ts new file mode 100644 index 0000000..cb031e6 --- /dev/null +++ b/src/commands/convert.ts @@ -0,0 +1,48 @@ +import { parseJunit } from "../lib/junit"; + +export async function convert({ + from, + to, + pathPrefix, +}: { + from: string; + to: string; + pathPrefix?: string; +}) { + // check if input file exists + const junitXmlFile = Bun.file(from); + if (!(await junitXmlFile.exists())) { + console.warn(`Input file not found: ${from}`); + console.warn(`Skipping convert (this is normal if tests were skipped or failed early)`); + return; + } + + // read junit xml file + const xmlString = await junitXmlFile.text(); + + // parse junit xml + const testCases = parseJunit(xmlString); + + // aggregate timings by file + const timingByFile: Record = {}; + for (const testCase of testCases) { + if (testCase.file.includes("..")) { + continue; + } + // Apply path prefix if provided + const filePath = pathPrefix ? `${pathPrefix}${testCase.file}` : testCase.file; + if (!timingByFile[filePath]) { + timingByFile[filePath] = 0; + } + timingByFile[filePath] += testCase.time; + } + + // convert to ms + for (const [file, timing] of Object.entries(timingByFile)) { + timingByFile[file] = Math.round(timing * 1000); + } + + // write timings JSON + await Bun.write(to, JSON.stringify(timingByFile, null, 2)); + console.log(`Converted ${Object.keys(timingByFile).length} test timings to ${to}`); +} diff --git a/src/commands/merge.ts b/src/commands/merge.ts new file mode 100644 index 0000000..f989de3 --- /dev/null +++ b/src/commands/merge.ts @@ -0,0 +1,42 @@ +import { Glob } from "bun"; +import { saveTimings } from "../backend/fileStorage"; + +export async function merge({ + timingsFile, + prefix, +}: { + timingsFile: string; + prefix: string; +}) { + // find all timing JSON files matching the prefix pattern + const glob = new Glob(`${prefix}*`); + const files = Array.from(glob.scanSync()); + + if (files.length === 0) { + console.warn(`No files found matching prefix: ${prefix}*`); + console.warn(`Skipping merge (this is normal if all tests failed or were skipped)`); + return; + } + + console.log(`Found ${files.length} timing files to merge:`); + files.forEach((f) => console.log(` - ${f}`)); + + // aggregate timings from all JSON files + const timingByFile: Record = {}; + + for (const file of files) { + const content = await Bun.file(file).text(); + const timings = JSON.parse(content) as Record; + + for (const [testFile, timing] of Object.entries(timings)) { + if (!timingByFile[testFile]) { + timingByFile[testFile] = 0; + } + timingByFile[testFile] += timing; + } + } + + // save merged timings + await saveTimings(timingsFile, timingByFile); + console.log(`\nMerged timings for ${Object.keys(timingByFile).length} files`); +} diff --git a/src/commands/save.ts b/src/commands/save.ts deleted file mode 100644 index 50ba728..0000000 --- a/src/commands/save.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { saveTimings } from "../backend/redis"; -import { parseJunit } from "../lib/junit"; - -export async function save({ from }: { from: string | undefined }) { - if (!from) { - console.warn( - "Please provide the --from option to specify the file to read test results from" - ); - process.exit(1); - } - - // read junit xml file - const junitXmlFile = Bun.file(from); - const xmlString = await junitXmlFile.text(); - - // parse junit xml - const testCases = parseJunit(xmlString); - - // aggregate timings - const timingByFile: Record = {}; - for (let testCase of testCases) { - if (testCase.file.includes("..")) { - continue; - } - if (!timingByFile[testCase.file]) { - timingByFile[testCase.file] = 0; - } - timingByFile[testCase.file] += testCase.time; - } - - // convert to ms - for (const [file, timing] of Object.entries(timingByFile)) { - timingByFile[file] = Math.round(timing * 1000); - } - - // save timings - await saveTimings(timingByFile); - console.log( - "Timings saved for files:\n", - Object.keys(timingByFile).join("\n - ") - ); -} diff --git a/src/commands/split.ts b/src/commands/split.ts index 0a80e51..d30e4ed 100644 --- a/src/commands/split.ts +++ b/src/commands/split.ts @@ -1,5 +1,5 @@ import { Glob } from "bun"; -import { getTimings } from "../backend/redis"; +import { getTimings } from "../backend/fileStorage"; import { splitFiles } from "../lib/splitFiles"; import { DEFAULT_TIMING_IF_MISSING } from "../config"; @@ -9,18 +9,15 @@ export async function split({ replaceFrom, replaceTo, out, + timingsFile, }: { - patterns: string[] | undefined; - total: string | undefined; + patterns: string[]; + total: string; replaceFrom: string[] | undefined; replaceTo: string[] | undefined; - out: string | undefined; + out: string; + timingsFile: string; }) { - if (!patterns || !total || !out) { - console.warn("Please provide the --pattern and --total and --out flags."); - process.exit(1); - } - if (replaceFrom && replaceTo && replaceFrom.length !== replaceTo.length) { console.warn( "The number of --replace-from and --replace-to flags must match." @@ -47,7 +44,7 @@ export async function split({ } // get file times - const filesTimesMap = await getTimings(files); + const filesTimesMap = await getTimings(timingsFile, files); // warn if missing timings for (const file of files) { diff --git a/src/config.ts b/src/config.ts index a954339..07aa06e 100644 --- a/src/config.ts +++ b/src/config.ts @@ -1,4 +1,2 @@ -export const REDIS_KEY_PREFIX = "fairsplice:timings"; export const NUMBER_OF_TIMINGS_TO_KEEP = 10; -export const REDIS_URL = process.env.FAIRSPLICE_REDIS_URL; export const DEFAULT_TIMING_IF_MISSING = 10000; diff --git a/tests/dummy/fast.test.ts b/tests/dummy/fast.test.ts new file mode 100644 index 0000000..3041a6d --- /dev/null +++ b/tests/dummy/fast.test.ts @@ -0,0 +1,19 @@ +import { describe, it, expect } from "bun:test"; + +// Fast tests - minimal delay +describe("Fast test suite", () => { + it("should complete quickly - test 1", async () => { + await Bun.sleep(50); + expect(1 + 1).toBe(2); + }); + + it("should complete quickly - test 2", async () => { + await Bun.sleep(30); + expect(true).toBe(true); + }); + + it("should complete quickly - test 3", async () => { + await Bun.sleep(20); + expect("hello").toContain("ell"); + }); +}); diff --git a/tests/dummy/medium.test.ts b/tests/dummy/medium.test.ts new file mode 100644 index 0000000..803a5f2 --- /dev/null +++ b/tests/dummy/medium.test.ts @@ -0,0 +1,19 @@ +import { describe, it, expect } from "bun:test"; + +// Medium duration tests +describe("Medium test suite", () => { + it("should take moderate time - test 1", async () => { + await Bun.sleep(150); + expect([1, 2, 3]).toHaveLength(3); + }); + + it("should take moderate time - test 2", async () => { + await Bun.sleep(200); + expect({ a: 1 }).toHaveProperty("a"); + }); + + it("should take moderate time - test 3", async () => { + await Bun.sleep(100); + expect(Math.max(1, 2, 3)).toBe(3); + }); +}); diff --git a/tests/dummy/slow.test.ts b/tests/dummy/slow.test.ts new file mode 100644 index 0000000..78ba1c8 --- /dev/null +++ b/tests/dummy/slow.test.ts @@ -0,0 +1,19 @@ +import { describe, it, expect } from "bun:test"; + +// Slow tests - longer delays +describe("Slow test suite", () => { + it("should take longer - test 1", async () => { + await Bun.sleep(300); + expect(Array.isArray([])).toBe(true); + }); + + it("should take longer - test 2", async () => { + await Bun.sleep(350); + expect(typeof "string").toBe("string"); + }); + + it("should take longer - test 3", async () => { + await Bun.sleep(250); + expect(null).toBeNull(); + }); +}); diff --git a/tests/dummy/variable.test.ts b/tests/dummy/variable.test.ts new file mode 100644 index 0000000..3942179 --- /dev/null +++ b/tests/dummy/variable.test.ts @@ -0,0 +1,19 @@ +import { describe, it, expect } from "bun:test"; + +// Variable timing tests +describe("Variable timing test suite", () => { + it("should handle variable timing - quick", async () => { + await Bun.sleep(25); + expect(Number.isInteger(42)).toBe(true); + }); + + it("should handle variable timing - medium", async () => { + await Bun.sleep(175); + expect(Object.keys({ a: 1, b: 2 })).toEqual(["a", "b"]); + }); + + it("should handle variable timing - slow", async () => { + await Bun.sleep(400); + expect(new Date()).toBeInstanceOf(Date); + }); +});