diff --git a/.claude/skills/fix-ci/SKILL.md b/.claude/skills/fix-ci/SKILL.md new file mode 100644 index 0000000..502b72b --- /dev/null +++ b/.claude/skills/fix-ci/SKILL.md @@ -0,0 +1,72 @@ +--- +name: fix-ci +description: Fetches the latest GitHub Actions logs for the current branch's PR, analyzes all failures, and fixes them. Use when CI is red, a PR has failing checks, or the user says "fix ci". Requires an open PR for the current branch. +argument-hint: "[optional job name to focus on]" +allowed-tools: Read, Grep, Glob, Edit, Write, Bash +--- + +# Fix CI + +Diagnose and fix all GitHub Actions failures for the current branch's PR. + +## Step 1: Validate branch has a PR + +```bash +BRANCH=$(git branch --show-current) +PR_JSON=$(gh pr list --head "$BRANCH" --state open --json number,title,url --limit 1) +``` + +If the JSON array is empty, **stop immediately**: +> No open PR found for branch `$BRANCH`. Create a PR first. + +Otherwise extract the PR number and continue. + +## Step 2: Fetch failed logs + +```bash +PR_NUMBER=$(echo "$PR_JSON" | jq -r '.[0].number') +gh pr checks "$PR_NUMBER" +RUN_ID=$(gh run list --branch "$BRANCH" --limit 1 --json databaseId --jq '.[0].databaseId') +gh run view "$RUN_ID" +gh run view "$RUN_ID" --log-failed +``` + +Read **every line** of `--log-failed` output. For each failure note the exact file, line, and error message. + +If `$ARGUMENTS` specifies a job name, prioritize that job but still report all failures. + +## Step 3: Categorize and fix + +Work through failures in this order: + +1. **Formatting** — run auto-formatters first to clear noise +2. **Compilation errors** — must compile before lint/test +3. **Lint violations** — fix the code pattern +4. **Runtime / test failures** — fix source code to satisfy the test + +### Hard constraints + +- **NEVER modify test files** — fix the source code, not the tests +- **NEVER add suppressions** (`#[allow(...)]`, `// eslint-disable`, `#pragma warning disable`) +- **NEVER use `any` in TypeScript** to silence type errors +- **NEVER delete or ignore failing tests** +- **NEVER remove assertions** + +## Step 4: Loop `make ci` until green + +```bash +make ci +``` + +If it fails: read output, fix the issue (same constraints as Step 3), run again. **Keep looping until a full pass is clean.** + +If stuck on the same failure after 5 attempts, ask the user for help. + +## Step 5: Commit/Push + +Once `make ci` passes: + +1. Commit, but DO NOT MARK THE COMMIT WITH YOU AS AN AUTHOR!!! +2. Push +3. Monitor until completion or failure +4. Upon failure, go back to the start of this document diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json new file mode 100644 index 0000000..ea1a428 --- /dev/null +++ b/.config/dotnet-tools.json @@ -0,0 +1,13 @@ +{ + "version": 1, + "isRoot": true, + "tools": { + "fantomas": { + "version": "7.0.5", + "commands": [ + "fantomas" + ], + "rollForward": false + } + } +} \ No newline at end of file diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..fd62b83 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,37 @@ +root = true + +[*] +indent_style = space +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.{fs,fsx}] +indent_size = 4 + +# F# compiler diagnostics — all unused things are errors +dotnet_diagnostic.FS1182.severity = error + +# Opt-in warnings elevated to errors +dotnet_diagnostic.FS3388.severity = error +dotnet_diagnostic.FS3389.severity = error +dotnet_diagnostic.FS3390.severity = error +dotnet_diagnostic.FS3391.severity = error +dotnet_diagnostic.FS3395.severity = error +dotnet_diagnostic.FS3559.severity = error +dotnet_diagnostic.FS3560.severity = error +dotnet_diagnostic.FS3582.severity = error + +[*.ts] +indent_size = 2 + +[*.json] +indent_size = 2 + +[*.{yml,yaml}] +indent_size = 2 + +[*.rs] +indent_size = 4 diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index b0cfecc..4c219cc 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -10,21 +10,33 @@ jobs: runs-on: ubuntu-latest defaults: run: - working-directory: src/Nap.VsCode + working-directory: src/Napper.VsCode steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: node-version: 22 + cache: npm + cache-dependency-path: src/Napper.VsCode/package-lock.json - uses: actions/setup-dotnet@v4 with: dotnet-version: "10.0.x" + - name: Cache NuGet packages + uses: actions/cache@v4 + with: + path: ~/.nuget/packages + key: ${{ runner.os }}-nuget-${{ hashFiles('**/*.fsproj') }} + restore-keys: ${{ runner.os }}-nuget- + - name: Install dependencies run: npm ci + - name: Format check + run: npm run format:check + - name: Lint run: npm run lint @@ -34,15 +46,38 @@ jobs: - name: Unit tests with coverage run: npm run test:unit + - name: Add CLI to PATH + run: echo "${{ github.workspace }}/src/Napper.VsCode/bin" >> "$GITHUB_PATH" + - name: E2E tests run: xvfb-run --auto-servernum npm test + - name: Extract TypeScript coverage percentage + id: ts-coverage + run: | + COVERAGE=$(npx c8 report --reporter text 2>/dev/null | grep 'All files' | awk '{print $4}' || echo "0") + echo "coverage=$COVERAGE" >> "$GITHUB_OUTPUT" + + - name: Check TypeScript coverage threshold + run: | + ACTUAL="${{ steps.ts-coverage.outputs.coverage }}" + THRESHOLD="${{ vars.TS_COVERAGE_THRESHOLD }}" + echo "TypeScript coverage: ${ACTUAL}% (threshold: ${THRESHOLD}%)" + if [ -z "$THRESHOLD" ] || [ "$THRESHOLD" = "0" ]; then + echo "No threshold set — skipping" + exit 0 + fi + if (( $(echo "$ACTUAL < $THRESHOLD" | bc -l) )); then + echo "::error::TypeScript coverage ${ACTUAL}% is below threshold ${THRESHOLD}%" + exit 1 + fi + - name: Upload TypeScript coverage if: always() uses: actions/upload-artifact@v4 with: name: typescript-coverage - path: coverage/typescript/report/ + path: src/Napper.VsCode/coverage/ test-fsharp: name: F# Build & Tests @@ -54,17 +89,73 @@ jobs: with: dotnet-version: "10.0.x" + - name: Cache NuGet packages + uses: actions/cache@v4 + with: + path: ~/.nuget/packages + key: ${{ runner.os }}-nuget-${{ hashFiles('**/*.fsproj') }} + restore-keys: ${{ runner.os }}-nuget- + - name: Install ReportGenerator run: dotnet tool install --global dotnet-reportgenerator-globaltool - name: Install dotnet-script run: dotnet tool install -g dotnet-script - - name: Build - run: dotnet build --nologo + - name: Restore tools + run: dotnet tool restore + + - name: Format check (Fantomas) + run: dotnet fantomas --check src/ + + - name: Restore + run: dotnet restore + + - name: Build (warnings are errors) + run: dotnet build --no-restore --nologo -warnaserror - name: Test with coverage - run: bash scripts/test-fsharp.sh + run: make test-fsharp + + - name: Extract Napper.Core coverage percentage + id: napcore-coverage + run: | + COVERAGE=$(grep -oP 'Line coverage: \K[0-9.]+' coverage/fsharp/report/Summary.txt || echo "0") + echo "coverage=$COVERAGE" >> "$GITHUB_OUTPUT" + + - name: Check Napper.Core coverage threshold + run: | + ACTUAL="${{ steps.napcore-coverage.outputs.coverage }}" + THRESHOLD="${{ vars.FSHARP_COVERAGE_THRESHOLD }}" + echo "Napper.Core coverage: ${ACTUAL}% (threshold: ${THRESHOLD}%)" + if [ -z "$THRESHOLD" ] || [ "$THRESHOLD" = "0" ]; then + echo "No threshold set — skipping" + exit 0 + fi + if (( $(echo "$ACTUAL < $THRESHOLD" | bc -l) )); then + echo "::error::Napper.Core coverage ${ACTUAL}% is below threshold ${THRESHOLD}%" + exit 1 + fi + + - name: Extract DotHttp coverage percentage + id: dothttp-coverage + run: | + COVERAGE=$(grep -oP 'Line coverage: \K[0-9.]+' coverage/dothttp/report/Summary.txt || echo "0") + echo "coverage=$COVERAGE" >> "$GITHUB_OUTPUT" + + - name: Check DotHttp coverage threshold + run: | + ACTUAL="${{ steps.dothttp-coverage.outputs.coverage }}" + THRESHOLD="${{ vars.DOTHTTP_COVERAGE_THRESHOLD }}" + echo "DotHttp coverage: ${ACTUAL}% (threshold: ${THRESHOLD}%)" + if [ -z "$THRESHOLD" ] || [ "$THRESHOLD" = "0" ]; then + echo "No threshold set — skipping" + exit 0 + fi + if (( $(echo "$ACTUAL < $THRESHOLD" | bc -l) )); then + echo "::error::DotHttp coverage ${ACTUAL}% is below threshold ${THRESHOLD}%" + exit 1 + fi - name: Upload F# coverage if: always() @@ -73,6 +164,111 @@ jobs: name: fsharp-coverage path: coverage/fsharp/report/ + - name: Upload DotHttp coverage + if: always() + uses: actions/upload-artifact@v4 + with: + name: dothttp-coverage + path: coverage/dothttp/report/ + + - name: Extract Napper.Lsp coverage percentage + id: lsp-coverage + run: | + if [ -f coverage/lsp/report/Summary.txt ]; then + COVERAGE=$(grep -oP 'Line coverage: \K[0-9.]+' coverage/lsp/report/Summary.txt || echo "0") + else + COVERAGE="0" + fi + echo "coverage=$COVERAGE" >> "$GITHUB_OUTPUT" + + - name: Check Napper.Lsp coverage threshold + run: | + ACTUAL="${{ steps.lsp-coverage.outputs.coverage }}" + THRESHOLD="${{ vars.LSP_COVERAGE_THRESHOLD }}" + echo "Napper.Lsp coverage: ${ACTUAL}% (threshold: ${THRESHOLD}%)" + if [ -z "$THRESHOLD" ] || [ "$THRESHOLD" = "0" ]; then + echo "No threshold set — skipping" + exit 0 + fi + if [ "$ACTUAL" = "0" ] && grep -q 'Assemblies: 0' coverage/lsp/report/Summary.txt 2>/dev/null; then + echo "LSP tests are integration tests (subprocess) — skipping coverage threshold" + exit 0 + fi + if (( $(echo "$ACTUAL < $THRESHOLD" | bc -l) )); then + echo "::error::Napper.Lsp coverage ${ACTUAL}% is below threshold ${THRESHOLD}%" + exit 1 + fi + + - name: Upload Napper.Lsp coverage + if: always() + uses: actions/upload-artifact@v4 + with: + name: lsp-coverage + path: coverage/lsp/report/ + + test-rust: + name: Rust Build & Tests + runs-on: ubuntu-latest + defaults: + run: + working-directory: src/Napper.Zed + steps: + - uses: actions/checkout@v4 + + - uses: dtolnay/rust-toolchain@stable + with: + components: clippy, rustfmt + + - name: Cache Cargo registry and build + uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + src/Napper.Zed/target + key: ${{ runner.os }}-cargo-${{ hashFiles('src/Napper.Zed/Cargo.lock') }} + restore-keys: ${{ runner.os }}-cargo- + + - name: Format check + run: cargo fmt -- --check + + - name: Clippy + run: cargo clippy + + - name: Install cargo-tarpaulin + run: cargo install cargo-tarpaulin + + - name: Test with coverage + run: cargo tarpaulin --out xml html --output-dir ../../coverage/rust/report --skip-clean + + - name: Extract Rust coverage percentage + id: rust-coverage + run: | + COVERAGE=$(grep -oP 'line-rate="\K[0-9.]+' ../../coverage/rust/report/cobertura.xml 2>/dev/null || echo "0") + COVERAGE_PCT=$(echo "$COVERAGE * 100" | bc -l | xargs printf "%.2f") + echo "coverage=$COVERAGE_PCT" >> "$GITHUB_OUTPUT" + + - name: Check Rust coverage threshold + run: | + ACTUAL="${{ steps.rust-coverage.outputs.coverage }}" + THRESHOLD="${{ vars.RUST_COVERAGE_THRESHOLD }}" + echo "Rust coverage: ${ACTUAL}% (threshold: ${THRESHOLD}%)" + if [ -z "$THRESHOLD" ] || [ "$THRESHOLD" = "0" ]; then + echo "No threshold set — skipping" + exit 0 + fi + if (( $(echo "$ACTUAL < $THRESHOLD" | bc -l) )); then + echo "::error::Rust coverage ${ACTUAL}% is below threshold ${THRESHOLD}%" + exit 1 + fi + + - name: Upload Rust coverage + if: always() + uses: actions/upload-artifact@v4 + with: + name: rust-coverage + path: coverage/rust/report/ + build-website: name: Website Build runs-on: ubuntu-latest @@ -85,6 +281,8 @@ jobs: - uses: actions/setup-node@v4 with: node-version: 22 + cache: npm + cache-dependency-path: website/package-lock.json - name: Install dependencies run: npm ci diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c387f5d..538a1c2 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -26,7 +26,7 @@ jobs: run: echo "VERSION=${GITHUB_REF_NAME#v}" >> "$GITHUB_ENV" - name: Bump versions and push - run: bash scripts/bump-version.sh "$VERSION" --commit + run: make bump-version VERSION="$VERSION" COMMIT=true build-vsix: needs: [bump-versions] @@ -41,22 +41,22 @@ jobs: node-version: 22 - name: Install extension dependencies - working-directory: src/Nap.VsCode + working-directory: src/Napper.VsCode run: npm ci - name: Compile extension - working-directory: src/Nap.VsCode + working-directory: src/Napper.VsCode run: npx webpack --mode production - name: Package universal VSIX - working-directory: src/Nap.VsCode + working-directory: src/Napper.VsCode run: npx @vscode/vsce package --no-dependencies --skip-license - name: Upload VSIX uses: actions/upload-artifact@v4 with: name: vsix - path: src/Nap.VsCode/*.vsix + path: src/Napper.VsCode/*.vsix build-cli: needs: [bump-versions] @@ -83,7 +83,7 @@ jobs: - name: Publish CLI (${{ matrix.rid }}) run: | - dotnet publish src/Nap.Cli/Nap.Cli.fsproj \ + dotnet publish src/Napper.Cli/Napper.Cli.fsproj \ -r ${{ matrix.rid }} \ --self-contained \ -p:PublishTrimmed=true \ @@ -106,8 +106,37 @@ jobs: name: cli-${{ matrix.rid }} path: ${{ matrix.asset }} + publish-nuget: + needs: [bump-versions] + runs-on: ubuntu-latest + if: startsWith(github.ref, 'refs/tags/v') + steps: + - uses: actions/checkout@v4 + with: + ref: main + + - uses: actions/setup-dotnet@v4 + with: + dotnet-version: "10.0.x" + + - name: Extract version from tag + run: echo "VERSION=${GITHUB_REF_NAME#v}" >> "$GITHUB_ENV" + + - name: Pack dotnet tool + run: | + dotnet pack src/Napper.Cli/Napper.Cli.fsproj \ + -c Release \ + -p:Version=${{ env.VERSION }} \ + --nologo + + - name: Push to NuGet + run: | + dotnet nuget push src/Napper.Cli/nupkg/napper.${{ env.VERSION }}.nupkg \ + --api-key ${{ secrets.NUGET_API_KEY }} \ + --source https://api.nuget.org/v3/index.json + release: - needs: [build-vsix, build-cli] + needs: [build-vsix, build-cli, publish-nuget] runs-on: ubuntu-latest if: startsWith(github.ref, 'refs/tags/') steps: diff --git a/.gitignore b/.gitignore index dcc3a14..50b27fb 100644 --- a/.gitignore +++ b/.gitignore @@ -12,11 +12,11 @@ dist/ out/ # VSCode extension -src/Nap.VsCode/node_modules/ -src/Nap.VsCode/dist/ -src/Nap.VsCode/out/ -src/Nap.VsCode/*.vsix -src/Nap.VsCode/.vscode-test/ +src/Napper.VsCode/node_modules/ +src/Napper.VsCode/dist/ +src/Napper.VsCode/out/ +src/Napper.VsCode/*.vsix +src/Napper.VsCode/.vscode-test/ # IDE settings .vscode/ @@ -38,6 +38,20 @@ website/_site/ examples/httpbin/advanced-report.html # Cached test specs -tests/Nap.Core.Tests/.spec-cache/ +tests/Napper.Core.Tests/.spec-cache/ examples/httpbin/all-methods-report.html + +src/Napper.Zed/target/ + +src/Napper.Zed/extension.wasm + +src/Napper.Zed/grammars/nap.wasm + +src/Napper.Zed/grammars/napenv.wasm + +*.wasm + +scripts/logs/ + +src/Napper.VsCode/.nyc_output/ diff --git a/.napenv b/.napenv new file mode 100644 index 0000000..4e7d609 --- /dev/null +++ b/.napenv @@ -0,0 +1 @@ +baseUrl = https://api.demo-ecommerce.com/v1 diff --git a/.openapi-spec.json b/.openapi-spec.json new file mode 100644 index 0000000..51859ef --- /dev/null +++ b/.openapi-spec.json @@ -0,0 +1,520 @@ +{ + "openapi": "3.1.0", + "info": { + "title": "E-commerce API", + "version": "1.0.0", + "description": "This is an e-commerce API spec for a storefront. \nIt includes authentication, product browsing, cart, and checkout operations.\nAuth is token-based. Explore, test, and mock this API freely.\n" + }, + "servers": [ + { + "url": "https://api.demo-ecommerce.com/v1", + "description": "Production environment" + }, + { + "url": "https://api.dev.demo-ecommerce.com/v1", + "description": "Development environment" + } + ], + "components": { + "securitySchemes": { + "BearerAuth": { + "type": "http", + "scheme": "bearer", + "bearerFormat": "JWT" + } + }, + "schemas": { + "Product": { + "type": "object", + "required": [ + "id", + "name", + "price", + "stock", + "category" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "example": "eda5cbc1-a615-4da5-ae73-4a33a9acfb6a" + }, + "name": { + "type": "string", + "example": "Worry Management" + }, + "description": { + "type": "string", + "example": "Mr street sell would civil. People through shake southern force." + }, + "price": { + "type": "number", + "format": "float", + "example": 91.37 + }, + "category": { + "type": "string", + "example": "wrong" + }, + "image_url": { + "type": "string", + "format": "uri", + "example": "https://dummyimage.com/766x809" + }, + "stock": { + "type": "integer", + "example": 94 + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "CartItem": { + "type": "object", + "required": [ + "product_id", + "quantity" + ], + "properties": { + "product_id": { + "type": "string", + "format": "uuid" + }, + "quantity": { + "type": "integer", + "minimum": 1 + } + } + }, + "Address": { + "type": "object", + "required": [ + "line1", + "city", + "state", + "postal_code", + "country" + ], + "properties": { + "line1": { + "type": "string" + }, + "line2": { + "type": "string" + }, + "city": { + "type": "string" + }, + "state": { + "type": "string" + }, + "postal_code": { + "type": "string" + }, + "country": { + "type": "string" + } + } + }, + "Order": { + "type": "object", + "required": [ + "id", + "items", + "total_amount", + "status", + "created_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "items": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CartItem" + } + }, + "total_amount": { + "type": "number" + }, + "status": { + "type": "string", + "enum": [ + "pending", + "confirmed", + "shipped", + "delivered" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "paths": { + "/auth/register": { + "post": { + "summary": "Create a new user account", + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "required": [ + "email", + "password" + ], + "properties": { + "email": { + "type": "string", + "format": "email" + }, + "password": { + "type": "string", + "format": "password" + }, + "name": { + "type": "string" + } + } + } + } + } + }, + "responses": { + "201": { + "description": "User created" + }, + "400": { + "description": "Invalid input" + } + } + } + }, + "/auth/login": { + "post": { + "summary": "Login and get access token", + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "required": [ + "email", + "password" + ], + "properties": { + "email": { + "type": "string" + }, + "password": { + "type": "string" + } + } + } + } + } + }, + "responses": { + "200": { + "description": "Authenticated successfully" + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/products": { + "get": { + "summary": "List all products with filters", + "parameters": [ + { + "name": "category", + "in": "query", + "schema": { + "type": "string" + } + }, + { + "name": "search", + "in": "query", + "schema": { + "type": "string" + } + }, + { + "name": "min_price", + "in": "query", + "schema": { + "type": "number" + } + }, + { + "name": "max_price", + "in": "query", + "schema": { + "type": "number" + } + } + ], + "responses": { + "200": { + "description": "List of products", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Product" + } + } + } + } + } + } + } + }, + "/products/{id}": { + "get": { + "summary": "Get product details by ID", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Product details", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Product" + } + } + } + } + } + } + }, + "/cart": { + "get": { + "summary": "Get current user's cart", + "security": [ + { + "BearerAuth": [] + } + ], + "responses": { + "200": { + "description": "Your cart items", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CartItem" + } + } + } + } + } + } + } + }, + "/cart/items": { + "post": { + "summary": "Add item to cart", + "security": [ + { + "BearerAuth": [] + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CartItem" + } + } + } + }, + "responses": { + "200": { + "description": "Item added to cart" + } + } + } + }, + "/checkout": { + "post": { + "summary": "Checkout and place order", + "security": [ + { + "BearerAuth": [] + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "required": [ + "address_id", + "payment_method_id" + ], + "properties": { + "address_id": { + "type": "string" + }, + "payment_method_id": { + "type": "string" + } + } + } + } + } + }, + "responses": { + "201": { + "description": "Order created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Order" + } + } + } + } + } + } + }, + "/orders": { + "get": { + "summary": "List your past orders", + "security": [ + { + "BearerAuth": [] + } + ], + "responses": { + "200": { + "description": "Order history", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Order" + } + } + } + } + } + } + } + }, + "/orders/{orderId}": { + "get": { + "summary": "Get order details", + "security": [ + { + "BearerAuth": [] + } + ], + "parameters": [ + { + "name": "orderId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Order detail", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Order" + } + } + } + } + } + } + }, + "/addresses": { + "get": { + "summary": "Get your saved addresses", + "security": [ + { + "BearerAuth": [] + } + ], + "responses": { + "200": { + "description": "List of saved addresses", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Address" + } + } + } + } + } + } + }, + "post": { + "summary": "Add a new address", + "security": [ + { + "BearerAuth": [] + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Address" + } + } + } + }, + "responses": { + "201": { + "description": "Address added" + } + } + } + } + } +} \ No newline at end of file diff --git a/Claude.md b/Claude.md index 7f312e2..753fb76 100644 --- a/Claude.md +++ b/Claude.md @@ -1,6 +1,7 @@ ## Too Many Cooks You are working with many other agents. Make sure there is effective cooperation + - Register on TMC immediately - Don't edit files that are locked; lock files when editing - COMMUNICATE REGULARLY AND COORDINATE WITH OTHERS THROUGH MESSAGES @@ -16,31 +17,43 @@ You are working with many other agents. Make sure there is effective cooperation - **Heavy logging at all levels** - Logs are critical, even in tests - **No suppressing warnings** - Fix them properly - **Use a robust library for CLI arg parsing** - Don't use Regex -- **No REGEX matching on structured data like JSON, YAML, TOML** - Regex is only for extreme corner cases +- **No REGEX matching on structured data like JSON, .HTTP, YAML, TOML, F#, TS, etc** - Use a well-established parser. Regex is only for extreme corner cases - **Expressions over assignments** - Prefer const and immutable patterns - **Named parameters** - Use object params for functions with 1+ args - **Keep files under 450 LOC and functions under 20 LOC** - **No commented-out code** - Delete it - **No placeholders** - If incomplete, leave LOUD compilation error with TODO +### Rust + +- We will soon be inserting an LSP so keep the code loose enough that this will be easy +- Keep files under 500 LOC +- Run fmt and clippy regularly!!! + ### Typescript + +- We will soon be inserting an LSP so keep the code loose enough that this will be easy - **TypeScript strict mode** - No `any`, no implicit types, turn all lints up to error - **Regularly run the linter** - Fix lint errors IMMEDIATELY - **Decouple providers from the VSCODE SDK** - No vscode sdk use within the providers - **Ignoring lints = ⛔️ illegal** - Fix violations immediately - **No throwing** - Only return `Result` -### F# +### F# + +- **⚠️ MAXIMUM CODE SHARING — NON-NEGOTIABLE** - All F# projects (Napper.Cli, Napper.Lsp, future consumers) MUST share logic through `Napper.Core`. If code could live in `Napper.Core`, it MUST live in `Napper.Core`. NEVER duplicate parsing, types, environment resolution, logging, or any domain logic across projects. Before writing ANY new module in a consumer project, check if it belongs in `Napper.Core` first. - **Idiomatic F#** - **Move content out of the fsproj files and into Directory.Build.props** - **Standard F# result types** - Use the standard F# built-in result types - **Turn on F# analyzers** - Strict rules to enforce F# best practice +- **Prefer moving config from fsproj -> buildprops** avoid project config across projects ## Testing ⚠️ NEVER KILL VSCODE PROCESSES #### Rules + - **Prefer e2e tests over unit tests** - only unit tests for isolating bugs - Separate e2e tests from unit tests by file. They should not be in the same file together. - **Add more assertions** - No, that's not enough. Add more!!! @@ -52,23 +65,26 @@ You are working with many other agents. Make sure there is effective cooperation ### Automated (E2E) Testing **AUTOMATED TESTING IS BLACK BOX TESTING ONLY** -- Only test the UI **THROUGH the UI**. -- Do not run command etc. to coerce the state. -- You are testing the UI, not the code. + +- Only test the UI **THROUGH the UI**. +- Do not run command etc. to coerce the state. +- You are testing the UI, not the code. - Make assertions about the UI - not the internal state -- This is true for both the CLI and the VSIX. -- The test VSIX must call the actual, real CLI. +- This is true for both the CLI and the VSIX. +- The test VSIX must call the actual, real CLI. - VSIX tests run in actual VS Code window **Illegal VSIX testing patterns** - * - ❌ Calling internal methods like provider.updateTasks() - * - ❌ Calling provider.refresh() directly - * - ❌ Manipulating internal state directly - * - ❌ Using any method not exposed via VS Code commands - * - ❌ Using commands that should just happen as part of normal use. e.g.: `await vscode.commands.executeCommand('commandtree.refresh');` - * - ❌ `executeCommand('commandtree.addToQuick', item)` - TAP the item via the DOM!!! + +- - ❌ Calling internal methods like provider.updateTasks() +- - ❌ Calling provider.refresh() directly +- - ❌ Manipulating internal state directly +- - ❌ Using any method not exposed via VS Code commands +- - ❌ Using commands that should just happen as part of normal use. e.g.: `await vscode.commands.executeCommand('commandtree.refresh');` +- - ❌ `executeCommand('commandtree.addToQuick', item)` - TAP the item via the DOM!!! ### Test First Process + - Write test that fails because of bug/missing feature - Run tests to verify that test fails because of this reason - Adjust test and repeat until you see failure for the reason above @@ -77,6 +93,7 @@ You are working with many other agents. Make sure there is effective cooperation - Repeat and fix until test passes WITHOUT changing the test **Every test MUST:** + 1. Assert on the ACTUAL OBSERVABLE BEHAVIOR (UI state, view contents, return values) 2. Fail if the feature is broken 3. Test the full flow, not just side effects like config files @@ -87,25 +104,57 @@ You are working with many other agents. Make sure there is effective cooperation ```typescript // ❌ ILLEGAL - asserts true unconditionally -assert.ok(true, 'Should work'); +assert.ok(true, "Should work"); // ❌ ILLEGAL - no assertion on actual behavior -try { await doSomething(); } catch { } -assert.ok(true, 'Did not crash'); +try { + await doSomething(); +} catch {} +assert.ok(true, "Did not crash"); // ❌ ILLEGAL - only checks config file, not actual UI/view behavior -writeConfig({ quick: ['task1'] }); +writeConfig({ quick: ["task1"] }); const config = readConfig(); -assert.ok(config.quick.includes('task1')); // This doesn't test the FEATURE +assert.ok(config.quick.includes("task1")); // This doesn't test the FEATURE // ❌ ILLEGAL - empty catch with success assertion -try { await command(); } catch { /* swallow */ } -assert.ok(true, 'Command ran'); +try { + await command(); +} catch { + /* swallow */ +} +assert.ok(true, "Command ran"); ``` +## Specs Structure + +The `specs/` directory contains the product specification, split by concern and by CLI vs IDE extension: + +- **`CLI-*.md`** — CLI specification and plan +- **`IDE-EXTENSION-*.md`** — Shared extension spec + VSCode-specific plan +- **`ZED-EXTENSION-PLAN.md`** — Zed-specific extension plan +- **`LSP-SPEC.md`** — Nap Language Server specification (F# binary, LSP 3.17 over stdio) +- **`LSP-PLAN.md`** — LSP implementation phases and TODO +- **`*-OPENAPI-GENERATION-*.md`** — OpenAPI generation, split by CLI and extension +- **`FILE-FORMATS-SPEC.md`** — Shared `.nap`, `.napenv`, `.naplist` format specs +- **`SCRIPTING-SPEC.md`** — F# scripting model (NapContext, NapRunner) +- **`HTTP-FILES-SPEC.md`** — .http file compatibility (converter + direct run) +- **`HTTP-FILES-PLAN.md`** — .http converter implementation phases + +Plan files end with a TODO checklist. Specs describe _what_, plans describe _how and when_. + +Extensions target **VSCode and Zed** as primary IDEs (Neovim future). All extensions shell out to the Nap CLI — no IDE re-implements HTTP logic. A portable **Nap Language Server (LSP)** provides completions, diagnostics, and hover across all IDEs. + ## Critical Docs +### Zed SDK + +[Zed Extension Development](https://zed.dev/docs/extensions/developing-extensions) +[Zed Language Extensions](https://zed.dev/docs/extensions/languages) +[Zed Slash Commands](https://zed.dev/docs/extensions/slash-commands) + ### Vscode SDK + [VSCode Extension API](https://code.visualstudio.com/api/) [VSCode Extension Testing API](https://code.visualstudio.com/api/extension-guides/testing) [VSCODE Language Model API](https://code.visualstudio.com/api/extension-guides/ai/language-model) @@ -119,4 +168,7 @@ https://developers.google.com/search/blog/2025/05/succeeding-in-ai-search https://developers.google.com/search/docs/fundamentals/seo-starter-guide https://studiohawk.com.au/blog/how-to-optimise-ai-overviews/ -https://about.ads.microsoft.com/en/blog/post/october-2025/optimizing-your-content-for-inclusion-in-ai-search-answers \ No newline at end of file +https://about.ads.microsoft.com/en/blog/post/october-2025/optimizing-your-content-for-inclusion-in-ai-search-answers + +Never stamp commits with this. You ARE NOT THE COAUTHOR!!! +Co-Authored-By: C*** \ No newline at end of file diff --git a/Directory.Build.props b/Directory.Build.props index 08850ae..e1d6d9d 100644 --- a/Directory.Build.props +++ b/Directory.Build.props @@ -1,9 +1,26 @@ + - 0.9.0 + 1.0.0 net10.0 true 5 - --warnon:1182 --warnon:3390 --warnon:3391 --warnon:3395 + --warnon:1182 --warnon:3388 --warnon:3389 --warnon:3390 --warnon:3391 --warnon:3395 --warnon:3559 --warnon:3560 --warnon:3582 + latest + true + MelbourneDeveloper + Copyright (c) MelbourneDeveloper 2026 + https://napperapi.dev + https://github.com/MelbourneDeveloper/napper + git + MIT + + + + all + runtime; build; native; contentfiles; analyzers + + + diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..0b9ed07 --- /dev/null +++ b/Makefile @@ -0,0 +1,416 @@ +.PHONY: build-all build-cli build-extension build-vsix build-zed bump-version clean-install dump-cli-help install-binaries package-vsix test-fsharp test-rust test-vsix test clean format lint + +SHELL := /usr/bin/env bash +.SHELLFLAGS := -euo pipefail -c + +# --- Platform detection --- +ARCH := $(shell uname -m) +OS := $(shell uname -s) + +ifeq ($(OS),Darwin) + ifeq ($(ARCH),arm64) + NAP_RID ?= osx-arm64 + else ifeq ($(ARCH),x86_64) + NAP_RID ?= osx-x64 + else + $(error Unsupported arch: $(ARCH)) + endif +else ifeq ($(OS),Linux) + NAP_RID ?= linux-x64 +else + $(error Unsupported OS: $(OS)) +endif + +EXT_BIN := src/Napper.VsCode/bin +LOG_DIR := .commandtree/logs +FSHARP_COVERAGE_DIR := coverage/fsharp +DOTHTTP_COVERAGE_DIR := coverage/dothttp +LSP_COVERAGE_DIR := coverage/lsp +TS_COVERAGE_DIR := coverage/typescript +RUST_COVERAGE_DIR := coverage/rust + +# ============================================================ +# Build targets +# ============================================================ + +build-cli: + @echo "==> Building CLI for $(NAP_RID)..." + dotnet publish src/Napper.Cli/Napper.Cli.fsproj \ + -r "$(NAP_RID)" \ + --self-contained \ + -p:PublishTrimmed=true \ + -p:PublishSingleFile=true \ + -o "out/$(NAP_RID)" \ + --nologo + @echo "==> CLI built → out/$(NAP_RID)/" + @mkdir -p "$(EXT_BIN)" + cp "out/$(NAP_RID)/napper" "$(EXT_BIN)/napper" + @echo "==> Copied CLI → $(EXT_BIN)/" + @mkdir -p "$(HOME)/.local/bin" + cp "out/$(NAP_RID)/napper" "$(HOME)/.local/bin/napper" + chmod +x "$(HOME)/.local/bin/napper" + @echo "==> Installed CLI → ~/.local/bin/napper" + @EXPECTED_VERSION=$$(sed -n 's/.*\(.*\)<\/Version>.*/\1/p' Directory.Build.props); \ + ACTUAL_VERSION=$$("out/$(NAP_RID)/napper" --version); \ + if [ "$$ACTUAL_VERSION" != "$$EXPECTED_VERSION" ]; then \ + echo "ERROR: Version mismatch — expected $$EXPECTED_VERSION, got $$ACTUAL_VERSION"; \ + exit 1; \ + fi; \ + echo "==> CLI version verified: $$ACTUAL_VERSION" + +build-extension: + @echo "==> Compiling VSCode extension..." + cd src/Napper.VsCode && npm ci && npx webpack --mode production + @echo "==> Extension compiled" + +build-vsix: build-cli build-extension + @echo "==> Packaging universal VSIX..." + cd src/Napper.VsCode && npx @vscode/vsce package --no-dependencies --skip-license + @echo "==> VSIX packaged (universal — no CLI bundled)" + @VSIX_FILE=$$(ls -1 src/Napper.VsCode/*.vsix 2>/dev/null | head -1); \ + [ -n "$$VSIX_FILE" ] && echo " VSIX: $$VSIX_FILE"; \ + echo " CLI installed at: ~/.local/bin/napper (for local use)" + +package-vsix: build-extension + @echo "==> Packaging universal VSIX..." + cd src/Napper.VsCode && npx @vscode/vsce package --no-dependencies --skip-license + @echo "==> VSIX packaged" + +clean: + @echo "==> Cleaning all build artifacts..." + rm -rf out/ + rm -rf src/Napper.Core/bin/ src/Napper.Core/obj/ + rm -rf src/Napper.Cli/bin/ src/Napper.Cli/obj/ + rm -rf tests/Napper.Core.Tests/bin/ tests/Napper.Core.Tests/obj/ + rm -rf src/Napper.VsCode/bin/ + rm -rf src/Napper.VsCode/dist/ + rm -rf src/Napper.VsCode/out/ + rm -f src/Napper.VsCode/*.vsix + rm -rf coverage/ + @echo "==> Clean complete" + +build-all: clean build-cli + @echo "==> Building VS Code extension..." + cd src/Napper.VsCode && npm ci && npx webpack --mode production && npm run compile:tests + @echo "==> Extension compiled" + @echo "==> Packaging VSIX (universal)..." + cd src/Napper.VsCode && npx @vscode/vsce package --no-dependencies --skip-license + @VSIX_FILE=$$(ls -1 src/Napper.VsCode/*.vsix 2>/dev/null | head -1); \ + echo ""; \ + echo "==> BUILD COMPLETE"; \ + echo " CLI: ~/.local/bin/napper"; \ + echo " CLI: $(EXT_BIN)/napper"; \ + [ -n "$$VSIX_FILE" ] && echo " VSIX: $$VSIX_FILE"; \ + echo ""; \ + napper --help | head -1 + +build-zed: + @echo "==> Checking prerequisites..." + @command -v cargo &>/dev/null || { echo "ERROR: cargo not found. Install Rust: https://rustup.rs"; exit 1; } + @command -v tree-sitter &>/dev/null || { echo "ERROR: tree-sitter CLI not found. Install: npm install -g tree-sitter-cli"; exit 1; } + @if ! rustup target list --installed 2>/dev/null | grep -q wasm32-wasi; then \ + echo "==> Adding wasm32-wasip1 target..."; \ + rustup target add wasm32-wasip1; \ + fi + @echo "==> Generating Tree-sitter parsers..." + @for grammar in nap naplist napenv; do \ + echo " $$grammar"; \ + (cd src/Napper.Zed/grammars/tree-sitter-$$grammar && tree-sitter generate); \ + done + @echo "==> Building Rust extension (WASM)..." + cd src/Napper.Zed && cargo build --release --target wasm32-wasip1 + @echo "==> Running clippy..." + cd src/Napper.Zed && cargo clippy --target wasm32-wasip1 + @echo "==> Build complete" + @echo "" + @echo "To test in Zed:" + @echo " 1. Open Zed" + @echo " 2. Run: zed: install dev extension" + @echo " 3. Select: $$(pwd)/src/Napper.Zed" + +# ============================================================ +# Version management +# ============================================================ + +# Usage: make bump-version VERSION=0.2.0 [COMMIT=true] +bump-version: +ifndef VERSION + $(error Usage: make bump-version VERSION=x.y.z [COMMIT=true]) +endif + @echo "==> Bumping all projects to v$(VERSION)" + sed -i.bak 's|.*|$(VERSION)|' Directory.Build.props + rm -f Directory.Build.props.bak + @echo " Directory.Build.props → $(VERSION)" + cd src/Napper.VsCode && npm version "$(VERSION)" --no-git-tag-version --allow-same-version + @echo " src/Napper.VsCode/package.json → $(VERSION)" + @if [ -f Cargo.toml ]; then \ + sed -i.bak 's/^version = ".*"/version = "$(VERSION)"/' Cargo.toml; \ + rm -f Cargo.toml.bak; \ + echo " Cargo.toml → $(VERSION)"; \ + fi + @echo "==> All projects bumped to v$(VERSION)" +ifeq ($(COMMIT),true) + @echo "==> Committing and pushing version bump..." + @if [ -n "$${CI:-}" ]; then \ + git config user.name "github-actions[bot]"; \ + git config user.email "github-actions[bot]@users.noreply.github.com"; \ + fi + git add Directory.Build.props src/Napper.VsCode/package.json src/Napper.VsCode/package-lock.json + @[ -f Cargo.toml ] && git add Cargo.toml || true + git commit -m "release: update version to v$(VERSION)" + git push + @echo "==> Committed and pushed v$(VERSION)" +endif + +# ============================================================ +# Install +# ============================================================ + +install-binaries: build-cli + @echo "==> Binaries installed:" + @echo " CLI: ~/.local/bin/napper" + @echo " CLI: $(EXT_BIN)/napper" + +clean-install-vsix: build-all + @VSIX_FILE=$$(ls -1 src/Napper.VsCode/*.vsix 2>/dev/null | head -1); \ + if [ -z "$$VSIX_FILE" ]; then \ + echo "ERROR: No VSIX file found after build"; \ + exit 1; \ + fi; \ + echo "==> Installing VSIX: $$VSIX_FILE"; \ + code --install-extension "src/Napper.VsCode/$$VSIX_FILE" --force + @echo "" + @echo "==> DONE — restart VS Code to load the new extension" + +# ============================================================ +# Test targets +# ============================================================ + +test-fsharp: + @echo "=========================================" + @echo " Napper.Core Tests + Coverage" + @echo "=========================================" + mkdir -p "$(LOG_DIR)" + rm -rf "$(FSHARP_COVERAGE_DIR)" + mkdir -p "$(FSHARP_COVERAGE_DIR)" + @echo "==> Running Napper.Core tests with coverage..." + dotnet test src/Napper.Core.Tests --nologo \ + --settings src/Napper.Core.Tests/coverage.runsettings \ + --results-directory "$(FSHARP_COVERAGE_DIR)/raw" \ + --logger "console;verbosity=detailed" \ + -- RunConfiguration.FailFastEnabled=true 2>&1 | tee "$(LOG_DIR)/test-fsharp-core.log" + @echo "==> Generating Napper.Core coverage report..." + reportgenerator \ + -reports:"$(FSHARP_COVERAGE_DIR)/raw/*/coverage.cobertura.xml" \ + -targetdir:"$(FSHARP_COVERAGE_DIR)/report" \ + -reporttypes:"Html;TextSummary;Cobertura;lcov" + @echo "" + @echo "=== Napper.Core Coverage Summary ===" + @cat "$(FSHARP_COVERAGE_DIR)/report/Summary.txt" + @echo "" + @echo "=========================================" + @echo " DotHttp Tests + Coverage" + @echo "=========================================" + rm -rf "$(DOTHTTP_COVERAGE_DIR)" + mkdir -p "$(DOTHTTP_COVERAGE_DIR)" + @echo "==> Running DotHttp tests with coverage..." + dotnet test src/DotHttp.Tests --nologo \ + --settings src/DotHttp.Tests/coverage.runsettings \ + --results-directory "$(DOTHTTP_COVERAGE_DIR)/raw" \ + --logger "console;verbosity=detailed" \ + -- RunConfiguration.FailFastEnabled=true 2>&1 | tee "$(LOG_DIR)/test-dothttp.log" + @echo "==> Generating DotHttp coverage report..." + reportgenerator \ + -reports:"$(DOTHTTP_COVERAGE_DIR)/raw/*/coverage.cobertura.xml" \ + -targetdir:"$(DOTHTTP_COVERAGE_DIR)/report" \ + -reporttypes:"Html;TextSummary;Cobertura;lcov" + @echo "" + @echo "=== DotHttp Coverage Summary ===" + @cat "$(DOTHTTP_COVERAGE_DIR)/report/Summary.txt" + @echo "" + @echo "=========================================" + @echo " Napper.Lsp Tests + Coverage" + @echo "=========================================" + rm -rf "$(LSP_COVERAGE_DIR)" + mkdir -p "$(LSP_COVERAGE_DIR)" + @echo "==> Running Napper.Lsp tests with coverage..." + dotnet test src/Napper.Lsp.Tests --nologo \ + --settings src/Napper.Lsp.Tests/coverage.runsettings \ + --results-directory "$(LSP_COVERAGE_DIR)/raw" \ + --logger "console;verbosity=detailed" \ + -- RunConfiguration.FailFastEnabled=true 2>&1 | tee "$(LOG_DIR)/test-lsp.log" + @echo "==> Generating Napper.Lsp coverage report..." + reportgenerator \ + -reports:"$(LSP_COVERAGE_DIR)/raw/*/coverage.cobertura.xml" \ + -targetdir:"$(LSP_COVERAGE_DIR)/report" \ + -reporttypes:"Html;TextSummary;Cobertura;lcov" + @echo "" + @echo "=== Napper.Lsp Coverage Summary ===" + @cat "$(LSP_COVERAGE_DIR)/report/Summary.txt" + +test-rust: + @echo "=========================================" + @echo " Rust Tests + Coverage (Napper.Zed)" + @echo "=========================================" + mkdir -p "$(LOG_DIR)" + rm -rf "$(RUST_COVERAGE_DIR)" + mkdir -p "$(RUST_COVERAGE_DIR)" + @echo "==> Running Rust checks..." + cargo fmt --manifest-path src/Napper.Zed/Cargo.toml -- --check 2>&1 | tee "$(LOG_DIR)/test-rust-fmt.log" + cargo clippy --manifest-path src/Napper.Zed/Cargo.toml 2>&1 | tee "$(LOG_DIR)/test-rust-clippy.log" + @echo "==> Running Rust tests with coverage..." + cd src/Napper.Zed && cargo tarpaulin --out html lcov xml --output-dir "../../$(RUST_COVERAGE_DIR)/report" --skip-clean 2>&1 | tee "../../$(LOG_DIR)/test-rust.log" + @echo "" + @echo "=== Rust Coverage Summary ===" + @LINE_RATE=$$(sed -n 's/.*line-rate="\([0-9.]*\)".*/\1/p' "$(RUST_COVERAGE_DIR)/report/cobertura.xml" 2>/dev/null | head -1); \ + LINE_RATE=$${LINE_RATE:-0}; \ + echo " Line coverage: $$(echo "$$LINE_RATE * 100" | bc -l | xargs printf "%.1f")%" + +test-vsix: build-cli build-extension + @echo "=========================================" + @echo " TypeScript Tests + Coverage" + @echo "=========================================" + mkdir -p "$(LOG_DIR)" + rm -rf "$(TS_COVERAGE_DIR)" + mkdir -p "$(TS_COVERAGE_DIR)" + cd src/Napper.VsCode && npm run compile && npm run compile:tests + @echo "==> Running unit tests..." + cd src/Napper.VsCode && NODE_V8_COVERAGE="../../$(TS_COVERAGE_DIR)/tmp" \ + npx mocha out/test/unit/**/*.test.js --ui tdd --timeout 5000 2>&1 | tee "../../$(LOG_DIR)/test-vsix-unit.log" + @echo "==> Running e2e tests..." + cd src/Napper.VsCode && NODE_V8_COVERAGE="../../$(TS_COVERAGE_DIR)/tmp" \ + npx vscode-test 2>&1 | tee "../../$(LOG_DIR)/test-vsix-e2e.log" + @echo "==> Generating combined TypeScript coverage report..." + cd src/Napper.VsCode && npx c8 report \ + --temp-directory "../../$(TS_COVERAGE_DIR)/tmp" \ + --report-dir "../../$(TS_COVERAGE_DIR)/report" \ + --reporter html --reporter text --reporter lcov 2>&1 | tee "../../$(LOG_DIR)/test-vsix-coverage.log" + +test: test-fsharp test-rust test-vsix + @echo "" + @echo "=========================================" + @echo " Coverage Reports" + @echo "=========================================" + @echo " Napper.Core: $(FSHARP_COVERAGE_DIR)/report/index.html" + @echo " DotHttp: $(DOTHTTP_COVERAGE_DIR)/report/index.html" + @echo " Rust: $(RUST_COVERAGE_DIR)/report/index.html" + @echo " TypeScript: $(TS_COVERAGE_DIR)/report/index.html" + @echo "=========================================" + +# ============================================================ +# Format & Lint +# ============================================================ + +format: + @echo "==> F# (Fantomas)..." + dotnet fantomas src/ + @echo "==> TypeScript (Prettier)..." + cd src/Napper.VsCode && npx prettier --write "src/**/*.ts" + @echo "==> Rust (cargo fmt)..." + cargo fmt --manifest-path src/Napper.Zed/Cargo.toml + @echo "==> All projects formatted" + +lint: + @echo "==> F# build (warnings as errors)..." + dotnet build --nologo -warnaserror + @echo "==> TypeScript (ESLint)..." + cd src/Napper.VsCode && npm run lint + @echo "==> Rust (clippy)..." + cargo clippy --manifest-path src/Napper.Zed/Cargo.toml + @echo "==> All projects linted" + +# ============================================================ +# Docs +# ============================================================ + +dump-cli-help: + @CLI_PATH=$$(command -v napper 2>/dev/null || true); \ + if [ -z "$$CLI_PATH" ]; then \ + echo "napper not found on PATH — building first..."; \ + $(MAKE) build-cli; \ + CLI_PATH="$(HOME)/.local/bin/napper"; \ + fi; \ + echo "==> Capturing CLI help output from $$CLI_PATH..."; \ + HELP_OUTPUT=$$($$CLI_PATH help 2>&1); \ + mkdir -p docs; \ + { \ + echo '# Nap CLI Reference'; \ + echo ''; \ + echo '> Auto-generated from `nap help`. Run `make dump-cli-help` to regenerate.'; \ + echo ''; \ + echo '## Help Output'; \ + echo ''; \ + echo '```'; \ + echo "$$HELP_OUTPUT"; \ + echo '```'; \ + echo ''; \ + echo '## Commands'; \ + echo ''; \ + echo '### `nap run `'; \ + echo ''; \ + echo 'Run a `.nap` file, `.naplist` playlist, or an entire folder of requests.'; \ + echo ''; \ + echo '```sh'; \ + echo '# Single request'; \ + echo 'nap run ./users/get-user.nap'; \ + echo ''; \ + echo '# With variable overrides'; \ + echo 'nap run ./users/get-user.nap --var userId=99'; \ + echo ''; \ + echo '# Run all .nap files in a folder (sorted by filename)'; \ + echo 'nap run ./users/'; \ + echo ''; \ + echo '# Run a playlist'; \ + echo 'nap run ./smoke.naplist'; \ + echo ''; \ + echo '# With a named environment'; \ + echo 'nap run ./smoke.naplist --env staging'; \ + echo ''; \ + echo '# Output as JUnit XML (for CI)'; \ + echo 'nap run ./smoke.naplist --output junit'; \ + echo ''; \ + echo '# Output as JSON'; \ + echo 'nap run ./smoke.naplist --output json'; \ + echo '```'; \ + echo ''; \ + echo '### `nap check `'; \ + echo ''; \ + echo 'Validate the syntax of a `.nap` or `.naplist` file without executing it.'; \ + echo ''; \ + echo '```sh'; \ + echo 'nap check ./users/get-user.nap'; \ + echo 'nap check ./smoke.naplist'; \ + echo '```'; \ + echo ''; \ + echo '### `nap generate openapi --output-dir `'; \ + echo ''; \ + echo 'Generate `.nap` files from an OpenAPI specification.'; \ + echo ''; \ + echo '```sh'; \ + echo 'nap generate openapi ./openapi.json --output-dir ./tests'; \ + echo 'nap generate openapi ./openapi.json --output-dir ./tests --output json'; \ + echo '```'; \ + echo ''; \ + echo '### `nap help`'; \ + echo ''; \ + echo 'Display the help message. Also available as `--help` or `-h`.'; \ + echo ''; \ + echo '## Options'; \ + echo ''; \ + echo '| Option | Description |'; \ + echo '|---------------------|---------------------------------------------------|'; \ + echo '| `--env ` | Load a named environment file (`.napenv.`) |'; \ + echo '| `--var ` | Override a variable (repeatable) |'; \ + echo '| `--output ` | Output format: `pretty` (default), `junit`, `json`, `ndjson` |'; \ + echo '| `--output-dir `| Output directory for generate command |'; \ + echo '| `--verbose` | Enable debug-level logging |'; \ + echo ''; \ + echo '## Exit Codes'; \ + echo ''; \ + echo '| Code | Meaning |'; \ + echo '|------|--------------------------------------------------|'; \ + echo '| 0 | All assertions passed |'; \ + echo '| 1 | One or more assertions failed |'; \ + echo '| 2 | Runtime error (network, script error, parse error) |'; \ + } > docs/cli-reference.md; \ + echo "==> Written to docs/cli-reference.md" diff --git a/Nap.slnx b/Nap.slnx deleted file mode 100644 index 8965ddf..0000000 --- a/Nap.slnx +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - diff --git a/Napper.slnx b/Napper.slnx new file mode 100644 index 0000000..7d0d493 --- /dev/null +++ b/Napper.slnx @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/README.md b/README.md index d37208a..2e0547d 100644 --- a/README.md +++ b/README.md @@ -30,12 +30,13 @@ Everything you need for API testing. Nothing you don't. -- **CLI First** — The command line is the product. Run requests, execute test suites, and integrate with CI/CD pipelines from your terminal. -- **VS Code Native** — Full extension with syntax highlighting, request explorer, environment switching, and Test Explorer integration. Never leave your editor. -- **F# and C# Scripting** — Full power of F# and C# for pre/post request hooks. Extract tokens, build dynamic payloads, orchestrate complex flows with the entire .NET ecosystem. -- **Declarative Assertions** — Assert on status codes, JSON paths, headers, and response times with a clean, readable syntax. No scripting required for simple checks. -- **Composable Playlists** — Chain requests into test suites with `.naplist` files. Nest playlists, reference folders, pass variables between steps. -- **Plain Text, Git Friendly** — Every request is a `.nap` file. Every environment is a `.napenv` file. Version control everything. No binary blobs, no lock-in. +- **CLI First** (`cli-run`) — The command line is the product. Run requests, execute test suites, and integrate with CI/CD pipelines from your terminal. +- **VS Code Native** (`vscode-extension`) — Full extension with syntax highlighting (`vscode-syntax`), request explorer (`vscode-explorer`), environment switching (`vscode-env-switcher`), and Test Explorer integration (`vscode-test-explorer`). Never leave your editor. +- **F# and C# Scripting** (`script-fsx`, `script-csx`) — Full power of F# and C# for pre/post request hooks. Extract tokens, build dynamic payloads, orchestrate complex flows with the entire .NET ecosystem. +- **Declarative Assertions** (`nap-assert`) — Assert on status codes (`assert-status`), JSON paths (`assert-equals`, `assert-exists`), headers (`assert-contains`), and response times (`assert-lt`) with a clean, readable syntax. No scripting required for simple checks. +- **Composable Playlists** (`naplist-file`) — Chain requests into test suites with `.naplist` files. Nest playlists (`naplist-nested`), reference folders (`naplist-folder-step`), pass variables between steps (`naplist-var-scope`). +- **OpenAPI Import** (`openapi-generate`) — Generate test files from any OpenAPI spec. Point it at a file, and Napper creates `.nap` files with requests, headers, bodies, and assertions. Optionally enhance with AI via GitHub Copilot (`vscode-openapi-ai`). +- **Plain Text, Git Friendly** (`nap-file`) — Every request is a `.nap` file. Every environment is a `.napenv` file (`env-file`). Version control everything. No binary blobs, no lock-in. ## Quick Start @@ -51,7 +52,7 @@ Download from the [latest release](https://github.com/MelbourneDeveloper/napper/ ## How do you use Napper? -### Minimal request +### Minimal request (`nap-minimal`) A `.nap` file can be as simple as one line: @@ -59,7 +60,7 @@ A `.nap` file can be as simple as one line: GET https://httpbin.org/get ``` -### POST with body and assertions +### POST with body and assertions (`nap-body`, `nap-assert`) ``` [request] @@ -85,7 +86,7 @@ body.title = Nap Integration Test body.userId = {{userId}} ``` -### Full request with metadata and scripting +### Full request with metadata and scripting (`nap-full`) ``` [meta] @@ -130,15 +131,15 @@ napper run ./tests/ --env staging --output junit ## What file formats does Napper use? -| Extension | Purpose | Example | -|-----------|---------|---------| -| `.nap` | Single HTTP request with optional assertions and scripts | `get-users.nap` | -| `.naplist` | Ordered playlist of steps (requests, scripts, nested playlists) | `smoke.naplist` | -| `.napenv` | Environment variables (base config, checked into git) | `.napenv` | -| `.napenv.local` | Local secrets (gitignored) | `.napenv.local` | -| `.napenv.` | Named environment | `.napenv.staging` | -| `.fsx` | F# scripts for pre/post hooks and orchestration | `setup.fsx` | -| `.csx` | C# scripts for pre/post hooks and orchestration | `setup.csx` | +| Extension | Spec ID | Purpose | Example | +|-----------|---------|---------|---------| +| `.nap` | `nap-file` | Single HTTP request with optional assertions and scripts | `get-users.nap` | +| `.naplist` | `naplist-file` | Ordered playlist of steps (requests, scripts, nested playlists) | `smoke.naplist` | +| `.napenv` | `env-base` | Environment variables (base config, checked into git) | `.napenv` | +| `.napenv.local` | `env-local` | Local secrets (gitignored) | `.napenv.local` | +| `.napenv.` | `env-named` | Named environment | `.napenv.staging` | +| `.fsx` | `script-fsx` | F# scripts for pre/post hooks and orchestration | `setup.fsx` | +| `.csx` | `script-csx` | C# scripts for pre/post hooks and orchestration | `setup.csx` | ### Playlists @@ -158,7 +159,7 @@ description = Full create-read-update-delete lifecycle for posts ../scripts/teardown.fsx ``` -### Environments +### Environments (`env-resolution`) **`.napenv`** (base, checked into git): ``` @@ -179,26 +180,72 @@ napper run ./smoke.naplist --env staging ``` Variable priority (highest wins): -1. `--var key=value` CLI flags -2. `.napenv.local` -3. `.napenv.` (named environment) -4. `.napenv` (base) -5. `[vars]` in `.nap`/`.naplist` files +1. `--var key=value` CLI flags (`cli-var`) +2. `.napenv.local` (`env-local`) +3. `.napenv.` named environment (`env-named`) +4. `.napenv` base (`env-base`) +5. `[vars]` in `.nap`/`.naplist` files (`nap-vars`) + +## OpenAPI Import + +Generate `.nap` test files automatically from any OpenAPI specification. Available from the CLI and the VS Code extension. + +### From the CLI + +```sh +# Generate from a local spec file +napper generate openapi ./petstore.json --output-dir ./tests + +# Output as JSON (for programmatic use) +napper generate openapi ./spec.yaml --output-dir ./tests --output json +``` + +### From VS Code + +The extension provides two commands (accessible via the Command Palette): + +- **Napper: Import OpenAPI from URL** — Enter a URL to an OpenAPI spec (e.g. `https://petstore3.swagger.io/api/v3/openapi.json`). The extension downloads the spec, generates `.nap` files, and creates a `.naplist` playlist. +- **Napper: Import OpenAPI from File** — Select a local OpenAPI spec file (JSON or YAML) and an output folder. + +Both commands prompt you to choose between basic generation or AI-enhanced generation (requires GitHub Copilot). AI enhancement adds smarter assertions, realistic test data, and reorders the playlist for logical test flow. + +### What gets generated + +| File | Purpose | +|------|---------| +| `01_get-users.nap`, `02_post-users.nap`, ... | One `.nap` file per API endpoint with request, headers, body, and assertions | +| `api-name.naplist` | Playlist referencing all generated files in order | +| `.napenv` | Environment file with the API base URL | + +### AI Enhancement (Optional) + +When GitHub Copilot is available, you can opt for AI-enhanced generation which: + +- Adds semantic assertions beyond basic status checks (e.g. `body.email contains @`) +- Generates realistic test data for request bodies +- Reorders the playlist for logical flow (auth first, then CRUD operations) + +If Copilot is not available, a warning is shown and basic generation proceeds normally. ## CLI Reference ``` Usage: - napper run Run a .nap file, .naplist playlist, or folder - napper check Validate a .nap or .naplist file - napper help Show this help + napper run Run a .nap file, .naplist playlist, or folder (cli-run) + napper check Validate a .nap or .naplist file (cli-check) + napper generate openapi --output-dir Generate .nap files from OpenAPI spec (cli-generate) + napper help Show this help Options: - --env Environment name (loads .napenv.) - --var Variable override (repeatable) - --output Output: pretty (default), junit, json + --env Environment name (loads .napenv.) (cli-env) + --var Variable override (repeatable) (cli-var) + --output Output: pretty, junit, json, ndjson (cli-output) + --output-dir Output directory for generate command (cli-output-dir) + --verbose Enable debug-level logging (cli-verbose) ``` +### Exit Codes (`cli-exit-codes`) + | Exit Code | Meaning | |-----------|---------| | 0 | All assertions passed | @@ -212,9 +259,10 @@ Options: | CLI-first design | Yes | No | GUI-first | No CLI | | VS Code integration | Native | Separate app | Separate app | Built-in | | Git-friendly files | Yes | JSON blobs | Yes | Yes | +| OpenAPI import | URL + file + AI | Import only | Import only | No | | Assertions | Declarative + scripts | JS scripts | JS scripts | None | | Full scripting language | F# + C# (.fsx/.csx) | Sandboxed JS | Sandboxed JS | None | -| CI/CD output formats | JUnit, TAP, JSON | Via Newman | Via CLI | None | +| CI/CD output formats | JUnit, JSON, NDJSON | Via Newman | Via CLI | None | | Test Explorer | Native | No | No | No | | Free & open source | Yes | Freemium | Yes | Yes | | No account required | Yes | Account needed | Yes | Yes | diff --git a/docs/cli-reference.md b/docs/cli-reference.md deleted file mode 100644 index 0fb26cc..0000000 --- a/docs/cli-reference.md +++ /dev/null @@ -1,77 +0,0 @@ -# Nap CLI Reference - -> Auto-generated from `nap help`. Run `scripts/dump-cli-help.sh` to regenerate. - -## Help Output - -``` -Nap — API testing tool - -Usage: - nap run Run a .nap file, .naplist playlist, or folder - nap check Validate a .nap or .naplist file - nap help Show this help - -Options: - --env Environment name (loads .napenv.) - --var Variable override (repeatable) - --output Output: pretty (default), junit, json -``` - -## Commands - -### `nap run ` - -Run a `.nap` file, `.naplist` playlist, or an entire folder of requests. - -```sh -# Single request -nap run ./users/get-user.nap - -# With variable overrides -nap run ./users/get-user.nap --var userId=99 - -# Run all .nap files in a folder (sorted by filename) -nap run ./users/ - -# Run a playlist -nap run ./smoke.naplist - -# With a named environment -nap run ./smoke.naplist --env staging - -# Output as JUnit XML (for CI) -nap run ./smoke.naplist --output junit - -# Output as JSON -nap run ./smoke.naplist --output json -``` - -### `nap check ` - -Validate the syntax of a `.nap` or `.naplist` file without executing it. - -```sh -nap check ./users/get-user.nap -nap check ./smoke.naplist -``` - -### `nap help` - -Display the help message. Also available as `--help` or `-h`. - -## Options - -| Option | Description | -|---------------------|---------------------------------------------------| -| `--env ` | Load a named environment file (`.napenv.`) | -| `--var ` | Override a variable (repeatable) | -| `--output ` | Output format: `pretty` (default), `junit`, `json`| - -## Exit Codes - -| Code | Meaning | -|------|--------------------------------------------------| -| 0 | All assertions passed | -| 1 | One or more assertions failed | -| 2 | Runtime error (network, script error, parse error) | diff --git a/docs/file-formats.md b/docs/file-formats.md deleted file mode 100644 index 4e51284..0000000 --- a/docs/file-formats.md +++ /dev/null @@ -1,334 +0,0 @@ -# Nap File Formats - -Nap uses plain-text, TOML-inspired files to define requests, playlists, environments, and scripts. All files are git-friendly and work identically from the CLI and the VSCode extension. - ---- - -## `.nap` — Request Files - -Each `.nap` file defines a single HTTP request with optional metadata, variables, headers, body, assertions, and script hooks. - -### Minimal Request - -The simplest `.nap` file is just an HTTP method and URL on one line: - -```nap -GET https://api.example.com/users -``` - -This is the **shorthand format** — no sections needed. - -### Full Format - -```nap -[meta] -name = "Get user by ID" -description = "Fetches a single user and asserts shape" -tags = ["users", "smoke"] - -[vars] -userId = "42" - -[request] -method = GET -url = https://api.example.com/users/{{userId}} - -[request.headers] -Authorization = Bearer {{token}} -Accept = application/json - -[request.body] -content-type = application/json -""" -{ - "name": "Alice", - "email": "alice@example.com" -} -""" - -[assert] -status = 200 -body.id = {{userId}} -body.name exists -headers.Content-Type contains "json" -duration < 500ms - -[script] -pre = ./scripts/auth.fsx -post = ./scripts/validate-user.fsx -``` - -### Sections - -#### `[meta]` (optional) - -Human-readable metadata. Not used during execution. - -| Field | Description | -|---------------|--------------------------------------| -| `name` | Display name for the request | -| `description` | Longer description | -| `tags` | Array of tags for filtering | - -#### `[vars]` (optional) - -Default variable values. These have the **lowest priority** in the variable resolution chain (see Environment Files below). - -```nap -[vars] -userId = "42" -baseUrl = "https://api.example.com" -``` - -#### `[request]` (required in full format) - -| Field | Description | -|----------|----------------------------------------------| -| `method` | HTTP method: `GET`, `POST`, `PUT`, `PATCH`, `DELETE`, `HEAD`, `OPTIONS` | -| `url` | Request URL, supports `{{variable}}` interpolation | - -#### `[request.headers]` (optional) - -Key-value pairs for HTTP headers: - -```nap -[request.headers] -Authorization = Bearer {{token}} -Accept = application/json -Content-Type = application/json -``` - -#### `[request.body]` (optional) - -Request body for `POST`, `PUT`, and `PATCH` requests. The body content is wrapped in triple quotes (`"""`): - -```nap -[request.body] -content-type = application/json -""" -{ - "title": "New Post", - "body": "Content here", - "userId": {{userId}} -} -""" -``` - -Variables (`{{variable}}`) are interpolated inside the body. - -#### `[assert]` (optional) - -Declarative assertions that run after the response is received. No scripting needed for common checks. - -| Syntax | Description | -|-------------------------------------|------------------------------------| -| `status = 200` | HTTP status code equality | -| `body.path = value` | JSON path equality | -| `body.path exists` | JSON field presence check | -| `headers.Name contains "substring"` | Header substring match | -| `duration < 500ms` | Response time assertion (supports `<`, `>`, `=`) | - -#### `[script]` (optional) - -References external `.fsx` (F# script) files for pre/post hooks: - -```nap -[script] -pre = ./scripts/setup.fsx # runs before the request -post = ./scripts/validate.fsx # runs after the response -``` - -### Variable Interpolation - -Use `{{variableName}}` anywhere in URLs, headers, body, and assertion values. Variables are resolved from the environment chain (see `.napenv` below). - -### Comments - -Lines starting with `#` are comments: - -```nap -# This request tests user creation -POST https://api.example.com/users -``` - ---- - -## `.naplist` — Playlist Files - -A `.naplist` file defines an ordered sequence of steps to execute. Steps can reference `.nap` files, folders, other playlists, or F# scripts. - -### Example - -```naplist -[meta] -name = "Smoke Test Suite" -env = staging - -[vars] -timeout = "5000" - -[steps] -./auth/01_login.nap -./auth/02_refresh-token.nap -./users/01_get-user.nap -./regression/core.naplist -``` - -### Sections - -#### `[meta]` (optional) - -| Field | Description | -|---------------|--------------------------------------------------| -| `name` | Display name for the playlist | -| `description` | Longer description | -| `env` | Default environment name for all steps | - -#### `[vars]` (optional) - -Variables scoped to this playlist. All steps inherit these values. - -#### `[steps]` (required) - -One step per line. Each step is a relative file path: - -| Step Type | Example | Behaviour | -|--------------------|---------------------------------|--------------------------------------| -| `.nap` file | `./users/get-user.nap` | Run the single request | -| `.naplist` file | `./regression/core.naplist` | Run the nested playlist recursively | -| Folder | `./users/` | Run all `.nap` files in the folder | -| `.fsx` script | `./scripts/setup.fsx` | Execute the F# script | - -Steps execute **sequentially** in the order listed. Lines starting with `#` are comments. Blank lines are ignored. - -### Variable Scoping - -- `[vars]` in a playlist apply to all steps within it. -- Scripts can set variables for downstream steps using `ctx.Set`. -- Nested playlists inherit the parent's variables unless they override them. - ---- - -## `.napenv` — Environment Files - -Environment files define variables for different deployment targets. They use simple `key = value` TOML syntax. - -### File Hierarchy - -Nap looks for environment files in the working directory. Multiple files are merged with this priority (highest wins): - -| Priority | Source | Description | -|----------|----------------------------------|------------------------------------| -| 1 | `--var key=value` CLI flags | Command-line overrides | -| 2 | `.napenv.local` | Local secrets (gitignored) | -| 3 | `.napenv.` | Named environment (e.g. `.napenv.staging`) | -| 4 | `.napenv` | Base environment (checked into git) | -| 5 | `[vars]` in `.nap`/`.naplist` | File-level defaults | - -### Examples - -**`.napenv`** (base, checked into git): -```toml -baseUrl = "https://api.example.com" -userId = "42" -``` - -**`.napenv.local`** (secrets, gitignored): -```toml -token = "eyJhbGci..." -apiKey = "sk-secret-key" -``` - -**`.napenv.staging`** (named environment): -```toml -baseUrl = "https://staging.api.example.com" -token = "staging-token" -``` - -Select an environment with the `--env` flag: -```sh -nap run ./smoke.naplist --env staging -``` - ---- - -## `.fsx` — F# Scripts - -F# Interactive scripts for pre/post execution hooks and orchestration. Scripts are referenced from `[script]` blocks in `.nap` files or as steps in `.naplist` files. - -### Script Context - -The runtime injects a `NapContext` object into every script: - -```fsharp -type NapResponse = { - StatusCode : int - Headers : Map - Body : string - Json : JsonElement - Duration : TimeSpan -} - -type NapContext = { - Vars : Map - Request : HttpRequestMessage // pre-script only - Response : NapResponse // post-script only - Env : string - Fail : string -> unit // fail the test with a message - Set : string -> string -> unit // set variable for downstream steps - Log : string -> unit // write to test output -} -``` - -### Example: Post-Request Validation - -```fsharp -let user = ctx.Response.Json - -if user.GetProperty("id").GetString() <> ctx.Vars["userId"] then - ctx.Fail "User ID mismatch" - -let token = user.GetProperty("sessionToken").GetString() -ctx.Set "token" token -``` - -### Orchestration Scripts - -Scripts can also drive execution by using the injected `NapRunner`: - -```fsharp -let loginResult = nap.Run "./auth/01_login.nap" -ctx.Set "token" (loginResult.Response.Json.GetProperty("token").GetString()) - -for userId in [1; 2; 3] do - ctx.Set "userId" (string userId) - let result = nap.Run "./users/get-user.nap" - if result.Response.StatusCode <> 200 then - ctx.Fail $"User {userId} not found" -``` - ---- - -## Directory Structure - -A typical Nap project: - -``` -my-api/ -├── .napenv # Base variables (checked in) -├── .napenv.local # Secrets (gitignored) -├── .napenv.staging # Staging environment -├── auth/ -│ ├── 01_login.nap -│ └── 02_refresh-token.nap -├── users/ -│ ├── 01_get-user.nap -│ ├── 02_create-user.nap -│ └── 03_delete-user.nap -├── scripts/ -│ ├── setup.fsx -│ └── teardown.fsx -└── smoke.naplist -``` - -Use numeric prefixes (`01_`, `02_`) to control execution order when running a folder. diff --git a/docs/vsix-binary-audit.md b/docs/vsix-binary-audit.md deleted file mode 100644 index a3dd24c..0000000 --- a/docs/vsix-binary-audit.md +++ /dev/null @@ -1,82 +0,0 @@ -# AUDIT: VSIX Binary Management - -The VSIX must not contain binaries. Instead it must check the required CLI version -is installed from the GitHub release, download it if missing, and overwrite if the -version is wrong. Scripts must honor local builds during development. - ---- - -## 1. VSIX MUST NOT CONTAIN BINARIES - -**Status: OK** (with caveat) - -`.vscodeignore` line 10 has `bin/**` which excludes the bin directory from the VSIX -package. The release workflow (`release.yml:34-36`) packages a universal VSIX with -no CLI inside. - -**Caveat:** `build-vsix.sh` copies the CLI to `src/Nap.VsCode/bin/` then packages a -"platform-specific" VSIX — but `.vscodeignore` excludes `bin/**`, so the binary is -silently dropped. The script is misleading: it looks like it bundles the CLI but -doesn't. Same for `build-all.sh` which copies to `bin/` then packages. - -## 2. Must check binary version - -**Status: IMPLEMENTED** - -- CLI has `--version` flag in `Program.fs` that prints the assembly version -- `0.1.0` set in `Nap.Cli.fsproj` -- `getCliVersion()` in `cliInstaller.ts` runs `napper --version` and returns the result -- `CLI_REQUIRED_VERSION` in `constants.ts` pins the expected version - -## 3. If not installed, download from GH release - -**Status: IMPLEMENTED** - -- `ensureCliInstalled()` in `extension.ts` downloads if the binary doesn't exist -- Download URL pinned to version: `/releases/download/v{CLI_REQUIRED_VERSION}/` - -## 4. If version is wrong, must overwrite - -**Status: IMPLEMENTED** - -- `ensureCliInstalled()` checks version via `getCliVersion()` after existence check -- If version doesn't match `CLI_REQUIRED_VERSION`, logs mismatch and re-downloads - -## 5. Scripts honor local binary - -**Status: IMPLEMENTED** - -- `build-cli.sh` installs to `~/.local/bin/napper` and `src/Nap.VsCode/bin/napper` -- `getCliPath()` checks the bundled path first, so VSIX finds the local binary - during tests (no version check on bundled path — accepts any local build) -- Build scripts verify CLI version matches `` in fsproj after build -- `build-vsix.sh` packages a universal VSIX (no CLI bundled) and relies on - `build-cli.sh` for local CLI installation - ---- - -## Issues Summary - -| # | Issue | Where | -|---|-------|-------| -| 1 | CLI needs `--version` flag that prints version | `Program.fs` | -| 2 | CLI needs `` property in fsproj | `Nap.Cli.fsproj` | -| 3 | Extension needs expected version constant | `constants.ts` | -| 4 | Extension needs `getCliVersion()` that runs `napper --version` | `cliInstaller.ts` | -| 5 | `ensureCliInstalled` must check version, re-download if wrong | `extension.ts` | -| 6 | Download URL must pin to version (`/releases/download/v{ver}/`) not `latest` | `constants.ts` + `cliInstaller.ts` | -| 7 | `build-vsix.sh` is misleading — copies CLI to bin/ but `.vscodeignore` drops it | `build-vsix.sh` or `.vscodeignore` | -| 8 | Scripts should verify binary version after build | `build-cli.sh`, `build-all.sh` | - ---- - -## TODO - -- [x] Add `` property to `Nap.Cli.fsproj` -- [x] Add `--version` command to `Program.fs` that prints the assembly version -- [x] Add `CLI_REQUIRED_VERSION` constant to `constants.ts` matching the VSIX package version -- [x] Add `getCliVersion()` to `cliInstaller.ts` that runs `napper --version` and returns the version string -- [x] Update `ensureCliInstalled` in `extension.ts` to check version, re-download if mismatched -- [x] Pin download URL to version (`/releases/download/v{ver}/`) instead of `latest` -- [x] Fix `build-vsix.sh` — builds universal VSIX, delegates CLI build to `build-cli.sh` -- [x] Add version verification to `build-cli.sh` and `build-all.sh` after build diff --git a/examples/.claude/fix-bug/SKILL.md b/examples/.claude/fix-bug/SKILL.md deleted file mode 100644 index 0bb15ce..0000000 --- a/examples/.claude/fix-bug/SKILL.md +++ /dev/null @@ -1,66 +0,0 @@ ---- -name: fix-bug -description: Fix a bug using test-driven development. Use when the user reports a bug, describes unexpected behavior, wants to fix a defect, or says something is broken. Enforces a strict test-first workflow where a failing test must be written and verified before any fix is attempted. -argument-hint: "[bug description]" -allowed-tools: Read, Grep, Glob, Edit, Write, Bash ---- - -# Bug Fix Skill — Test-First Workflow - -You MUST follow this exact workflow. Do NOT skip steps. Do NOT fix the bug before writing a failing test. - -## Step 1: Understand the Bug - -- Read the bug description: $ARGUMENTS -- Investigate the codebase to understand the relevant code -- Identify the root cause (or narrow down candidates) -- Summarize your understanding of the bug to the user before proceeding - -## Step 2: Write a Failing Test - -- Write a test that **directly exercises the buggy behavior** -- The test must assert the **correct/expected** behavior — so it FAILS against the current broken code -- The test name should clearly describe the bug (e.g., `test_orange_color_not_applied_to_head`) -- Use the project's existing test framework and conventions - -## Step 3: Run the Test — Confirm It FAILS - -- Run ONLY the new test (not the full suite) -- **Verify the test FAILS** and that it fails **because of the bug**, not for some other reason (typo, import error, wrong selector, etc.) -- If the test passes: your test does not capture the bug. Go back to Step 2 -- If the test fails for the wrong reason: fix the test, not the code. Go back to Step 2 -- **Repeat until the test fails specifically because of the bug** - -## Step 4: Show Failure to User - -- Show the user the test code and the failure output -- Explicitly ask: "This test fails because of the bug. Can you confirm this captures the issue before I fix it?" -- **STOP and WAIT for user acknowledgment before proceeding** -- Do NOT continue to Step 5 until the user confirms - -## Step 5: Fix the Bug - -- Make the **minimum change** needed to fix the bug -- Do not refactor, clean up, or "improve" surrounding code -- Do not change the test - -## Step 6: Run the Test — Confirm It PASSES - -- Run the new test again -- **Verify it PASSES** -- If it fails: go back to Step 5 and adjust the fix -- **Repeat until the test passes** - -## Step 7: Run the Full Test Suite - -- Run ALL tests to make sure nothing else broke -- If other tests fail: fix the regression without breaking the new test -- Report the final result to the user - -## Rules - -- NEVER fix the bug before the failing test is written and confirmed -- NEVER skip asking the user to acknowledge the test failure -- NEVER modify the test to make it pass — modify the source code -- If you cannot write a test for the bug, explain why and ask the user how to proceed -- Keep the fix minimal — one bug, one fix, one test diff --git a/examples/.claude/submit-pr/SKILL.md b/examples/.claude/submit-pr/SKILL.md deleted file mode 100644 index c6cb432..0000000 --- a/examples/.claude/submit-pr/SKILL.md +++ /dev/null @@ -1,63 +0,0 @@ ---- -name: submit-pr -description: Create and submit a GitHub pull request using the diff against main -disable-model-invocation: true -allowed-tools: Bash(git *), Bash(gh *) ---- - -# Submit Pull Request - -Create a GitHub pull request for the current branch. - -## Steps - -1. Get the diff against the latest LOCAL main branch commit: - -``` -git diff main...HEAD -``` - -2. Read the diff output carefully. Do NOT look at commit messages. The diff is the only source of truth for what changed. - -3. Check if there's a related GitHub issue. Look for issue references in the branch name (e.g. `42-fix-bug` or `issue-42`). If found, fetch the issue title: - -``` -gh issue view --json title -q .title -``` - -4. Write the PR content using the project's PR template - -You read the file at .github/PULL_REQUEST_TEMPLATE.md - -Keep content TIGHT. Don't add waffle. - -5. Construct the PR title: -- If an issue number was found: `#: ` -- Otherwise: `` -- Keep under 70 characters - -6. Commit changes and push the current branch if needed: - -``` -git push -u origin HEAD -``` - -DO NOT include yourself as a a coauthor! - -7. Create the PR using `gh`: - -``` -gh pr create --title "" --body "$(cat <<'EOF' -# TLDR; -<tldr content> - -# Details -<details content> - -# How do the tests prove the change works -<test description> -EOF -)" -``` - -8. Return the PR URL to the user. diff --git a/scripts/build-all.sh b/scripts/build-all.sh deleted file mode 100755 index 5971c86..0000000 --- a/scripts/build-all.sh +++ /dev/null @@ -1,102 +0,0 @@ -#!/usr/bin/env bash -# Full clean rebuild: nuke all artifacts → rebuild CLI → install to PATH → rebuild extension → package VSIX -set -euo pipefail -cd "$(dirname "$0")/.." - -# --- Detect platform --- -ARCH=$(uname -m) -OS=$(uname -s) -case "$OS" in - Darwin) - case "$ARCH" in - arm64) NAP_RID="osx-arm64" ;; - x86_64) NAP_RID="osx-x64" ;; - *) echo "ERROR: Unsupported arch: $ARCH"; exit 1 ;; - esac ;; - Linux) - NAP_RID="linux-x64" ;; - *) - echo "ERROR: Unsupported OS: $OS"; exit 1 ;; -esac - -echo "==> Platform: $NAP_RID" - -# ============================================================ -# 1. CLEAN — nuke every build artifact -# ============================================================ -echo "==> Cleaning all build artifacts..." - -rm -rf out/ -rm -rf src/Nap.Core/bin/ src/Nap.Core/obj/ -rm -rf src/Nap.Cli/bin/ src/Nap.Cli/obj/ -rm -rf tests/Nap.Core.Tests/bin/ tests/Nap.Core.Tests/obj/ -rm -rf src/Nap.VsCode/bin/ -rm -rf src/Nap.VsCode/dist/ -rm -rf src/Nap.VsCode/out/ -rm -f src/Nap.VsCode/*.vsix -rm -rf coverage/ - -echo "==> Clean complete" - -# ============================================================ -# 2. BUILD CLI -# ============================================================ -echo "==> Building CLI ($NAP_RID)..." - -dotnet publish src/Nap.Cli/Nap.Cli.fsproj \ - -r "$NAP_RID" \ - --self-contained \ - -p:PublishTrimmed=true \ - -p:PublishSingleFile=true \ - -o "out/$NAP_RID" \ - --nologo - -# Copy to extension bin/ (for tests and VSIX packaging) -EXT_BIN="src/Nap.VsCode/bin" -mkdir -p "$EXT_BIN" -cp "out/$NAP_RID/napper" "$EXT_BIN/napper" - -# Install to PATH -mkdir -p "$HOME/.local/bin" -cp "out/$NAP_RID/napper" "$HOME/.local/bin/napper" -chmod +x "$HOME/.local/bin/napper" - -# Verify CLI version matches fsproj -EXPECTED_VERSION=$(sed -n 's/.*<Version>\(.*\)<\/Version>.*/\1/p' Directory.Build.props) -ACTUAL_VERSION=$("out/$NAP_RID/napper" --version) -if [ "$ACTUAL_VERSION" != "$EXPECTED_VERSION" ]; then - echo "ERROR: Version mismatch — expected $EXPECTED_VERSION, got $ACTUAL_VERSION" - exit 1 -fi - -echo "==> CLI built and installed → ~/.local/bin/napper (v$ACTUAL_VERSION)" - -# ============================================================ -# 3. BUILD EXTENSION -# ============================================================ -echo "==> Building VS Code extension..." - -cd src/Nap.VsCode -npm ci -npx webpack --mode production -npm run compile:tests - -echo "==> Extension compiled" - -# ============================================================ -# 4. PACKAGE VSIX -# ============================================================ -echo "==> Packaging VSIX (universal)..." - -npx @vscode/vsce package --no-dependencies --skip-license - -VSIX_FILE=$(ls -1 *.vsix 2>/dev/null | head -1) -cd ../.. - -echo "" -echo "==> BUILD COMPLETE" -echo " CLI: ~/.local/bin/napper" -echo " CLI: $EXT_BIN/napper" -[ -n "${VSIX_FILE:-}" ] && echo " VSIX: src/Nap.VsCode/$VSIX_FILE" -echo "" -napper --help | head -1 diff --git a/scripts/build-cli.sh b/scripts/build-cli.sh index 894520f..324e66c 100755 --- a/scripts/build-cli.sh +++ b/scripts/build-cli.sh @@ -1,56 +1,39 @@ #!/usr/bin/env bash -# Build CLI binary and install to PATH + extension bin/ set -euo pipefail -cd "$(dirname "$0")/.." -# --- Detect RID --- -if [ -z "${NAP_RID:-}" ]; then - ARCH=$(uname -m) - OS=$(uname -s) - case "$OS" in - Darwin) - case "$ARCH" in - arm64) NAP_RID="osx-arm64" ;; - x86_64) NAP_RID="osx-x64" ;; - esac ;; - Linux) NAP_RID="linux-x64" ;; - esac -fi +# Build the Napper CLI and copy it into the VSCode extension bin directory. +# Called from src/Napper.VsCode via: bash ../../scripts/build-cli.sh -if [ -z "${NAP_RID:-}" ]; then - echo "ERROR: Could not detect platform. Set NAP_RID manually." - exit 1 -fi +REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +EXT_BIN="${REPO_ROOT}/src/Napper.VsCode/bin" -echo "==> Building CLI for $NAP_RID..." +ARCH="$(uname -m)" +OS="$(uname -s)" -dotnet publish src/Nap.Cli/Nap.Cli.fsproj \ - -r "$NAP_RID" \ +case "${OS}" in + Darwin) + case "${ARCH}" in + arm64) RID="osx-arm64" ;; + x86_64) RID="osx-x64" ;; + *) echo "Unsupported arch: ${ARCH}" >&2; exit 1 ;; + esac + ;; + Linux) RID="linux-x64" ;; + *) echo "Unsupported OS: ${OS}" >&2; exit 1 ;; +esac + +OUT_DIR="${REPO_ROOT}/out/${RID}" + +echo "==> Building CLI for ${RID}..." +dotnet publish "${REPO_ROOT}/src/Napper.Cli/Napper.Cli.fsproj" \ + -r "${RID}" \ --self-contained \ -p:PublishTrimmed=true \ -p:PublishSingleFile=true \ - -o "out/$NAP_RID" \ + -o "${OUT_DIR}" \ --nologo -echo "==> CLI built → out/$NAP_RID/" - -# --- Copy into extension bin/ so tests can find it --- -EXT_BIN="src/Nap.VsCode/bin" -mkdir -p "$EXT_BIN" -cp "out/$NAP_RID/napper" "$EXT_BIN/napper" -echo "==> Copied CLI → $EXT_BIN/" - -# --- Install to PATH so it overrides any stale released binary --- -mkdir -p "$HOME/.local/bin" -cp "out/$NAP_RID/napper" "$HOME/.local/bin/napper" -chmod +x "$HOME/.local/bin/napper" -echo "==> Installed CLI → ~/.local/bin/napper" - -# --- Verify CLI version matches fsproj --- -EXPECTED_VERSION=$(sed -n 's/.*<Version>\(.*\)<\/Version>.*/\1/p' Directory.Build.props) -ACTUAL_VERSION=$("out/$NAP_RID/napper" --version) -if [ "$ACTUAL_VERSION" != "$EXPECTED_VERSION" ]; then - echo "ERROR: Version mismatch — expected $EXPECTED_VERSION, got $ACTUAL_VERSION" - exit 1 -fi -echo "==> CLI version verified: $ACTUAL_VERSION" +echo "==> CLI built → ${OUT_DIR}/" +mkdir -p "${EXT_BIN}" +cp "${OUT_DIR}/napper" "${EXT_BIN}/napper" +echo "==> Copied CLI → ${EXT_BIN}/" diff --git a/scripts/build-extension.sh b/scripts/build-extension.sh deleted file mode 100755 index 5176219..0000000 --- a/scripts/build-extension.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail -cd "$(dirname "$0")/.." -echo "==> Compiling VSCode extension..." -cd src/Nap.VsCode -npm ci -npx webpack --mode production -echo "==> Extension compiled" diff --git a/scripts/build-vsix.sh b/scripts/build-vsix.sh deleted file mode 100755 index 1418ba1..0000000 --- a/scripts/build-vsix.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash -# Build CLI + universal VSIX (no binary bundled — VSIX downloads CLI at runtime) -# -# 1. Builds the CLI and installs to PATH + extension bin/ (for local testing) -# 2. Verifies CLI version matches the extension's expected version -# 3. Packages a universal VSIX (bin/ excluded by .vscodeignore) -set -euo pipefail -cd "$(dirname "$0")/.." - -# --- Build CLI first (installs to PATH + bin/) --- -bash scripts/build-cli.sh - -# --- Build extension + package VSIX --- -echo "==> Building VS Code extension..." -cd src/Nap.VsCode -npm ci -npx webpack --mode production -npx @vscode/vsce package --no-dependencies --skip-license - -VSIX_FILE=$(ls -1 *.vsix 2>/dev/null | head -1) -cd ../.. - -echo "" -echo "==> VSIX packaged (universal — no CLI bundled)" -[ -n "${VSIX_FILE:-}" ] && echo " VSIX: src/Nap.VsCode/$VSIX_FILE" -echo " CLI installed at: ~/.local/bin/napper (for local use)" diff --git a/scripts/bump-version.sh b/scripts/bump-version.sh deleted file mode 100755 index 1579f62..0000000 --- a/scripts/bump-version.sh +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env bash -# Bump version across all projects (F#, TypeScript) and optionally commit+push. -# Usage: ./scripts/bump-version.sh <version> [--commit] -# e.g. ./scripts/bump-version.sh 0.2.0 --commit -set -euo pipefail -cd "$(dirname "$0")/.." - -if [ -z "${1:-}" ]; then - echo "Usage: $0 <version> [--commit]" - echo " e.g. $0 0.2.0 --commit" - exit 1 -fi - -VERSION="$1" -COMMIT="${2:-}" - -echo "==> Bumping all projects to v${VERSION}" - -# --- F# (Directory.Build.props) --- -sed -i.bak "s|<Version>.*</Version>|<Version>${VERSION}</Version>|" Directory.Build.props -rm -f Directory.Build.props.bak -echo " Directory.Build.props → ${VERSION}" - -# --- TypeScript / VS Code extension (package.json) --- -cd src/Nap.VsCode -npm version "${VERSION}" --no-git-tag-version --allow-same-version -cd ../.. -echo " src/Nap.VsCode/package.json → ${VERSION}" - -# --- Rust (Cargo.toml) — bump if present --- -if [ -f Cargo.toml ]; then - sed -i.bak "s/^version = \".*\"/version = \"${VERSION}\"/" Cargo.toml - rm -f Cargo.toml.bak - echo " Cargo.toml → ${VERSION}" -fi - -echo "==> All projects bumped to v${VERSION}" - -# --- Commit + push if requested --- -if [ "$COMMIT" = "--commit" ]; then - echo "==> Committing and pushing version bump..." - # Set git identity in CI if not already configured - if [ -n "${CI:-}" ]; then - git config user.name "github-actions[bot]" - git config user.email "github-actions[bot]@users.noreply.github.com" - fi - git add Directory.Build.props src/Nap.VsCode/package.json src/Nap.VsCode/package-lock.json - [ -f Cargo.toml ] && git add Cargo.toml - git commit -m "release: update version to v${VERSION}" - git push - echo "==> Committed and pushed v${VERSION}" -fi diff --git a/scripts/clean-install.sh b/scripts/clean-install.sh deleted file mode 100755 index 06b0d84..0000000 --- a/scripts/clean-install.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Full clean rebuild + install VSIX into VS Code -set -euo pipefail -cd "$(dirname "$0")/.." - -# Build everything (clean → CLI → extension → VSIX) -bash scripts/build-all.sh - -# Install VSIX into VS Code -cd src/Nap.VsCode -VSIX_FILE=$(ls -1 *.vsix 2>/dev/null | head -1) - -if [ -z "${VSIX_FILE:-}" ]; then - echo "ERROR: No VSIX file found after build" - exit 1 -fi - -echo "==> Installing VSIX: $VSIX_FILE" -code --install-extension "$VSIX_FILE" --force - -echo "" -echo "==> DONE — restart VS Code to load the new extension" diff --git a/scripts/dump-cli-help.sh b/scripts/dump-cli-help.sh deleted file mode 100755 index b3382a4..0000000 --- a/scripts/dump-cli-help.sh +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# Dump the Nap CLI --help output to a markdown document in docs/ - -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" -ROOT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)" -OUTPUT_FILE="$ROOT_DIR/docs/cli-reference.md" - -# Use the installed binary from PATH -CLI_PATH=$(command -v napper 2>/dev/null || true) - -if [ -z "$CLI_PATH" ]; then - echo "napper not found on PATH — building first..." - bash "$SCRIPT_DIR/build-cli.sh" - CLI_PATH="$HOME/.local/bin/napper" -fi - -echo "==> Capturing CLI help output from $CLI_PATH..." - -HELP_OUTPUT=$("$CLI_PATH" help 2>&1) - -mkdir -p "$ROOT_DIR/docs" - -cat > "$OUTPUT_FILE" << 'HEADER' -# Nap CLI Reference - -> Auto-generated from `nap help`. Run `scripts/dump-cli-help.sh` to regenerate. - -## Help Output - -``` -HEADER - -echo "$HELP_OUTPUT" >> "$OUTPUT_FILE" - -cat >> "$OUTPUT_FILE" << 'FOOTER' -``` - -## Commands - -### `nap run <file|folder>` - -Run a `.nap` file, `.naplist` playlist, or an entire folder of requests. - -```sh -# Single request -nap run ./users/get-user.nap - -# With variable overrides -nap run ./users/get-user.nap --var userId=99 - -# Run all .nap files in a folder (sorted by filename) -nap run ./users/ - -# Run a playlist -nap run ./smoke.naplist - -# With a named environment -nap run ./smoke.naplist --env staging - -# Output as JUnit XML (for CI) -nap run ./smoke.naplist --output junit - -# Output as JSON -nap run ./smoke.naplist --output json -``` - -### `nap check <file>` - -Validate the syntax of a `.nap` or `.naplist` file without executing it. - -```sh -nap check ./users/get-user.nap -nap check ./smoke.naplist -``` - -### `nap generate openapi <spec> --output-dir <dir>` - -Generate `.nap` files from an OpenAPI specification. - -```sh -nap generate openapi ./openapi.json --output-dir ./tests -nap generate openapi ./openapi.json --output-dir ./tests --output json -``` - -### `nap help` - -Display the help message. Also available as `--help` or `-h`. - -## Options - -| Option | Description | -|---------------------|---------------------------------------------------| -| `--env <name>` | Load a named environment file (`.napenv.<name>`) | -| `--var <key=value>` | Override a variable (repeatable) | -| `--output <format>` | Output format: `pretty` (default), `junit`, `json`, `ndjson` | -| `--output-dir <dir>`| Output directory for generate command | -| `--verbose` | Enable debug-level logging | - -## Exit Codes - -| Code | Meaning | -|------|--------------------------------------------------| -| 0 | All assertions passed | -| 1 | One or more assertions failed | -| 2 | Runtime error (network, script error, parse error) | -FOOTER - -echo "==> Written to $OUTPUT_FILE" diff --git a/scripts/install.ps1 b/scripts/install.ps1 new file mode 100644 index 0000000..948f0ee --- /dev/null +++ b/scripts/install.ps1 @@ -0,0 +1,82 @@ +# Install Napper CLI on Windows +# Usage: irm https://raw.githubusercontent.com/MelbourneDeveloper/napper/main/scripts/install.ps1 | iex +# Or: .\scripts\install.ps1 [-Version 0.2.0] [-InstallDir C:\tools] + +param( + [string]$Version = "latest", + [string]$InstallDir = "$env:LOCALAPPDATA\napper" +) + +$ErrorActionPreference = "Stop" + +$repo = "MelbourneDeveloper/napper" +$asset = "napper-win-x64.exe" +$checksumFile = "checksums-sha256.txt" + +# --- Resolve version --- +if ($Version -eq "latest") { + Write-Host "==> Fetching latest release..." + $release = Invoke-RestMethod "https://api.github.com/repos/$repo/releases/latest" + $tag = $release.tag_name +} else { + $tag = "v$Version" +} + +Write-Host "==> Installing napper $tag" + +$baseUrl = "https://github.com/$repo/releases/download/$tag" +$binaryUrl = "$baseUrl/$asset" +$checksumUrl = "$baseUrl/$checksumFile" + +# --- Download binary and checksums --- +$tmpDir = Join-Path $env:TEMP "napper-install" +New-Item -ItemType Directory -Force -Path $tmpDir | Out-Null + +$binaryPath = Join-Path $tmpDir $asset +$checksumPath = Join-Path $tmpDir $checksumFile + +Write-Host "==> Downloading $asset..." +Invoke-WebRequest -Uri $binaryUrl -OutFile $binaryPath -UseBasicParsing + +Write-Host "==> Downloading checksums..." +Invoke-WebRequest -Uri $checksumUrl -OutFile $checksumPath -UseBasicParsing + +# --- Verify checksum --- +Write-Host "==> Verifying SHA256 checksum..." +$actualHash = (Get-FileHash -Path $binaryPath -Algorithm SHA256).Hash.ToLower() +$checksumLines = Get-Content $checksumPath +$expectedLine = $checksumLines | Where-Object { $_ -match $asset } + +if (-not $expectedLine) { + Remove-Item -Recurse -Force $tmpDir + throw "ERROR: $asset not found in checksums file" +} + +$expectedHash = ($expectedLine -split "\s+")[0].ToLower() + +if ($actualHash -ne $expectedHash) { + Remove-Item -Recurse -Force $tmpDir + throw "ERROR: Checksum mismatch`n Expected: $expectedHash`n Actual: $actualHash" +} + +Write-Host " Checksum verified: $actualHash" + +# --- Install to directory --- +New-Item -ItemType Directory -Force -Path $InstallDir | Out-Null +$destPath = Join-Path $InstallDir "napper.exe" +Move-Item -Force -Path $binaryPath -Destination $destPath + +# --- Add to PATH if needed --- +$userPath = [Environment]::GetEnvironmentVariable("PATH", "User") +if ($userPath -notlike "*$InstallDir*") { + Write-Host "==> Adding $InstallDir to user PATH..." + [Environment]::SetEnvironmentVariable("PATH", "$userPath;$InstallDir", "User") + $env:PATH = "$env:PATH;$InstallDir" +} + +# --- Cleanup --- +Remove-Item -Recurse -Force $tmpDir + +Write-Host "" +Write-Host "==> napper $tag installed to $destPath" +Write-Host " Restart your terminal, then run: napper --help" diff --git a/scripts/install.sh b/scripts/install.sh new file mode 100755 index 0000000..6e1cc3a --- /dev/null +++ b/scripts/install.sh @@ -0,0 +1,77 @@ +#!/usr/bin/env bash +# Install Napper CLI on macOS / Linux +# Usage: curl -fsSL https://raw.githubusercontent.com/MelbourneDeveloper/napper/main/scripts/install.sh | bash +# Or: ./scripts/install.sh [version] +# e.g. ./scripts/install.sh 0.2.0 +set -euo pipefail + +REPO="MelbourneDeveloper/napper" +VERSION="${1:-latest}" +INSTALL_DIR="${NAPPER_INSTALL_DIR:-$HOME/.local/bin}" +CHECKSUM_FILE="checksums-sha256.txt" + +# --- Detect platform --- +ARCH=$(uname -m) +OS=$(uname -s) +case "$OS" in + Darwin) + case "$ARCH" in + arm64) ASSET="napper-osx-arm64" ;; + x86_64) ASSET="napper-osx-x64" ;; + *) echo "ERROR: Unsupported arch: $ARCH"; exit 1 ;; + esac ;; + Linux) + ASSET="napper-linux-x64" ;; + *) + echo "ERROR: Unsupported OS: $OS (use install.ps1 for Windows)"; exit 1 ;; +esac + +# --- Resolve version --- +if [ "$VERSION" = "latest" ]; then + echo "==> Fetching latest release..." + TAG=$(curl -fsSL "https://api.github.com/repos/$REPO/releases/latest" | grep '"tag_name"' | cut -d '"' -f 4) +else + TAG="v${VERSION}" +fi + +echo "==> Installing napper $TAG ($ASSET)" + +BASE_URL="https://github.com/$REPO/releases/download/$TAG" +TMP_DIR=$(mktemp -d) +trap 'rm -rf "$TMP_DIR"' EXIT + +# --- Download binary and checksums --- +echo "==> Downloading $ASSET..." +curl -fSL -o "$TMP_DIR/$ASSET" "$BASE_URL/$ASSET" + +echo "==> Downloading checksums..." +curl -fSL -o "$TMP_DIR/$CHECKSUM_FILE" "$BASE_URL/$CHECKSUM_FILE" + +# --- Verify checksum --- +echo "==> Verifying SHA256 checksum..." +EXPECTED_HASH=$(grep "$ASSET" "$TMP_DIR/$CHECKSUM_FILE" | awk '{print $1}') + +if [ -z "$EXPECTED_HASH" ]; then + echo "ERROR: $ASSET not found in checksums file" + exit 1 +fi + +ACTUAL_HASH=$(shasum -a 256 "$TMP_DIR/$ASSET" | awk '{print $1}') + +if [ "$ACTUAL_HASH" != "$EXPECTED_HASH" ]; then + echo "ERROR: Checksum mismatch" + echo " Expected: $EXPECTED_HASH" + echo " Actual: $ACTUAL_HASH" + exit 1 +fi + +echo " Checksum verified: $ACTUAL_HASH" + +# --- Install --- +mkdir -p "$INSTALL_DIR" +mv "$TMP_DIR/$ASSET" "$INSTALL_DIR/napper" +chmod +x "$INSTALL_DIR/napper" + +echo "" +echo "==> napper $TAG installed to $INSTALL_DIR/napper" +echo " Run: napper --help" diff --git a/scripts/package-vsix.sh b/scripts/package-vsix.sh deleted file mode 100755 index 3e22249..0000000 --- a/scripts/package-vsix.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env bash -# Build extension and package a universal VSIX (no CLI binary bundled). -# The extension downloads the CLI binary on first activation. - -set -euo pipefail -cd "$(dirname "$0")/.." - -# --- Extension --- -bash scripts/build-extension.sh - -# --- VSIX --- -echo "==> Packaging universal VSIX..." -cd src/Nap.VsCode -npx @vscode/vsce package --no-dependencies --skip-license -echo "==> VSIX packaged" diff --git a/scripts/test-fsharp.sh b/scripts/test-fsharp.sh deleted file mode 100755 index b243db9..0000000 --- a/scripts/test-fsharp.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail -cd "$(dirname "$0")/.." - -COVERAGE_DIR="coverage/fsharp" -rm -rf "$COVERAGE_DIR" -mkdir -p "$COVERAGE_DIR" - -echo "==> Running F# tests with coverage..." -dotnet test tests/Nap.Core.Tests --nologo \ - --settings tests/Nap.Core.Tests/coverage.runsettings \ - --results-directory "$COVERAGE_DIR/raw" - -echo "==> Generating coverage report..." -reportgenerator \ - -reports:"$COVERAGE_DIR/raw/*/coverage.cobertura.xml" \ - -targetdir:"$COVERAGE_DIR/report" \ - -reporttypes:"Html;TextSummary;Cobertura;lcov" - -echo "" -echo "=== F# Coverage Summary ===" -cat "$COVERAGE_DIR/report/Summary.txt" diff --git a/scripts/test.sh b/scripts/test.sh deleted file mode 100755 index d8852a3..0000000 --- a/scripts/test.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail -cd "$(dirname "$0")/.." - -bash scripts/build-cli.sh - -COVERAGE_DIR="coverage/typescript" -rm -rf "$COVERAGE_DIR" -mkdir -p "$COVERAGE_DIR" - -echo "==> Compiling TypeScript..." -cd src/Nap.VsCode -npm run compile -npm run compile:tests - -echo "==> Running unit tests with coverage..." -npx c8 \ - --temp-directory "../../$COVERAGE_DIR/tmp" \ - --report-dir "../../$COVERAGE_DIR/report" \ - mocha out/test/unit/**/*.test.js --ui tdd --timeout 5000 - -echo "==> Running e2e tests..." -npx vscode-test - -echo "" -echo "=== TypeScript Coverage Summary ===" -echo "Report: $COVERAGE_DIR/report/index.html" diff --git a/specs/CLI-OPENAPI-GENERATION.md b/specs/CLI-OPENAPI-GENERATION.md new file mode 100644 index 0000000..4ee5811 --- /dev/null +++ b/specs/CLI-OPENAPI-GENERATION.md @@ -0,0 +1,269 @@ +# `openapi-generate` — OpenAPI Test Generation — CLI + +> **One click to turn an OpenAPI spec into a comprehensive, runnable test suite.** + +--- + +CRITICAL: START WITH TESTS THAT VERIFY THAT OpenAPI -> .nap is WORKING. THE OPENAPI -> .nap DETERMINISTIC PART IS F#. + +--- + +## Vision + +A user points Nap at an OpenAPI 3.x or Swagger 2.x specification and gets a complete test suite: one `.nap` file per operation, organized by tag into subdirectories, with a `.naplist` playlist, a `.napenv` environment file, and meaningful assertions derived from the spec's response schemas. + +The generated files are **starting points**. The user edits, extends, and commits them alongside the rest of the collection. + +--- + +## Generation Flow + +``` +Input Parse Collect Generate +──────────────────── ────────────── ───────────── ────────────────────── +Local file (.json/.yaml) │ Group endpoints Per-tag subdirectory: + or ├─ JSON.parse() by tag - 01_operation.nap +URL (https://...) │ or YAML parse │ - 02_operation.nap + ▼ │ ... + Resolve $ref │ + │ ▼ Root: + ▼ EndpointDescriptor[] - api-tests.naplist + OpenApiSpec - .napenv + - .napenv.local (gitignored) +``` + +### `openapi-input` — Input formats + +| Format | Spec ID | Status | +|--------|---------|--------| +| OpenAPI 3.x JSON | `openapi-oas3` | Implemented | +| Swagger 2.x JSON | `openapi-swagger2` | Implemented | +| YAML (both versions) | `openapi-yaml` | Not yet — needs YAML parser | +| URL-based loading | `openapi-url` | Not yet — file picker only | + +--- + +## What Gets Generated + +### `openapi-nap-gen` — Per operation: a `.nap` file + +```nap +# Generated from GET /users/{userId} +[meta] +name = Get user by ID +description = Auto-generated from petstore.yaml - operation getUserById +tags = ["users", "generated"] +generated = true + +[vars] +userId = "REPLACE_ME" + +[request] +GET {{baseUrl}}/users/{{userId}} + +[request.headers] +Authorization = Bearer {{token}} +Accept = application/json + +[assert] +status = 200 +body.id exists +body.name exists +body.email exists +``` + +### `openapi-tag-dirs` — Per tag: a subdirectory + +Operations tagged `users` go into `users/`, operations tagged `pets` go into `pets/`, etc. Untagged operations go into the root. + +``` +generated/ +├── .napenv +├── .napenv.local # gitignored, placeholder for secrets +├── api-tests.naplist +├── users/ +│ ├── 01_get-user.nap +│ ├── 02_create-user.nap +│ └── 03_delete-user.nap +└── pets/ + ├── 01_list-pets.nap + └── 02_get-pet.nap +``` + +### `openapi-naplist-gen` — Per spec: a `.naplist` playlist + +```naplist +[meta] +name = Pet Store API + +[steps] +./users/01_get-user.nap +./users/02_create-user.nap +./users/03_delete-user.nap +./pets/01_list-pets.nap +./pets/02_get-pet.nap +``` + +### `openapi-napenv-gen` — Per spec: a `.napenv` environment + +```toml +baseUrl = https://petstore.example.com/v1 +``` + +--- + +## Generation Details + +### `openapi-baseurl` — Base URL extraction + +1. OpenAPI 3.x: first entry in `servers[].url` +2. Swagger 2.x: `{schemes[0]}://{host}{basePath}` +3. Fallback: `https://api.example.com` + +### `openapi-params` — Path parameter conversion + +OpenAPI `{param}` becomes Nap `{{param}}`. Each path parameter also generates a `[vars]` entry with a placeholder value. + +### `openapi-body-gen` — Request body generation + +For POST / PUT / PATCH operations: +- If the spec provides an `example`, use it verbatim +- Otherwise, recursively generate from the schema using type-appropriate defaults +- Use `format` hints for smarter defaults (email, uuid, date-time, uri) +- Use `enum` values when available (pick the first) +- Respect `minimum` / `maximum` for numeric types + +### `openapi-assert-gen` — Response assertion generation + +From the success response schema (first 2xx status code): +- `status = {code}` for the expected status +- `body.{field} exists` for each top-level required property +- `body.{field} = {value}` for fields with known constant values (enums with single value) +- `headers.Content-Type contains "json"` when response media type is `application/json` + +### `openapi-query-params` — Query parameter handling + +Query parameters from the spec are appended to the URL as `?key={{key}}` and generate corresponding `[vars]` entries. + +### `openapi-auth` — Authentication handling + +From the spec's `securitySchemes` and per-operation `security` requirements: + +| Scheme | Generated output | +|--------|-----------------| +| Bearer token (`http: bearer`) | `Authorization = Bearer {{token}}` header + `token` in `.napenv.local` | +| API key (header) | `{headerName} = {{apiKey}}` header + `apiKey` in `.napenv.local` | +| API key (query) | Appended as query param `?{name}={{apiKey}}` | +| Basic auth | `Authorization = Basic {{basicAuth}}` header | + +### `openapi-error-gen` — Error case generation + +For each documented error response (4xx, 5xx), generate an additional `.nap` file that intentionally triggers the error: + +```nap +# Generated error case: 404 for GET /users/{userId} +[meta] +name = Get user by ID - 404 +description = Verify 404 when user does not exist +tags = ["users", "generated", "error-case"] +generated = true + +[vars] +userId = "nonexistent-id" + +[request] +GET {{baseUrl}}/users/{{userId}} + +[assert] +status = 404 +``` + +### `openapi-ref` — `$ref` resolution + +OpenAPI specs use `$ref` pointers extensively for reusable schemas, parameters, and responses. The generator must resolve all `$ref` pointers by inlining the referenced definitions before generating output. This includes: +- `#/components/schemas/...` (OAS3) and `#/definitions/...` (Swagger 2) +- `#/components/parameters/...` +- `#/components/responses/...` +- Nested `$ref` chains (a schema referencing another schema) + +### `openapi-meta-flag` — Generated file metadata + +Every generated `.nap` file includes `generated = true` in the `[meta]` block. This allows tooling to distinguish generated files from hand-written ones, enabling safe re-generation and `--diff` mode. + +--- + +## CLI Commands + +```sh +# Generate from a local spec +nap generate openapi ./petstore.yaml --output ./petstore/ + +# Generate from a URL +nap generate openapi https://api.example.com/openapi.json --output ./generated/ + +# Generate only for specific tags +nap generate openapi ./petstore.yaml --tag users --tag pets --output ./filtered/ + +# Show what would change without overwriting (diff mode) +nap generate openapi ./petstore.yaml --output ./petstore/ --diff +``` + +### `openapi-diff` — Diff / regeneration mode + +Re-running `nap generate openapi` against an existing output directory with `--diff` compares the spec's current state against previously generated files (identified by `generated = true`). It reports: +- New operations added to the spec +- Operations removed from the spec +- Changed request/response schemas + +Without `--diff`, re-generation overwrites files that have `generated = true` but leaves files where that flag has been removed (indicating the user has taken ownership). + +--- + +## Implementation Phases + +### Phase A: Core Generation Improvements + +- `$ref` resolution (inline all references before generation) +- YAML support (add YAML parser) +- Response body assertions from response schemas +- Tag-based folder organization +- `[vars]` block for path parameters +- `generated = true` metadata flag + +### Phase B: Enhanced Generation + +- Query parameter and auth header generation +- Error case test generation (4xx, 5xx) +- Smarter example values using `format`, `enum`, `minimum`/`maximum` +- URL-based spec loading +- Header assertions + +### Phase C: Diff and Regeneration + +- `--diff` mode in CLI +- `generated = true` detection for safe overwrite +- Preserve custom assertions, update generated ones + +--- + +## TODO + +### Phase A: Core Generation Improvements +- [ ] `$ref` resolution (inline all references before generation) +- [ ] YAML support +- [ ] Response body assertions from response schemas +- [ ] Tag-based folder organization +- [ ] `[vars]` block for path parameters +- [ ] `generated = true` metadata flag + +### Phase B: Enhanced Generation +- [ ] Query parameter and auth header generation +- [ ] Error case test generation (4xx, 5xx) +- [ ] Smarter example values using `format`, `enum`, `minimum`/`maximum` +- [ ] URL-based spec loading +- [ ] Header assertions + +### Phase C: Diff and Regeneration +- [ ] `--diff` mode in CLI +- [ ] `generated = true` detection for safe overwrite +- [ ] Preserve custom assertions, update generated ones diff --git a/specs/CLI-PLAN.md b/specs/CLI-PLAN.md new file mode 100644 index 0000000..c266a84 --- /dev/null +++ b/specs/CLI-PLAN.md @@ -0,0 +1,129 @@ +# Nap CLI — Implementation Plan + +--- + +## Parser Implementation + +### Recommended approach: ANTLR4 + +The `.nap` and `.naplist` formats should be parsed with **ANTLR4** (targeting the C# runtime via `Antlr4.Runtime.Standard` NuGet package, which works fine from F#). + +**Rationale:** +- The format has a non-trivial grammar (multi-line string literals, section headers, assertion expressions, variable interpolation). +- ANTLR gives a formal grammar file (`.g4`) that serves as the authoritative format spec and is easy to evolve. +- The C# ANTLR runtime is mature and well-maintained. Generating a visitor/listener from F# is straightforward. +- Alternatives (FParsec, manual recursive descent) are viable but ANTLR's grammar file is more readable as documentation and easier to extend without regressions. + +**Alternative — FParsec:** +If the grammar stays simple enough, [FParsec](https://www.quanttec.com/fparsec/) (a combinator parser library for F#) is a strong alternative. It keeps everything in F#, has excellent error messages, and has no code generation step. Use FParsec if the grammar remains simple; switch to ANTLR if the grammar grows complex (e.g. full expression language for assertions, conditional blocks). + +**Grammar files location:** + +``` +nap/ +└── src/ + └── Napper.Core/ + └── Grammar/ + ├── NapFile.g4 # .nap file grammar + └── NapList.g4 # .naplist grammar +``` + +The generated parser code is committed to the repo (not regenerated on every build) to avoid toolchain dependencies in CI. + +--- + +## Project Layout + +``` +nap/ +├── src/ +│ ├── Napper.Core/ # F# — parser, types, runner engine +│ ├── Napper.Scripting/ # F# — fsi host, script context injection +│ └── Napper.Cli/ # F# — CLI entry point (System.CommandLine) +├── tests/ +│ ├── Napper.Core.Tests/ +│ └── Napper.Scripting.Tests/ +├── examples/ +│ └── petstore/ # Sample collection against Petstore API +└── nap.sln +``` + +--- + +## Implementation Phases + +### Phase 1 — Core CLI (MVP) + +- `.nap` file parser +- HTTP request runner (single file) +- Built-in `[assert]` block evaluation +- `.napenv` variable resolution +- `--output pretty` and `--output junit` +- `nap run <file>` command + +### Phase 2 — Collections & Playlists + +- Folder-based collection runner +- `.naplist` file parser and runner +- Nested playlist support +- Variable scoping across steps (`ctx.Set`) + +### Phase 3 — F# Scripting + +- dotnet-fsi host integration +- `NapContext` injection +- Pre/post script execution +- `ctx.Set` for cross-step variable passing + +### Phase 4 — Polish & Distribution + +- **NuGet package for `dotnet tool install` (PRIMARY channel)** — set `<PackAsTool>true</PackAsTool>` and `<ToolCommandName>napper</ToolCommandName>` in `Nap.Cli.fsproj`, publish to nuget.org. This is the primary distribution method — no code signing needed, no SmartScreen warnings on Windows, immediate availability. The VSIX extension auto-installs via `dotnet tool install -g napper --version X.X.X`. +- Standalone native binary (NativeAOT or single-file publish) — secondary channel for users without .NET SDK +- Homebrew formula +- Winget / Chocolatey / Scoop packages (future) +- `nap new` scaffolding commands +- Language-extensible script runner plugin model + +--- + +## Open Questions / Future Considerations + +- **GraphQL support** — a `[request.graphql]` block with query/variables sub-keys. +- **WebSocket / SSE testing** — separate request type, different assertion model. +- **Mock server mode** — `nap mock ./collection/` serves a mock based on expected responses. +- **Script language plugins** — `.py`, `.js` runners as opt-in packages. +- **Secret manager integration** — pull `{{token}}` from 1Password, AWS Secrets Manager, etc. at runtime. +- **HTML report output** — `--output html` for a shareable test report. + +--- + +## TODO + +### Phase 1 — Core CLI (MVP) +- [ ] `.nap` file parser +- [ ] HTTP request runner (single file) +- [ ] Built-in `[assert]` block evaluation +- [ ] `.napenv` variable resolution +- [ ] `--output pretty` and `--output junit` +- [ ] `nap run <file>` command + +### Phase 2 — Collections & Playlists +- [ ] Folder-based collection runner +- [ ] `.naplist` file parser and runner +- [ ] Nested playlist support +- [ ] Variable scoping across steps (`ctx.Set`) + +### Phase 3 — F# Scripting +- [ ] dotnet-fsi host integration +- [ ] `NapContext` injection +- [ ] Pre/post script execution +- [ ] `ctx.Set` for cross-step variable passing + +### Phase 4 — Polish & Distribution +- [ ] `dotnet tool install` — set `PackAsTool` in fsproj, publish to nuget.org (PRIMARY) +- [ ] VSIX auto-installs CLI via `dotnet tool install -g napper --version X.X.X` +- [ ] Standalone native binary (NativeAOT or single-file publish) — secondary +- [ ] Homebrew formula +- [ ] Winget / Chocolatey / Scoop packages +- [ ] `nap new` scaffolding commands +- [ ] Language-extensible script runner plugin model diff --git a/specs/CLI-SPEC.md b/specs/CLI-SPEC.md new file mode 100644 index 0000000..cd53220 --- /dev/null +++ b/specs/CLI-SPEC.md @@ -0,0 +1,124 @@ +# Nap CLI Specification + +> **Nap** (Network API Protocol) — a CLI-first, test-oriented alternative to Postman, Bruno, `.http` files, and curl. + +--- + +## Vision + +Nap is a developer-first HTTP testing tool. It is as simple as curl for one-off requests, but scales to full test suites with reusable components, scripted assertions, and CI integration. It is not a GUI-first tool with a CLI bolted on — the CLI is the product. + +--- + +## Core Principles + +1. **Files are the source of truth.** All requests, tests, and playlists are plain files. Git-friendly by default. +2. **Simple things are simple.** A single HTTP call should look almost as terse as curl. +3. **Tests are reusable components.** A `.nap` file (`nap-file`) is a reusable unit. It can be composed into playlists (`naplist-file`) without modification. +4. **Scripting is opt-in and external.** F# and C# scripts live in `.fsx`/`.csx` files referenced by name (`script-fsx`, `script-csx`). Simple assertions need no scripting. +5. **No lock-in.** The format is plain text. The scripting is standard `.fsx`/`.csx`. Results emit standard formats. + +--- + +## Installation + +The Napper CLI is distributed as a **dotnet tool** via NuGet. This is the primary distribution channel — it avoids code-signing requirements (no Windows SmartScreen warnings), works cross-platform, and integrates with existing .NET toolchains. + +```sh +# Install globally +dotnet tool install -g napper + +# Install a specific version +dotnet tool install -g napper --version 0.6.0 + +# Update to latest +dotnet tool update -g napper +``` + +The VSIX extension installs the CLI automatically via `dotnet tool install` on activation, using the extension's own version to determine which CLI version to install. Users with the CLI already on PATH (or configured via `nap.cliPath`) skip the auto-install. + +**Future channels** (not yet implemented): +- Homebrew formula (`brew install napper`) +- Winget / Chocolatey / Scoop packages +- Standalone native binary (NativeAOT single-file publish) + +--- + +## Usage + +### `cli-run` — Run Command + +```sh +# Run a single request (simplest case — as easy as curl) +napper run ./users/get-user.nap + +# Run a single request with inline variable override +napper run ./users/get-user.nap --var userId=99 + +# Run a collection (folder) +napper run ./users/ + +# Run a playlist +napper run ./smoke.naplist + +# Specify environment +napper run ./smoke.naplist --env staging +``` + +### `cli-check` — Validate Syntax + +```sh +# Validate syntax without running +napper check ./smoke.naplist +``` + +### `cli-generate` — Generate from OpenAPI + +```sh +# Generate .nap files from an OpenAPI spec +napper generate openapi ./petstore.json --output-dir ./petstore/ +``` + +See [CLI OpenAPI Generation](./CLI-OPENAPI-GENERATION.md) for full details. + +--- + +## CLI Flags + +| Flag | Spec ID | Description | +|------|---------|-------------| +| `--env <name>` | `cli-env` | Load environment variables from `.napenv.<name>` (`env-named`) | +| `--var <key=value>` | `cli-var` | Override a variable (repeatable). Highest priority in `env-resolution` | +| `--output <format>` | `cli-output` | Output format: `output-pretty` (default), `output-junit`, `output-json`, `output-ndjson` | +| `--output-dir <dir>` | `cli-output-dir` | Destination directory for `cli-generate` | +| `--verbose` | `cli-verbose` | Enable debug-level logging | + +--- + +## `cli-output` — Output Formats + +| Format | Spec ID | Description | +|--------|---------|-------------| +| `pretty` | `output-pretty` | Human-readable console output with ANSI colors (default) | +| `junit` | `output-junit` | JUnit XML for CI/CD integration | +| `json` | `output-json` | Single JSON object per result | +| `ndjson` | `output-ndjson` | Newline-delimited JSON for streaming | + +--- + +## `cli-exit-codes` — Exit Codes + +| Code | Meaning | +|------|---------| +| 0 | All assertions passed | +| 1 | One or more assertions failed | +| 2 | Runtime error (network, script error, parse error) | + +--- + +## Related Specs + +- [File Formats](./FILE-FORMATS-SPEC.md) — `.nap`, `.napenv`, `.naplist` format specifications +- [Scripting](./SCRIPTING-SPEC.md) — F# and C# scripting model, NapContext, NapRunner +- [CLI Plan](./CLI-PLAN.md) — Parser, project layout, implementation phases +- [OpenAPI Generation (CLI)](./CLI-OPENAPI-GENERATION.md) — Test suite generation from OpenAPI specs diff --git a/specs/FILE-FORMATS-SPEC.md b/specs/FILE-FORMATS-SPEC.md new file mode 100644 index 0000000..d7dd7a2 --- /dev/null +++ b/specs/FILE-FORMATS-SPEC.md @@ -0,0 +1,163 @@ +# Nap File Formats + +Specifications for `.nap`, `.napenv`, and `.naplist` file formats. These are shared between the CLI and all IDE extensions. + +--- + +## `nap-file` — `.nap` Request File + +Each `.nap` file defines one **request** plus its optional **setup**, **assertions**, and **script reference**. + +### `nap-minimal` — Minimal example + +```nap +GET https://api.example.com/users +``` + +### `nap-full` — Full anatomy + +```nap +# Optional metadata block +[meta] +name = "Get user by ID" +description = "Fetches a single user and asserts shape" +tags = ["users", "smoke"] + +# Optional variables (can be overridden by environment) +[vars] +userId = "42" + +# Request block (required) +[request] +method = GET +url = https://api.example.com/users/{{userId}} + +[request.headers] +Authorization = Bearer {{token}} +Accept = application/json + +# Optional: request body (for POST/PUT/PATCH) +# [request.body] +# content-type = application/json +# """ +# { "name": "Alice" } +# """ + +# Optional: built-in assertions (no scripting required) +[assert] +status = 200 +body.id = {{userId}} +body.name exists + +# Optional: reference an external script for complex assertions or setup +[script] +pre = ./scripts/auth.fsx # runs before the request +post = ./scripts/validate-user.fsx # runs after the response +``` + +### `nap-design` — Key design decisions + +- **TOML-inspired syntax** — familiar, unambiguous, easy to parse. +- **`{{variable}}`** interpolation (`env-interpolation`) throughout — variables resolved from env files, CLI flags, or parent playlist scope. +- **`[assert]` block** — declarative assertions that cover ~80% of cases without scripting: + - `assert-status` — `status = 200` — HTTP status code + - `assert-equals` — `body.path = value` — JSONPath equality + - `assert-exists` — `body.path exists` — presence check + - `assert-matches` — `body.path matches "pattern"` — glob pattern match + - `assert-contains` — `headers.Content-Type contains "json"` — substring check + - `assert-lt` — `duration < 500ms` — less-than comparison + - `assert-gt` — `body.count > 0` — greater-than comparison +- **`[script]` block** — references external `.fsx`/`.csx` files for pre/post hooks (see `script-fsx`, `script-csx`). +- `nap-comments` — Comments with `#`. + +#### `http-methods` — Supported HTTP Methods + +GET, POST, PUT, PATCH, DELETE, HEAD, OPTIONS + +--- + +## `env-file` — `.napenv` Environment File + +Environment files are TOML files that define variable sets for different deployment targets. + +```toml +# .napenv (base — checked into git, no secrets) +baseUrl = "https://api.example.com" +userId = "42" +``` + +```toml +# .napenv.local (gitignored — secrets) +token = "eyJhbGci..." +``` + +```toml +# .napenv.staging +baseUrl = "https://staging.api.example.com" +token = "staging-token" +``` + +### `env-resolution` — Variable resolution order (highest wins) + +1. CLI `--var key=value` flags (`cli-var`) +2. `env-local` — `.napenv.local` +3. `env-named` — Named environment file (e.g. `.napenv.staging`) +4. `env-base` — Base `.napenv` +5. `nap-vars` — `[vars]` block in the `.nap` file + +--- + +## `collection-folder` — Collections: Folder-Based + +A folder of `.nap` files is implicitly a **collection**. Subfolders are sub-collections. + +``` +my-api/ +├── .napenv +├── .napenv.local # gitignored +├── auth/ +│ ├── 01_login.nap +│ └── 02_refresh-token.nap +├── users/ +│ ├── 01_get-user.nap +│ ├── 02_create-user.nap +│ └── 03_delete-user.nap +└── smoke.naplist +``` + +`collection-sort` — Execution order within a folder: **filename sort** (use numeric prefixes `01_`, `02_` to control order). + +--- + +## `naplist-file` — `.naplist` Playlist File + +A `.naplist` file is an explicit ordered list of steps. Steps can reference: +- `naplist-nap-step` — Individual `.nap` files (by relative path) +- `naplist-folder-step` — Folders (run all `.nap` files in that folder, sorted) +- `naplist-nested` — Other `.naplist` files (nested playlists — fully recursive) +- `naplist-script-step` — `.fsx` or `.csx` scripts + +### Example `smoke.naplist` + +```naplist +[meta] +name = "Smoke Test Suite" +env = staging # default environment for this playlist + +[vars] +timeout = "5000" + +[steps] +./auth/01_login.nap +./auth/02_refresh-token.nap +./users/01_get-user.nap + +# Include another playlist +./regression/core.naplist +``` + +### `naplist-var-scope` — Variable scoping in playlists + +- A `[vars]` block (`naplist-vars`) in a `.naplist` sets variables for all steps in that playlist. +- Scripts can use `ctx.Set` (`script-context`) to pass variables **forward** to subsequent steps in the same playlist. +- Nested `.naplist` files (`naplist-nested`) inherit the parent's variable scope unless they override. diff --git a/specs/HTTP-FILES-PLAN.md b/specs/HTTP-FILES-PLAN.md new file mode 100644 index 0000000..c2314a2 --- /dev/null +++ b/specs/HTTP-FILES-PLAN.md @@ -0,0 +1,210 @@ +# .http File Compatibility — Implementation Plan + +--- + +## Architecture + +The parser lives in a **standalone project `DotHttp`** (publishable as a NuGet package) with zero dependency on Napper.Core. The converter lives in Napper.Core and maps parsed types to `.nap` file content. + +``` +DotHttp/ → Napper.Core/HttpToNapConverter.fs + Types.fs (maps HttpFile → .nap content + Parser.fs (FParsec) using OpenApiTypes constants) +``` + +### Key Modules + +| Module | Location | Responsibility | +|--------|----------|---------------| +| `Types` | `DotHttp/Types.fs` | `HttpRequest`, `HttpFile`, `HttpDialect` types | +| `Parser` | `DotHttp/Parser.fs` | FParsec parser: `.http` text → `HttpFile` | +| `HttpToNapConverter` | `Napper.Core/HttpToNapConverter.fs` | Map `HttpFile` → `.nap` file content + env conversion | + +### Types + +```fsharp +type HttpDialect = Microsoft | JetBrains | Common + +type HttpRequest = + { Name: string option + Method: string + Url: string + HttpVersion: string option + Headers: (string * string) list + Body: string option + PreScript: string option + PostScript: string option + Comments: string list } + +type HttpFile = + { Requests: HttpRequest list + FileVariables: (string * string) list // @var = value (MS dialect) + Dialect: HttpDialect } + +type HttpEnv = + { Environments: Map<string, Map<string, string>> + PrivateEnvironments: Map<string, Map<string, string>> } + +type ConvertResult = + { GeneratedFiles: (string * string) list // (path, content) + Warnings: string list } +``` + +--- + +## Parser Design + +The `.http` parser uses **FParsec** (already a project dependency) with a state-tracking line-by-line approach. For files with JetBrains inline scripts (`< {% ... %}`), a streaming FParsec parser handles multiline script blocks. FParsec was chosen because no official .http file parser exists as a NuGet package — the `DotHttp` project is intended to fill this gap and be published independently. + +### State Machine + +``` + ┌──────────┐ + ┌─────────│ IDLE │◄────── ### separator + │ └────┬─────┘ + │ │ METHOD line detected + │ ▼ + │ ┌──────────┐ + │ │ HEADERS │◄────── Key: Value lines + │ └────┬─────┘ + │ │ blank line + │ ▼ + │ ┌──────────┐ + │ │ BODY │◄────── non-separator lines + │ └────┬─────┘ + │ │ ### or EOF + └──────────────┘ +``` + +**Method detection:** A line starts with a known HTTP method (`GET`, `POST`, `PUT`, `PATCH`, `DELETE`, `HEAD`, `OPTIONS`) followed by a space and a URL. + +**Header detection:** A line matches `NonWhitespace: AnyText` (colon-separated with no leading whitespace). + +**Dialect detection** runs as a pre-pass over the file, looking for `@var = value` (MS) or `< {%` / `> {%` (JB) patterns before parsing. + +--- + +## Implementation Phases + +### Phase 1 — Core Converter (Common Subset) + +Parse the `http-shared` common subset and generate `.nap` files. + +**Scope:** +- Parse `###`-separated requests with method line, headers, and body +- Generate one `.nap` file per request +- Map `{{variable}}` interpolation (already identical syntax) +- Numeric prefix naming (`01_name.nap`, `02_name.nap`) +- `--output-dir` and `--dry-run` flags +- CLI entry point: `napper convert http <input> --output-dir <dir>` + +**Testing:** +- Unit tests: parser correctness on sample `.http` files +- E2e tests: `napper convert http` CLI command produces expected `.nap` files +- Edge cases: empty bodies, no headers, multiple requests, trailing newlines + +### Phase 2 — Dialect-Specific Features + +Add Microsoft and JetBrains dialect support. + +**Microsoft:** +- Parse `@variable = value` file-level variables → `[vars]` block +- Parse `# @name requestName` → `[meta] name` + +**JetBrains:** +- Parse `http-client.env.json` → `.napenv.*` files +- Parse `http-client.private.env.json` → `.napenv.local` +- Detect and warn on unsupported features (WebSocket, gRPC, GraphQL, `import`/`run`) +- Simple assertion extraction from `> {% ... %}` response handlers + +**Testing:** +- Unit tests: dialect detection accuracy +- Unit tests: environment file conversion +- E2e tests: convert real-world JetBrains HTTP Client files +- E2e tests: convert real-world REST Client (VS Code) files + +### Phase 3 — IDE Integration + +Add converter commands to IDE extensions. + +**VSCode:** +- `Nap: Convert .http File` command +- `Nap: Convert .http Directory` command +- CodeLens on `.http` files showing `Convert to .nap` +- Post-conversion: open generated files + +**Zed:** +- `/nap-convert-http` slash command + +**Testing:** +- VSCode e2e: command execution, file creation, editor opens +- Zed: manual testing (no automated e2e framework) + +### Phase 4 — Direct Execution (Future) + +`napper run file.http` converts in-memory and executes. + +**Scope:** +- Detect `.http` extension in `napper run` → parse → convert to in-memory `NapFile` → execute +- `--request <name>` flag to select a specific request from multi-request files +- `--env` flag reads `http-client.env.json` when running `.http` files + +**Testing:** +- E2e tests: `napper run file.http` returns expected output +- E2e tests: `--request` flag filters correctly + +--- + +## Open Questions + +1. **`.rest` extension** — JetBrains and REST Client also support `.rest` as an alias for `.http`. Should Nap treat them identically? **Recommendation: yes.** +2. **Round-trip fidelity** — Should the converter preserve original comments in the `.nap` output? **Recommendation: yes, as `#` comments above the relevant section.** +3. **Playlist generation** — When converting a directory, should a `.naplist` be generated for the converted files? **Recommendation: yes, matching the OpenAPI generator pattern.** +4. **Incremental conversion** — Should re-running the converter on an already-converted directory be safe (skip existing, only add new)? **Recommendation: yes, `--overwrite` opt-in for replacement.** + +--- + +## TODO + +### Phase 1 — Core Converter (Common Subset) +- [x] Define `HttpRequest` and `HttpFile` types — `DotHttp/Types.fs` +- [x] Implement FParsec `.http` parser with state-tracking line-by-line approach — `DotHttp/Parser.fs` +- [x] Implement `HttpToNapConverter` mapping — `Napper.Core/HttpToNapConverter.fs` +- [x] Wire up `napper convert http` CLI command — `Napper.Cli/Program.fs` +- [x] `--output-dir` flag +- [x] `--dry-run` flag +- [x] Numeric prefix naming for output files +- [x] Parser unit tests (32 tests: single request, multi-request, edge cases) — `DotHttp.Tests/ParserTests.fs` +- [x] CLI e2e tests (12 tests: single file, multi-request, directory, dry-run, env, JSON output) — `Napper.Core.Tests/HttpConvertE2eTests.fs` + +### Phase 2 — Dialect-Specific Features +- [x] Dialect detection pre-pass — auto-detected from `@var` (MS) or `< {%` / `> {%` (JB) +- [ ] `--dialect` flag (ms / jb / auto) +- [x] Microsoft `@variable = value` parsing +- [x] Microsoft `# @name` parsing +- [x] JetBrains `http-client.env.json` → `.napenv.*` conversion +- [x] JetBrains `http-client.private.env.json` → `.napenv.local` — auto-detected next to input +- [x] Simple assertion extraction from JB response handlers — `response.status`, `hasOwnProperty` +- [x] Unsupported feature warnings (WebSocket, gRPC, GraphQL, etc.) +- [x] Dialect detection unit tests +- [x] Environment conversion e2e tests +- [ ] Real-world file e2e tests (download and convert actual public .http collections) + +### Phase 2.5 — AI Script Porting (placeholder) +- [ ] AI-assisted porting of JetBrains `> {% ... %}` scripts to `[script]`/`[assert]` blocks +- [ ] AI-assisted porting of JetBrains `< {% ... %}` pre-request scripts +- [ ] AI-assisted porting of external JS script file references + +### Phase 3 — IDE Integration +- [ ] VSCode `Nap: Convert .http File` command +- [ ] VSCode `Nap: Convert .http Directory` command +- [ ] VSCode CodeLens on `.http` files +- [ ] Zed `/nap-convert-http` slash command +- [ ] VSCode e2e tests + +### Phase 4 — Direct Execution (Future) +- [ ] `.http` extension detection in `napper run` +- [ ] In-memory conversion pipeline +- [ ] `--request <name>` flag +- [ ] `--env` reads `http-client.env.json` for `.http` files +- [ ] Direct execution e2e tests diff --git a/specs/HTTP-FILES-SPEC.md b/specs/HTTP-FILES-SPEC.md new file mode 100644 index 0000000..44b63f7 --- /dev/null +++ b/specs/HTTP-FILES-SPEC.md @@ -0,0 +1,317 @@ +# `http-compat` — .http File Compatibility + +> **Let users bring their existing `.http` files to Nap — convert them to `.nap` format, or run them directly.** + +--- + +## Problem + +The `.http` file format is the most widely adopted plain-text HTTP request format. It is supported natively by Visual Studio, JetBrains IDEs (IntelliJ, Rider, WebStorm), and VS Code via the REST Client extension. Many teams already have `.http` file collections in their repos. + +Nap's `.nap` format is superior for testing (declarative assertions, playlists, scripting), but asking users to abandon existing `.http` files is a migration barrier. Nap should meet users where they are. + +--- + +## The `.http` Format Landscape + +There is **no single `.http` standard**. Two major dialects exist: + +### `http-ms` — Microsoft Dialect + +Used by Visual Studio and the VS Code REST Client extension. Defined informally by [RFC 9110](https://www.rfc-editor.org/rfc/rfc9110) alignment and Microsoft's tooling docs. + +| Feature | Syntax | +|---------|--------| +| Request separator | `###` | +| Comments | `#` or `//` | +| Variables | `@variableName = value` (file-level) or `{{variableName}}` (interpolation) | +| Environments | VS Code settings or JSON files | +| Named requests | `# @name requestName` above request line | +| Response scripting | Not supported natively (extension-dependent) | + +### `http-jb` — JetBrains Dialect + +Used by IntelliJ IDEA, Rider, WebStorm, and the JetBrains HTTP Client CLI. + +| Feature | Syntax | +|---------|--------| +| Request separator | `###` | +| Comments | `#` or `//` | +| Variables | `{{variableName}}` (interpolation from env files) | +| Environments | `http-client.env.json` / `http-client.private.env.json` | +| Named requests | `### Request Name` (text after separator) | +| Pre-request scripts | `< {% ... %}` or `< file.js` | +| Response handlers | `> {% ... %}` or `> file.js` | +| Output redirection | `>>` (new file) / `>>!` (overwrite) | +| WebSocket | `WEBSOCKET ws://...` with `===` message separators | +| GraphQL | `GRAPHQL http://...` with inline query | +| gRPC | `GRPC host/service/method` | + +### `http-shared` — Common Subset + +Both dialects share this core syntax: + +```http +### Optional comment or name +METHOD URL [HTTP/version] +Header-Name: Header-Value +Header-Name: Header-Value + +Request body here +``` + +Key shared elements: +- `http-separator` — `###` separates requests within a single file +- `http-method-line` — `METHOD URL` as the first line of a request +- `http-headers` — colon-separated `Key: Value` pairs +- `http-body` — blank line followed by body content +- `http-comments` — `#` and `//` for comments +- `http-vars` — `{{variable}}` interpolation syntax (same as Nap) + +--- + +## Approach: Converter (Primary) + Direct Run (Future) + +### Decision: Converter First + +After evaluating three options, the **converter** approach is the primary strategy: + +| Option | Pros | Cons | +|--------|------|------| +| **A. Converter (`napper convert`)** | Simple, deterministic, testable; users get full `.nap` features after conversion; no runtime complexity | One-time migration step; users must re-convert if `.http` files change | +| B. LSP dual-format support | Seamless — `.http` files just work in the IDE | Massive LSP complexity; two grammars to maintain; assertion gap | +| C. Runtime interpreter | `napper run file.http` just works | Must replicate JetBrains/MS scripting models; assertion mapping is lossy | + +**Rationale:** The converter is the highest-value, lowest-risk path. It gives users a clear migration story, produces first-class `.nap` files that benefit from all Nap features, and keeps the core simple. Direct `napper run file.http` support can be added later as a convenience that internally converts on-the-fly. + +--- + +## `http-convert` — Conversion Specification + +### CLI Command + +```sh +# Convert a single .http file +napper convert http ./requests.http --output-dir ./nap-requests/ + +# Convert a directory of .http files +napper convert http ./http-collection/ --output-dir ./nap-collection/ + +# Convert with JetBrains environment file +napper convert http ./requests.http --env-file ./http-client.env.json --output-dir ./output/ + +# Dry run — show what would be generated +napper convert http ./requests.http --dry-run +``` + +### `http-convert-flags` — CLI Flags + +| Flag | Spec ID | Description | +|------|---------|-------------| +| `--output-dir <dir>` | `http-convert-outdir` | Destination directory for generated `.nap` files | +| `--env-file <path>` | `http-convert-envfile` | Path to `http-client.env.json` or similar env file | +| `--dialect <ms\|jb\|auto>` | `http-convert-dialect` | Force a dialect; `auto` (default) detects from syntax | +| `--dry-run` | `http-convert-dryrun` | Preview generated files without writing | +| `--overwrite` | `http-convert-overwrite` | Overwrite existing `.nap` files (default: skip) | + +### `http-convert-parse` — Parsing Strategy + +The converter parses `.http` files using a **line-oriented state machine** (not regex on structured data). The parser operates on the `http-shared` common subset, with dialect-specific extensions: + +**Parser states:** +1. `IDLE` — between requests, consuming `###` separators and comments +2. `METHOD_LINE` — expecting `METHOD URL [HTTP/version]` +3. `HEADERS` — consuming `Key: Value` lines until blank line +4. `BODY` — consuming body lines until next `###` or EOF + +**Dialect detection (`http-convert-detect`):** +- `@variable = value` at file level → Microsoft dialect +- `< {%` or `> {%` script blocks → JetBrains dialect +- `http-client.env.json` present in same directory → JetBrains dialect +- Neither → treat as common subset + +### `http-convert-mapping` — Format Mapping + +#### Request mapping + +| `.http` element | `.nap` output | Notes | +|-----------------|---------------|-------| +| `### Name` or `# @name Name` | `[meta] name = "Name"` | Request name | +| `METHOD URL` | `[request] METHOD URL` | Direct mapping | +| `Header: Value` | `[request.headers] Header = Value` | Direct mapping | +| Body content | `[request.body]` | Direct mapping | +| `{{variable}}` | `{{variable}}` | Identical syntax — no change needed | +| `HTTP/1.1` or `HTTP/2` | Dropped | Nap does not specify HTTP version | + +#### Variable mapping + +| Source | `.nap` output | +|--------|---------------| +| `@var = value` (MS file-level) | `[vars] var = "value"` | +| `http-client.env.json` environments | `.napenv` + `.napenv.<envname>` files | +| `http-client.private.env.json` | `.napenv.local` (gitignored) | + +#### `http-convert-env` — Environment file conversion + +JetBrains `http-client.env.json`: +```json +{ + "dev": { "host": "localhost:8080", "token": "abc" }, + "prod": { "host": "api.example.com", "token": "xyz" } +} +``` + +Converts to: +```toml +# .napenv (common variables — empty if all are env-specific) +``` +```toml +# .napenv.dev +host = "localhost:8080" +token = "abc" +``` +```toml +# .napenv.prod +host = "api.example.com" +token = "xyz" +``` + +Private env file → `.napenv.local` with a comment noting it should be gitignored. + +#### `http-convert-scripts` — Script conversion + +JetBrains pre-request and response handler scripts are **not converted**. Instead, the converter emits a warning and a `TODO` comment in the generated `.nap` file: + +```nap +# TODO: This request had a JetBrains response handler script. +# Original: > {% client.test("status", function() { client.assert(response.status === 200) }) %} +# Convert to a [script] post reference or [assert] block. +[assert] +status = 200 +``` + +**Simple assertion extraction (`http-convert-assert`):** When a JetBrains response handler contains recognizable patterns, the converter extracts them into `[assert]` blocks: + +| JetBrains pattern | Nap assertion | +|--------------------|---------------| +| `response.status === 200` | `status = 200` | +| `response.body.hasOwnProperty("id")` | `body.id exists` | +| `response.headers.valueOf("Content-Type")` contains check | `headers.Content-Type contains "..."` | + +Complex scripts that cannot be pattern-matched are left as TODO comments only. + +#### `http-convert-unsupported` — Unsupported features + +These JetBrains-specific features have no `.nap` equivalent and are **dropped with warnings**: + +| Feature | Handling | +|---------|----------| +| WebSocket requests (`WEBSOCKET`) | Warning: "WebSocket not supported, skipping" | +| gRPC requests (`GRPC`) | Warning: "gRPC not supported, skipping" | +| GraphQL requests (`GRAPHQL`) | Warning: "GraphQL not supported, skipping" | +| Output redirection (`>>`, `>>!`) | Warning: "Output redirection not supported" | +| `@no-log`, `@no-cookie-jar` tags | Warning: "Tag not supported" | +| `import` / `run` directives | Warning: "Import directives not supported" | +| SSL configuration | Warning: "SSL configuration not converted" | + +### `http-convert-output` — Output Structure + +A single `.http` file with multiple requests: + +``` +input.http → output-dir/ + ├── .napenv + ├── 01_get-users.nap + ├── 02_create-user.nap + └── 03_delete-user.nap +``` + +A directory of `.http` files: + +``` +http-collection/ → nap-collection/ +├── auth.http ├── .napenv +├── users.http ├── auth/ +└── http-client.env.json │ ├── 01_login.nap + │ └── 02_refresh.nap + └── users/ + ├── 01_get-user.nap + └── 02_create-user.nap +``` + +**Naming rules (`http-convert-naming`):** +- Request name from `### Name` or `# @name Name` → slugified filename +- No name → `{method}-{url-path-slug}` (e.g. `get-users-userid`) +- Numeric prefix for ordering: `01_`, `02_`, etc. +- Multiple requests per `.http` file → one `.nap` file each, grouped in a subdirectory named after the `.http` file + +--- + +## `http-run` — Direct `.http` File Execution (Future) + +A future convenience feature: `napper run file.http` internally converts on-the-fly and executes. + +```sh +# Run a .http file directly (converts in memory, does not write .nap files) +napper run ./requests.http + +# Run a specific request by name within a multi-request .http file +napper run ./requests.http --request "Get Users" + +# Run with a specific environment from http-client.env.json +napper run ./requests.http --env dev +``` + +**Implementation:** Parse → convert to in-memory `.nap` representation → execute through the existing runner. No files written to disk. + +**New flag:** +| Flag | Spec ID | Description | +|------|---------|-------------| +| `--request <name>` | `http-run-request` | Run a specific named request from a multi-request `.http` file | + +--- + +## `http-ide` — IDE Extension Integration + +### VSCode + +- **`Nap: Convert .http File`** command — converts the active `.http` file or prompts for a file picker +- **`Nap: Convert .http Directory`** command — converts all `.http` files in a selected directory +- CodeLens on `.http` files: `Convert to .nap` above each `###` separator +- After conversion, opens the generated `.nap` file(s) in the editor + +### Zed + +- Slash command: `/nap-convert-http <file>` — converts and returns summary in the Assistant + +--- + +## Dependencies + +This feature depends on **Microsoft's `.http` format specification**. While there is no formal RFC, Microsoft's Visual Studio and VS Code REST Client define the de facto standard. The converter targets the `http-shared` common subset plus explicit dialect handling for Microsoft and JetBrains extensions. + +**No dependency on JetBrains' proprietary runtime or API.** The converter reads the file format only — it does not invoke the JetBrains HTTP Client engine. + +### `http-parser-project` — Standalone Parser Package + +The `.http` file parser lives in a standalone project **`DotHttp`** with no dependency on Napper.Core. It uses **FParsec** (parser combinator library, already a project dependency) because no official `.http` file parser exists as a NuGet package. The project is designed to be published independently as `DotHttp` on NuGet for any .NET project that needs `.http` file parsing. It is a generic, reusable library — not Nap-specific. + +**No new dependencies** — FParsec 1.1.1 is already used for the `.nap` file parser. + +--- + +## Design Principles + +1. **Lossless where possible.** Every piece of information in the `.http` file should appear in the `.nap` output — either as a direct mapping or as a comment. +2. **Warnings over errors.** Unsupported features produce warnings, not failures. The converter should always produce output. +3. **Idempotent.** Running the converter twice on the same input produces identical output. +4. **No invented assertions.** The converter only generates `[assert]` blocks from explicit JetBrains response handlers. It does not guess assertions. + +--- + +## Related Specs + +- [File Formats](./FILE-FORMATS-SPEC.md) — `.nap`, `.napenv`, `.naplist` format specs (target format) +- [CLI Spec](./CLI-SPEC.md) — CLI commands and flags +- [IDE Extension Spec](./IDE-EXTENSION-SPEC.md) — IDE integration surface +- [HTTP Files Plan](./HTTP-FILES-PLAN.md) — Implementation phases and TODO diff --git a/specs/IDE-EXTENION-OPENAPI-GENERATION-SPEC.md b/specs/IDE-EXTENION-OPENAPI-GENERATION-SPEC.md new file mode 100644 index 0000000..8869bf7 --- /dev/null +++ b/specs/IDE-EXTENION-OPENAPI-GENERATION-SPEC.md @@ -0,0 +1,152 @@ +# `vscode-openapi` — OpenAPI Test Generation — IDE Extension + +> Extension-side integration for OpenAPI import and AI-assisted enrichment. + +--- + +## `vscode-openapi-import` — Import Command + +The `Nap: Import from OpenAPI` command (`nap.importOpenApi`): + +1. User picks a spec file (JSON / YAML) or pastes a URL +2. User picks an output folder +3. Generator runs, writes files +4. Opens the generated `.naplist` in the editor +5. Shows success notification with file count + +### Menu placement + +The import command appears in: +- The Nap explorer panel title bar (cloud-download icon) +- The Command Palette + +--- + +## `vscode-openapi-ai` — AI-Assisted Enrichment (Copilot) + +> AI enrichment is an **optional layer** on top of the deterministic generator. The generator always works without Copilot. When Copilot is available and the user opts in, the output is enriched. + +### How it works + +1. The deterministic generator produces the base `GenerationResult` +2. If the user chooses "Generate with AI enhancement" and Copilot is available: + - The enricher sends batched prompts to the VS Code Language Model API (`vscode.lm`) + - Each prompt covers a batch of operations (grouped by tag) to stay within rate limits + - The LLM responses are parsed and merged into the generation result +3. The enriched files are written to disk + +### What AI enriches + +| Area | Without AI | With AI | +|------|-----------|---------| +| Assertions | `status = 200`, `body.field exists` for required fields | Semantic assertions: format checks, value range checks, relationship assertions between fields | +| Request body examples | Schema-derived defaults (`"example"`, `0`, `true`) | Contextually realistic values: real-looking emails, names, dates, UUIDs | +| Error case tests | One per documented error status code with placeholder input | Targeted invalid inputs that would actually trigger each error | +| Playlist ordering | File-sort order | Logical flow: auth first, create before read, CRUD lifecycle | +| Validation scripts | None | `.fsx` scripts for complex nested object / array validation | + +### Architecture + +The AI enrichment is split into two modules: + +**`openApiAiEnhancer.ts`** — pure functions, no VS Code SDK dependency: +- Input: `GenerationResult` + parsed `OpenApiSpec` + LLM response strings +- Output: enriched `GenerationResult` +- Fully testable without VS Code + +**Extension integration layer** (in `extension.ts`): +- Checks `vscode.lm.selectChatModels()` for Copilot availability +- Presents choice: "Generate" vs "Generate with AI" +- Sends prompts, collects responses, passes to enhancer +- Shows progress notification during AI processing + +### Prompt design + +Prompts return parseable JSON. Each covers one enrichment aspect for a batch of operations: + +- **Assertion enrichment**: Given response schemas, return assertion lines per operation +- **Test data enrichment**: Given request body schemas, return realistic example bodies +- **Error case enrichment**: Given operations with error responses, return test inputs per error code + +### Future AI integration + +The VS Code Language Model API integration is the first step. Future paid features may include: +- A standalone Nap agent that generates and maintains test suites outside VS Code +- Continuous test generation that watches spec changes and updates tests +- AI-driven test prioritization based on API change impact analysis + +--- + +## Current Implementation State + +### What exists today + +**`src/Napper.VsCode/src/openApiGenerator.ts`** (380 lines) — pure TypeScript, no VS Code SDK: +- `generateFromOpenApi(jsonText: string): Result<GenerationResult, string>` +- Supports OpenAPI 3.x and Swagger 2.x (JSON only) +- Extracts base URL from `servers[]` or `host`/`basePath`/`schemes` +- Converts path params `{param}` to `{{param}}` +- Generates example request bodies from schemas (recursive) +- Creates `[assert]` with success status code only +- Adds Content-Type/Accept headers for POST/PUT/PATCH +- Outputs numbered `.nap` files, one `.naplist`, one `.napenv` +- All string literals defined as constants in `constants.ts` + +**`src/Napper.VsCode/src/extension.ts`** (lines 412-472) — VS Code integration: +- File picker for spec file +- Output folder picker +- Writes generated files to disk + +**`src/Napper.VsCode/src/constants.ts`** (lines 201-241) — all OpenAPI constants + +### What is missing + +| Gap | Priority | Notes | +|-----|----------|-------| +| Unit tests for openApiGenerator.ts | Critical | 380 lines of pure functions with zero tests | +| `$ref` resolution | High | Most real-world specs use `$ref` extensively | +| YAML support | High | YAML is the dominant format for OpenAPI specs | +| Response body assertions | High | Only generates `status = code` today | +| Tag-based folder organization | High | Currently flat-numbered, should group by tag | +| Query parameter handling | Medium | Not added to URL or `[vars]` | +| Auth scheme handling | Medium | No security scheme detection | +| `[vars]` block for path params | Medium | Params are in URL but no `[vars]` section | +| `generated = true` meta flag | Medium | Spec calls for it, not implemented | +| Error case generation | Medium | Only happy-path tests generated | +| Smarter example values (format/enum) | Medium | Everything is `"example"` or `0` | +| URL-based spec loading | Low | File picker only today | +| `--diff` mode | Low | No re-generation support | +| AI enrichment (Copilot) | Low | Foundation first, then AI layer | + +--- + +## Implementation Phases + +### Phase A: Testing Foundation + +Write comprehensive unit tests for `openApiGenerator.ts`. Test fixtures for valid OAS3, valid Swagger 2, edge cases, error cases. All pure functions, no VS Code dependency needed. + +### Phase B: AI-Assisted Enrichment + +- `openApiAiEnhancer.ts` module (pure functions) +- VS Code Language Model API integration +- Batch prompt design and response parsing +- UI toggle: "Generate" vs "Generate with AI" +- Enhanced assertions, test data, playlist ordering + +--- + +## TODO + +### Phase A: Testing Foundation +- [ ] Unit tests for `openApiGenerator.ts` +- [ ] Test fixtures for valid OAS3 +- [ ] Test fixtures for valid Swagger 2 +- [ ] Edge case and error case tests + +### Phase B: AI-Assisted Enrichment +- [ ] `openApiAiEnhancer.ts` module (pure functions) +- [ ] VS Code Language Model API integration +- [ ] Batch prompt design and response parsing +- [ ] UI toggle: "Generate" vs "Generate with AI" +- [ ] Enhanced assertions, test data, playlist ordering diff --git a/specs/IDE-EXTENSION-PLAN.md b/specs/IDE-EXTENSION-PLAN.md new file mode 100644 index 0000000..5c61ad1 --- /dev/null +++ b/specs/IDE-EXTENSION-PLAN.md @@ -0,0 +1,89 @@ +# Nap VSCode Extension — Implementation Plan + +--- + +## Implementation Phases + +### Phase 1 — Core Extension + +- Syntax highlighting for `.nap` and `.naplist` files +- Explorer tab with collection tree +- CodeLens run actions +- Basic response viewer panel + +### Phase 2 — Test Explorer & Playlists + +- Test Explorer integration (`vscode.TestController`) +- Playlists tab with step tree +- Run results mapped to test items + +### Phase 3 — LSP Cutover + +Connect the VSCode extension to `napper-lsp` via `vscode-languageclient`. The LSP itself is a separate project — see **[LSP Plan](./LSP-PLAN.md)**. + +This phase **deletes duplicated TypeScript parsing code** and replaces it with LSP calls. After this phase, the VSIX is a thin UI shell — it renders data from the LSP, it does NOT parse `.nap` files itself. + +**Delete and replace:** +- `extractHttpMethod` (TS) → use `textDocument/documentSymbol` from LSP +- `parseMethodAndUrl` (TS) → use `napper/requestInfo` from LSP +- `parsePlaylistStepPaths` (TS) → use `textDocument/documentSymbol` from LSP +- `detectEnvironments` (TS) → use `napper/environments` from LSP +- CodeLens section detection (TS) → use `textDocument/documentSymbol` from LSP +- Curl generation (TS) → use `napper/curlCommand` from LSP + +**Wire up:** +- `vscode-languageclient` to launch `napper-lsp` over stdio +- Environment switcher (status bar + quick-pick — data from LSP `napper/environments`) +- Hover, completions, diagnostics (provided by LSP) + +### Phase 4 — Polish & Distribution + +- **CLI installation via `dotnet tool install`** — replace raw binary download with `dotnet tool install -g napper --version X.X.X`. Version is read from the extension's own `package.json`. Eliminates Windows SmartScreen warnings and custom HTTP download code. +- Split editor layout (request panel webview) +- New request guided flow +- OpenAPI generation command +- Publish to VS Code Marketplace and Open VSX Registry + +--- + +## TODO + +### Phase 1 — Core Extension +- [ ] Syntax highlighting for `.nap` and `.naplist` files +- [ ] Explorer tab with collection tree +- [ ] CodeLens run actions +- [ ] Basic response viewer panel + +### Phase 2 — Test Explorer & Playlists +- [ ] Test Explorer integration (`vscode.TestController`) +- [ ] Playlists tab with step tree +- [ ] Run results mapped to test items + +### Phase 3 — LSP Cutover +- [ ] Add `vscode-languageclient` dependency +- [ ] Wire up to launch `napper-lsp` over stdio on activation +- [ ] Delete `extractHttpMethod` — use documentSymbol +- [ ] Delete `parseMethodAndUrl` — use `napper/requestInfo` +- [ ] Delete `parsePlaylistStepPaths` — use documentSymbol +- [ ] Delete `detectEnvironments` — use `napper/environments` +- [ ] Replace curl generation — use `napper/curlCommand` +- [ ] Replace CodeLens section detection — use documentSymbol +- [ ] Environment switcher data from LSP +- [ ] Verify hover, completions, diagnostics from LSP +- [ ] Run ALL existing VSIX e2e tests — must pass + +### Phase 4 — Polish & Distribution +- [ ] Replace raw binary download with `dotnet tool install -g napper --version X.X.X` +- [ ] Delete custom HTTP download code (`cliInstaller.ts` download/redirect logic) +- [ ] Split editor layout (request panel webview) +- [ ] New request guided flow +- [ ] OpenAPI generation command +- [ ] Publish to VS Code Marketplace and Open VSX Registry + +--- + +## Related Specs + +- [LSP Specification](./LSP-SPEC.md) — Language server capabilities +- [LSP Plan](./LSP-PLAN.md) — LSP implementation phases and TODO +- [IDE Extension Spec](./IDE-EXTENSION-SPEC.md) — Feature matrix and shared behaviour diff --git a/specs/IDE-EXTENSION-SPEC.md b/specs/IDE-EXTENSION-SPEC.md new file mode 100644 index 0000000..a3d238d --- /dev/null +++ b/specs/IDE-EXTENSION-SPEC.md @@ -0,0 +1,384 @@ +# `ide-extension` — Napper IDE Extension Specification + +> The extension is the **primary entry point** for most users. It must be as approachable as Postman on first open, but backed by plain files that work perfectly from the CLI and in CI. + +--- + +## Target IDEs + +| IDE | Language | Grammar System | Status | +|-----|----------|---------------|--------| +| **VSCode** (+ Cursor, Windsurf, VSCodium) | TypeScript | TextMate | Primary | +| **Zed** | Rust → WASM | Tree-sitter | Primary | +| **Neovim** | Lua | Tree-sitter | Future | + +All extensions shell out to the **Nap CLI** for execution. No IDE extension re-implements the HTTP runner. This keeps every IDE in sync with the CLI. + +--- + +## System Architecture + +```mermaid +graph TB + subgraph "User's IDE" + VS[VSCode Extension<br/>TypeScript] + ZD[Zed Extension<br/>Rust/WASM] + NV[Neovim Plugin<br/>Lua] + end + + subgraph "Nap Toolchain" + LSP[nap-lsp<br/>F# binary] + CLI[nap CLI<br/>F# binary] + end + + subgraph "Napper.Core (shared F# library)" + PARSER[Parser.fs] + TYPES[Types.fs] + ENV[Environment.fs] + RUNNER[Runner.fs] + OPENAPI[OpenApiGenerator.fs] + end + + VS -->|stdio / LSP| LSP + ZD -->|stdio / LSP| LSP + NV -->|stdio / LSP| LSP + + VS -->|shell out| CLI + ZD -->|shell out| CLI + NV -->|shell out| CLI + + LSP --> PARSER + LSP --> TYPES + LSP --> ENV + + CLI --> PARSER + CLI --> TYPES + CLI --> ENV + CLI --> RUNNER + CLI --> OPENAPI +``` + +```mermaid +graph LR + subgraph "IDE ↔ LSP (language intelligence)" + direction LR + IDE1[IDE] -->|completions, diagnostics,<br/>hover, symbols| LSP1[nap-lsp] + end + + subgraph "IDE ↔ CLI (execution)" + direction LR + IDE2[IDE] -->|nap run, nap generate| CLI1[nap CLI] + end +``` + +--- + +## `vscode-philosophy` — Design Philosophy + +- **No separate app.** Everything lives inside the IDE. No webview-based fake browser. +- **Files are always the truth.** The UI is a lens over `.nap` and `.naplist` files. Edits in the UI update the file directly; edits in the file are immediately reflected in the UI. There is no sync step. +- **Progressive disclosure.** A new user can send their first request within 30 seconds of installing. Advanced features (scripting, playlists, environments) reveal themselves naturally as the user explores. +- **Looks good, works fast.** The UI should feel polished — not a dev tool hacked together from tree views and JSON editors. +- **Parity where possible.** Features should be as close as possible across IDEs. Where an IDE lacks a capability, degrade gracefully rather than omit the feature entirely. + +--- + +## `ide-lsp` — Portable Core: Nap Language Server (LSP) + +The foundation for cross-IDE feature parity is a **Nap Language Server** (`napper-lsp`) — an F# binary that speaks LSP 3.17 over stdio. It reuses `Napper.Core` directly (parser, types, environment) with zero duplicated logic. + +**The LSP replaces duplicated logic in IDE extensions.** The VSIX currently re-parses `.nap` files in TypeScript to extract HTTP methods, URLs, playlist steps, and environment names. This logic already exists in `Napper.Core` F#. After the LSP cutover, all IDEs ask the LSP for this data instead of reimplementing parsing in their own language. **Less TypeScript, less Rust, MORE F#.** + +IDE extensions become **thin UI shells** — they render data from the LSP and handle IDE-specific UI (CodeLens, tree views, status bars). They do NOT parse `.nap` files themselves. + +See **[LSP Specification](./LSP-SPEC.md)** for the full capability spec and **[LSP Plan](./LSP-PLAN.md)** for implementation phases. + +--- + +## Feature Matrix: What Ships Where + +| Feature | VSCode | Zed | Source | +|---------|--------|-----|--------| +| Syntax highlighting | TextMate grammar | Tree-sitter grammar | IDE-specific grammars, same visual result | +| Document symbols (outline) | LSP | LSP | **LSP** — `textDocument/documentSymbol` via `Napper.Core.Parser` | +| Request info (method + URL) | LSP | LSP | **LSP** — `napper/requestInfo` via `Napper.Core.Parser` | +| Copy as curl | LSP | LSP | **LSP** — `napper/curlCommand` via `Napper.Core.CurlGenerator` | +| Environment listing | LSP | LSP | **LSP** — `napper/environments` via `Napper.Core.Environment` | +| Completions | LSP | LSP | **LSP** — `textDocument/completion` | +| Diagnostics | LSP | LSP | **LSP** — `textDocument/publishDiagnostics` | +| Hover | LSP | LSP | **LSP** — `textDocument/hover` | +| Run request | CodeLens `▶ Run` | Runnables via `runnables.scm` | IDE-specific UI, both shell out to CLI | +| Sidebar panel | Tree view in Activity Bar | Not available | VSCode-only | +| Response viewer | Webview panel | Not available | VSCode-only; Zed uses terminal | +| Test Explorer | `vscode.TestController` | Not available | VSCode-only | +| Environment switcher UI | Status bar + quick-pick | CLI `--env` flag | IDE-specific UI; **data from LSP** | +| New request flow | Quick-input wizard | Not available | VSCode-only | +| Commands | Command Palette | Slash commands | IDE-specific entry points | +| AI enrichment (OpenAPI) | VS Code LM API | Zed Assistant | IDE-specific AI integration | + +--- + +## Shared Behaviour (All IDEs) + +### `vscode-syntax` — Syntax Highlighting + +Full grammar-aware highlighting for `.nap` and `.naplist` files. Both grammars must produce visually identical results. + +**VSCode:** TextMate grammar (`.tmLanguage.json`). +**Zed:** Tree-sitter grammar with `highlights.scm`, `brackets.scm`, `outline.scm`, `indents.scm` query files. + +Both grammars highlight: +- Section headers (`[meta]`, `[request]`, `[assert]`, `[script]`, `[vars]`, `[steps]`) +- Keys and values +- `{{variable}}` interpolation +- HTTP methods (`GET`, `POST`, etc.) +- Comments (`#`) +- String literals +- Assertion operators (`exists`, `matches`, `contains`, `<`, `=`) + +### `vscode-codelens` — Run Actions + +Every IDE must support running a `.nap` file or `.naplist` file from within the editor. + +**VSCode:** CodeLens actions appear above relevant lines: +- `▶ Run` above `[request]` +- `▶ Run Playlist` above `[meta]` in `.naplist` files +- `⧉ Copy as curl` above `[request]` + +**Zed:** Runnables via `runnables.scm` detect `[request]` blocks and offer "Run" in the editor gutter. The runnable executes `nap run <file>` and streams output to the terminal. + +### Language Intelligence (via LSP) + +All IDEs connect to the Nap Language Server (`nap-lsp`) for completions, diagnostics, hover, and document symbols. See **[LSP Specification](./LSP-SPEC.md)** for the full details. + +--- + +## VSCode-Only Features + +These features rely on VSCode APIs that have no equivalent in Zed or Neovim. + +### `vscode-layout` — Layout Overview + +The extension contributes a dedicated **Nap Activity Bar icon** (sidebar panel). The panel has two tabs: + +``` +┌─────────────────────────────┐ +│ Nap [+ v] │ <- panel header: new request button, env picker +├──────────┬──────────────────┤ +│ Explorer │ Playlists │ <- two tabs +├──────────┴──────────────────┤ +│ │ +│ my-api/ │ <- folder = collection +│ auth/ │ +│ 01_login │ <- .nap file +│ 02_refresh-token │ +│ users/ │ +│ 01_get-user pass │ <- pass indicator +│ 02_create-user fail │ <- fail indicator +│ 03_delete-user │ +│ │ +│ smoke [> Run] │ <- .naplist file +└─────────────────────────────┘ +``` + +### `vscode-explorer` — Explorer Tab + +The Explorer tab mirrors the folder structure on disk, filtered to `.nap`, `.naplist`, and `.napenv` files. + +**Each `.nap` file node shows:** +- File name (without extension, prettified) +- HTTP method badge (`GET`, `POST`, etc.) in a colour-coded pill +- Last run result icon: pass / fail / pending / skipped +- Hover: URL, last run time, last status code + +**Context menu on a `.nap` file:** +- Run +- Copy as curl +- Open in editor +- Add to playlist +- Duplicate +- Delete + +**Folder (collection) context menu:** +- Run all +- New request here +- New playlist here + +### `vscode-playlists` — Playlists Tab + +Lists all `.naplist` files found in the workspace, with a tree showing their step structure (including nested playlists). Each playlist node has a Run button. Individual steps can be run in isolation. + +### `vscode-editor` — Request Editor (split view) + +Clicking a `.nap` file opens it in a **split editor**: the raw `.nap` file on the left (editable), and a structured **Request Panel** on the right as a webview. + +``` +┌─────────────────────────┬──────────────────────────────────────┐ +│ get-user.nap │ Get user by ID [> Run] │ +│─────────────────────────│──────────────────────────────────────│ +│ [meta] │ -- Request ---------------------- │ +│ name = "Get user by ID" │ GET https://api.ex.../users/42 │ +│ │ │ +│ [request] │ Headers [+] │ +│ method = GET │ Authorization Bearer ****** │ +│ url = {{baseUrl}}/... │ Accept application/... │ +│ │ ----------------------------------- │ +│ [assert] │ │ +│ status = 200 │ -- Response --------------------- │ +│ body.id exists │ 200 OK 47ms 1.2 KB │ +│ │ │ +│ │ Headers Body Preview │ +│ │ { │ +│ │ "id": "42", │ +│ │ "name": "Alice" │ +│ │ } │ +│ │ │ +│ │ Assertions │ +│ │ pass status = 200 │ +│ │ pass body.id exists │ +└─────────────────────────┴──────────────────────────────────────┘ +``` + +**The right panel is read-only** — a live preview of the request and (after running) the response. All editing is done in the `.nap` file on the left. The two sides stay in sync automatically. + +**Response sub-tabs:** +- **Body** — raw or pretty-printed JSON/XML/text with syntax highlighting and search +- **Headers** — response headers as a clean key/value table +- **Preview** — rendered HTML (for HTML responses) or image (for image responses) + +**Assertions section** (below the response): each assertion from the `[assert]` block is listed with its pass/fail state and the actual vs. expected value on failure. + +### `vscode-env-switcher` — Environment Switcher + +A **status bar item** (bottom-left) shows the active environment: + +``` +[ Nap: staging v ] +``` + +Clicking opens a quick-pick dropdown listing all detected environments (from `.napenv.*` files). Switching environment immediately re-resolves all variable previews in open editors. + +A per-workspace setting `nap.defaultEnvironment` can be committed to the repo to set the team default. + +### `vscode-new-request` — New Request Flow + +Clicking **[+]** in the panel header (or running the `Nap: New Request` command) opens a guided quick-input flow: + +1. **Pick HTTP method** — GET / POST / PUT / PATCH / DELETE / HEAD / OPTIONS +2. **Enter URL** — with autocomplete for `{{baseUrl}}` and other known variables +3. **Pick destination folder** — from the workspace collection tree +4. **Name the request** — defaults to `{method} {path}` (e.g. `GET users-userId`) + +The file is created immediately and opened in the split editor, ready to run. + +### `vscode-test-explorer` — Test Explorer Integration + +The extension registers a `vscode.TestController` so all `.nap` files appear in the standard VSCode **Test Explorer** panel (the flask icon in the activity bar). + +- Collections map to **test suites** +- `.nap` files map to **test items** +- `.naplist` files map to a **test suite** with each step as a child item +- Nested playlists are nested suites + +Run/debug actions in the Test Explorer invoke the Nap CLI under the hood (`nap run <file> --output junit`) and map results back to the test items. + +Results are shown in the **Test Results** output panel with: +- Full request (method, URL, headers, body) +- Full response (status, headers, body) +- Each assertion result with actual vs. expected values on failure +- Script output (`ctx.Log` messages) shown as test output + +--- + +## Zed-Only Features + +### Runnables + +Zed's `runnables.scm` detects `[request]` blocks in `.nap` files and offers a gutter "Run" action. The runnable executes `nap run <file>` and streams output to the Zed terminal panel. Environment variables from `runnables.scm` captures (prefixed `ZED_CUSTOM_`) can pass context. + +### Slash Commands (Assistant Integration) + +Zed extensions can register slash commands for the Zed Assistant: + +- `/nap-run <file>` — run a `.nap` file and return the result in the Assistant context +- `/nap-import-openapi <file>` — generate `.nap` files from an OpenAPI spec + +### Text Redactions + +Zed supports `redactions.scm` to mask sensitive values during screen sharing. The Nap grammar should redact `{{variable}}` values sourced from `.napenv.local`. + +--- + +## `vscode-settings` — Extension Settings + +These settings apply across all IDEs where the extension supports configuration. + +| Setting | Default | Description | IDEs | +|---------|---------|-------------|------| +| `nap.defaultEnvironment` | `""` | Active environment name | All (VSCode via settings, Zed/Neovim via CLI flag) | +| `nap.autoRunOnSave` | `false` | Re-run the request when the file is saved | VSCode | +| `nap.splitEditorLayout` | `"beside"` | `"beside"` or `"below"` for the response panel | VSCode | +| `nap.maskSecretsInPreview` | `true` | Mask variables sourced from `.napenv.local` in hover tooltips | All (via LSP) | +| `nap.cliPath` | `"nap"` | Path to the Nap CLI binary (auto-detected if on PATH) | All | + +--- + +## `vscode-commands` — Extension Commands + +### VSCode (Command Palette) + +| Command | Description | +|---------|-------------| +| `Nap: New Request` | Create a new `.nap` file via guided flow | +| `Nap: New Playlist` | Create a new `.naplist` file | +| `Nap: Run File` | Run the currently open `.nap` or `.naplist` | +| `Nap: Run All` | Run all `.nap` files in the workspace | +| `Nap: Switch Environment` | Open environment picker | +| `Nap: Copy as curl` | Copy the current request as a curl command | +| `Nap: Generate from OpenAPI` | Run `nap generate openapi` against a spec file | +| `Nap: Reveal in Explorer` | Jump from the Nap panel to the file in the native Explorer | + +### Zed (Slash Commands) + +| Command | Description | +|---------|-------------| +| `/nap-run` | Run a `.nap` file and return results in Assistant | +| `/nap-import-openapi` | Generate `.nap` files from an OpenAPI spec | + +--- + +## `vscode-impl` — Implementation Notes + +### VSCode + +- Built in **TypeScript** using the VSCode Extension API. +- The response panel webview uses a minimal framework (Lit or vanilla TS + CSS) — no heavy UI library. +- The extension shells out to the **Nap CLI** (`nap run --output json`) for all HTTP execution. +- **CLI acquisition:** The VSIX installs the CLI via `dotnet tool install -g napper --version X.X.X` on activation, where `X.X.X` is the extension's own `package.json` version. This avoids raw binary downloads (which trigger Windows SmartScreen warnings on unsigned binaries) and leverages NuGet as a trusted distribution channel. If the CLI is already on PATH at the correct version, installation is skipped. +- File watching via `vscode.workspace.createFileSystemWatcher` keeps the panel tree up to date without polling. +- The `.nap` language grammar (TextMate `.tmLanguage.json`) is generated from the ANTLR grammar to avoid drift. +- Published to the **VS Code Marketplace** and the **Open VSX Registry** (for VSCodium / Cursor / Windsurf users). + +### Zed + +- Built in **Rust**, compiled to **WebAssembly** via `zed_extension_api` crate. +- Tree-sitter grammar for `.nap` and `.naplist` (separate from the TextMate grammar — tree-sitter is more expressive for structural queries). +- LSP integration: the extension declares the Nap Language Server in `extension.toml` and implements `language_server_command` to launch it. +- Published via the **zed-industries/extensions** GitHub repository. +- No webviews, no sidebar panels, no test explorer — these are VSCode-only. Zed users get syntax highlighting, LSP intelligence, and runnables. + +### Shared + +- All extensions shell out to `nap run` for execution. No IDE re-implements HTTP logic. +- All extensions connect to `nap-lsp` for language intelligence. See **[LSP Specification](./LSP-SPEC.md)**. +- Grammar definitions (TextMate and Tree-sitter) are both derived from the same ANTLR `.g4` grammar to prevent drift. + +--- + +## Related Specs + +- [LSP Specification](./LSP-SPEC.md) — Language server capabilities, architecture, and protocol details +- [LSP Plan](./LSP-PLAN.md) — LSP implementation phases and TODO +- [IDE Extension Plan (VSCode)](./IDE-EXTENSION-PLAN.md) — VSCode implementation phases and TODO +- [IDE Extension Plan (Zed)](./ZED-EXTENSION-PLAN.md) — Zed implementation phases and TODO +- [OpenAPI Generation (Extension)](./IDE-EXTENION-OPENAPI-GENERATION-SPEC.md) — Import command and AI enrichment diff --git a/specs/LSP-PLAN.md b/specs/LSP-PLAN.md new file mode 100644 index 0000000..f639b06 --- /dev/null +++ b/specs/LSP-PLAN.md @@ -0,0 +1,281 @@ +# Nap Language Server — Implementation Plan + +The LSP is a **thin F# project** (`Napper.Lsp`) that references `Napper.Core` directly. It contains ONLY LSP protocol adapters — all parsing, types, environment resolution, and logging come from `Napper.Core`, the same shared library used by `Napper.Cli`. **Zero duplicated domain logic. Period.** + +--- + +## ⛔️ DO NOT BREAK EXISTING FUNCTIONALITY + +**The LSP is a PARALLEL project.** It does NOT touch the existing VSIX, CLI, or tests until the cutover phase. + +- **DO NOT modify any existing TypeScript files in `src/Napper.VsCode/`** +- **DO NOT modify any existing F# files in `src/Napper.Core/` or `src/Napper.Cli/`** (unless adding new public functions for LSP consumption — and those MUST NOT change existing signatures or behaviour) +- **DO NOT modify or delete any existing tests** +- **ALL existing tests MUST continue to pass at all times** +- **The cutover happens ONLY after the LSP is stable and its own tests pass** + +If you need to add a function to `Napper.Core` for the LSP, that's fine — but it's an ADDITION, not a modification. Existing code stays untouched. + +--- + +## Strategy: Build Parallel, Cut Across Clean + +The goal is to **move logic OUT of TypeScript/Rust and INTO F#**. The VSIX currently reimplements parsing logic that already exists in `Napper.Core`. After cutover, the VSIX becomes a thin UI shell — it asks the LSP for data and renders it. Same for Zed. Same for Neovim. **Less TypeScript, less Rust, MORE F#.** + +```mermaid +graph LR + subgraph "Phase 1-2: Build LSP (parallel)" + LSP[Napper.Lsp project] -->|references| CORE[Napper.Core] + LSPT[Napper.Lsp.Tests] -->|tests| LSP + end + + subgraph "Existing (UNTOUCHED)" + CLI[Napper.Cli] -->|references| CORE + VSIX[Napper.VsCode VSIX] + TESTS[All existing tests] + end + + subgraph "Phase 3: Cutover" + VSIX2[VSIX wires up<br/>vscode-languageclient] -->|stdio| LSP2[napper-lsp binary] + ZED[Zed extension] -->|stdio| LSP2 + end +``` + +--- + +## What the VSIX Does TODAY That Belongs in the LSP + +The VSIX currently **reimplements parsing logic in TypeScript** that already exists in `Napper.Core` F#. This is duplicated code that MUST move to the LSP so all IDEs share it. + +| VSIX Logic (TypeScript) | What it does | Where it should live | Napper.Core function | +|------------------------|-------------|---------------------|---------------------| +| `explorerProvider.ts:54-68` `extractHttpMethod` | Parses `.nap` file to find HTTP method | **LSP** — document symbols / custom request | `Parser.parseNapFile` (already exists) | +| `curlCopy.ts:59-68` `parseMethodAndUrl` | Parses `.nap` file to extract method + URL | **LSP** — custom request `napper/requestInfo` | `Parser.parseNapFile` (already exists) | +| `explorerProvider.ts:120-136` `parsePlaylistStepPaths` | Parses `.naplist` to extract step file paths | **LSP** — document symbols / custom request | `Parser.parseNapList` (already exists) | +| `environmentSwitcher.ts:8-39` `detectEnvironments` | Scans `.napenv.*` files to list environment names | **LSP** — custom request `napper/environments` | `Environment.fs` (needs new function) | +| `curlCopy.ts:70-82` curl generation | Builds `curl -X METHOD 'URL'` string | **Napper.Core** — new `CurlGenerator` module | Does not exist yet — add to Core | +| `codeLensProvider.ts:44-68` section detection | Finds `[request]` and shorthand lines for CodeLens | **LSP** — document symbols gives this for free | `Parser.parseNapFile` (already exists) | + +After cutover, the VSIX TypeScript code for all of the above gets **deleted** and replaced with LSP calls. The Zed extension and Neovim get the same data without writing a single line of TypeScript or Rust parsing code. + +```mermaid +graph TB + subgraph "BEFORE: Duplicated parsing in each IDE" + VS_TS[VSCode TypeScript<br/>extractHttpMethod<br/>parseMethodAndUrl<br/>parsePlaylistStepPaths<br/>detectEnvironments] --> FILES[.nap / .naplist / .napenv files] + ZED_RS[Zed Rust<br/>would need same logic] --> FILES + end + + subgraph "AFTER: Single source of truth in LSP" + VS2[VSCode — thin UI shell] -->|LSP requests| LSP[napper-lsp F#] + ZED2[Zed — thin UI shell] -->|LSP requests| LSP + NV2[Neovim — thin UI shell] -->|LSP requests| LSP + LSP -->|calls| CORE[Napper.Core<br/>Parser.fs / Environment.fs] + CORE --> FILES2[.nap / .naplist / .napenv files] + end +``` + +--- + +## Project Structure + +``` +src/Napper.Lsp/ +├── Napper.Lsp.fsproj # References Napper.Core, depends on Ionide.LanguageServerProtocol +├── Client.fs # LSP client wrapper for notifications back to IDE +├── Server.fs # LSP server — lifecycle, document sync, symbols, custom requests +├── Workspace.fs # Workspace state: open documents, loaded environments +└── Program.fs # Entry point: stdio transport, server init +``` + +```mermaid +graph TD + PROGRAM[Program.fs<br/>Entry point + stdio] --> SERVER[Server.fs<br/>Lifecycle + handlers] + SERVER --> WS[Workspace.fs<br/>Docs + env state] + + WS --> CORE_P[Napper.Core.Parser] + WS --> CORE_E[Napper.Core.Environment] + WS --> CORE_T[Napper.Core.Types] + WS --> CORE_L[Napper.Core.Logger] +``` + +--- + +## ⚠️ Code Sharing with Napper.Core — MANDATORY + +**`Napper.Lsp` contains ONLY LSP protocol glue.** All domain logic lives in `Napper.Core` and is shared with `Napper.Cli`. If the LSP needs a capability that doesn't exist in `Napper.Core` yet, ADD IT TO `Napper.Core` — do NOT put it in `Napper.Lsp`. This is non-negotiable. + +The rule is simple: **if it's not LSP protocol code, it goes in `Napper.Core`.** + +Examples of what belongs where: +- Parsing a `.nap` file → `Napper.Core.Parser` (already exists) +- Extracting variable names from a parsed file → `Napper.Core` (add if missing) +- Mapping a parse error to an LSP Diagnostic → `Napper.Lsp` (protocol glue) +- Scanning for `{{variables}}` in a string → `Napper.Core` (already exists in Environment.fs) +- Generating a curl command → `Napper.Core` (add new module) +- Listing environment names → `Napper.Core.Environment` (add new function) +- Formatting an LSP CompletionItem → `Napper.Lsp` (protocol glue) + +| Napper.Core Module | LSP Usage | +|-------------------|-----------| +| `Parser.parseNapFile` | Document symbols, request info, CodeLens data, diagnostics | +| `Parser.parseNapList` | Document symbols, step listing, diagnostics | +| `Environment.parseEnvFile` | Variable completions, hover values | +| `Environment.resolveVars` | Hover display | +| `Environment.loadEnvironment` | Variable diagnostics | +| `Environment.detectEnvironments` | **NEW** — list available env names for IDE switcher | +| `CurlGenerator.toCurl` | **NEW** — generate curl command from parsed request | +| `Types.*` | All handlers | +| `Logger.*` | All handlers | + +--- + +## Implementation Phases + +### Phase 1 — Project Scaffold + Document Sync + +Set up the F# project, wire up JSON-RPC over stdio, and implement document synchronization. **No existing code is modified.** + +- Create `Napper.Lsp.fsproj` referencing `Napper.Core` and `Ionide.LanguageServerProtocol` +- Add project to `Napper.slnx` +- Implement `Program.fs` — stdio transport, server lifecycle +- Implement `Server.fs` — `initialize`/`initialized`/`shutdown` handlers, capability advertisement +- Implement `Workspace.fs` — in-memory document store (`didOpen`, `didChange`, `didClose`) +- Verify the server starts, handshakes, and tracks open documents + +### Phase 2 — Shared Features + Tests + +Build the LSP features that REPLACE duplicated TypeScript/Rust logic. These are not new features — they are existing VSIX capabilities moved to F# so all IDEs share them. Also: thorough integration tests over JSON-RPC stdio. + +**Document Symbols** — replaces `extractHttpMethod`, `parsePlaylistStepPaths`, and CodeLens section detection in TypeScript: +- `textDocument/documentSymbol` for `.nap` files — sections with line ranges, HTTP method + URL +- `textDocument/documentSymbol` for `.naplist` files — sections with step listing + +**Custom LSP Requests** — replaces `parseMethodAndUrl`, `detectEnvironments` in TypeScript: +- `napper/requestInfo` — given a `.nap` file URI, return `{ method, url, headers }` (parsed by `Napper.Core.Parser`) +- `napper/environments` — scan workspace for `.napenv.*` files, return list of environment names +- `napper/curlCommand` — given a `.nap` file URI, return the curl command string + +**Napper.Core additions** (shared with CLI): +- `Environment.detectEnvironmentNames` — scan a directory for `.napenv.*` files and return env names +- `CurlGenerator.toCurl` — generate curl string from a `NapRequest` + +**Tests** — every test launches the real `napper-lsp` binary and talks JSON-RPC over stdio: +- All Phase 1 lifecycle tests (already done) +- Test: `textDocument/documentSymbol` returns sections for valid `.nap` file +- Test: `textDocument/documentSymbol` returns sections for valid `.naplist` file +- Test: `napper/requestInfo` returns method + URL from parsed `.nap` file +- Test: `napper/environments` returns env names from workspace +- Test: `napper/curlCommand` returns correct curl string +- **ALL existing F# tests still pass** +- **ALL existing VSIX e2e tests still pass** + +### Phase 3 — Cutover (VSIX + Zed Wire Up) + +**Only after Phase 2 is complete and all tests pass.** + +- Add `vscode-languageclient` dependency to VSIX +- Wire up VSIX to launch `napper-lsp` over stdio on activation +- Zed extension: implement `language_server_command` in `lib.rs` to launch `napper-lsp` +- **DELETE** duplicated TypeScript parsing code (`extractHttpMethod`, `parseMethodAndUrl`, `parsePlaylistStepPaths`, `detectEnvironments`) — replace with LSP calls +- Verify: existing VSIX features work exactly as before (now powered by LSP) +- **Run ALL existing VSIX e2e tests — every single one must pass** +- **Run ALL existing F# tests — must pass** + +### Phase 4 — Post-Cutover: New LSP Features + +These are genuinely NEW capabilities that don't exist in any IDE today. + +- Diagnostics (`Diagnostics.fs`) — parse errors, unknown variables, missing blocks +- Completions (`Completions.fs`) — HTTP methods, headers, variables, status codes, operators +- Hover (`Hover.fs`) — variable resolution, section descriptions, secret masking +- Configuration — `workspace/didChangeConfiguration` for environment name and mask settings +- File watching — `.napenv` changes trigger revalidation + +Each feature gets its own LSP integration tests (same approach: real binary, real JSON-RPC, real assertions). + +--- + +## Testing Strategy + +**No unit tests. No mocks. LSP integration tests ONLY.** + +Every test: +1. Launches the `napper-lsp` binary as a subprocess +2. Sends LSP JSON-RPC messages over stdin (the exact same protocol VSCode/Zed use) +3. Reads LSP JSON-RPC responses from stdout +4. Asserts on the responses + +This is the same communication path the real IDEs use. If the tests pass, the IDEs work. + +- **Napper.Core tests already cover** parsing, environment resolution, and types — do NOT re-test those. +- **Existing VSIX e2e tests**: Must pass before AND after cutover. These are the acceptance criteria. +- **Existing F# tests**: Must pass at all times. Run them before every change. + +--- + +## Dependencies + +| Package | Purpose | +|---------|---------| +| `Ionide.LanguageServerProtocol` | LSP types and JSON-RPC server framework | +| `Napper.Core` (project ref) | Parser, types, environment, logger | + +No other dependencies. The LSP is lightweight by design. + +--- + +## TODO + +### Phase 1 — Project Scaffold + Document Sync +- [x] Create `Napper.Lsp.fsproj` with `Napper.Core` project reference +- [x] Add `Ionide.LanguageServerProtocol` package reference +- [x] Add `Napper.Lsp` to `Napper.slnx` +- [x] Implement `Program.fs` — stdio transport and server lifecycle +- [x] Implement `Server.fs` — initialize/shutdown, capability registration +- [x] Implement `Workspace.fs` — document store (didOpen/didChange/didClose) + +### Phase 2 — Shared Features + Tests +- [x] Create `Napper.Lsp.Tests` project +- [x] Test: initialize handshake (JSON-RPC over stdio) +- [x] Test: initialized notification +- [x] Test: textDocument/didOpen +- [x] Test: textDocument/didChange +- [x] Test: textDocument/didClose +- [x] Test: shutdown + exit lifecycle +- [x] Test: malformed JSON-RPC handled gracefully +- [x] Test: unknown method returns LSP error +- [x] Verify all existing projects build (zero warnings, zero errors) +- [x] Add `SectionScanner` to `Napper.Core` (section positions for document symbols) +- [x] Add `Environment.detectEnvironmentNames` to `Napper.Core` +- [x] Add `CurlGenerator.toCurl` to `Napper.Core` +- [x] Implement `textDocument/documentSymbol` for `.nap` files (sections + method + URL) +- [x] Implement `textDocument/documentSymbol` for `.naplist` files (sections + steps) +- [x] Implement `textDocument/codeLens` for `.nap` files (request section detection) +- [x] Implement `workspace/executeCommand` `napper.requestInfo` (method, URL, headers) +- [x] Implement `workspace/executeCommand` `napper.copyCurl` (curl string) +- [x] Implement `workspace/executeCommand` `napper.listEnvironments` (env names) +- [x] Test: documentSymbol returns sections for `.nap` file +- [x] Test: documentSymbol returns sections for `.naplist` file +- [x] Test: codeLens returns lenses for `.nap` file +- [x] Test: `napper.requestInfo` returns parsed method + URL +- [x] Test: `napper.copyCurl` returns curl string +- [x] Test: `napper.listEnvironments` returns env names +- [ ] Verify ALL existing F# tests pass +- [ ] Verify ALL existing VSIX e2e tests pass + +### Phase 3 — Cutover +- [ ] Add `vscode-languageclient` to VSIX +- [ ] Wire VSIX to launch `napper-lsp` on activation +- [ ] Wire Zed `language_server_command` to launch `napper-lsp` +- [ ] Delete duplicated TS parsing code, replace with LSP calls +- [ ] Verify existing VSIX features unchanged +- [ ] Run ALL existing VSIX e2e tests — must pass +- [ ] Run ALL existing F# tests — must pass + +### Phase 4 — Post-Cutover: New LSP Features +- [ ] Diagnostics (parse errors, unknown variables, missing blocks) +- [ ] Completions (methods, headers, variables, status codes, operators) +- [ ] Hover (variable resolution, secret masking, descriptions) +- [ ] Configuration (environment name, mask settings) +- [ ] File watching (.napenv changes) +- [ ] Integration tests for each new feature (JSON-RPC over stdio) diff --git a/specs/LSP-SPEC.md b/specs/LSP-SPEC.md new file mode 100644 index 0000000..ec47eca --- /dev/null +++ b/specs/LSP-SPEC.md @@ -0,0 +1,240 @@ +# Nap Language Server — Specification + +> A standalone LSP binary that provides language intelligence for `.nap`, `.naplist`, and `.napenv` files across all IDEs. Built in F#, reusing **Napper.Core** modules directly. + +--- + +## Architecture + +```mermaid +graph TB + subgraph IDEs + VS[VSCode Extension<br/>TypeScript] + ZD[Zed Extension<br/>Rust/WASM] + NV[Neovim Plugin<br/>Lua] + end + + subgraph "nap-lsp (F# binary)" + JSONRPC[JSON-RPC over stdio] + HANDLERS[LSP Handlers] + subgraph "Napper.Core (shared library)" + PARSER[Parser.fs<br/>FParsec] + ENV[Environment.fs<br/>Variable Resolution] + TYPES[Types.fs<br/>Domain Model] + end + end + + VS -->|stdio| JSONRPC + ZD -->|stdio| JSONRPC + NV -->|stdio| JSONRPC + JSONRPC --> HANDLERS + HANDLERS --> PARSER + HANDLERS --> ENV + HANDLERS --> TYPES +``` + +```mermaid +graph LR + subgraph "Napper.Core (shared)" + T[Types.fs] + P[Parser.fs] + E[Environment.fs] + L[Logger.fs] + end + + subgraph "Consumers" + CLI[Napper.Cli] + LSP[Napper.Lsp] + end + + CLI --> T + CLI --> P + CLI --> E + CLI --> L + LSP --> T + LSP --> P + LSP --> E + LSP --> L +``` + +--- + +## Design Principles + +- **⚠️ ZERO duplicated logic — this is the #1 rule.** `Napper.Lsp` MUST NOT contain any parsing, type definitions, environment resolution, or domain logic. ALL of that lives in `Napper.Core`. The LSP is a thin protocol adapter that calls `Napper.Core` functions and translates results to LSP responses. If you find yourself writing domain logic in `Napper.Lsp`, STOP — it belongs in `Napper.Core` where the CLI can use it too. +- **Napper.Core is the single source of truth.** `Napper.Cli` and `Napper.Lsp` are both thin consumers of `Napper.Core`. They share the exact same parser, types, environment resolution, and logger. Any new capability needed by the LSP that could be useful to the CLI MUST be added to `Napper.Core`, not to `Napper.Lsp`. +- **Standalone binary.** Published as a self-contained `nap-lsp` executable via `dotnet publish`. No .NET runtime required on the user's machine. +- **Protocol-only coupling.** IDE extensions communicate exclusively via LSP over stdio. No IDE-specific code in the LSP binary. +- **Incremental.** Each LSP capability ships independently. The server advertises only what it supports. + +--- + +## Transport + +| Property | Value | +|----------|-------| +| Transport | stdio (stdin/stdout) | +| Protocol | JSON-RPC 2.0 (LSP 3.17) | +| Encoding | UTF-8 | +| Binary name | `nap-lsp` | + +IDE extensions launch `nap-lsp` as a child process and communicate over stdin/stdout. No TCP, no WebSocket, no HTTP. + +--- + +## ⚠️ The LSP Replaces Duplicated IDE Logic + +The VSIX currently reimplements `.nap` file parsing in TypeScript — extracting HTTP methods, URLs, playlist steps, and environment names. This is **duplicated logic** that already exists in `Napper.Core` F#. The LSP eliminates this duplication: all IDEs ask the LSP, the LSP calls `Napper.Core`, done. **Less TypeScript, less Rust, MORE F#.** + +| Duplicated VSIX Logic | Replaced By | +|-----------------------|-------------| +| `extractHttpMethod` (TS) — re-parses `.nap` to find method | `textDocument/documentSymbol` — LSP parses once via `Napper.Core.Parser` | +| `parseMethodAndUrl` (TS) — re-parses `.nap` for curl copy | `napper/requestInfo` — custom LSP request | +| `parsePlaylistStepPaths` (TS) — re-parses `.naplist` for steps | `textDocument/documentSymbol` — LSP parses via `Napper.Core.Parser` | +| `detectEnvironments` (TS) — scans `.napenv.*` files | `napper/environments` — custom LSP request | +| CodeLens section detection (TS) — finds `[request]` lines | `textDocument/documentSymbol` — sections with line ranges | + +--- + +## Capabilities + +### `lsp-custom` — Custom Requests (Napper-specific) + +These are non-standard LSP requests that provide structured data to all IDEs. They replace duplicated parsing logic in TypeScript/Rust. + +| Method | Params | Returns | Replaces | +|--------|--------|---------|----------| +| `napper/requestInfo` | `{ uri: string }` | `{ method: string, url: string, headers: Record<string, string> }` | `parseMethodAndUrl` in TS | +| `napper/environments` | `{ rootUri: string }` | `{ environments: string[] }` | `detectEnvironments` in TS | +| `napper/curlCommand` | `{ uri: string }` | `{ curl: string }` | curl generation in TS | + +**Implementation:** All three call `Napper.Core` functions — `Parser.parseNapFile`, `Environment.detectEnvironmentNames` (new), `CurlGenerator.toCurl` (new). + +### `lsp-completions` — Completions + +Triggered on typing within `.nap` and `.naplist` files. + +| Context | Completion Items | +|---------|-----------------| +| After `method =` | `GET`, `POST`, `PUT`, `PATCH`, `DELETE`, `HEAD`, `OPTIONS` | +| After `[request.headers]` key position | Common HTTP headers: `Content-Type`, `Authorization`, `Accept`, `Cache-Control`, `User-Agent`, ... | +| Inside `{{` | Variable names from `.napenv` files in the workspace | +| After `status` in `[assert]` | Common HTTP status codes: `200`, `201`, `400`, `401`, `404`, `500`, ... | +| After assertion target | Assertion operators: `=`, `exists`, `contains`, `matches`, `<`, `>` | +| `[steps]` block in `.naplist` | `.nap` and `.naplist` file paths from the workspace | + +**Implementation:** Parse the document up to the cursor position using `Napper.Core.Parser`. Determine the current section (`[meta]`, `[request]`, `[assert]`, etc.) and offer context-appropriate items. + +### `lsp-diagnostics` — Diagnostics + +Published on `textDocument/didOpen` and `textDocument/didChange`. + +| Diagnostic | Severity | Condition | +|-----------|----------|-----------| +| Parse error | Error | `Napper.Core.Parser.parseNapFile` returns `Error` | +| Unknown variable | Warning | `{{name}}` referenced but not defined in any `.napenv` file or `[vars]` block | +| Missing `[request]` block | Error | Full `.nap` file has no `[request]` section | +| Invalid assertion syntax | Error | Assertion line doesn't match any known operator pattern | +| Unreachable script path | Warning | `[script]` `pre` or `post` path does not exist on disk | +| Missing step file | Warning | `.naplist` step references a file that doesn't exist | + +**Implementation:** Run `Napper.Core.Parser.parseNapFile` or `parseNapList`. For variable diagnostics, scan for `{{...}}` patterns and check against `Napper.Core.Environment.loadEnvironment`. Report diagnostics with line/column positions from FParsec error info. + +### `lsp-hover` — Hover + +| Hover Target | Display | +|-------------|---------| +| `{{variable}}` | Resolved value from the active environment. If sourced from `.napenv.local`, show `******` (masked). | +| Section header (`[request]`, `[assert]`, etc.) | Brief description of the section's purpose | +| HTTP method keyword | Method description (e.g., "GET — Safe, idempotent retrieval") | +| Assertion operator | Operator description (e.g., "contains — checks if the value includes the substring") | + +**Implementation:** Parse the document, locate the token under the cursor, resolve variables using `Napper.Core.Environment`. + +### `lsp-symbols` — Document Symbols + +Expose file structure for outline navigation (Ctrl+Shift+O in VSCode, symbol search in Zed). + +| Symbol | Kind | Scope | +|--------|------|-------| +| `[meta]` | `Namespace` | `.nap`, `.naplist` | +| `[request]` | `Function` | `.nap` | +| `[request.headers]` | `Struct` | `.nap` | +| `[request.body]` | `Struct` | `.nap` | +| `[assert]` | `Function` | `.nap` | +| `[script]` | `Function` | `.nap` | +| `[vars]` | `Variable` | `.nap`, `.naplist` | +| `[steps]` | `Array` | `.naplist` | + +**Implementation:** Walk the parsed AST from `Napper.Core.Parser` and emit `DocumentSymbol` entries with line ranges. + +--- + +## File Watching + +The LSP watches the workspace for changes to `.napenv`, `.napenv.*`, and `.napenv.local` files. When these change, the server: + +1. Reloads the environment using `Napper.Core.Environment.loadEnvironment` +2. Re-publishes diagnostics for all open `.nap` files (unknown variable warnings may appear or disappear) +3. Updates hover resolution for `{{variable}}` tokens + +The server registers `workspace/didChangeWatchedFiles` for these glob patterns: +- `**/.napenv` +- `**/.napenv.*` + +--- + +## Configuration + +The LSP accepts configuration via `workspace/didChangeConfiguration` and `initializationOptions`: + +| Setting | Type | Default | Description | +|---------|------|---------|-------------| +| `nap.environment` | `string` | `""` | Active environment name (selects `.napenv.{name}`) | +| `nap.maskSecrets` | `bool` | `true` | Mask values from `.napenv.local` in hover | + +--- + +## Supported File Types + +| Extension | Language ID | Features | +|-----------|------------|----------| +| `.nap` | `nap` | All capabilities | +| `.naplist` | `naplist` | Completions (steps), diagnostics, symbols | +| `.napenv` | `napenv` | Hover (show which files reference each variable) | + +--- + +## Error Handling + +- Parse errors from FParsec are mapped to LSP `Diagnostic` objects with precise line/column positions. +- The server never crashes on malformed input. All handlers catch exceptions and log via `Napper.Core.Logger`. +- If the workspace has no `.napenv` files, variable-related features degrade gracefully (no completions, no hover values, but no errors either). + +--- + +## Distribution + +| Platform | Binary | Notes | +|----------|--------|-------| +| macOS (arm64) | `nap-lsp` | Self-contained, single file | +| macOS (x64) | `nap-lsp` | Self-contained, single file | +| Linux (x64) | `nap-lsp` | Self-contained, single file | +| Windows (x64) | `nap-lsp.exe` | Self-contained, single file | + +Built with `dotnet publish -c Release -r <rid> --self-contained -p:PublishSingleFile=true`. + +IDE extensions discover the binary by: +1. Checking `nap.cliPath` setting (if configured) +2. Looking for `nap-lsp` on `PATH` +3. Downloading from GitHub releases (future) + +--- + +## Related Specs + +- [IDE Extension Spec](./IDE-EXTENSION-SPEC.md) — Feature matrix and IDE-specific behaviour +- [IDE Extension Plan (VSCode)](./IDE-EXTENSION-PLAN.md) — VSCode implementation phases +- [Zed Extension Plan](./ZED-EXTENSION-PLAN.md) — Zed implementation phases +- [File Formats Spec](./FILE-FORMATS-SPEC.md) — `.nap`, `.naplist`, `.napenv` format definitions +- [LSP Implementation Plan](./LSP-PLAN.md) — Implementation phases and TODO diff --git a/specs/Napper.md b/specs/Napper.md deleted file mode 100644 index 9b99baf..0000000 --- a/specs/Napper.md +++ /dev/null @@ -1,672 +0,0 @@ -# Nap — API Testing Tool Specification - -> **Nap** (Network API Protocol) — a CLI-first, test-oriented alternative to Postman, Bruno, `.http` files, and curl. - ---- - -## Vision - -Nap is a developer-first HTTP testing tool. It is as simple as curl for one-off requests, but scales to full test suites with reusable components, scripted assertions, and CI integration. It is not a GUI-first tool with a CLI bolted on — the CLI is the product; the VSCode extension is a first-class citizen that operates on the same files. - ---- - -## Core Principles - -1. **Files are the source of truth.** All requests, tests, and playlists are plain files. Git-friendly by default. -2. **Simple things are simple.** A single HTTP call should look almost as terse as curl. -3. **Tests are reusable components.** A `.nap` file is a reusable unit. It can be composed into playlists without modification. -4. **Scripting is opt-in and external.** F# (and potentially other languages) scripts live in `.fsx` files referenced by name. Simple assertions need no scripting. -5. **No lock-in.** The format is plain text. The scripting is standard `.fsx`. Results emit standard formats. - ---- - -## File Format: `.nap` - -Each `.nap` file defines one **request** plus its optional **setup**, **assertions**, and **script reference**. - -### Minimal example — just a request - -```nap -GET https://api.example.com/users -``` - -### Full anatomy - -```nap -# Optional metadata block -[meta] -name = "Get user by ID" -description = "Fetches a single user and asserts shape" -tags = ["users", "smoke"] - -# Optional variables (can be overridden by environment) -[vars] -userId = "42" - -# Request block (required) -[request] -method = GET -url = https://api.example.com/users/{{userId}} - -[request.headers] -Authorization = Bearer {{token}} -Accept = application/json - -# Optional: request body (for POST/PUT/PATCH) -# [request.body] -# content-type = application/json -# """ -# { "name": "Alice" } -# """ - -# Optional: built-in assertions (no scripting required) -[assert] -status = 200 -body.id = {{userId}} -body.name exists - -# Optional: reference an external script for complex assertions or setup -[script] -pre = ./scripts/auth.fsx # runs before the request -post = ./scripts/validate-user.fsx # runs after the response -``` - -### Key design decisions - -- **TOML-inspired syntax** — familiar, unambiguous, easy to parse. -- **`{{variable}}`** interpolation throughout — variables resolved from env files, CLI flags, or parent playlist scope. -- **`[assert]` block** — declarative assertions that cover ~80% of cases without scripting. - - `status = 200` — HTTP status code - - `body.path = value` — JSONPath equality - - `body.path exists` — presence check - - `body.path matches "regex"` — regex match - - `headers.Content-Type contains "json"` — header check - - `duration < 500ms` — performance assertion -- **`[script]` block** — references external `.fsx` files for pre/post hooks. -- Comments with `#`. - ---- - -## Scripting Model - -Scripts are **external `.fsx` files** referenced by relative path. This keeps `.nap` files clean and makes scripts independently testable and reusable across many `.nap` files. - -### Script context object - -The runtime injects a `NapContext` object into every script. The interface (F# record): - -```fsharp -type NapResponse = { - StatusCode : int - Headers : Map<string, string> - Body : string // raw body - Json : JsonElement // parsed if Content-Type is JSON - Duration : TimeSpan -} - -type NapContext = { - Vars : Map<string, string> // mutable — scripts can set vars for downstream steps - Request : HttpRequestMessage // pre-script only - Response : NapResponse // post-script only (None in pre-script) - Env : string // current environment name - Fail : string -> unit // call to fail the test with a message - Set : string -> string -> unit // set a variable for downstream steps - Log : string -> unit // write to test output -} -``` - -### Example post-script (`validate-user.fsx`) - -```fsharp -// ctx : NapContext is injected automatically -let user = ctx.Response.Json - -if user.GetProperty("id").GetString() <> ctx.Vars["userId"] then - ctx.Fail "User ID mismatch" - -// Extract a token from response and pass it to the next step -let token = user.GetProperty("sessionToken").GetString() -ctx.Set "token" token -``` - -### Script-driven execution (inverse model) - -The relationship between `.nap` files and scripts works **both ways**: - -**`.nap` file drives scripts** — a request file references one or more pre/post scripts. - -**Script drives `.nap` files** — an `.fsx` file can itself act as the entry point, orchestrating as many requests as needed: - -```fsharp -// orchestrate.fsx — F# script as the top-level runner -// ctx : NapContext injected; nap : NapRunner also injected - -let loginResult = nap.Run "./auth/01_login.nap" -ctx.Set "token" (loginResult.Response.Json.GetProperty("token").GetString()) - -for userId in [1; 2; 3] do - ctx.Set "userId" (string userId) - let result = nap.Run "./users/get-user.nap" - if result.Response.StatusCode <> 200 then - ctx.Fail $"User {userId} not found" -``` - -The `NapRunner` object injected into orchestration scripts: - -```fsharp -type NapRunner = { - Run : string -> NapResult // run a .nap file, returns result - RunList : string -> NapResult list // run a .naplist file - Vars : Map<string, string> // shared variable bag -} -``` - -This enables arbitrarily complex test flows — loops, branching, data-driven runs — without any special playlist syntax. - -A `.naplist` can reference an `.fsx` orchestration script as a step, the same as any `.nap` file: - -```naplist -[steps] -./auth/01_login.nap -./scripts/parametrized-user-tests.fsx # script drives multiple .nap files -./teardown/cleanup.nap -``` - -### Language extensibility - -The `[script]` block specifies a file path. The runtime dispatches based on file extension: -- `.fsx` → F# interactive (dotnet-fsi) -- Future: `.py`, `.js`, etc. — the architecture allows pluggable runners - ---- - -## Environment Files: `.napenv` - -Environment files are TOML files that define variable sets for different deployment targets. - -```toml -# .napenv (base — checked into git, no secrets) -baseUrl = "https://api.example.com" -userId = "42" -``` - -```toml -# .napenv.local (gitignored — secrets) -token = "eyJhbGci..." -``` - -```toml -# .napenv.staging -baseUrl = "https://staging.api.example.com" -token = "staging-token" -``` - -Variable resolution order (highest wins): -1. CLI `--var key=value` flags -2. `.napenv.local` -3. Named environment file (e.g. `.napenv.staging`) -4. Base `.napenv` -5. `[vars]` block in the `.nap` file - ---- - -## Collections: Folder-Based - -A folder of `.nap` files is implicitly a **collection**. Subfolders are sub-collections. - -``` -my-api/ -├── .napenv -├── .napenv.local # gitignored -├── auth/ -│ ├── 01_login.nap -│ └── 02_refresh-token.nap -├── users/ -│ ├── 01_get-user.nap -│ ├── 02_create-user.nap -│ └── 03_delete-user.nap -└── smoke.naplist -``` - -Execution order within a folder: **filename sort** (use numeric prefixes `01_`, `02_` to control order). - ---- - -## Playlists: `.naplist` - -A `.naplist` file is an explicit ordered list of steps. Steps can reference: -- Individual `.nap` files (by relative path) -- Folders (run all `.nap` files in that folder, sorted) -- Other `.naplist` files (nested playlists — fully recursive) - -### Example `smoke.naplist` - -```naplist -[meta] -name = "Smoke Test Suite" -env = staging # default environment for this playlist - -[vars] -timeout = "5000" - -[steps] -./auth/01_login.nap -./auth/02_refresh-token.nap -./users/01_get-user.nap - -# Include another playlist -./regression/core.naplist -``` - -### Variable scoping in playlists - -- A `[vars]` block in a `.naplist` sets variables for all steps in that playlist. -- Scripts can use `ctx.Set` to pass variables **forward** to subsequent steps in the same playlist. -- Nested `.naplist` files inherit the parent's variable scope unless they override. - ---- - -## CLI - -### Installation - -```sh -# .NET global tool -dotnet tool install -g nap - -# Standalone binary (Homebrew, GitHub Releases) -brew install nap -``` - -### Usage - -```sh -# Run a single request (simplest case — as easy as curl) -nap run ./users/get-user.nap - -# Run a single request with inline variable override -nap run ./users/get-user.nap --var userId=99 --var env=dev - -# Run a collection (folder) -nap run ./users/ - -# Run a playlist -nap run ./smoke.naplist - -# Specify environment -nap run ./smoke.naplist --env staging - -# Watch mode — re-run on file save -nap run ./smoke.naplist --watch - -# Output formats -nap run ./smoke.naplist --output junit # JUnit XML (CI) -nap run ./smoke.naplist --output tap # TAP format -nap run ./smoke.naplist --output pretty # default human-readable - -# Scaffold a new .nap file -nap new request ./users/get-user.nap -nap new playlist ./smoke.naplist -nap new env staging - -# Validate syntax without running -nap check ./smoke.naplist - -# List all requests in a collection/playlist -nap list ./ -``` - -### Exit codes - -| Code | Meaning | -|------|---------| -| 0 | All assertions passed | -| 1 | One or more assertions failed | -| 2 | Runtime error (network, script error, parse error) | - ---- - -## VSCode Extension - -> The extension is the **primary entry point** for most users. It must be as approachable as Postman on first open, but backed by plain files that work perfectly from the CLI and in CI. - -### Design philosophy - -- **No separate app.** Everything lives inside VSCode. No webview-based fake browser. No Electron shell inside Electron. -- **Files are always the truth.** The UI is a lens over `.nap` and `.naplist` files. Edits in the UI update the file directly; edits in the file are immediately reflected in the UI. There is no sync step. -- **Progressive disclosure.** A new user can send their first request within 30 seconds of installing. Advanced features (scripting, playlists, environments) reveal themselves naturally as the user explores. -- **Looks good, works fast.** The UI should feel polished — not a dev tool hacked together from tree views and JSON editors. - ---- - -### Layout overview - -The extension contributes a dedicated **Nap Activity Bar icon** (sidebar panel). The panel has three tabs: - -``` -┌─────────────────────────────┐ -│ 🟢 Nap [+ ▾] │ ← panel header: new request button, env picker -├──────────┬──────────────────┤ -│ Explorer │ Playlists │ ← two tabs -├──────────┴──────────────────┤ -│ │ -│ 📁 my-api/ │ ← folder = collection -│ 📁 auth/ │ -│ 📄 01_login │ ← .nap file -│ 📄 02_refresh-token │ -│ 📁 users/ │ -│ 📄 01_get-user ✓ │ ← pass indicator -│ 📄 02_create-user ✗ │ ← fail indicator -│ 📄 03_delete-user │ -│ │ -│ 📋 smoke [▶ Run] │ ← .naplist file -└─────────────────────────────┘ -``` - ---- - -### Explorer tab - -The Explorer tab mirrors the folder structure on disk. It is not a custom tree — it wraps the workspace file tree filtered to `.nap`, `.naplist`, and `.napenv` files. - -**Each `.nap` file node shows:** -- File name (without extension, prettified) -- HTTP method badge (`GET`, `POST`, etc.) in a colour-coded pill -- Last run result icon: ✓ pass / ✗ fail / ● pending / ⊘ skipped -- Hover: URL, last run time, last status code - -**Context menu on a `.nap` file:** -- ▶ Run -- ⧉ Copy as curl -- ✎ Open in editor -- + Add to playlist… -- ⊕ Duplicate -- 🗑 Delete - -**Folder (collection) context menu:** -- ▶ Run all -- + New request here -- + New playlist here - ---- - -### Playlists tab - -Lists all `.naplist` files found in the workspace, with a tree showing their step structure (including nested playlists). - -``` -📋 smoke - 📄 01_login - 📄 02_refresh-token - 📄 01_get-user - 📋 regression/core ← nested playlist, expandable - 📄 ... - -📋 regression/core - ... -``` - -Each playlist node has a ▶ Run button. Individual steps can be run in isolation from the tree. - ---- - -### Request editor (the main view) - -Clicking a `.nap` file opens it in a **split editor**: the raw `.nap` file on the left (editable), and a structured **Request Panel** on the right as a webview. - -``` -┌─────────────────────────┬──────────────────────────────────────┐ -│ get-user.nap │ Get user by ID [▶ Run] │ -│─────────────────────────│──────────────────────────────────────│ -│ [meta] │ ┌─ Request ──────────────────────┐ │ -│ name = "Get user by ID" │ │ GET https://api.ex…/users/42 │ │ -│ │ │ │ │ -│ [request] │ │ Headers [+] │ │ -│ method = GET │ │ Authorization Bearer •••••• │ │ -│ url = {{baseUrl}}/… │ │ Accept application/… │ │ -│ │ └────────────────────────────────┘ │ -│ [assert] │ │ -│ status = 200 │ ┌─ Response ─────────────────────┐ │ -│ body.id exists │ │ 200 OK 47ms 1.2 KB │ │ -│ │ │ │ │ -│ │ │ Headers Body Preview │ │ -│ │ │ ┌────────────────────────────┐ │ │ -│ │ │ │ { │ │ │ -│ │ │ │ "id": "42", │ │ │ -│ │ │ │ "name": "Alice" │ │ │ -│ │ │ │ } │ │ │ -│ │ │ └────────────────────────────┘ │ │ -│ │ │ │ │ -│ │ │ Assertions │ │ -│ │ │ ✓ status = 200 │ │ -│ │ │ ✓ body.id exists │ │ -│ │ └────────────────────────────────┘ │ -└─────────────────────────┴──────────────────────────────────────┘ -``` - -**The right panel is read-only** — it is a live preview of the request and (after running) the response. All editing is done in the `.nap` file on the left. The two sides stay in sync automatically. - -**The right panel has three response sub-tabs:** -- **Body** — raw or pretty-printed JSON/XML/text with syntax highlighting and search -- **Headers** — response headers as a clean key/value table -- **Preview** — rendered HTML (for HTML responses) or image (for image responses) - -**Assertions section** (below the response): each assertion from the `[assert]` block is listed with its pass/fail state and the actual vs. expected value on failure. - ---- - -### Inline editing features - -**Syntax highlighting** — full grammar-aware highlighting for `.nap` and `.naplist` files. - -**Variable resolution on hover** — hovering over `{{token}}` shows a tooltip with the resolved value from the active environment (masked if the key is in `.napenv.local`). - -**CodeLens actions** (appear above relevant lines in the raw file): -- `▶ Run` above `[request]` -- `▶ Run Playlist` above `[meta]` in `.naplist` files -- `⧉ Copy as curl` above `[request]` - -**Autocomplete:** -- Standard HTTP method names -- Common header names (`Content-Type`, `Authorization`, `Accept`, …) -- Known variable names from `.napenv` files in the workspace -- Status codes in `[assert]` blocks - -**Inline diagnostics** — squiggly underlines for: -- Unknown variables (referenced in `{{…}}` but not defined in any env file) -- Invalid assertion syntax -- Missing required `[request]` block -- Unreachable script paths in `[script]` - ---- - -### Environment switcher - -A **status bar item** (bottom-left) shows the active environment: - -``` -[ Nap: staging ▾ ] -``` - -Clicking opens a quick-pick dropdown listing all detected environments (from `.napenv.*` files). Switching environment immediately re-resolves all variable previews in open editors. - -A per-workspace setting `nap.defaultEnvironment` can be committed to the repo to set the team default. - ---- - -### New request flow - -Clicking **[+]** in the panel header (or running the `Nap: New Request` command) opens a guided quick-input flow: - -1. **Pick HTTP method** — GET / POST / PUT / PATCH / DELETE / HEAD / OPTIONS -2. **Enter URL** — with autocomplete for `{{baseUrl}}` and other known variables -3. **Pick destination folder** — from the workspace collection tree -4. **Name the request** — defaults to `{method} {path}` (e.g. `GET users-userId`) - -The file is created immediately and opened in the split editor, ready to run. - ---- - -### Test Explorer integration - -The extension registers a `vscode.TestController` so all `.nap` files appear in the standard VSCode **Test Explorer** panel (the flask icon in the activity bar). - -- Collections map to **test suites** -- `.nap` files map to **test items** -- `.naplist` files map to a **test suite** with each step as a child item -- Nested playlists are nested suites - -Run/debug actions in the Test Explorer invoke the Nap CLI under the hood (`nap run <file> --output junit`) and map results back to the test items. - -Results are shown in the **Test Results** output panel with: -- Full request (method, URL, headers, body) -- Full response (status, headers, body) -- Each assertion result with actual vs. expected values on failure -- Script output (`ctx.Log` messages) shown as test output - ---- - -### Extension settings - -| Setting | Default | Description | -|---------|---------|-------------| -| `nap.defaultEnvironment` | `""` | Active environment name | -| `nap.autoRunOnSave` | `false` | Re-run the request when the file is saved | -| `nap.splitEditorLayout` | `"beside"` | `"beside"` or `"below"` for the response panel | -| `nap.maskSecretsInPreview` | `true` | Mask variables sourced from `.napenv.local` in hover tooltips | -| `nap.cliPath` | `"nap"` | Path to the Nap CLI binary (auto-detected if on PATH) | - ---- - -### Extension commands (Command Palette) - -| Command | Description | -|---------|-------------| -| `Nap: New Request` | Create a new `.nap` file via guided flow | -| `Nap: New Playlist` | Create a new `.naplist` file | -| `Nap: Run File` | Run the currently open `.nap` or `.naplist` | -| `Nap: Run All` | Run all `.nap` files in the workspace | -| `Nap: Switch Environment` | Open environment picker | -| `Nap: Copy as curl` | Copy the current request as a curl command | -| `Nap: Generate from OpenAPI` | Run `nap generate openapi` against a spec file | -| `Nap: Reveal in Explorer` | Jump from the Nap panel to the file in the native Explorer | - ---- - -### Extension implementation notes - -- Built in **TypeScript** using the VSCode Extension API. -- The response panel webview uses a minimal framework (Lit or vanilla TS + CSS) — no heavy UI library. -- The extension shells out to the **Nap CLI** (`nap run --output json`) for all HTTP execution. It does not re-implement the HTTP runner in TypeScript. This keeps the CLI and extension always in sync. -- File watching via `vscode.workspace.createFileSystemWatcher` keeps the panel tree up to date without polling. -- The `.nap` language grammar (TextMate `.tmLanguage.json`) is generated from the ANTLR grammar to avoid drift. -- The extension is published to the **VS Code Marketplace** and the **Open VSX Registry** (for VSCodium / Cursor / Windsurf users). - ---- - -## Parser Implementation - -### Recommended approach: ANTLR4 - -The `.nap` and `.naplist` formats should be parsed with **ANTLR4** (targeting the C# runtime via `Antlr4.Runtime.Standard` NuGet package, which works fine from F#). - -**Rationale:** -- The format has a non-trivial grammar (multi-line string literals, section headers, assertion expressions, variable interpolation). -- ANTLR gives a formal grammar file (`.g4`) that serves as the authoritative format spec and is easy to evolve. -- The C# ANTLR runtime is mature and well-maintained. Generating a visitor/listener from F# is straightforward. -- Alternatives (FParsec, manual recursive descent) are viable but ANTLR's grammar file is more readable as documentation and easier to extend without regressions. - -**Alternative — FParsec:** -If the grammar stays simple enough, [FParsec](https://www.quanttec.com/fparsec/) (a combinator parser library for F#) is a strong alternative. It keeps everything in F#, has excellent error messages, and has no code generation step. Use FParsec if the grammar remains simple; switch to ANTLR if the grammar grows complex (e.g. full expression language for assertions, conditional blocks). - -**Grammar files location:** - -``` -nap/ -└── src/ - └── Nap.Core/ - └── Grammar/ - ├── NapFile.g4 # .nap file grammar - └── NapList.g4 # .naplist grammar -``` - -The generated parser code is committed to the repo (not regenerated on every build) to avoid toolchain dependencies in CI. - ---- - -## Project Layout (Implementation) - -``` -nap/ -├── src/ -│ ├── Nap.Core/ # F# — parser, types, runner engine -│ ├── Nap.Scripting/ # F# — fsi host, script context injection -│ ├── Nap.Cli/ # F# — CLI entry point (System.CommandLine) -│ └── Nap.VsCode/ # TypeScript — VSCode extension -├── tests/ -│ ├── Nap.Core.Tests/ -│ └── Nap.Scripting.Tests/ -├── examples/ -│ └── petstore/ # Sample collection against Petstore API -└── nap.sln -``` - ---- - -## Implementation Phases - -### Phase 1 — Core CLI (MVP) - -- `.nap` file parser -- HTTP request runner (single file) -- Built-in `[assert]` block evaluation -- `.napenv` variable resolution -- `--output pretty` and `--output junit` -- `nap run <file>` command - -### Phase 2 — Collections & Playlists - -- Folder-based collection runner -- `.naplist` file parser and runner -- Nested playlist support -- Variable scoping across steps (`ctx.Set`) - -### Phase 3 — F# Scripting - -- dotnet-fsi host integration -- `NapContext` injection -- Pre/post script execution -- `ctx.Set` for cross-step variable passing - -### Phase 4 — VSCode Extension - -- Syntax highlighting -- Test Explorer integration -- CodeLens run actions -- Environment switcher -- Response viewer panel - -### Phase 5 — Polish & Distribution - -- Standalone native binary (NativeAOT or single-file publish) -- NuGet package for `dotnet tool install` -- Homebrew formula -- `nap new` scaffolding commands -- Language-extensible script runner plugin model - ---- - -## Open Questions / Future Considerations - -- **GraphQL support** — a `[request.graphql]` block with query/variables sub-keys. -- **WebSocket / SSE testing** — separate request type, different assertion model. -- **Mock server mode** — `nap mock ./collection/` serves a mock based on expected responses. -- **Script language plugins** — `.py`, `.js` runners as opt-in packages. -- **Secret manager integration** — pull `{{token}}` from 1Password, AWS Secrets Manager, etc. at runtime. -- **HTML report output** — `--output html` for a shareable test report. - ---- - -## OpenAPI / Swagger Test Generation - -See [OpenAPI Generation Specification](./OpenApiGeneration.md) for the full specification covering one-click test suite generation from OpenAPI specs, including AI-assisted enrichment via GitHub Copilot. diff --git a/specs/OpenApiGeneration.md b/specs/OpenApiGeneration.md deleted file mode 100644 index 74bb9aa..0000000 --- a/specs/OpenApiGeneration.md +++ /dev/null @@ -1,377 +0,0 @@ -# OpenAPI Test Generation Specification - -> **One click to turn an OpenAPI spec into a comprehensive, runnable test suite.** - ---- - -CRITICAL: START WITH TESTS THAT VERIFY THAT OpenAPI -> .nap is WORKING. THE OPENAPI -> .nap DETERMINISTIC PART IS F#. ENRICHMENT IS COPILOT ONLY. - ---- - -## Vision - -A user points Napper at an OpenAPI 3.x or Swagger 2.x specification and gets a complete test suite: one `.nap` file per operation, organized by tag into subdirectories, with a `.naplist` playlist, a `.napenv` environment file, and meaningful assertions derived from the spec's response schemas. - -Without AI, the generator produces deterministic output from the spec alone. When GitHub Copilot is available inside VS Code, the user can opt into AI-assisted enrichment that adds smarter assertions, realistic test data, error-case tests, and intelligent playlist ordering. - -The generated files are **starting points**. The user edits, extends, and commits them alongside the rest of the collection. - ---- - -## Generation Flow - -``` -Input Parse Collect Generate -──────────────────── ────────────── ───────────── ────────────────────── -Local file (.json/.yaml) │ Group endpoints Per-tag subdirectory: - or ├─ JSON.parse() by tag - 01_operation.nap -URL (https://...) │ or YAML parse │ - 02_operation.nap - ▼ │ ... - Resolve $ref │ - │ ▼ Root: - ▼ EndpointDescriptor[] - api-tests.naplist - OpenApiSpec - .napenv - - .napenv.local (gitignored) -``` - -### Input formats - -| Format | Status | -|--------|--------| -| OpenAPI 3.x JSON | Implemented | -| Swagger 2.x JSON | Implemented | -| YAML (both versions) | Not yet — needs YAML parser | -| URL-based loading | Not yet — file picker only | - ---- - -## What Gets Generated - -### Per operation: a `.nap` file - -```nap -# Generated from GET /users/{userId} -[meta] -name = Get user by ID -description = Auto-generated from petstore.yaml - operation getUserById -tags = ["users", "generated"] -generated = true - -[vars] -userId = "REPLACE_ME" - -[request] -GET {{baseUrl}}/users/{{userId}} - -[request.headers] -Authorization = Bearer {{token}} -Accept = application/json - -[assert] -status = 200 -body.id exists -body.name exists -body.email exists -``` - -### Per tag: a subdirectory - -Operations tagged `users` go into `users/`, operations tagged `pets` go into `pets/`, etc. Untagged operations go into the root. - -``` -generated/ -├── .napenv -├── .napenv.local # gitignored, placeholder for secrets -├── api-tests.naplist -├── users/ -│ ├── 01_get-user.nap -│ ├── 02_create-user.nap -│ └── 03_delete-user.nap -└── pets/ - ├── 01_list-pets.nap - └── 02_get-pet.nap -``` - -### Per spec: a `.naplist` playlist - -```naplist -[meta] -name = Pet Store API - -[steps] -./users/01_get-user.nap -./users/02_create-user.nap -./users/03_delete-user.nap -./pets/01_list-pets.nap -./pets/02_get-pet.nap -``` - -### Per spec: a `.napenv` environment - -```toml -baseUrl = https://petstore.example.com/v1 -``` - ---- - -## Generation Details - -### Base URL extraction - -1. OpenAPI 3.x: first entry in `servers[].url` -2. Swagger 2.x: `{schemes[0]}://{host}{basePath}` -3. Fallback: `https://api.example.com` - -### Path parameter conversion - -OpenAPI `{param}` becomes Napper `{{param}}`. Each path parameter also generates a `[vars]` entry with a placeholder value. - -### Request body generation - -For POST / PUT / PATCH operations: -- If the spec provides an `example`, use it verbatim -- Otherwise, recursively generate from the schema using type-appropriate defaults -- Use `format` hints for smarter defaults (email, uuid, date-time, uri) -- Use `enum` values when available (pick the first) -- Respect `minimum` / `maximum` for numeric types - -### Response assertion generation - -From the success response schema (first 2xx status code): -- `status = {code}` for the expected status -- `body.{field} exists` for each top-level required property -- `body.{field} = {value}` for fields with known constant values (enums with single value) -- `headers.Content-Type contains "json"` when response media type is `application/json` - -### Query parameter handling - -Query parameters from the spec are appended to the URL as `?key={{key}}` and generate corresponding `[vars]` entries. - -### Authentication handling - -From the spec's `securitySchemes` and per-operation `security` requirements: - -| Scheme | Generated output | -|--------|-----------------| -| Bearer token (`http: bearer`) | `Authorization = Bearer {{token}}` header + `token` in `.napenv.local` | -| API key (header) | `{headerName} = {{apiKey}}` header + `apiKey` in `.napenv.local` | -| API key (query) | Appended as query param `?{name}={{apiKey}}` | -| Basic auth | `Authorization = Basic {{basicAuth}}` header | - -### Error case generation - -For each documented error response (4xx, 5xx), generate an additional `.nap` file that intentionally triggers the error: - -```nap -# Generated error case: 404 for GET /users/{userId} -[meta] -name = Get user by ID - 404 -description = Verify 404 when user does not exist -tags = ["users", "generated", "error-case"] -generated = true - -[vars] -userId = "nonexistent-id" - -[request] -GET {{baseUrl}}/users/{{userId}} - -[assert] -status = 404 -``` - -### `$ref` resolution - -OpenAPI specs use `$ref` pointers extensively for reusable schemas, parameters, and responses. The generator must resolve all `$ref` pointers by inlining the referenced definitions before generating output. This includes: -- `#/components/schemas/...` (OAS3) and `#/definitions/...` (Swagger 2) -- `#/components/parameters/...` -- `#/components/responses/...` -- Nested `$ref` chains (a schema referencing another schema) - -### Generated file metadata - -Every generated `.nap` file includes `generated = true` in the `[meta]` block. This allows tooling to distinguish generated files from hand-written ones, enabling safe re-generation and `--diff` mode. - ---- - -## CLI Commands - -```sh -# Generate from a local spec -napper generate openapi ./petstore.yaml --output ./petstore/ - -# Generate from a URL -napper generate openapi https://api.example.com/openapi.json --output ./generated/ - -# Generate only for specific tags -napper generate openapi ./petstore.yaml --tag users --tag pets --output ./filtered/ - -# Show what would change without overwriting (diff mode) -napper generate openapi ./petstore.yaml --output ./petstore/ --diff -``` - -### Diff / regeneration mode - -Re-running `napper generate openapi` against an existing output directory with `--diff` compares the spec's current state against previously generated files (identified by `generated = true`). It reports: -- New operations added to the spec -- Operations removed from the spec -- Changed request/response schemas - -Without `--diff`, re-generation overwrites files that have `generated = true` but leaves files where that flag has been removed (indicating the user has taken ownership). - ---- - -## VS Code Extension Integration - -### Import command - -The `Napper: Import from OpenAPI` command (`napper.importOpenApi`): - -1. User picks a spec file (JSON / YAML) or pastes a URL -2. User picks an output folder -3. Generator runs, writes files -4. Opens the generated `.naplist` in the editor -5. Shows success notification with file count - -### Menu placement - -The import command appears in: -- The Napper explorer panel title bar (cloud-download icon) -- The Command Palette - ---- - -## AI-Assisted Enrichment (Copilot) - -> AI enrichment is an **optional layer** on top of the deterministic generator. The generator always works without Copilot. When Copilot is available and the user opts in, the output is enriched. - -### How it works - -1. The deterministic generator produces the base `GenerationResult` -2. If the user chooses "Generate with AI enhancement" and Copilot is available: - - The enricher sends batched prompts to the VS Code Language Model API (`vscode.lm`) - - Each prompt covers a batch of operations (grouped by tag) to stay within rate limits - - The LLM responses are parsed and merged into the generation result -3. The enriched files are written to disk - -### What AI enriches - -| Area | Without AI | With AI | -|------|-----------|---------| -| Assertions | `status = 200`, `body.field exists` for required fields | Semantic assertions: format checks, value range checks, relationship assertions between fields | -| Request body examples | Schema-derived defaults (`"example"`, `0`, `true`) | Contextually realistic values: real-looking emails, names, dates, UUIDs | -| Error case tests | One per documented error status code with placeholder input | Targeted invalid inputs that would actually trigger each error | -| Playlist ordering | File-sort order | Logical flow: auth first, create before read, CRUD lifecycle | -| Validation scripts | None | `.fsx` scripts for complex nested object / array validation | - -### Architecture - -The AI enrichment is split into two modules: - -**`openApiAiEnhancer.ts`** — pure functions, no VS Code SDK dependency: -- Input: `GenerationResult` + parsed `OpenApiSpec` + LLM response strings -- Output: enriched `GenerationResult` -- Fully testable without VS Code - -**Extension integration layer** (in `extension.ts`): -- Checks `vscode.lm.selectChatModels()` for Copilot availability -- Presents choice: "Generate" vs "Generate with AI" -- Sends prompts, collects responses, passes to enhancer -- Shows progress notification during AI processing - -### Prompt design - -Prompts return parseable JSON. Each covers one enrichment aspect for a batch of operations: - -- **Assertion enrichment**: Given response schemas, return assertion lines per operation -- **Test data enrichment**: Given request body schemas, return realistic example bodies -- **Error case enrichment**: Given operations with error responses, return test inputs per error code - -### Future AI integration - -The VS Code Language Model API integration is the first step. Future paid features may include: -- A standalone Napper agent that generates and maintains test suites outside VS Code -- Continuous test generation that watches spec changes and updates tests -- AI-driven test prioritization based on API change impact analysis - ---- - -## Current Implementation State - -### What exists today - -**`src/Nap.VsCode/src/openApiGenerator.ts`** (380 lines) — pure TypeScript, no VS Code SDK: -- `generateFromOpenApi(jsonText: string): Result<GenerationResult, string>` -- Supports OpenAPI 3.x and Swagger 2.x (JSON only) -- Extracts base URL from `servers[]` or `host`/`basePath`/`schemes` -- Converts path params `{param}` to `{{param}}` -- Generates example request bodies from schemas (recursive) -- Creates `[assert]` with success status code only -- Adds Content-Type/Accept headers for POST/PUT/PATCH -- Outputs numbered `.nap` files, one `.naplist`, one `.napenv` -- All string literals defined as constants in `constants.ts` - -**`src/Nap.VsCode/src/extension.ts`** (lines 412-472) — VS Code integration: -- File picker for spec file -- Output folder picker -- Writes generated files to disk - -**`src/Nap.VsCode/src/constants.ts`** (lines 201-241) — all OpenAPI constants - -### What is missing - -| Gap | Priority | Notes | -|-----|----------|-------| -| Unit tests for openApiGenerator.ts | Critical | 380 lines of pure functions with zero tests | -| `$ref` resolution | High | Most real-world specs use `$ref` extensively | -| YAML support | High | YAML is the dominant format for OpenAPI specs | -| Response body assertions | High | Only generates `status = code` today | -| Tag-based folder organization | High | Currently flat-numbered, should group by tag | -| Query parameter handling | Medium | Not added to URL or `[vars]` | -| Auth scheme handling | Medium | No security scheme detection | -| `[vars]` block for path params | Medium | Params are in URL but no `[vars]` section | -| `generated = true` meta flag | Medium | Spec calls for it, not implemented | -| Error case generation | Medium | Only happy-path tests generated | -| Smarter example values (format/enum) | Medium | Everything is `"example"` or `0` | -| URL-based spec loading | Low | File picker only today | -| `--diff` mode | Low | No re-generation support | -| AI enrichment (Copilot) | Low | Foundation first, then AI layer | - ---- - -## Implementation Phases - -### Phase A: Testing Foundation - -Write comprehensive unit tests for `openApiGenerator.ts`. Test fixtures for valid OAS3, valid Swagger 2, edge cases, error cases. All pure functions, no VS Code dependency needed. - -### Phase B: Core Generation Improvements - -- `$ref` resolution (inline all references before generation) -- YAML support (add `js-yaml` dependency) -- Response body assertions from response schemas -- Tag-based folder organization -- `[vars]` block for path parameters -- `generated = true` metadata flag - -### Phase C: Enhanced Generation - -- Query parameter and auth header generation -- Error case test generation (4xx, 5xx) -- Smarter example values using `format`, `enum`, `minimum`/`maximum` -- URL-based spec loading -- Header assertions - -### Phase D: AI-Assisted Enrichment - -- `openApiAiEnhancer.ts` module (pure functions) -- VS Code Language Model API integration -- Batch prompt design and response parsing -- UI toggle: "Generate" vs "Generate with AI" -- Enhanced assertions, test data, playlist ordering - -### Phase E: Diff and Regeneration - -- `--diff` mode in CLI -- `generated = true` detection for safe overwrite -- Preserve custom assertions, update generated ones diff --git a/specs/SCRIPTING-SPEC.md b/specs/SCRIPTING-SPEC.md new file mode 100644 index 0000000..0e9d2eb --- /dev/null +++ b/specs/SCRIPTING-SPEC.md @@ -0,0 +1,104 @@ +# Nap Scripting Model + +Scripts are external files referenced by relative path from the `nap-script` section. This keeps `.nap` files clean and makes scripts independently testable and reusable across many `.nap` files. + +- `script-fsx` — F# scripts (`.fsx`) executed via `dotnet fsi` +- `script-csx` — C# scripts (`.csx`) executed via `dotnet script` + +--- + +## `script-context` — Script Context Object + +The runtime injects a `NapContext` object into every script. The interface (F# record): + +```fsharp +type NapResponse = { + StatusCode : int + Headers : Map<string, string> + Body : string // raw body + Json : JsonElement // parsed if Content-Type is JSON + Duration : TimeSpan +} + +type NapContext = { + Vars : Map<string, string> // mutable — scripts can set vars for downstream steps + Request : HttpRequestMessage // pre-script only + Response : NapResponse // post-script only (None in pre-script) + Env : string // current environment name + Fail : string -> unit // call to fail the test with a message + Set : string -> string -> unit // set a variable for downstream steps + Log : string -> unit // write to test output +} +``` + +--- + +## `script-post` — Example Post-Script (`validate-user.fsx`) + +```fsharp +// ctx : NapContext is injected automatically +let user = ctx.Response.Json + +if user.GetProperty("id").GetString() <> ctx.Vars["userId"] then + ctx.Fail "User ID mismatch" + +// Extract a token from response and pass it to the next step +let token = user.GetProperty("sessionToken").GetString() +ctx.Set "token" token +``` + +--- + +## `script-orchestration` — Script-Driven Execution (Inverse Model) + +The relationship between `.nap` files and scripts works **both ways**: + +**`.nap` file drives scripts** — a request file references one or more pre/post scripts. + +**Script drives `.nap` files** — an `.fsx` file can itself act as the entry point, orchestrating as many requests as needed: + +```fsharp +// orchestrate.fsx — F# script as the top-level runner +// ctx : NapContext injected; nap : NapRunner also injected + +let loginResult = nap.Run "./auth/01_login.nap" +ctx.Set "token" (loginResult.Response.Json.GetProperty("token").GetString()) + +for userId in [1; 2; 3] do + ctx.Set "userId" (string userId) + let result = nap.Run "./users/get-user.nap" + if result.Response.StatusCode <> 200 then + ctx.Fail $"User {userId} not found" +``` + +### `script-runner` — NapRunner + +The `NapRunner` object injected into orchestration scripts: + +```fsharp +type NapRunner = { + Run : string -> NapResult // run a .nap file, returns result + RunList : string -> NapResult list // run a .naplist file + Vars : Map<string, string> // shared variable bag +} +``` + +This enables arbitrarily complex test flows — loops, branching, data-driven runs — without any special playlist syntax. + +A `.naplist` can reference an `.fsx` orchestration script as a step, the same as any `.nap` file: + +```naplist +[steps] +./auth/01_login.nap +./scripts/parametrized-user-tests.fsx # script drives multiple .nap files +./teardown/cleanup.nap +``` + +--- + +## `script-dispatch` — Language Extensibility + +The `nap-script` section specifies a file path. The runtime dispatches based on file extension: +- `.fsx` → F# interactive via `dotnet fsi` (`script-fsx`) +- `.csx` → C# scripting via `dotnet script` (`script-csx`) +- Future: `.py`, `.js`, etc. — the architecture allows pluggable runners diff --git a/specs/ZED-EXTENSION-PLAN.md b/specs/ZED-EXTENSION-PLAN.md new file mode 100644 index 0000000..80fe21a --- /dev/null +++ b/specs/ZED-EXTENSION-PLAN.md @@ -0,0 +1,124 @@ +# Nap Zed Extension — Implementation Plan + +Zed extensions are written in Rust, compiled to WebAssembly. The extension provides syntax highlighting via Tree-sitter, language intelligence via LSP, and run actions via runnables. Zed does not support webviews, sidebar panels, test explorers, or status bar items — those are VSCode-only. + +--- + +## Architecture + +``` +nap-zed-extension/ +├── Cargo.toml # crate-type = ["cdylib"], depends on zed_extension_api +├── extension.toml # Extension manifest: languages, grammars, language servers +├── src/ +│ └── lib.rs # Extension trait impl: language_server_command, slash commands +├── languages/ +│ └── nap/ +│ ├── config.toml # Language metadata: name, path_suffixes, comments, tabs +│ ├── highlights.scm # Syntax highlighting queries +│ ├── brackets.scm # Bracket matching pairs +│ ├── outline.scm # Code outline (sections as items) +│ ├── indents.scm # Auto-indentation rules +│ ├── injections.scm # Language injection (if needed for body blocks) +│ ├── runnables.scm # Detect [request] blocks as runnable +│ └── redactions.scm # Mask {{variable}} values for screen sharing +├── grammars/ +│ └── tree-sitter-nap/ # Tree-sitter grammar (C, compiled to WASM) +│ ├── grammar.js # Tree-sitter grammar definition +│ └── src/ +│ └── parser.c # Generated parser +└── LICENSE +``` + +--- + +## Implementation Phases + +### Phase 1 — Tree-sitter Grammar + Syntax Highlighting + +Build the Tree-sitter grammar for `.nap` and `.naplist` files. Write all query files. + +- `grammar.js` — Tree-sitter grammar definition covering all `.nap` syntax: section headers, key-value pairs, `{{variable}}` interpolation, HTTP methods, comments, string literals, assertion operators +- `highlights.scm` — Map grammar nodes to theme captures (`@keyword`, `@string`, `@variable`, `@function`, `@comment`, `@operator`, `@punctuation`) +- `brackets.scm` — Pair `[` and `]` for section headers +- `outline.scm` — Expose `[meta]`, `[request]`, `[assert]`, `[script]`, `[vars]`, `[steps]` as outline items +- `indents.scm` — Auto-indent after section headers +- `config.toml` — Register `.nap` and `.naplist` file extensions, set `#` as line comment, configure tab size + +### Phase 2 — Runnables (Run from Editor) + +- `runnables.scm` — Detect `[request]` blocks and mark them as runnable +- The runnable label shows the HTTP method and URL +- Execution runs `nap run <file>` in the Zed terminal +- Capture `ZED_CUSTOM_` environment variables for method and URL context + +### Phase 3 — LSP Integration + +The Zed extension launches `nap-lsp` (the shared F# LSP binary) via `language_server_command`. The LSP itself is a separate project — see **[LSP Spec](./LSP-SPEC.md)** and **[LSP Plan](./LSP-PLAN.md)** for details. + +- Implement `language_server_command` in `lib.rs` to launch `nap-lsp` binary +- Register the language server in `extension.toml` for `.nap` and `.naplist` languages +- The LSP provides completions, diagnostics, hover, symbols — no Zed-specific code needed +- Handle LSP binary discovery (check PATH, fallback to download) + +### Phase 4 — Slash Commands + Redactions + +- `/nap-run <file>` slash command — run a `.nap` file and return formatted results in the Assistant +- `/nap-import-openapi <file>` slash command — generate `.nap` files from an OpenAPI spec +- `redactions.scm` — Mask `{{variable}}` interpolation values during screen sharing +- Implement `complete_slash_command_argument` to suggest `.nap` and `.naplist` files from the worktree + +### Phase 5 — Polish & Publishing + +- Test on macOS and Linux +- Write extension description and README +- Add MIT license +- Submit PR to `zed-industries/extensions` repository +- Ensure Tree-sitter grammar produces visually identical highlighting to the VSCode TextMate grammar + +--- + +## TODO + +### Phase 1 — Tree-sitter Grammar + Syntax Highlighting +- [ ] Write `grammar.js` for `.nap` file format +- [ ] Write `grammar.js` for `.naplist` file format (or combined grammar) +- [ ] Write `highlights.scm` +- [ ] Write `brackets.scm` +- [ ] Write `outline.scm` +- [ ] Write `indents.scm` +- [ ] Write `config.toml` with language metadata +- [ ] Register grammar in `extension.toml` +- [ ] Test highlighting matches VSCode TextMate grammar visually + +### Phase 2 — Runnables +- [ ] Write `runnables.scm` to detect `[request]` blocks +- [ ] Verify `nap run <file>` executes in Zed terminal +- [ ] Add runnable label showing HTTP method + URL + +### Phase 3 — LSP Integration +- [ ] Implement `language_server_command` in `lib.rs` +- [ ] Register language server in `extension.toml` +- [ ] Test completions, diagnostics, hover via LSP +- [ ] Handle LSP binary discovery (PATH lookup) + +### Phase 4 — Slash Commands + Redactions +- [ ] Implement `/nap-run` slash command +- [ ] Implement `/nap-import-openapi` slash command +- [ ] Implement argument completion for slash commands +- [ ] Write `redactions.scm` for `{{variable}}` masking + +### Phase 5 — Polish & Publishing +- [ ] Test on macOS and Linux +- [ ] Write extension README +- [ ] Add license +- [ ] Submit to zed-industries/extensions +- [ ] Visual parity check against VSCode highlighting + +--- + +## Related Specs + +- [LSP Specification](./LSP-SPEC.md) — Language server capabilities, architecture, and protocol details +- [LSP Plan](./LSP-PLAN.md) — LSP implementation phases and TODO +- [IDE Extension Spec](./IDE-EXTENSION-SPEC.md) — Feature matrix and shared/IDE-specific behaviour diff --git a/src/DotHttp.Tests/.gitignore b/src/DotHttp.Tests/.gitignore new file mode 100644 index 0000000..0559642 --- /dev/null +++ b/src/DotHttp.Tests/.gitignore @@ -0,0 +1 @@ +.http-cache/ diff --git a/src/DotHttp.Tests/.http-cache/deepns-restclient.http b/src/DotHttp.Tests/.http-cache/deepns-restclient.http new file mode 100644 index 0000000..4fb9e4c --- /dev/null +++ b/src/DotHttp.Tests/.http-cache/deepns-restclient.http @@ -0,0 +1,51 @@ +# Sample HTTP requests using vscode rest-client extension +# Lines with ### serves as a marker for the extension to insert +# links to send requests + +### A simple GET Request +# Get list of sites supported by stackexchange APIs +GET https://api.stackexchange.com/2.2/sites + +### Get list of tags by site +GET https://api.stackexchange.com/2.2/tags?site=stackoverflow + +### Get details of a particular tag +# Query parameters specified one per line. +GET https://api.stackexchange.com/2.2/tags/vscode-extensions/info + ?site=stackoverflow + +### Name the request (using @name) and refer to it in a different request +# @name tagsearch +GET https://api.stackexchange.com/2.2/tags?site=askubuntu + +### Access values from a request or response in another request +# The general syntax is +# <request-name>.<request|response>.<body|headers>.<path> +# For JSON response, JSONPath syntax (https://goessner.net/articles/JsonPath/) +# is used. +GET https://api.stackexchange.com/2.2/tags/{{tagsearch.response.body.$.items[0].name}}/info?site=askubuntu +#Authorization: Basic base64-user-password + +### Supports different authentication options +# Body can be specified separately from the request +POST https://example.com/posts +Authorization: Basic username:password + +{ + "id": 1, + "title": "My awesome post", + "timestamp": 1504932105 +} + + +### Another example showing POST request, this +# time with a file level variable +@test_server = dummy.restapiexample.com +POST http://{{test_server}}/api/v1/create +Content-Type: application/json + +{ + "name":"Joe", + "salary":"123456789", + "age":"23", +} \ No newline at end of file diff --git a/src/DotHttp.Tests/.http-cache/ijhttp-echo.http b/src/DotHttp.Tests/.http-cache/ijhttp-echo.http new file mode 100644 index 0000000..1cc90ce --- /dev/null +++ b/src/DotHttp.Tests/.http-cache/ijhttp-echo.http @@ -0,0 +1,12 @@ +### Echo test +GET /echo HTTP/1.1 +Accept: application/json +Public-Variable: {{public-variable}} +Another-Variable: {{another-variable}} +Third-Variable: {{third-variable}} +Hidden-Variable: {{hidden-variable}} +Hidden-Variable2: {{hidden-variable2}} +Last-Variable: {{last-variable}} +Host: localhost:{{localport}} + +### diff --git a/src/DotHttp.Tests/.http-cache/reggieray-todos.http b/src/DotHttp.Tests/.http-cache/reggieray-todos.http new file mode 100644 index 0000000..0a4c809 --- /dev/null +++ b/src/DotHttp.Tests/.http-cache/reggieray-todos.http @@ -0,0 +1,44 @@ +@base_address = http://localhost:5295 + + +GET {{base_address}}/todos/ +Accept: application/json + +### + + +GET {{base_address}}/todos/{{$guid}} +Authorization: Basic {{$dotenv Authorization}} +Accept: application/json + +### + +POST {{base_address}}/todos/ +Authorization: Basic {{$dotenv Authorization}} +Content-Type: application/json + +{ + "id": "{{$guid}}", + "title": "Todo Title ({{$timestamp}})", + "isComplete": false +} + +### + + +@todo_id = {{$guid}} +PUT {{base_address}}/todos/{{todo_id}} +Authorization: Basic {{$dotenv Authorization}} +Content-Type: application/json + +{ + "id": "{{todo_id}}", + "title": "Todo Title ({{$timestamp}})", + "isComplete": false +} + +### + +DELETE {{base_address}}/todos/{{$guid}} +Authorization: Basic {{$dotenv Authorization}} +Accept: application/json \ No newline at end of file diff --git a/src/DotHttp.Tests/.http-cache/waldyrious-example.http b/src/DotHttp.Tests/.http-cache/waldyrious-example.http new file mode 100644 index 0000000..5d06865 --- /dev/null +++ b/src/DotHttp.Tests/.http-cache/waldyrious-example.http @@ -0,0 +1,28 @@ +# To use with the [vscode-restclient](https://github.com/Huachao/vscode-restclient) extension + +#------------------------------------------------------------------------------- +# Example with JSON data +POST https://api.example.com/address +Content-Type: application/json + +{ + "foo": "bar", + "baz": "qux" +} +### + +#------------------------------------------------------------------------------- +# Example with query parameters +GET https://example.com/comments + ?page=2 + &pageSize=10 +### + +#------------------------------------------------------------------------------- +# Example with form-urlencoded data +POST https://api.example.com/login +Content-Type: application/x-www-form-urlencoded + +name=foo +&password=bar +### \ No newline at end of file diff --git a/src/DotHttp.Tests/DotHttp.Tests.fsproj b/src/DotHttp.Tests/DotHttp.Tests.fsproj new file mode 100644 index 0000000..6c872e6 --- /dev/null +++ b/src/DotHttp.Tests/DotHttp.Tests.fsproj @@ -0,0 +1,23 @@ +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <IsPackable>false</IsPackable> + </PropertyGroup> + + <ItemGroup> + <Compile Include="ParserTests.fs" /> + <Compile Include="RealWorldTests.fs" /> + </ItemGroup> + + <ItemGroup> + <PackageReference Include="coverlet.collector" Version="8.0.0" /> + <PackageReference Include="Microsoft.NET.Test.Sdk" Version="18.3.0" /> + <PackageReference Include="xunit" Version="2.9.3" /> + <PackageReference Include="xunit.runner.visualstudio" Version="3.1.5" /> + </ItemGroup> + + <ItemGroup> + <ProjectReference Include="..\DotHttp\DotHttp.fsproj" /> + </ItemGroup> + +</Project> diff --git a/src/DotHttp.Tests/ParserTests.fs b/src/DotHttp.Tests/ParserTests.fs new file mode 100644 index 0000000..322eed2 --- /dev/null +++ b/src/DotHttp.Tests/ParserTests.fs @@ -0,0 +1,1316 @@ +module DotHttp.Tests.ParserTests +// Specs: http-shared, http-separator, http-method-line, http-headers, http-body, +// http-comments, http-vars, http-ms, http-jb, http-convert-detect, +// http-convert-parse, http-parser-project + +open Xunit +open DotHttp +open DotHttp.Parser + +// ─── Helpers ─────────────────────────────────────────────────── + +let private unwrap (input: string) : HttpFile = + match parse input with + | Ok f -> f + | Error e -> failwith $"Expected parse to succeed but got: {e}" + +let private firstRequest (f: HttpFile) : HttpRequest = + match f.Requests with + | first :: _ -> first + | [] -> failwith "Expected at least one request" + +let private requestAt (f: HttpFile) (index: int) : HttpRequest = + if index < f.Requests.Length then + f.Requests[index] + else + failwith $"Expected request at index {index}" + +// ─── Single request ──────────────────────────────────────────── + +[<Fact>] +let ``parse minimal GET request`` () = + let f = unwrap "GET https://api.example.com/users\n" + let req = firstRequest f + Assert.Equal("GET", req.Method) + Assert.Equal("https://api.example.com/users", req.Url) + Assert.Equal(1, f.Requests.Length) + Assert.True(req.Headers.IsEmpty) + Assert.True(req.Body.IsNone) + Assert.True(req.Name.IsNone) + Assert.Equal(Common, f.Dialect) + +[<Fact>] +let ``parse POST with headers and body`` () = + let input = + """POST https://api.example.com/users +Content-Type: application/json +Accept: application/json + +{ + "name": "Alice", + "email": "alice@example.com" +} +""" + + let f = unwrap input + let req = firstRequest f + Assert.Equal("POST", req.Method) + Assert.Equal("https://api.example.com/users", req.Url) + Assert.Equal(2, req.Headers.Length) + Assert.Equal("Content-Type", fst req.Headers[0]) + Assert.Equal("application/json", snd req.Headers[0]) + Assert.Equal("Accept", fst req.Headers[1]) + Assert.Equal("application/json", snd req.Headers[1]) + Assert.True(req.Body.IsSome) + Assert.Contains("Alice", req.Body.Value) + Assert.Contains("alice@example.com", req.Body.Value) + +[<Fact>] +let ``parse request with HTTP version`` () = + let f = unwrap "GET https://example.com HTTP/1.1\n" + let req = firstRequest f + Assert.Equal("GET", req.Method) + Assert.Equal("https://example.com", req.Url) + Assert.Equal(Some "1.1", req.HttpVersion) + +[<Fact>] +let ``parse request with HTTP/2`` () = + let f = unwrap "GET https://example.com HTTP/2\n" + let req = firstRequest f + Assert.Equal(Some "2", req.HttpVersion) + +// ─── Multiple requests with ### separator ────────────────────── + +[<Fact>] +let ``parse multiple requests separated by ###`` () = + let input = + """GET https://api.example.com/users + +### + +POST https://api.example.com/users +Content-Type: application/json + +{"name": "Bob"} + +### Delete user + +DELETE https://api.example.com/users/1 +""" + + let f = unwrap input + Assert.Equal(3, f.Requests.Length) + + let get = requestAt f 0 + Assert.Equal("GET", get.Method) + Assert.Equal("https://api.example.com/users", get.Url) + Assert.True(get.Body.IsNone) + + let post = requestAt f 1 + Assert.Equal("POST", post.Method) + Assert.True(post.Body.IsSome) + Assert.Contains("Bob", post.Body.Value) + + let delete = requestAt f 2 + Assert.Equal("DELETE", delete.Method) + Assert.Equal("https://api.example.com/users/1", delete.Url) + Assert.Equal(Some "Delete user", delete.Name) + +// ─── Named requests (### name) ───────────────────────────────── + +[<Fact>] +let ``parse separator name becomes request name`` () = + let input = + """### Get all users +GET https://api.example.com/users +""" + + let f = unwrap input + let req = firstRequest f + Assert.Equal(Some "Get all users", req.Name) + Assert.Equal("GET", req.Method) + +// ─── Comments ────────────────────────────────────────────────── + +[<Fact>] +let ``parse hash comments`` () = + let input = + """# This is a comment +GET https://api.example.com/users +""" + + let f = unwrap input + let req = firstRequest f + Assert.Equal("GET", req.Method) + Assert.Contains("This is a comment", req.Comments) + +[<Fact>] +let ``parse double-slash comments`` () = + let input = + """// Another comment +GET https://api.example.com/users +""" + + let f = unwrap input + let req = firstRequest f + Assert.Contains("Another comment", req.Comments) + +// ─── Microsoft dialect ───────────────────────────────────────── + +[<Fact>] +let ``parse Microsoft file-level variable declarations`` () = + let input = + """@host = api.example.com +@token = abc123 + +GET https://{{host}}/users +Authorization: Bearer {{token}} +""" + + let f = unwrap input + Assert.Equal(Microsoft, f.Dialect) + Assert.Equal(2, f.FileVariables.Length) + Assert.Equal("host", fst f.FileVariables[0]) + Assert.Equal("api.example.com", snd f.FileVariables[0]) + Assert.Equal("token", fst f.FileVariables[1]) + Assert.Equal("abc123", snd f.FileVariables[1]) + + let req = firstRequest f + Assert.Equal("GET", req.Method) + Assert.Contains("{{host}}", req.Url) + Assert.Equal(1, req.Headers.Length) + Assert.Equal("Authorization", fst req.Headers[0]) + Assert.Contains("{{token}}", snd req.Headers[0]) + +[<Fact>] +let ``parse Microsoft name directive`` () = + let input = + """# @name GetUsers +GET https://api.example.com/users +""" + + let f = unwrap input + let req = firstRequest f + Assert.Equal(Microsoft, f.Dialect) + Assert.Equal(Some "GetUsers", req.Name) + +// ─── JetBrains dialect ───────────────────────────────────────── + +[<Fact>] +let ``parse JetBrains inline post-response script`` () = + let input = + """GET https://api.example.com/users + +> {% + client.test("status", function() { + client.assert(response.status === 200); + }); +%} +""" + + let f = unwrap input + Assert.Equal(JetBrains, f.Dialect) + let req = firstRequest f + Assert.True(req.PostScript.IsSome) + Assert.Contains("response.status", req.PostScript.Value) + +[<Fact>] +let ``parse JetBrains inline pre-request script`` () = + let input = + """< {% request.variables.set("ts", Date.now()) %} +POST https://api.example.com/data +Content-Type: application/json + +{"timestamp": "{{ts}}"} +""" + + let f = unwrap input + Assert.Equal(JetBrains, f.Dialect) + let req = firstRequest f + Assert.True(req.PreScript.IsSome) + Assert.Contains("request.variables.set", req.PreScript.Value) + +[<Fact>] +let ``parse JetBrains file script references`` () = + let input = + """< scripts/setup.js +GET https://api.example.com/users +> scripts/validate.js +""" + + let f = unwrap input + let req = firstRequest f + Assert.True(req.PreScript.IsSome) + Assert.Equal("file:scripts/setup.js", req.PreScript.Value) + Assert.True(req.PostScript.IsSome) + Assert.Equal("file:scripts/validate.js", req.PostScript.Value) + +// ─── Variable interpolation (passthrough) ────────────────────── + +[<Fact>] +let ``variable interpolation syntax preserved in URL`` () = + let f = unwrap "GET https://{{host}}/api/{{version}}/users\n" + let req = firstRequest f + Assert.Contains("{{host}}", req.Url) + Assert.Contains("{{version}}", req.Url) + +[<Fact>] +let ``variable interpolation preserved in headers`` () = + let input = + """GET https://api.example.com +Authorization: Bearer {{token}} +X-Request-Id: {{requestId}} +""" + + let f = unwrap input + let req = firstRequest f + Assert.Equal(2, req.Headers.Length) + Assert.Contains("{{token}}", snd req.Headers[0]) + Assert.Contains("{{requestId}}", snd req.Headers[1]) + +// ─── Case-insensitive methods ────────────────────────────────── + +[<Theory>] +[<InlineData("get")>] +[<InlineData("Get")>] +[<InlineData("GET")>] +let ``parse method case-insensitively`` (method: string) = + let f = unwrap $"{method} https://example.com\n" + let req = firstRequest f + Assert.Equal("GET", req.Method) + +[<Theory>] +[<InlineData("post")>] +[<InlineData("put")>] +[<InlineData("patch")>] +[<InlineData("delete")>] +[<InlineData("head")>] +[<InlineData("options")>] +[<InlineData("trace")>] +[<InlineData("connect")>] +let ``parse all HTTP methods`` (method: string) = + let f = unwrap $"{method} https://example.com\n" + let req = firstRequest f + Assert.Equal(method.ToUpperInvariant(), req.Method) + +// ─── Edge cases ──────────────────────────────────────────────── + +[<Fact>] +let ``empty input returns error`` () = + match parse "" with + | Error msg -> Assert.Contains("No HTTP requests", msg) + | Ok _ -> failwith "Expected error for empty input" + +[<Fact>] +let ``only comments returns error`` () = + match parse "# just a comment\n// another comment\n" with + | Error msg -> Assert.Contains("No HTTP requests", msg) + | Ok _ -> failwith "Expected error for comments-only input" + +[<Fact>] +let ``request without body has None body`` () = + let f = unwrap "DELETE https://api.example.com/users/42\n" + let req = firstRequest f + Assert.True(req.Body.IsNone) + +[<Fact>] +let ``multiple blank lines between requests handled`` () = + let input = + """GET https://example.com/a + + + +### + + + +GET https://example.com/b +""" + + let f = unwrap input + Assert.Equal(2, f.Requests.Length) + Assert.Equal("https://example.com/a", (requestAt f 0).Url) + Assert.Equal("https://example.com/b", (requestAt f 1).Url) + +// ─── Mixed dialect detection ─────────────────────────────────── + +[<Fact>] +let ``file with only standard features detected as Common`` () = + let input = + """### Request 1 +GET https://example.com + +### Request 2 +POST https://example.com +Content-Type: application/json + +{"key": "value"} +""" + + let f = unwrap input + Assert.Equal(Common, f.Dialect) + +[<Fact>] +let ``file with file-level variables detected as Microsoft`` () = + let f = unwrap "@host = example.com\nGET https://{{host}}\n" + Assert.Equal(Microsoft, f.Dialect) + +[<Fact>] +let ``file with script blocks detected as JetBrains`` () = + let input = + """GET https://example.com +> {% client.log("done") %} +""" + + let f = unwrap input + Assert.Equal(JetBrains, f.Dialect) + +// ─── Realistic multi-request file ────────────────────────────── + +[<Fact>] +let ``parse realistic REST API file`` () = + let input = + """### List all users +GET https://api.example.com/v1/users +Accept: application/json +Authorization: Bearer {{token}} + +### Create a user +POST https://api.example.com/v1/users +Content-Type: application/json +Authorization: Bearer {{token}} + +{ + "name": "Charlie", + "email": "charlie@test.com", + "role": "admin" +} + +### Get user by ID +GET https://api.example.com/v1/users/{{userId}} +Accept: application/json +Authorization: Bearer {{token}} + +### Delete user +DELETE https://api.example.com/v1/users/{{userId}} +Authorization: Bearer {{token}} +""" + + let f = unwrap input + Assert.Equal(4, f.Requests.Length) + + let list = requestAt f 0 + Assert.Equal("GET", list.Method) + Assert.Equal(Some "List all users", list.Name) + Assert.Equal(2, list.Headers.Length) + Assert.True(list.Body.IsNone) + + let create = requestAt f 1 + Assert.Equal("POST", create.Method) + Assert.Equal(Some "Create a user", create.Name) + Assert.Equal(2, create.Headers.Length) + Assert.True(create.Body.IsSome) + Assert.Contains("Charlie", create.Body.Value) + Assert.Contains("charlie@test.com", create.Body.Value) + Assert.Contains("admin", create.Body.Value) + + let get = requestAt f 2 + Assert.Equal("GET", get.Method) + Assert.Contains("{{userId}}", get.Url) + Assert.Equal(Some "Get user by ID", get.Name) + + let delete = requestAt f 3 + Assert.Equal("DELETE", delete.Method) + Assert.Equal(Some "Delete user", delete.Name) + Assert.Equal(1, delete.Headers.Length) + +// ─── Body blank line preservation ───────────────────────────── + +[<Fact>] +let ``body preserves internal blank lines`` () = + let input = + "POST https://example.com\nContent-Type: text/plain\n\nline 1\n\nline 2\n\nline 3\n" + + let f = unwrap input + let req = firstRequest f + Assert.True(req.Body.IsSome) + Assert.Contains("line 1", req.Body.Value) + Assert.Contains("line 2", req.Body.Value) + Assert.Contains("line 3", req.Body.Value) + // Blank lines between body lines must be preserved + Assert.Contains("line 1\n\nline 2", req.Body.Value) + Assert.Contains("line 2\n\nline 3", req.Body.Value) + +// ─── CRLF line endings ──────────────────────────────────────── + +[<Fact>] +let ``CRLF line endings parsed correctly`` () = + let input = + "GET https://example.com/crlf HTTP/1.1\r\nAccept: text/html\r\nHost: example.com\r\n\r\n" + + let f = unwrap input + let req = firstRequest f + Assert.Equal("GET", req.Method) + Assert.Equal("https://example.com/crlf", req.Url) + Assert.Equal(Some "1.1", req.HttpVersion) + Assert.Equal(2, req.Headers.Length) + Assert.Equal("Accept", fst req.Headers[0]) + Assert.Equal("text/html", snd req.Headers[0]) + Assert.Equal("Host", fst req.Headers[1]) + Assert.Equal("example.com", snd req.Headers[1]) + +// ─── Headers with colons in values ──────────────────────────── + +[<Fact>] +let ``header values may contain colons`` () = + let input = + "GET https://example.com\nX-Forwarded-For: http://proxy.internal:8080\nAuthorization: Basic dXNlcjpwYXNz\n" + + let f = unwrap input + let req = firstRequest f + Assert.Equal(2, req.Headers.Length) + Assert.Equal("X-Forwarded-For", fst req.Headers[0]) + Assert.Equal("http://proxy.internal:8080", snd req.Headers[0]) + Assert.Equal("Authorization", fst req.Headers[1]) + Assert.Equal("Basic dXNlcjpwYXNz", snd req.Headers[1]) + +// ─── URL with query parameters ──────────────────────────────── + +[<Fact>] +let ``URL with complex query parameters preserved`` () = + let input = + "GET https://api.example.com/search?q=hello+world&page=2&filter=status%3Aactive&sort=name:asc\n" + + let f = unwrap input + let req = firstRequest f + Assert.Equal("GET", req.Method) + Assert.Contains("q=hello+world", req.Url) + Assert.Contains("page=2", req.Url) + Assert.Contains("filter=status%3Aactive", req.Url) + Assert.Contains("sort=name:asc", req.Url) + +// ═══════════════════════════════════════════════════════════════ +// Real-world .http file scenarios +// ═══════════════════════════════════════════════════════════════ + +// ─── 1. Stripe-style payment API ────────────────────────────── + +[<Fact>] +let ``real-world: Stripe-style payment API`` () = + let input = + """@baseUrl = https://api.stripe.com/v1 +@secretKey = sk_test_abc123 + +### Create a customer +# @name CreateCustomer +POST https://{{baseUrl}}/customers +Authorization: Bearer {{secretKey}} +Content-Type: application/x-www-form-urlencoded + +email=customer@example.com&name=Jane%20Doe&description=Test%20customer + +### Create a payment intent +# @name CreatePaymentIntent +POST https://{{baseUrl}}/payment_intents +Authorization: Bearer {{secretKey}} +Content-Type: application/x-www-form-urlencoded + +amount=2000¤cy=usd&customer={{CreateCustomer.response.body.id}}&payment_method_types[]=card + +### List charges with pagination +GET https://{{baseUrl}}/charges?limit=10&starting_after={{lastChargeId}} +Authorization: Bearer {{secretKey}} +""" + + let f = unwrap input + Assert.Equal(Microsoft, f.Dialect) + Assert.Equal(3, f.Requests.Length) + Assert.Equal(2, f.FileVariables.Length) + Assert.Equal("baseUrl", fst f.FileVariables[0]) + Assert.Equal("https://api.stripe.com/v1", snd f.FileVariables[0]) + Assert.Equal("secretKey", fst f.FileVariables[1]) + Assert.Equal("sk_test_abc123", snd f.FileVariables[1]) + + let createCust = requestAt f 0 + Assert.Equal("POST", createCust.Method) + Assert.Equal(Some "CreateCustomer", createCust.Name) + Assert.Contains("{{baseUrl}}", createCust.Url) + Assert.Equal(2, createCust.Headers.Length) + Assert.Equal("Authorization", fst createCust.Headers[0]) + Assert.Contains("{{secretKey}}", snd createCust.Headers[0]) + Assert.Equal("Content-Type", fst createCust.Headers[1]) + Assert.Equal("application/x-www-form-urlencoded", snd createCust.Headers[1]) + Assert.True(createCust.Body.IsSome) + Assert.Contains("email=customer@example.com", createCust.Body.Value) + Assert.Contains("name=Jane%20Doe", createCust.Body.Value) + + let pi = requestAt f 1 + Assert.Equal("POST", pi.Method) + Assert.Equal(Some "CreatePaymentIntent", pi.Name) + Assert.True(pi.Body.IsSome) + Assert.Contains("amount=2000", pi.Body.Value) + Assert.Contains("currency=usd", pi.Body.Value) + Assert.Contains("payment_method_types[]=card", pi.Body.Value) + + let charges = requestAt f 2 + Assert.Equal("GET", charges.Method) + Assert.Contains("limit=10", charges.Url) + Assert.Contains("starting_after={{lastChargeId}}", charges.Url) + Assert.True(charges.Body.IsNone) + +// ─── 2. OAuth2 token flow ───────────────────────────────────── + +[<Fact>] +let ``real-world: OAuth2 authorization code flow`` () = + let input = + """### Exchange authorization code for tokens +# @name TokenExchange +POST https://auth.example.com/oauth/token +Content-Type: application/x-www-form-urlencoded +Accept: application/json + +grant_type=authorization_code&code={{authCode}}&redirect_uri=https://app.example.com/callback&client_id={{clientId}}&client_secret={{clientSecret}} + +### Refresh access token +# @name RefreshToken +POST https://auth.example.com/oauth/token +Content-Type: application/x-www-form-urlencoded + +grant_type=refresh_token&refresh_token={{TokenExchange.response.body.refresh_token}}&client_id={{clientId}} + +### Call protected resource +GET https://api.example.com/me +Authorization: Bearer {{TokenExchange.response.body.access_token}} +Accept: application/json +""" + + let f = unwrap input + Assert.Equal(Microsoft, f.Dialect) + Assert.Equal(3, f.Requests.Length) + + let tokenEx = requestAt f 0 + Assert.Equal("POST", tokenEx.Method) + Assert.Equal("https://auth.example.com/oauth/token", tokenEx.Url) + Assert.Equal(Some "TokenExchange", tokenEx.Name) + Assert.Equal(2, tokenEx.Headers.Length) + Assert.True(tokenEx.Body.IsSome) + Assert.Contains("grant_type=authorization_code", tokenEx.Body.Value) + Assert.Contains("redirect_uri=https://app.example.com/callback", tokenEx.Body.Value) + Assert.Contains("client_secret={{clientSecret}}", tokenEx.Body.Value) + + let refresh = requestAt f 1 + Assert.Equal(Some "RefreshToken", refresh.Name) + Assert.True(refresh.Body.IsSome) + Assert.Contains("grant_type=refresh_token", refresh.Body.Value) + Assert.Contains("{{TokenExchange.response.body.refresh_token}}", refresh.Body.Value) + + let protected' = requestAt f 2 + Assert.Equal("GET", protected'.Method) + Assert.Equal("https://api.example.com/me", protected'.Url) + Assert.Equal(2, protected'.Headers.Length) + Assert.Contains("{{TokenExchange.response.body.access_token}}", snd protected'.Headers[0]) + +// ─── 3. GraphQL over HTTP ───────────────────────────────────── + +[<Fact>] +let ``real-world: GraphQL query and mutation over HTTP`` () = + let input = + """### GraphQL query - list repositories +POST https://api.github.com/graphql +Authorization: Bearer {{githubToken}} +Content-Type: application/json +User-Agent: MyApp/1.0 + +{ + "query": "query { viewer { repositories(first: 10) { nodes { name, stargazerCount } } } }" +} + +### GraphQL mutation - create issue +POST https://api.github.com/graphql +Authorization: Bearer {{githubToken}} +Content-Type: application/json +User-Agent: MyApp/1.0 + +{ + "query": "mutation($input: CreateIssueInput!) { createIssue(input: $input) { issue { id number title } } }", + "variables": { + "input": { + "repositoryId": "MDEwOlJlcG9zaXRvcnkxMjM0NTY=", + "title": "Bug: Login page broken", + "body": "Steps to reproduce:\n1. Go to /login\n2. Enter credentials\n3. Page crashes" + } + } +} +""" + + let f = unwrap input + Assert.Equal(Common, f.Dialect) + Assert.Equal(2, f.Requests.Length) + + let query = requestAt f 0 + Assert.Equal("POST", query.Method) + Assert.Equal("https://api.github.com/graphql", query.Url) + Assert.Equal(3, query.Headers.Length) + Assert.Equal("User-Agent", fst query.Headers[2]) + Assert.Equal("MyApp/1.0", snd query.Headers[2]) + Assert.True(query.Body.IsSome) + Assert.Contains("viewer", query.Body.Value) + Assert.Contains("repositories", query.Body.Value) + Assert.Contains("stargazerCount", query.Body.Value) + + let mutation = requestAt f 1 + Assert.Equal("POST", mutation.Method) + Assert.True(mutation.Body.IsSome) + Assert.Contains("createIssue", mutation.Body.Value) + Assert.Contains("MDEwOlJlcG9zaXRvcnkxMjM0NTY=", mutation.Body.Value) + Assert.Contains("Bug: Login page broken", mutation.Body.Value) + // Body with nested JSON must preserve structure including blank lines between keys + Assert.Contains("variables", mutation.Body.Value) + +// ─── 4. XML SOAP request ────────────────────────────────────── + +[<Fact>] +let ``real-world: SOAP XML web service`` () = + let input = + """### GetWeather SOAP call +POST https://www.w3schools.com/xml/tempconvert.asmx HTTP/1.1 +Content-Type: text/xml; charset=utf-8 +SOAPAction: "https://www.w3schools.com/xml/CelsiusToFahrenheit" + +<?xml version="1.0" encoding="utf-8"?> +<soap:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xmlns:xsd="http://www.w3.org/2001/XMLSchema" + xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/"> + <soap:Body> + <CelsiusToFahrenheit xmlns="https://www.w3schools.com/xml/"> + <Celsius>100</Celsius> + </CelsiusToFahrenheit> + </soap:Body> +</soap:Envelope> +""" + + let f = unwrap input + Assert.Equal(Common, f.Dialect) + Assert.Equal(1, f.Requests.Length) + + let req = firstRequest f + Assert.Equal("POST", req.Method) + Assert.Equal("https://www.w3schools.com/xml/tempconvert.asmx", req.Url) + Assert.Equal(Some "1.1", req.HttpVersion) + Assert.Equal(Some "GetWeather SOAP call", req.Name) + Assert.Equal(2, req.Headers.Length) + Assert.Equal("Content-Type", fst req.Headers[0]) + Assert.Equal("text/xml; charset=utf-8", snd req.Headers[0]) + Assert.Equal("SOAPAction", fst req.Headers[1]) + Assert.Contains("CelsiusToFahrenheit", snd req.Headers[1]) + Assert.True(req.Body.IsSome) + Assert.Contains("<?xml version=", req.Body.Value) + Assert.Contains("soap:Envelope", req.Body.Value) + Assert.Contains("soap:Body", req.Body.Value) + Assert.Contains("<Celsius>100</Celsius>", req.Body.Value) + Assert.Contains("xmlns:xsi=", req.Body.Value) + +// ─── 5. JetBrains multi-request with response handlers ──────── + +[<Fact>] +let ``real-world: JetBrains test suite with response handlers`` () = + let input = + """### Login and capture token +POST https://api.example.com/auth/login +Content-Type: application/json + +{"username": "admin", "password": "secret"} + +> {% + client.test("Login successful", function() { + client.assert(response.status === 200, "Expected 200"); + client.assert(response.body.token !== undefined, "Token missing"); + client.global.set("authToken", response.body.token); + }); +%} + +### Get user profile +GET https://api.example.com/users/me +Authorization: Bearer {{authToken}} +Accept: application/json + +> {% + client.test("Profile loaded", function() { + client.assert(response.status === 200); + client.assert(response.body.email !== undefined); + }); +%} + +### Update profile +PATCH https://api.example.com/users/me +Authorization: Bearer {{authToken}} +Content-Type: application/json + +{"displayName": "Admin User", "timezone": "UTC"} + +> {% + client.test("Profile updated", function() { + client.assert(response.status === 200); + client.assert(response.body.displayName === "Admin User"); + }); +%} +""" + + let f = unwrap input + Assert.Equal(JetBrains, f.Dialect) + Assert.Equal(3, f.Requests.Length) + + let login = requestAt f 0 + Assert.Equal("POST", login.Method) + Assert.Equal("https://api.example.com/auth/login", login.Url) + Assert.Equal(Some "Login and capture token", login.Name) + Assert.True(login.Body.IsSome) + Assert.Contains("admin", login.Body.Value) + Assert.True(login.PostScript.IsSome) + Assert.Contains("response.status === 200", login.PostScript.Value) + Assert.Contains("client.global.set", login.PostScript.Value) + Assert.Contains("authToken", login.PostScript.Value) + Assert.True(login.PreScript.IsNone) + + let profile = requestAt f 1 + Assert.Equal("GET", profile.Method) + Assert.Equal(Some "Get user profile", profile.Name) + Assert.Equal(2, profile.Headers.Length) + Assert.Contains("{{authToken}}", snd profile.Headers[0]) + Assert.True(profile.PostScript.IsSome) + Assert.Contains("response.body.email", profile.PostScript.Value) + Assert.True(profile.Body.IsNone) + + let update = requestAt f 2 + Assert.Equal("PATCH", update.Method) + Assert.Equal(Some "Update profile", update.Name) + Assert.True(update.Body.IsSome) + Assert.Contains("Admin User", update.Body.Value) + Assert.Contains("UTC", update.Body.Value) + Assert.True(update.PostScript.IsSome) + Assert.Contains("Admin User", update.PostScript.Value) + +// ─── 6. Microsoft REST Client with environments ────────────── + +[<Fact>] +let ``real-world: Microsoft REST Client full featured`` () = + let input = + """@hostname = localhost +@port = 3000 +@host = {{hostname}}:{{port}} +@contentType = application/json +@createdAt = 2024-01-15T10:30:00Z + +// This file tests the full CRUD API + +### Create a new todo item +# @name CreateTodo +POST https://{{host}}/api/todos +Content-Type: {{contentType}} + +{ + "title": "Buy groceries", + "completed": false, + "dueDate": "{{createdAt}}", + "tags": ["shopping", "personal"], + "priority": 1 +} + +### Get the created item using response reference +GET https://{{host}}/api/todos/{{CreateTodo.response.body.id}} +Accept: {{contentType}} + +### List all todos with filtering +GET https://{{host}}/api/todos?completed=false&sort=priority&order=asc&limit=25 +Accept: {{contentType}} + +### Update a todo with PUT (full replace) +PUT https://{{host}}/api/todos/{{CreateTodo.response.body.id}} +Content-Type: {{contentType}} + +{ + "title": "Buy groceries and cook dinner", + "completed": true, + "dueDate": "{{createdAt}}", + "tags": ["shopping", "personal", "cooking"], + "priority": 2 +} +""" + + let f = unwrap input + Assert.Equal(Microsoft, f.Dialect) + Assert.Equal(4, f.Requests.Length) + Assert.Equal(5, f.FileVariables.Length) + Assert.Equal("hostname", fst f.FileVariables[0]) + Assert.Equal("localhost", snd f.FileVariables[0]) + Assert.Equal("port", fst f.FileVariables[1]) + Assert.Equal("3000", snd f.FileVariables[1]) + Assert.Equal("host", fst f.FileVariables[2]) + Assert.Equal("{{hostname}}:{{port}}", snd f.FileVariables[2]) + Assert.Equal("contentType", fst f.FileVariables[3]) + Assert.Equal("application/json", snd f.FileVariables[3]) + Assert.Equal("createdAt", fst f.FileVariables[4]) + + let create = requestAt f 0 + Assert.Equal("POST", create.Method) + Assert.Equal(Some "CreateTodo", create.Name) + Assert.Contains("{{host}}", create.Url) + Assert.True(create.Body.IsSome) + Assert.Contains("Buy groceries", create.Body.Value) + Assert.Contains("\"tags\":", create.Body.Value) + Assert.Contains("\"shopping\"", create.Body.Value) + Assert.Contains("\"priority\": 1", create.Body.Value) + + let getItem = requestAt f 1 + Assert.Equal("GET", getItem.Method) + Assert.Contains("{{CreateTodo.response.body.id}}", getItem.Url) + Assert.True(getItem.Body.IsNone) + + let list = requestAt f 2 + Assert.Equal("GET", list.Method) + Assert.Contains("completed=false", list.Url) + Assert.Contains("sort=priority", list.Url) + Assert.Contains("limit=25", list.Url) + + let update = requestAt f 3 + Assert.Equal("PUT", update.Method) + Assert.True(update.Body.IsSome) + Assert.Contains("cook dinner", update.Body.Value) + Assert.Contains("cooking", update.Body.Value) + Assert.Contains("\"priority\": 2", update.Body.Value) + +// ─── 7. Kubernetes API ──────────────────────────────────────── + +[<Fact>] +let ``real-world: Kubernetes API requests`` () = + let input = + """### List pods in default namespace +GET https://kubernetes.default.svc/api/v1/namespaces/default/pods +Authorization: Bearer {{k8sToken}} +Accept: application/json + +### Create a deployment +POST https://kubernetes.default.svc/apis/apps/v1/namespaces/default/deployments +Authorization: Bearer {{k8sToken}} +Content-Type: application/json + +{ + "apiVersion": "apps/v1", + "kind": "Deployment", + "metadata": { + "name": "nginx-deployment", + "labels": { + "app": "nginx" + } + }, + "spec": { + "replicas": 3, + "selector": { + "matchLabels": { + "app": "nginx" + } + }, + "template": { + "metadata": { + "labels": { + "app": "nginx" + } + }, + "spec": { + "containers": [ + { + "name": "nginx", + "image": "nginx:1.25", + "ports": [ + { + "containerPort": 80 + } + ] + } + ] + } + } + } +} + +### Scale deployment +PATCH https://kubernetes.default.svc/apis/apps/v1/namespaces/default/deployments/nginx-deployment/scale +Authorization: Bearer {{k8sToken}} +Content-Type: application/strategic-merge-patch+json + +{"spec": {"replicas": 5}} + +### Delete deployment +DELETE https://kubernetes.default.svc/apis/apps/v1/namespaces/default/deployments/nginx-deployment +Authorization: Bearer {{k8sToken}} +""" + + let f = unwrap input + Assert.Equal(Common, f.Dialect) + Assert.Equal(4, f.Requests.Length) + + let listPods = requestAt f 0 + Assert.Equal("GET", listPods.Method) + Assert.Contains("/api/v1/namespaces/default/pods", listPods.Url) + Assert.Equal(Some "List pods in default namespace", listPods.Name) + Assert.True(listPods.Body.IsNone) + Assert.Equal(2, listPods.Headers.Length) + + let createDeploy = requestAt f 1 + Assert.Equal("POST", createDeploy.Method) + Assert.Contains("deployments", createDeploy.Url) + Assert.True(createDeploy.Body.IsSome) + Assert.Contains("nginx-deployment", createDeploy.Body.Value) + Assert.Contains("\"replicas\": 3", createDeploy.Body.Value) + Assert.Contains("nginx:1.25", createDeploy.Body.Value) + Assert.Contains("containerPort", createDeploy.Body.Value) + + let scale = requestAt f 2 + Assert.Equal("PATCH", scale.Method) + Assert.Contains("/scale", scale.Url) + Assert.Equal("Content-Type", fst scale.Headers[1]) + Assert.Equal("application/strategic-merge-patch+json", snd scale.Headers[1]) + Assert.True(scale.Body.IsSome) + Assert.Contains("\"replicas\": 5", scale.Body.Value) + + let del = requestAt f 3 + Assert.Equal("DELETE", del.Method) + Assert.Contains("nginx-deployment", del.Url) + Assert.True(del.Body.IsNone) + +// ─── 8. AWS S3 pre-signed style requests ────────────────────── + +[<Fact>] +let ``real-world: AWS-style requests with complex headers`` () = + let input = + """### Upload object to S3 +PUT https://my-bucket.s3.us-east-1.amazonaws.com/photos/2024/vacation.jpg +Host: my-bucket.s3.us-east-1.amazonaws.com +Content-Type: image/jpeg +x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +x-amz-date: 20240115T120000Z +Authorization: AWS4-HMAC-SHA256 Credential={{accessKey}}/20240115/us-east-1/s3/aws4_request, SignedHeaders=content-type;host;x-amz-content-sha256;x-amz-date, Signature={{signature}} + +### List bucket contents +GET https://my-bucket.s3.us-east-1.amazonaws.com/?list-type=2&prefix=photos/&max-keys=100 +Host: my-bucket.s3.us-east-1.amazonaws.com +x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +x-amz-date: 20240115T120000Z +Authorization: AWS4-HMAC-SHA256 Credential={{accessKey}}/20240115/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date, Signature={{listSig}} +""" + + let f = unwrap input + Assert.Equal(2, f.Requests.Length) + + let upload = requestAt f 0 + Assert.Equal("PUT", upload.Method) + Assert.Contains("vacation.jpg", upload.Url) + Assert.Equal(5, upload.Headers.Length) + Assert.Equal("Host", fst upload.Headers[0]) + Assert.Equal("x-amz-content-sha256", fst upload.Headers[2]) + Assert.Equal("x-amz-date", fst upload.Headers[3]) + Assert.Contains("AWS4-HMAC-SHA256", snd upload.Headers[4]) + Assert.Contains("{{accessKey}}", snd upload.Headers[4]) + Assert.Contains("Signature={{signature}}", snd upload.Headers[4]) + + let listBucket = requestAt f 1 + Assert.Equal("GET", listBucket.Method) + Assert.Contains("list-type=2", listBucket.Url) + Assert.Contains("prefix=photos/", listBucket.Url) + Assert.Contains("max-keys=100", listBucket.Url) + Assert.Equal(4, listBucket.Headers.Length) + +// ─── 9. JetBrains with pre-request scripts and file refs ───── + +[<Fact>] +let ``real-world: JetBrains pre and post scripts with file refs`` () = + let input = + """### Create signed webhook +< scripts/generate-hmac.js +POST https://api.example.com/webhooks +Content-Type: application/json +X-Signature: {{hmacSignature}} +X-Timestamp: {{timestamp}} + +{ + "url": "https://myapp.com/webhook", + "events": ["order.created", "order.updated", "payment.received"], + "secret": "whsec_abc123" +} + +> scripts/verify-webhook.js + +### Trigger test event +POST https://api.example.com/webhooks/{{webhookId}}/test +Authorization: Bearer {{apiKey}} + +> {% + client.test("Test event sent", function() { + client.assert(response.status === 202, "Expected 202 Accepted"); + client.assert(response.body.eventId !== undefined); + }); +%} +""" + + let f = unwrap input + Assert.Equal(JetBrains, f.Dialect) + Assert.Equal(2, f.Requests.Length) + + let createWebhook = requestAt f 0 + Assert.Equal("POST", createWebhook.Method) + Assert.Equal("https://api.example.com/webhooks", createWebhook.Url) + Assert.Equal(Some "Create signed webhook", createWebhook.Name) + Assert.Equal(3, createWebhook.Headers.Length) + Assert.Equal("X-Signature", fst createWebhook.Headers[1]) + Assert.Contains("{{hmacSignature}}", snd createWebhook.Headers[1]) + Assert.True(createWebhook.Body.IsSome) + Assert.Contains("order.created", createWebhook.Body.Value) + Assert.Contains("payment.received", createWebhook.Body.Value) + Assert.True(createWebhook.PreScript.IsSome) + Assert.Equal("file:scripts/generate-hmac.js", createWebhook.PreScript.Value) + Assert.True(createWebhook.PostScript.IsSome) + Assert.Equal("file:scripts/verify-webhook.js", createWebhook.PostScript.Value) + + let triggerTest = requestAt f 1 + Assert.Equal("POST", triggerTest.Method) + Assert.Contains("{{webhookId}}", triggerTest.Url) + Assert.True(triggerTest.PostScript.IsSome) + Assert.Contains("202 Accepted", triggerTest.PostScript.Value) + Assert.Contains("eventId", triggerTest.PostScript.Value) + +// ─── 10. Elasticsearch bulk operations ──────────────────────── + +[<Fact>] +let ``real-world: Elasticsearch bulk and NDJSON body`` () = + let input = + """### Create index with mappings +PUT https://elasticsearch.local:9200/products +Content-Type: application/json + +{ + "settings": { + "number_of_shards": 1, + "number_of_replicas": 0 + }, + "mappings": { + "properties": { + "name": { "type": "text", "analyzer": "standard" }, + "price": { "type": "float" }, + "category": { "type": "keyword" }, + "created_at": { "type": "date" } + } + } +} + +### Search with aggregation +POST https://elasticsearch.local:9200/products/_search +Content-Type: application/json + +{ + "size": 0, + "query": { + "bool": { + "must": [ + { "range": { "price": { "gte": 10, "lte": 100 } } }, + { "term": { "category": "electronics" } } + ] + } + }, + "aggs": { + "avg_price": { "avg": { "field": "price" } }, + "price_ranges": { + "range": { + "field": "price", + "ranges": [ + { "to": 25 }, + { "from": 25, "to": 50 }, + { "from": 50 } + ] + } + } + } +} + +### Delete by query +POST https://elasticsearch.local:9200/products/_delete_by_query +Content-Type: application/json + +{"query": {"range": {"created_at": {"lt": "2023-01-01"}}}} +""" + + let f = unwrap input + Assert.Equal(Common, f.Dialect) + Assert.Equal(3, f.Requests.Length) + + let createIdx = requestAt f 0 + Assert.Equal("PUT", createIdx.Method) + Assert.Contains("/products", createIdx.Url) + Assert.Equal(Some "Create index with mappings", createIdx.Name) + Assert.True(createIdx.Body.IsSome) + Assert.Contains("number_of_shards", createIdx.Body.Value) + Assert.Contains("\"text\"", createIdx.Body.Value) + Assert.Contains("\"float\"", createIdx.Body.Value) + Assert.Contains("\"keyword\"", createIdx.Body.Value) + + let search = requestAt f 1 + Assert.Equal("POST", search.Method) + Assert.Contains("/_search", search.Url) + Assert.True(search.Body.IsSome) + Assert.Contains("\"size\": 0", search.Body.Value) + Assert.Contains("electronics", search.Body.Value) + Assert.Contains("avg_price", search.Body.Value) + Assert.Contains("price_ranges", search.Body.Value) + + let deleteBQ = requestAt f 2 + Assert.Equal("POST", deleteBQ.Method) + Assert.Contains("_delete_by_query", deleteBQ.Url) + Assert.True(deleteBQ.Body.IsSome) + Assert.Contains("2023-01-01", deleteBQ.Body.Value) + +// ─── 11. Mixed comments and separators ──────────────────────── + +[<Fact>] +let ``real-world: Azure DevOps API with mixed comments`` () = + let input = + """# Azure DevOps REST API examples +# Base URL: https://dev.azure.com/{org}/{project}/_apis + +@org = mycompany +@project = myproject +@apiVersion = 7.1 + +### Get work item by ID +// List work items by query +GET https://dev.azure.com/{{org}}/{{project}}/_apis/wit/workitems/42?$expand=all&api-version={{apiVersion}} +Authorization: Basic {{pat}} +Accept: application/json + +### Create bug work item +// Create a new bug +POST https://dev.azure.com/{{org}}/{{project}}/_apis/wit/workitems/$Bug?api-version={{apiVersion}} +Content-Type: application/json-patch+json +Authorization: Basic {{pat}} + +[ + {"op": "add", "path": "/fields/System.Title", "value": "Login button unresponsive"}, + {"op": "add", "path": "/fields/System.Description", "value": "<p>The login button does not respond to clicks on Safari 17</p>"}, + {"op": "add", "path": "/fields/Microsoft.VSTS.Common.Priority", "value": 1}, + {"op": "add", "path": "/fields/System.Tags", "value": "bug; safari; auth"} +] +""" + + let f = unwrap input + Assert.Equal(Microsoft, f.Dialect) + Assert.Equal(2, f.Requests.Length) + Assert.Equal(3, f.FileVariables.Length) + Assert.Equal("org", fst f.FileVariables[0]) + Assert.Equal("mycompany", snd f.FileVariables[0]) + Assert.Equal("project", fst f.FileVariables[1]) + Assert.Equal("apiVersion", fst f.FileVariables[2]) + Assert.Equal("7.1", snd f.FileVariables[2]) + + let getWI = requestAt f 0 + Assert.Equal("GET", getWI.Method) + Assert.Contains("{{org}}", getWI.Url) + Assert.Contains("{{project}}", getWI.Url) + Assert.Contains("workitems/42", getWI.Url) + Assert.Contains("$expand=all", getWI.Url) + Assert.Contains("api-version={{apiVersion}}", getWI.Url) + Assert.Equal(Some "Get work item by ID", getWI.Name) + Assert.Equal(2, getWI.Headers.Length) + Assert.True(getWI.Body.IsNone) + // Comments should be captured + Assert.True(getWI.Comments.Length > 0) + + let createBug = requestAt f 1 + Assert.Equal("POST", createBug.Method) + Assert.Contains("$Bug", createBug.Url) + Assert.Equal(Some "Create bug work item", createBug.Name) + Assert.Equal("Content-Type", fst createBug.Headers[0]) + Assert.Equal("application/json-patch+json", snd createBug.Headers[0]) + Assert.True(createBug.Body.IsSome) + Assert.Contains("System.Title", createBug.Body.Value) + Assert.Contains("Login button unresponsive", createBug.Body.Value) + Assert.Contains("<p>The login button", createBug.Body.Value) + Assert.Contains("Safari 17", createBug.Body.Value) + Assert.Contains("Priority", createBug.Body.Value) + +// ─── 12. Multipart form data ───────────────────────────────── + +[<Fact>] +let ``real-world: multipart form data file upload`` () = + let input = + """### Upload document with metadata +POST https://api.example.com/documents/upload +Content-Type: multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW +Authorization: Bearer {{token}} + +------WebKitFormBoundary7MA4YWxkTrZu0gW +Content-Disposition: form-data; name="title" + +Quarterly Report Q4 2024 +------WebKitFormBoundary7MA4YWxkTrZu0gW +Content-Disposition: form-data; name="department" + +Engineering +------WebKitFormBoundary7MA4YWxkTrZu0gW +Content-Disposition: form-data; name="file"; filename="report.pdf" +Content-Type: application/pdf + +<binary content placeholder> +------WebKitFormBoundary7MA4YWxkTrZu0gW-- +""" + + let f = unwrap input + Assert.Equal(1, f.Requests.Length) + + let req = firstRequest f + Assert.Equal("POST", req.Method) + Assert.Contains("/documents/upload", req.Url) + Assert.Equal(2, req.Headers.Length) + Assert.Contains("multipart/form-data", snd req.Headers[0]) + Assert.Contains("boundary=", snd req.Headers[0]) + Assert.True(req.Body.IsSome) + Assert.Contains("WebKitFormBoundary", req.Body.Value) + Assert.Contains("Quarterly Report Q4 2024", req.Body.Value) + Assert.Contains("Engineering", req.Body.Value) + Assert.Contains("report.pdf", req.Body.Value) + Assert.Contains("application/pdf", req.Body.Value) + // Multipart bodies have internal blank lines that must be preserved + Assert.Contains("form-data; name=\"title\"", req.Body.Value) + +// ─── 13. Request without trailing newline ───────────────────── + +[<Fact>] +let ``edge case: request without trailing newline`` () = + let input = "GET https://example.com/no-trailing-newline" + let f = unwrap input + let req = firstRequest f + Assert.Equal("GET", req.Method) + Assert.Equal("https://example.com/no-trailing-newline", req.Url) + +// ─── 14. JetBrains unsupported methods produce no crash ─────── + +[<Fact>] +let ``unsupported JetBrains methods are silently skipped`` () = + let input = + """### Normal request +GET https://example.com/api + +### WebSocket (unsupported) +WEBSOCKET wss://example.com/ws + +### Another normal request +POST https://example.com/api +Content-Type: application/json + +{"key": "value"} +""" + + let f = unwrap input + // WebSocket request is skipped, only 2 real HTTP requests + Assert.Equal(2, f.Requests.Length) + Assert.Equal("GET", (requestAt f 0).Method) + Assert.Equal("POST", (requestAt f 1).Method) + Assert.True((requestAt f 1).Body.IsSome) + Assert.Contains("key", (requestAt f 1).Body.Value) diff --git a/src/DotHttp.Tests/RealWorldTests.fs b/src/DotHttp.Tests/RealWorldTests.fs new file mode 100644 index 0000000..68627d0 --- /dev/null +++ b/src/DotHttp.Tests/RealWorldTests.fs @@ -0,0 +1,832 @@ +module DotHttp.Tests.RealWorldTests + +open System +open System.IO +open System.Net.Http +open Xunit +open DotHttp +open DotHttp.Parser + +// ─── Infrastructure ─────────────────────────────────────────── + +let private cacheDir = + Path.Combine(AppContext.BaseDirectory, "..", "..", "..", ".http-cache") + +let private httpClient = new HttpClient() + +let private loadCached (filename: string) (url: string) : string = + let path = Path.Combine(cacheDir, filename) + + if not (Directory.Exists cacheDir) then + Directory.CreateDirectory cacheDir |> ignore + + if not (File.Exists path) then + let content = httpClient.GetStringAsync(url).Result + File.WriteAllText(path, content) + + File.ReadAllText path + +let private unwrap (input: string) : HttpFile = + match parse input with + | Ok f -> f + | Error e -> failwith $"Parse failed: {e}" + +let private reqAt (f: HttpFile) (i: int) : HttpRequest = f.Requests[i] + +let private assertHeader (req: HttpRequest) (key: string) (valuePart: string) = + let found = req.Headers |> List.tryFind (fun (k, _) -> k = key) + Assert.True(found.IsSome, $"Header '{key}' not found in request {req.Method} {req.Url}") + Assert.Contains(valuePart, snd found.Value) + +let private assertHeaderExact (req: HttpRequest) (key: string) (value: string) = + let found = req.Headers |> List.tryFind (fun (k, _) -> k = key) + Assert.True(found.IsSome, $"Header '{key}' not found") + Assert.Equal(value, snd found.Value) + +// ─── Source URLs ────────────────────────────────────────────── + +[<Literal>] +let private ReggierayUrl = + "https://raw.githubusercontent.com/reggieray/http-file-examples/main/http-file-examples.http" + +[<Literal>] +let private DeepnsUrl = + "https://gist.githubusercontent.com/deepns/38c24829361f23c90b3fe74a9af00d13/raw/vscode-rest-client-samples.http" + +[<Literal>] +let private WaldyriousUrl = + "https://gist.githubusercontent.com/waldyrious/fc4ce598447312970236bc645d4a14bf/raw/example.http" + +[<Literal>] +let private IjhttpEchoUrl = + "https://raw.githubusercontent.com/vitalijr2/ijhttp-demo/main/echo.http" + +[<Literal>] +let private BcnRustUrl = + "https://raw.githubusercontent.com/BcnRust/devbcn-workshop/refs/heads/main/api.http" + +[<Literal>] +let private ClockifyUrl = + "https://raw.githubusercontent.com/balexandre/ba-clockify/e97d3816ff5b18a30dc35e77e62beab0f4dbb159/_.http" + +[<Literal>] +let private DanvegaUrl = + "https://raw.githubusercontent.com/danvega/quick-bytes/4c482241d63da7aabf91861eb146fad4abdfb71e/qb.http" + +[<Literal>] +let private FlipChandlerUrl = + "https://raw.githubusercontent.com/flipChandler/project-management-api/f14df9aa93a02be93fdfcad3b29bbd3a0199acca/ap.http" + +[<Literal>] +let private JmfayardUrl = + "https://raw.githubusercontent.com/jmfayard/playground-spring/refs/heads/main/API.http" + +[<Literal>] +let private SquareCoreUrl = + "https://raw.githubusercontent.com/UKP-SQuARE/square-core/refs/heads/master/api.http" + +[<Literal>] +let private PanasonicUrl = + "https://raw.githubusercontent.com/lostfields/python-panasonic-comfort-cloud/edcb2ff11e1c62bde2a47bf1841ffe4e6024723d/requests.http" + +// ═══════════════════════════════════════════════════════════════ +// 1. reggieray — .NET Todo CRUD with MS variables, $guid, $dotenv +// ═══════════════════════════════════════════════════════════════ + +[<Fact>] +let ``real-world download: reggieray Todo CRUD API`` () = + let content = loadCached "reggieray-todos.http" ReggierayUrl + let f = unwrap content + Assert.Equal(Microsoft, f.Dialect) + Assert.Equal(5, f.Requests.Length) + // File variables: base_address, todo_id + Assert.True(f.FileVariables.Length >= 2) + Assert.Equal("base_address", fst f.FileVariables[0]) + Assert.Equal("http://localhost:5295", snd f.FileVariables[0]) + let todoIdVar = f.FileVariables |> List.find (fun (k, _) -> k = "todo_id") + Assert.Equal("{{$guid}}", snd todoIdVar) + + // Request 0: GET all todos + let r0 = reqAt f 0 + Assert.Equal("GET", r0.Method) + Assert.Contains("{{base_address}}", r0.Url) + Assert.Contains("/todos/", r0.Url) + Assert.Equal(1, r0.Headers.Length) + assertHeaderExact r0 "Accept" "application/json" + Assert.True(r0.Body.IsNone) + + // Request 1: GET single todo with guid + let r1 = reqAt f 1 + Assert.Equal("GET", r1.Method) + Assert.Contains("{{$guid}}", r1.Url) + Assert.Equal(2, r1.Headers.Length) + assertHeader r1 "Authorization" "{{$dotenv Authorization}}" + assertHeaderExact r1 "Accept" "application/json" + Assert.True(r1.Body.IsNone) + + // Request 2: POST create todo + let r2 = reqAt f 2 + Assert.Equal("POST", r2.Method) + Assert.Contains("/todos/", r2.Url) + Assert.Equal(2, r2.Headers.Length) + assertHeader r2 "Authorization" "{{$dotenv Authorization}}" + assertHeaderExact r2 "Content-Type" "application/json" + Assert.True(r2.Body.IsSome) + Assert.Contains("\"id\": \"{{$guid}}\"", r2.Body.Value) + Assert.Contains("\"title\":", r2.Body.Value) + Assert.Contains("{{$timestamp}}", r2.Body.Value) + Assert.Contains("\"isComplete\": false", r2.Body.Value) + + // Request 3: PUT update todo + let r3 = reqAt f 3 + Assert.Equal("PUT", r3.Method) + Assert.Contains("{{todo_id}}", r3.Url) + Assert.Equal(2, r3.Headers.Length) + assertHeader r3 "Authorization" "{{$dotenv Authorization}}" + assertHeaderExact r3 "Content-Type" "application/json" + Assert.True(r3.Body.IsSome) + Assert.Contains("\"id\": \"{{todo_id}}\"", r3.Body.Value) + Assert.Contains("{{$timestamp}}", r3.Body.Value) + + // Request 4: DELETE todo + let r4 = reqAt f 4 + Assert.Equal("DELETE", r4.Method) + Assert.Contains("{{$guid}}", r4.Url) + Assert.Equal(2, r4.Headers.Length) + assertHeader r4 "Authorization" "{{$dotenv Authorization}}" + assertHeaderExact r4 "Accept" "application/json" + Assert.True(r4.Body.IsNone) + +// ═══════════════════════════════════════════════════════════════ +// 2. deepns — VS Code REST Client with @name, response refs +// ═══════════════════════════════════════════════════════════════ + +[<Fact>] +let ``real-world download: deepns StackExchange REST Client`` () = + let content = loadCached "deepns-restclient.http" DeepnsUrl + let f = unwrap content + Assert.Equal(Microsoft, f.Dialect) + Assert.True(f.Requests.Length >= 6) + + // File variable + let testServer = f.FileVariables |> List.find (fun (k, _) -> k = "test_server") + Assert.Equal("dummy.restapiexample.com", snd testServer) + + // Request 0: GET sites + let r0 = reqAt f 0 + Assert.Equal("GET", r0.Method) + Assert.Equal("https://api.stackexchange.com/2.2/sites", r0.Url) + Assert.Equal(Some "A simple GET Request", r0.Name) + Assert.True(r0.Body.IsNone) + Assert.True(r0.Headers.IsEmpty) + + // Request 1: GET tags with query param + let r1 = reqAt f 1 + Assert.Equal("GET", r1.Method) + Assert.Contains("tags?site=stackoverflow", r1.Url) + Assert.Equal(Some "Get list of tags by site", r1.Name) + + // Request 2: GET tag info + let r2 = reqAt f 2 + Assert.Equal("GET", r2.Method) + Assert.Contains("vscode-extensions", r2.Url) + Assert.Equal(Some "Get details of a particular tag", r2.Name) + + // Request 3: Named request @name tagsearch + let r3 = reqAt f 3 + Assert.Equal("GET", r3.Method) + Assert.Equal(Some "tagsearch", r3.Name) + Assert.Contains("tags?site=askubuntu", r3.Url) + + // Request 4: Response variable reference + let r4 = reqAt f 4 + Assert.Equal("GET", r4.Method) + Assert.Contains("{{tagsearch.response.body.$.items[0].name}}", r4.Url) + Assert.Contains("?site=askubuntu", r4.Url) + Assert.Equal(Some "Access values from a request or response in another request", r4.Name) + + // Request 5: POST with auth and body + let r5 = reqAt f 5 + Assert.Equal("POST", r5.Method) + Assert.Equal("https://example.com/posts", r5.Url) + Assert.Equal(1, r5.Headers.Length) + assertHeader r5 "Authorization" "Basic username:password" + Assert.True(r5.Body.IsSome) + Assert.Contains("\"id\": 1", r5.Body.Value) + Assert.Contains("My awesome post", r5.Body.Value) + Assert.Contains("1504932105", r5.Body.Value) + + // Request 6: POST with file-level variable in URL + let r6 = reqAt f 6 + Assert.Equal("POST", r6.Method) + Assert.Contains("{{test_server}}", r6.Url) + Assert.Contains("/api/v1/create", r6.Url) + assertHeaderExact r6 "Content-Type" "application/json" + Assert.True(r6.Body.IsSome) + Assert.Contains("\"name\":\"Joe\"", r6.Body.Value) + Assert.Contains("\"salary\":\"123456789\"", r6.Body.Value) + +// ═══════════════════════════════════════════════════════════════ +// 3. waldyrious — JSON, query params, form-urlencoded +// ═══════════════════════════════════════════════════════════════ + +[<Fact>] +let ``real-world download: waldyrious mixed content types`` () = + let content = loadCached "waldyrious-example.http" WaldyriousUrl + let f = unwrap content + Assert.Equal(Common, f.Dialect) + Assert.Equal(3, f.Requests.Length) + + // Request 0: JSON POST + let r0 = reqAt f 0 + Assert.Equal("POST", r0.Method) + Assert.Equal("https://api.example.com/address", r0.Url) + Assert.Equal(1, r0.Headers.Length) + assertHeaderExact r0 "Content-Type" "application/json" + Assert.True(r0.Body.IsSome) + Assert.Contains("\"foo\": \"bar\"", r0.Body.Value) + Assert.Contains("\"baz\": \"qux\"", r0.Body.Value) + + // Request 1: GET with multiline query params + // The continuation lines (?page=2, &pageSize=10) are parsed as body, not URL + let r1 = reqAt f 1 + Assert.Equal("GET", r1.Method) + Assert.Contains("example.com/comments", r1.Url) + Assert.True(r1.Body.IsSome) + Assert.Contains("page=2", r1.Body.Value) + Assert.Contains("pageSize=10", r1.Body.Value) + + // Request 2: Form-urlencoded POST + let r2 = reqAt f 2 + Assert.Equal("POST", r2.Method) + Assert.Equal("https://api.example.com/login", r2.Url) + Assert.Equal(1, r2.Headers.Length) + assertHeaderExact r2 "Content-Type" "application/x-www-form-urlencoded" + Assert.True(r2.Body.IsSome) + Assert.Contains("name=foo", r2.Body.Value) + Assert.Contains("password=bar", r2.Body.Value) + +// ═══════════════════════════════════════════════════════════════ +// 4. ijhttp-demo — JetBrains echo with HTTP/1.1 and 7 headers +// ═══════════════════════════════════════════════════════════════ + +[<Fact>] +let ``real-world download: JetBrains ijhttp echo test`` () = + let content = loadCached "ijhttp-echo.http" IjhttpEchoUrl + let f = unwrap content + Assert.Equal(1, f.Requests.Length) + Assert.Equal(Common, f.Dialect) + + let r0 = reqAt f 0 + Assert.Equal("GET", r0.Method) + Assert.Contains("/echo", r0.Url) + Assert.Equal(Some "1.1", r0.HttpVersion) + Assert.Equal(Some "Echo test", r0.Name) + Assert.True(r0.Body.IsNone) + + // Exactly 8 headers with interpolated variables + Assert.Equal(8, r0.Headers.Length) + assertHeaderExact r0 "Accept" "application/json" + assertHeaderExact r0 "Public-Variable" "{{public-variable}}" + assertHeaderExact r0 "Another-Variable" "{{another-variable}}" + assertHeaderExact r0 "Third-Variable" "{{third-variable}}" + assertHeaderExact r0 "Hidden-Variable" "{{hidden-variable}}" + assertHeaderExact r0 "Hidden-Variable2" "{{hidden-variable2}}" + assertHeader r0 "Host" "localhost:{{localport}}" + // Last-Variable should also be present + assertHeaderExact r0 "Last-Variable" "{{last-variable}}" + +// ═══════════════════════════════════════════════════════════════ +// 5. BcnRust — Rust film CRUD, MS vars, HTTP/1.1 everywhere +// ═══════════════════════════════════════════════════════════════ + +[<Fact>] +let ``real-world download: BcnRust workshop film API`` () = + let content = loadCached "bcnrust-workshop.http" BcnRustUrl + let f = unwrap content + Assert.Equal(Microsoft, f.Dialect) + Assert.Equal(7, f.Requests.Length) + Assert.Equal(2, f.FileVariables.Length) + Assert.Equal("host", fst f.FileVariables[0]) + Assert.Equal("http://localhost:8080", snd f.FileVariables[0]) + Assert.Equal("film_id", fst f.FileVariables[1]) + Assert.Equal("6f05e5f2-133c-11ee-be9f-0ab7e0d8c876", snd f.FileVariables[1]) + + // Request 0: health check + let r0 = reqAt f 0 + Assert.Equal("GET", r0.Method) + Assert.Contains("{{host}}", r0.Url) + Assert.Contains("/api/health", r0.Url) + Assert.Equal(Some "1.1", r0.HttpVersion) + Assert.Equal(Some "health", r0.Name) + Assert.True(r0.Body.IsNone) + + // Request 1: POST create film + let r1 = reqAt f 1 + Assert.Equal("POST", r1.Method) + Assert.Contains("/api/v1/films", r1.Url) + Assert.Equal(Some "1.1", r1.HttpVersion) + Assert.Equal(Some "create film", r1.Name) + Assert.Equal(1, r1.Headers.Length) + assertHeaderExact r1 "Content-Type" "application/json" + Assert.True(r1.Body.IsSome) + Assert.Contains("Death in Venice", r1.Body.Value) + Assert.Contains("Luchino Visconti", r1.Body.Value) + Assert.Contains("1971", r1.Body.Value) + Assert.Contains("poster", r1.Body.Value) + + // Request 2: PUT update film + let r2 = reqAt f 2 + Assert.Equal("PUT", r2.Method) + Assert.Contains("/api/v1/films", r2.Url) + Assert.Equal(Some "update film", r2.Name) + Assert.True(r2.Body.IsSome) + Assert.Contains("{{film_id}}", r2.Body.Value) + Assert.Contains("Benjamin Britten", r2.Body.Value) + Assert.Contains("1981", r2.Body.Value) + + // Request 3: GET all films + let r3 = reqAt f 3 + Assert.Equal("GET", r3.Method) + Assert.Equal(Some "get all films", r3.Name) + Assert.True(r3.Body.IsNone) + + // Request 4: GET single film with variable + let r4 = reqAt f 4 + Assert.Equal("GET", r4.Method) + Assert.Contains("{{film_id}}", r4.Url) + Assert.Equal(Some "get film", r4.Name) + + // Request 5: GET bad film (truncated UUID) + let r5 = reqAt f 5 + Assert.Equal("GET", r5.Method) + Assert.Contains("356e42a8-e659-406f-98", r5.Url) + Assert.Equal(Some "get bad film", r5.Name) + + // Request 6: DELETE film + let r6 = reqAt f 6 + Assert.Equal("DELETE", r6.Method) + Assert.Contains("{{film_id}}", r6.Url) + Assert.Equal(Some "delete film", r6.Name) + Assert.True(r6.Body.IsNone) + + // All requests should have HTTP/1.1 + for req in f.Requests do + Assert.Equal(Some "1.1", req.HttpVersion) + +// ═══════════════════════════════════════════════════════════════ +// 6. Clockify — 7 dense GET requests with API key auth +// ═══════════════════════════════════════════════════════════════ + +[<Fact>] +let ``real-world download: Clockify API requests`` () = + let content = loadCached "clockify.http" ClockifyUrl + let f = unwrap content + Assert.Equal(Microsoft, f.Dialect) + Assert.Equal(7, f.Requests.Length) + Assert.Equal(4, f.FileVariables.Length) + Assert.Equal("HOST", fst f.FileVariables[0]) + Assert.Equal("https://api.clockify.me/api/v1", snd f.FileVariables[0]) + Assert.Equal("USER", fst f.FileVariables[1]) + Assert.Equal("?", snd f.FileVariables[1]) + Assert.Equal("WORKSPACE", fst f.FileVariables[2]) + Assert.Equal("APIKEY", fst f.FileVariables[3]) + + // Every request: GET, has X-Api-Key and content-type, uses {{HOST}} + for req in f.Requests do + Assert.Equal("GET", req.Method) + Assert.Contains("{{HOST}}", req.Url) + Assert.Equal(2, req.Headers.Length) + assertHeaderExact req "X-Api-Key" "{{APIKEY}}" + assertHeaderExact req "content-type" "application/json" + + // Most requests have no body, except request 4 whose continuation + // line (?project=...&start=...) is parsed as body + for i in 0 .. f.Requests.Length - 1 do + if i <> 4 then + Assert.True((reqAt f i).Body.IsNone) + + // Request 0: /user + Assert.Contains("/user", (reqAt f 0).Url) + Assert.False((reqAt f 0).Url.Contains("time-entries")) + + // Request 1: /workspaces (just the list) + Assert.Contains("/workspaces", (reqAt f 1).Url) + Assert.False((reqAt f 1).Url.Contains("{{WORKSPACE}}")) + + // Request 2: /workspaces/{id}/clients + Assert.Contains("{{WORKSPACE}}/clients", (reqAt f 2).Url) + + // Request 3: /workspaces/{id}/projects + Assert.Contains("{{WORKSPACE}}/projects", (reqAt f 3).Url) + + // Request 4: time-entries with query params on continuation line (parsed as body) + let r4 = reqAt f 4 + Assert.Contains("{{USER}}/time-entries", r4.Url) + Assert.Contains("{{WORKSPACE}}", r4.Url) + Assert.True(r4.Body.IsSome) + Assert.Contains("project=", r4.Body.Value) + +// ═══════════════════════════════════════════════════════════════ +// 7. danvega — Spring Framework 7 resilience demo +// ═══════════════════════════════════════════════════════════════ + +[<Fact>] +let ``real-world download: danvega Spring resilience demo`` () = + let content = loadCached "danvega-quickbytes.http" DanvegaUrl + let f = unwrap content + Assert.Equal(Common, f.Dialect) + Assert.Equal(5, f.Requests.Length) + Assert.True(f.FileVariables.IsEmpty) + + // All requests target localhost:8080 + for req in f.Requests do + Assert.Contains("localhost:8080", req.Url) + + // Request 0: GET all restaurants + let r0 = reqAt f 0 + Assert.Equal("GET", r0.Method) + Assert.Contains("/api/restaurants/", r0.Url) + Assert.Equal(Some "Get All Restaurants", r0.Name) + Assert.True(r0.Headers.IsEmpty) + Assert.True(r0.Body.IsNone) + + // Request 1: GET restaurant menu (@Retryable demo) + let r1 = reqAt f 1 + Assert.Equal("GET", r1.Method) + Assert.Contains("rest-001/menu", r1.Url) + Assert.Equal(1, r1.Headers.Length) + assertHeaderExact r1 "Accept" "application/json" + Assert.True(r1.Body.IsNone) + Assert.True(r1.Comments.Length > 0) + Assert.True(r1.Comments |> List.exists (fun c -> c.Contains("retry") || c.Contains("Retry"))) + + // Request 2: POST assign driver (RetryTemplate demo) + let r2 = reqAt f 2 + Assert.Equal("POST", r2.Method) + Assert.Contains("/api/drivers/assign", r2.Url) + Assert.Contains("orderId=order-001", r2.Url) + Assert.Equal(1, r2.Headers.Length) + assertHeaderExact r2 "Accept" "application/json" + Assert.True(r2.Body.IsNone) + + // Request 3: GET lunch-rush (ConcurrencyLimit demo - platform threads) + let r3 = reqAt f 3 + Assert.Equal("GET", r3.Method) + Assert.Contains("lunch-rush", r3.Url) + Assert.False(r3.Url.Contains("virtual")) + Assert.Equal(1, r3.Headers.Length) + + // Request 4: GET lunch-rush-virtual (ConcurrencyLimit demo - virtual threads) + let r4 = reqAt f 4 + Assert.Equal("GET", r4.Method) + Assert.Contains("lunch-rush-virtual", r4.Url) + Assert.Equal(1, r4.Headers.Length) + +// ═══════════════════════════════════════════════════════════════ +// 8. flipChandler — Portuguese project management API +// ═══════════════════════════════════════════════════════════════ + +[<Fact>] +let ``real-world download: flipChandler project management API`` () = + let content = loadCached "flipchandler.http" FlipChandlerUrl + let f = unwrap content + Assert.Equal(Common, f.Dialect) + Assert.Equal(8, f.Requests.Length) + + // Request 0: GET all resources + let r0 = reqAt f 0 + Assert.Equal("GET", r0.Method) + Assert.Contains("/recursos", r0.Url) + Assert.True(r0.Headers.IsEmpty) + + // Requests 1-4: Four POSTs creating different employees + let r1 = reqAt f 1 + Assert.Equal("POST", r1.Method) + Assert.Contains("/recursos", r1.Url) + assertHeaderExact r1 "Content-Type" "application/json" + Assert.True(r1.Body.IsSome) + Assert.Contains("Roger Guedes", r1.Body.Value) + Assert.Contains("72152492048", r1.Body.Value) + Assert.Contains("Product Owner", r1.Body.Value) + Assert.Contains("2021-12-20", r1.Body.Value) + + let r2 = reqAt f 2 + Assert.Equal("POST", r2.Method) + Assert.True(r2.Body.IsSome) + Assert.Contains("Eva Mendes", r2.Body.Value) + Assert.Contains("Scrum Master", r2.Body.Value) + + let r3 = reqAt f 3 + Assert.Equal("POST", r3.Method) + Assert.True(r3.Body.IsSome) + Assert.Contains("Immanuel Kant", r3.Body.Value) + Assert.Contains("Backend Developer", r3.Body.Value) + + let r4 = reqAt f 4 + Assert.Equal("POST", r4.Method) + Assert.True(r4.Body.IsSome) + Assert.Contains("Priscila Fantin", r4.Body.Value) + Assert.Contains("Analista de Requisitos", r4.Body.Value) + + // Request 5: GET by UUID (findByPk) + let r5 = reqAt f 5 + Assert.Equal("GET", r5.Method) + Assert.Contains("149b32d4-302a-463f-ab97-045641df38bf", r5.Url) + + // Request 6: PATCH update name + let r6 = reqAt f 6 + Assert.Equal("PATCH", r6.Method) + Assert.Contains("149b32d4-302a-463f-ab97-045641df38bf", r6.Url) + assertHeaderExact r6 "Content-Type" "application/json" + Assert.True(r6.Body.IsSome) + Assert.Contains("Boneco Sifuroso", r6.Body.Value) + + // Request 7: DELETE + let r7 = reqAt f 7 + Assert.Equal("DELETE", r7.Method) + Assert.Contains("recursos", r7.Url) + Assert.True(r7.Body.IsNone) + +// ═══════════════════════════════════════════════════════════════ +// 9. jmfayard — Spring Boot policy CRUD with ---- separators +// ═══════════════════════════════════════════════════════════════ + +[<Fact>] +let ``real-world download: jmfayard Spring Boot policies`` () = + let content = loadCached "jmfayard-spring.http" JmfayardUrl + let f = unwrap content + Assert.Equal(Common, f.Dialect) + Assert.Equal(8, f.Requests.Length) + + // Request 0: Health check — the "----" separator is parsed as body + let r0 = reqAt f 0 + Assert.Equal("GET", r0.Method) + Assert.Equal("http://localhost:8080/api/1/health", r0.Url) + Assert.Equal(Some "Check whether server is up and running", r0.Name) + Assert.True(r0.Body.IsSome) + Assert.Contains("----", r0.Body.Value) + Assert.True(r0.Headers.IsEmpty) + + // Request 1: Spring actuator — also has "----" parsed as body + let r1 = reqAt f 1 + Assert.Equal("GET", r1.Method) + Assert.Contains("/actuator", r1.Url) + Assert.Equal(Some "Spring actuator", r1.Name) + Assert.True(r1.Body.IsSome) + Assert.Contains("----", r1.Body.Value) + + // Request 2: GET policies + let r2 = reqAt f 2 + Assert.Equal("GET", r2.Method) + Assert.Equal("http://localhost:8080/policies", r2.Url) + Assert.Equal(Some "Get policies", r2.Name) + + // Request 3: POST create policy + let r3 = reqAt f 3 + Assert.Equal("POST", r3.Method) + Assert.Equal("http://localhost:8080/policies", r3.Url) + Assert.Equal(Some "Create policies", r3.Name) + Assert.Equal(2, r3.Headers.Length) + assertHeaderExact r3 "Content-Type" "application/json" + assertHeaderExact r3 "Accept" "application/json" + Assert.True(r3.Body.IsSome) + Assert.Contains("What is this", r3.Body.Value) + Assert.Contains("INACTIVE", r3.Body.Value) + Assert.Contains("2026-01-01", r3.Body.Value) + Assert.Contains("2026-12-01", r3.Body.Value) + + // Request 4: GET specific policy + let r4 = reqAt f 4 + Assert.Equal("GET", r4.Method) + Assert.Contains("policies/1", r4.Url) + Assert.Equal(Some "GET a particular policy", r4.Name) + + // Request 5: PUT update policy + let r5 = reqAt f 5 + Assert.Equal("PUT", r5.Method) + Assert.Contains("policies/1", r5.Url) + Assert.Equal(Some "Update policy", r5.Name) + Assert.Equal(2, r5.Headers.Length) + assertHeaderExact r5 "Content-Type" "application/json" + Assert.True(r5.Body.IsSome) + Assert.Contains("\"id\": 1", r5.Body.Value) + Assert.Contains("obsolete", r5.Body.Value) + Assert.Contains("createdAt", r5.Body.Value) + Assert.Contains("updatedAt", r5.Body.Value) + + // Request 6: DELETE policy + let r6 = reqAt f 6 + Assert.Equal("DELETE", r6.Method) + Assert.Contains("policies/1", r6.Url) + Assert.Equal(Some "Delete policy", r6.Name) + Assert.True(r6.Body.IsNone) + + // Request 7: Extra GET user by name at end of file + let r7 = reqAt f 7 + Assert.Equal("GET", r7.Method) + Assert.Contains("policies/1", r7.Url) + Assert.Equal(Some "GET user by name", r7.Name) + Assert.Equal(2, r7.Headers.Length) + assertHeaderExact r7 "Content-Type" "application/json" + assertHeaderExact r7 "Accept" "application/json" + +// ═══════════════════════════════════════════════════════════════ +// 10. UKP-SQuARE — ML platform with OAuth, @name, response refs +// ═══════════════════════════════════════════════════════════════ + +[<Fact>] +let ``real-world download: UKP-SQuARE ML platform API`` () = + let content = loadCached "square-core.http" SquareCoreUrl + let f = unwrap content + Assert.Equal(Microsoft, f.Dialect) + Assert.Equal(7, f.Requests.Length) + + // File variables + Assert.Equal("hostname", fst f.FileVariables[0]) + Assert.Equal("test.square.ukp-lab.de", snd f.FileVariables[0]) + // Multiple inline @token variables referencing response body + let tokenVars = f.FileVariables |> List.filter (fun (k, _) -> k = "token") + Assert.True(tokenVars.Length >= 4) + + for (_, v) in tokenVars do + Assert.Contains("get_token.response.body.access_token", v) + + // Request 0: OAuth token exchange + let r0 = reqAt f 0 + Assert.Equal("POST", r0.Method) + Assert.Equal(Some "get_token", r0.Name) + Assert.Contains("{{hostname}}", r0.Url) + Assert.Contains("openid-connect/token", r0.Url) + Assert.Equal(Some "1.1", r0.HttpVersion) + Assert.Equal(2, r0.Headers.Length) + assertHeader r0 "Host" "{{hostname}}" + assertHeader r0 "content-type" "x-www-form-urlencoded" + Assert.True(r0.Body.IsSome) + Assert.Contains("client_id=square-api", r0.Body.Value) + Assert.Contains("grant_type=password", r0.Body.Value) + Assert.Contains("username={{username}}", r0.Body.Value) + Assert.Contains("password={{password}}", r0.Body.Value) + + // Request 1: GET deployed models + let r1 = reqAt f 1 + Assert.Equal("GET", r1.Method) + Assert.Equal(Some "get_deployed_models", r1.Name) + Assert.Contains("deployed-models", r1.Url) + Assert.Equal(Some "1.1", r1.HttpVersion) + assertHeader r1 "Authorization" "Bearer {{token}}" + assertHeader r1 "Host" "{{hostname}}" + + // Request 2: DELETE remove model + let r2 = reqAt f 2 + Assert.Equal("DELETE", r2.Method) + Assert.Equal(Some "remove_model", r2.Name) + Assert.Contains("{{model_name}}", r2.Url) + Assert.Equal(Some "1.1", r2.HttpVersion) + assertHeader r2 "Authorization" "Bearer {{token}}" + + // Request 3: POST deploy all models + let r3 = reqAt f 3 + Assert.Equal("POST", r3.Method) + Assert.Equal(Some "deploy_all_models", r3.Name) + Assert.Contains("/db/deploy", r3.Url) + + // Request 4: GET datastores + let r4 = reqAt f 4 + Assert.Equal("GET", r4.Method) + Assert.Equal(Some "get_datastores", r4.Name) + Assert.Contains("/datastores", r4.Url) + + // Request 5: GET datastore indices + let r5 = reqAt f 5 + Assert.Equal("GET", r5.Method) + Assert.Equal(Some "get_datastores_indices", r5.Name) + Assert.Contains("/datastores/nq/indices", r5.Url) + + // Request 6: POST deploy specific model + let r6 = reqAt f 6 + Assert.Equal("POST", r6.Method) + Assert.Equal(Some "deploy_model", r6.Name) + Assert.Contains("{{model_identifier}}", r6.Url) + Assert.Equal(Some "1.1", r6.HttpVersion) + + // Every request (except first) should have Bearer auth + for i in 1 .. f.Requests.Length - 1 do + assertHeader (reqAt f i) "Authorization" "Bearer {{token}}" + +// ═══════════════════════════════════════════════════════════════ +// 11. Panasonic Comfort Cloud — 223-line IoT API +// ═══════════════════════════════════════════════════════════════ + +[<Fact>] +let ``real-world download: Panasonic Comfort Cloud IoT API`` () = + let content = loadCached "panasonic-cloud.http" PanasonicUrl + let f = unwrap content + Assert.Equal(Microsoft, f.Dialect) + Assert.True(f.Requests.Length >= 10) + Assert.Equal("APP-VERSION", fst f.FileVariables[0]) + Assert.Equal("1.20.1", snd f.FileVariables[0]) + + // Request 0: Login + let login = f.Requests |> List.find (fun r -> r.Name = Some "login") + Assert.Equal("POST", login.Method) + Assert.Equal("https://accsmart.panasonic.com/auth/login", login.Url) + Assert.Equal(Some "1.1", login.HttpVersion) + Assert.True(login.Headers.Length >= 7) + assertHeaderExact login "X-APP-TYPE" "1" + assertHeader login "X-APP-VERSION" "{{APP-VERSION}}" + assertHeaderExact login "User-Agent" "G-RAC" + assertHeaderExact login "X-APP-TIMESTAMP" "1" + assertHeaderExact login "X-APP-NAME" "Comfort Cloud" + assertHeaderExact login "X-CFC-API-KEY" "Comfort Cloud" + assertHeader login "Accept" "application/json" + assertHeader login "Content-Type" "application/json" + Assert.True(login.Body.IsSome) + Assert.Contains("\"language\": 0", login.Body.Value) + Assert.Contains("\"loginId\": \"{{$dotenv USERNAME}}\"", login.Body.Value) + Assert.Contains("\"password\": \"{{$dotenv PASSWORD}}\"", login.Body.Value) + + // Request 1: Device group (named "device") + let device = f.Requests |> List.find (fun r -> r.Name = Some "device") + Assert.Equal("GET", device.Method) + Assert.Contains("device/group", device.Url) + Assert.Equal(Some "1.1", device.HttpVersion) + assertHeader device "X-User-Authorization" "{{login.response.body.$.uToken}}" + assertHeaderExact device "X-APP-TYPE" "1" + assertHeaderExact device "User-Agent" "G-RAC" + Assert.True(device.Body.IsNone) + + // Requests referencing device GUID via response variable in URL + // Only 2 requests have device.response.body in the URL (deviceStatus/now and deviceStatus) + let deviceGuidRequests = + f.Requests |> List.filter (fun r -> r.Url.Contains("device.response.body")) + + Assert.True(deviceGuidRequests.Length >= 2) + + for req in deviceGuidRequests do + Assert.Contains("deviceGuid", req.Url) + assertHeader req "X-User-Authorization" "login.response.body" + + // Device control POSTs with nested JSON + let controls = + f.Requests + |> List.filter (fun r -> r.Method = "POST" && r.Url.Contains("control")) + + Assert.True(controls.Length >= 2) + + // First control: full parameter set + let fullControl = + controls + |> List.find (fun r -> r.Body.IsSome && r.Body.Value.Contains("operationMode")) + + Assert.Contains("\"operate\": 1", fullControl.Body.Value) + Assert.Contains("\"operationMode\": 3", fullControl.Body.Value) + Assert.Contains("\"temperatureSet\": 22.5", fullControl.Body.Value) + Assert.Contains("\"ecoMode\": null", fullControl.Body.Value) + Assert.Contains("\"airSwingUD\": null", fullControl.Body.Value) + Assert.Contains("\"fanSpeed\": null", fullControl.Body.Value) + Assert.Contains("deviceGuid", fullControl.Body.Value) + + // Second control: just temperature + let tempControl = + controls |> List.find (fun r -> r.Body.IsSome && r.Body.Value.Contains("21.0")) + + Assert.Contains("\"temperatureSet\": 21.0", tempControl.Body.Value) + + // History data POST + let history = f.Requests |> List.find (fun r -> r.Url.Contains("deviceHistoryData")) + Assert.Equal("POST", history.Method) + Assert.True(history.Body.IsSome) + Assert.Contains("\"dataMode\": 0", history.Body.Value) + Assert.Contains("\"date\": \"20190610\"", history.Body.Value) + Assert.Contains("osTimezone", history.Body.Value) + + // Agreement endpoints + let agreementGet = + f.Requests + |> List.filter (fun r -> r.Method = "GET" && r.Url.Contains("agreement")) + + Assert.True(agreementGet.Length >= 3) + + let agreementPut = + f.Requests + |> List.find (fun r -> r.Method = "PUT" && r.Url.Contains("agreement")) + + Assert.True(agreementPut.Body.IsSome) + Assert.Contains("\"agreementStatus\": 0", agreementPut.Body.Value) + Assert.Contains("\"type\": 0", agreementPut.Body.Value) + + // Requests with rich headers (>= 7 headers) + let richHeaderRequests = f.Requests |> List.filter (fun r -> r.Headers.Length >= 7) + Assert.True(richHeaderRequests.Length >= 5) + + // Requests with Accept-Encoding and Connection headers + let withAcceptEncoding = + f.Requests + |> List.filter (fun r -> r.Headers |> List.exists (fun (k, _) -> k = "Accept-Encoding")) + + Assert.True(withAcceptEncoding.Length >= 1) + + for req in withAcceptEncoding do + assertHeaderExact req "Accept-Encoding" "gzip" + assertHeaderExact req "Connection" "Keep-Alive" diff --git a/tests/Nap.Core.Tests/coverage.runsettings b/src/DotHttp.Tests/coverage.runsettings similarity index 90% rename from tests/Nap.Core.Tests/coverage.runsettings rename to src/DotHttp.Tests/coverage.runsettings index 0425249..990fe43 100644 --- a/tests/Nap.Core.Tests/coverage.runsettings +++ b/src/DotHttp.Tests/coverage.runsettings @@ -5,7 +5,7 @@ <DataCollector friendlyName="XPlat Code Coverage"> <Configuration> <Format>cobertura,lcov</Format> - <Include>[Nap.Core]*</Include> + <Include>[DotHttp]*</Include> <IncludeTestAssembly>false</IncludeTestAssembly> </Configuration> </DataCollector> diff --git a/src/DotHttp/DotHttp.fsproj b/src/DotHttp/DotHttp.fsproj new file mode 100644 index 0000000..44daa1a --- /dev/null +++ b/src/DotHttp/DotHttp.fsproj @@ -0,0 +1,18 @@ +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <GenerateDocumentationFile>true</GenerateDocumentationFile> + <PackageId>DotHttp</PackageId> + <Description>Standalone FParsec-based parser for .http files. Supports Microsoft (VS Code REST Client) and JetBrains (IntelliJ HTTP Client) dialects.</Description> + </PropertyGroup> + + <ItemGroup> + <Compile Include="Types.fs" /> + <Compile Include="Parser.fs" /> + </ItemGroup> + + <ItemGroup> + <PackageReference Include="FParsec" Version="1.1.1" /> + </ItemGroup> + +</Project> diff --git a/src/DotHttp/Parser.fs b/src/DotHttp/Parser.fs new file mode 100644 index 0000000..c30bda5 --- /dev/null +++ b/src/DotHttp/Parser.fs @@ -0,0 +1,428 @@ +module DotHttp.Parser + +open FParsec +open DotHttp + +// ─── Constants ───────────────────────────────────────────────── + +[<Literal>] +let private Separator = "###" + +[<Literal>] +let private ScriptFilePrefix = "file:" + +// ─── Known methods ───────────────────────────────────────────── + +let private httpMethods = + [ "GET" + "POST" + "PUT" + "PATCH" + "DELETE" + "HEAD" + "OPTIONS" + "TRACE" + "CONNECT" ] + +let private unsupportedMethods = [ "WEBSOCKET"; "GRPC"; "GRAPHQL" ] + +// ─── Discriminated union for parsed lines ────────────────────── + +type private Part = + | PSeparator of string option + | PComment of string + | PMsName of string + | PMsVar of string * string + | PMethod of string * string * string option + | PHeader of string * string + | PBody of string + | PPreScript of string + | PPostScript of string + | PUnsupported of string + | PBlank + +// ─── Utility parsers ─────────────────────────────────────────── + +let private trimmedRestOfLine: Parser<string, unit> = + restOfLine true |>> fun s -> s.Trim() + +let private lineEnd': Parser<unit, unit> = skipNewline <|> eof + +// ─── Separator: ### [optional name] ──────────────────────────── + +let private pSeparator: Parser<Part, unit> = + pstring Separator >>. trimmedRestOfLine + |>> fun s -> + let trimmed = s.Trim() + + let name = + if trimmed = "" then + None + else + let stripped = + if trimmed.StartsWith "#" then + trimmed.TrimStart('#').Trim() + elif trimmed.StartsWith "//" then + trimmed.Substring(2).Trim() + else + trimmed + + if stripped = "" then None else Some stripped + + PSeparator name + +// ─── Comment lines: # or // ──────────────────────────────────── + +let private pHashComment: Parser<Part, unit> = + pchar '#' >>. notFollowedBy (pstring "##") >>. restOfLine true + |>> fun s -> PComment(s.Trim()) + +let private pSlashComment: Parser<Part, unit> = + pstring "//" >>. restOfLine true |>> fun s -> PComment(s.Trim()) + +// ─── Microsoft @variable = value ─────────────────────────────── + +let private pMsVar: Parser<Part, unit> = + pchar '@' + >>. many1Satisfy (fun c -> c <> ' ' && c <> '\t' && c <> '=' && c <> '\n' && c <> '\r') + .>> spaces + .>> pchar '=' + .>> spaces + .>>. trimmedRestOfLine + |>> PMsVar + +// ─── Microsoft # @name directive ─────────────────────────────── + +let private pMsName: Parser<Part, unit> = + pchar '#' + >>. spaces + >>. pchar '@' + >>. pstring "name" + >>. spaces1 + >>. trimmedRestOfLine + |>> PMsName + +// ─── Method line: METHOD URL [HTTP/version] ──────────────────── + +let private pMethodLine: Parser<Part, unit> = + let pMethod = + httpMethods + |> List.map (fun m -> attempt (stringCIReturn m (m.ToUpperInvariant()))) + |> choice + + pMethod .>> spaces1 + .>>. many1Satisfy (fun c -> c <> ' ' && c <> '\t' && c <> '\n' && c <> '\r') + .>>. opt ( + attempt ( + pchar ' ' + >>. spaces + >>. pstring "HTTP/" + >>. many1Satisfy (fun c -> c <> '\n' && c <> '\r') + |>> fun v -> v.Trim() + ) + ) + .>> lineEnd' + |>> fun ((m, url), ver) -> PMethod(m, url, ver) + +// ─── Unsupported method lines (WEBSOCKET, GRPC, GRAPHQL) ────── + +let private pUnsupported: Parser<Part, unit> = + unsupportedMethods |> List.map (fun m -> attempt (stringCIReturn m m)) |> choice + .>> restOfLine true + |>> PUnsupported + +// ─── Header: Key: Value ──────────────────────────────────────── + +let private pHeader: Parser<Part, unit> = + many1Satisfy (fun c -> c <> ':' && c <> '\n' && c <> '\r' && c <> ' ' && c <> '\t') + .>> pchar ':' + .>> spaces + .>>. trimmedRestOfLine + |>> PHeader + +// ─── JetBrains inline scripts: < {% ... %} and > {% ... %} ──── + +let private pInlinePreScript: Parser<Part, unit> = + pstring "< {%" >>. manyCharsTill anyChar (pstring "%}") .>> optional skipNewline + |>> fun s -> PPreScript(s.Trim()) + +let private pInlinePostScript: Parser<Part, unit> = + pstring "> {%" >>. manyCharsTill anyChar (pstring "%}") .>> optional skipNewline + |>> fun s -> PPostScript(s.Trim()) + +// ─── JetBrains file references: < file.js and > file.js ─────── + +let private pFilePreScript: Parser<Part, unit> = + pstring "< " >>. notFollowedBy (pstring "{%") >>. trimmedRestOfLine + |>> fun s -> PPreScript(sprintf "%s%s" ScriptFilePrefix s) + +let private pFilePostScript: Parser<Part, unit> = + pstring "> " >>. notFollowedBy (pstring "{%") >>. trimmedRestOfLine + |>> fun s -> PPostScript(sprintf "%s%s" ScriptFilePrefix s) + +// ─── Body line (fallback) ────────────────────────────────────── + +let private pBody: Parser<Part, unit> = + notFollowedBy (pstring Separator) + >>. notFollowedBy (attempt pMethodLine) + >>. many1Satisfy (fun c -> c <> '\n' && c <> '\r') + .>> optional skipNewline + |>> PBody + +// ─── Blank line ──────────────────────────────────────────────── + +/// Matches any whitespace-only content (at least one whitespace char consumed) +let private pBlank: Parser<Part, unit> = skipNewline |>> fun _ -> PBlank + +// ─── Combined part parser (order matters) ────────────────────── + +let private pPart: Parser<Part, unit> = + choice + [ attempt pSeparator + attempt pMsName + attempt pMsVar + attempt pInlinePreScript + attempt pInlinePostScript + attempt pFilePreScript + attempt pFilePostScript + attempt pUnsupported + attempt pMethodLine + attempt pHeader + attempt pHashComment + attempt pSlashComment + attempt pBlank + attempt pBody ] + +// ─── Build HttpRequest from accumulated parts ────────────────── + +let private buildRequest (parts: Part list) : HttpRequest option = + let mutable name = None + let mutable method' = None + let mutable url = None + let mutable httpVer = None + let mutable headers = [] + let mutable bodyLines = [] + let mutable preScript = None + let mutable postScript = None + let mutable comments = [] + + for p in parts do + match p with + | PSeparator n -> + match n with + | Some s -> name <- Some s + | None -> () + | PComment c -> comments <- comments @ [ c ] + | PMsName n -> name <- Some n + | PMsVar _ -> () + | PMethod(m, u, v) -> + method' <- Some m + url <- Some u + httpVer <- v + | PHeader(k, v) -> headers <- headers @ [ (k, v) ] + | PBody l -> bodyLines <- bodyLines @ [ l ] + | PPreScript s -> preScript <- Some s + | PPostScript s -> postScript <- Some s + | PUnsupported _ -> () + | PBlank -> () + + match method', url with + | Some m, Some u -> + let body = + let joined = bodyLines |> String.concat "\n" |> (fun s -> s.Trim()) + if joined = "" then None else Some joined + + Some + { Name = name + Method = m + Url = u + HttpVersion = httpVer + Headers = headers + Body = body + PreScript = preScript + PostScript = postScript + Comments = comments } + | _ -> None + +// ─── Split parts into per-request groups at separators ───────── + +let private splitAtSeparators (parts: Part list) : Part list list = + let mutable groups: Part list list = [] + let mutable current: Part list = [] + + for p in parts do + match p with + | PSeparator _ -> + if not (List.isEmpty current) then + groups <- groups @ [ current ] + + current <- [ p ] + | _ -> current <- current @ [ p ] + + if not (List.isEmpty current) then + groups <- groups @ [ current ] + + groups + +// ─── Dialect detection ───────────────────────────────────────── + +let private detectDialect (parts: Part list) : HttpDialect = + let hasMsFeatures = + parts + |> List.exists (fun p -> + match p with + | PMsVar _ + | PMsName _ -> true + | _ -> false) + + let hasJbFeatures = + parts + |> List.exists (fun p -> + match p with + | PPreScript _ + | PPostScript _ -> true + | _ -> false) + + if hasJbFeatures then JetBrains + elif hasMsFeatures then Microsoft + else Common + +// ─── File-level variable extraction ──────────────────────────── + +let private extractFileVars (parts: Part list) : (string * string) list = + parts + |> List.choose (fun p -> + match p with + | PMsVar(k, v) -> Some(k, v) + | _ -> None) + +// ─── Public API ──────────────────────────────────────────────── + +/// Parse state for line-by-line processing +type private ParseState = + | BeforeMethod // before any method line in current request + | InHeaders // after method line, parsing headers + | InBody // after blank line following headers + | InScript of prefix: string // accumulating multiline script block + +[<Literal>] +let private ScriptOpen = "{%" + +[<Literal>] +let private ScriptClose = "%}" + +/// Parse line by line with state tracking (handles multiline scripts) +let private parseAll (input: string) : Part list = + let lines = input.Split [| '\n' |] |> Array.toList + let acc = ResizeArray<Part>() + let mutable state = BeforeMethod + let mutable scriptLines = ResizeArray<string>() + + for line in lines do + let lineInput = line + "\n" + let trimmed = line.Trim() + + match state with + | InScript prefix -> + // Accumulating multiline script until closing %} + let closeIdx = line.IndexOf ScriptClose + + if closeIdx >= 0 then + let fragment = line.Substring(0, closeIdx).Trim() + + if fragment <> "" then + scriptLines.Add fragment + + let content = scriptLines |> String.concat "\n" |> (fun s -> s.Trim()) + + let part = + if prefix = "<" then + PPreScript content + else + PPostScript content + + acc.Add part + scriptLines <- ResizeArray<string>() + state <- BeforeMethod + else + scriptLines.Add line + | _ -> + if trimmed = "" then + match state with + | InBody -> acc.Add(PBody "") + | InHeaders -> + acc.Add PBlank + state <- InBody + | BeforeMethod -> acc.Add PBlank + | InScript _ -> () + else + // Check for multiline script block start + let isPreScriptStart = + trimmed.StartsWith "< {%" && not (trimmed.Contains ScriptClose) + + let isPostScriptStart = + trimmed.StartsWith "> {%" && not (trimmed.Contains ScriptClose) + + if isPreScriptStart then + let after = trimmed.Substring(4).Trim() + scriptLines <- ResizeArray<string>() + + if after <> "" then + scriptLines.Add after + + state <- InScript "<" + elif isPostScriptStart then + let after = trimmed.Substring(4).Trim() + scriptLines <- ResizeArray<string>() + + if after <> "" then + scriptLines.Add after + + state <- InScript ">" + else + match state with + | InBody -> + match run (attempt pSeparator) lineInput with + | Success(part, _, _) -> + acc.Add part + state <- BeforeMethod + | Failure _ -> + match run (attempt pMethodLine) lineInput with + | Success(part, _, _) -> + acc.Add part + state <- InHeaders + | Failure _ -> + // Check for script file references in body state + match run (attempt pFilePostScript) lineInput with + | Success(part, _, _) -> acc.Add part + | Failure _ -> + match run (attempt pFilePreScript) lineInput with + | Success(part, _, _) -> acc.Add part + | Failure _ -> acc.Add(PBody trimmed) + | InScript _ -> () + | _ -> + match run pPart lineInput with + | Success(part, _, _) -> + acc.Add part + + match part with + | PMethod _ -> state <- InHeaders + | PSeparator _ -> state <- BeforeMethod + | _ -> () + | Failure _ -> acc.Add(PBody trimmed) + + acc |> Seq.toList + +/// Parse a .http/.rest file into an HttpFile structure +let parse (input: string) : Result<HttpFile, string> = + let parts = parseAll input + let groups = splitAtSeparators parts + let requests = groups |> List.choose buildRequest + + if List.isEmpty requests then + Result.Error "No HTTP requests found in file" + else + Result.Ok + { Requests = requests + FileVariables = extractFileVars parts + Dialect = detectDialect parts } diff --git a/src/DotHttp/Types.fs b/src/DotHttp/Types.fs new file mode 100644 index 0000000..f0b8366 --- /dev/null +++ b/src/DotHttp/Types.fs @@ -0,0 +1,25 @@ +namespace DotHttp + +/// Dialect of .http file +type HttpDialect = + | Microsoft + | JetBrains + | Common + +/// A single parsed HTTP request from a .http file +type HttpRequest = + { Name: string option + Method: string + Url: string + HttpVersion: string option + Headers: (string * string) list + Body: string option + PreScript: string option + PostScript: string option + Comments: string list } + +/// A fully parsed .http file +type HttpFile = + { Requests: HttpRequest list + FileVariables: (string * string) list + Dialect: HttpDialect } diff --git a/src/Nap.Cli/Nap.Cli.fsproj b/src/Nap.Cli/Nap.Cli.fsproj deleted file mode 100644 index d2236ab..0000000 --- a/src/Nap.Cli/Nap.Cli.fsproj +++ /dev/null @@ -1,17 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <OutputType>Exe</OutputType> - <AssemblyName>napper</AssemblyName> - </PropertyGroup> - - <ItemGroup> - <Compile Include="Program.fs" /> - </ItemGroup> - - <ItemGroup> - <ProjectReference Include="..\Nap.Core\Nap.Core.fsproj" /> - </ItemGroup> - - -</Project> diff --git a/src/Nap.Cli/Program.fs b/src/Nap.Cli/Program.fs deleted file mode 100644 index 4064c8c..0000000 --- a/src/Nap.Cli/Program.fs +++ /dev/null @@ -1,306 +0,0 @@ -open System -open System.IO -open Nap.Core - -/// Parse CLI arguments into a structured form -type CliArgs = { - Command : string // "run", "check", "generate", "help" - SubCommand : string option // e.g. "openapi" for "generate openapi" - File : string option - Env : string option - Vars : Map<string, string> - Output : string // "pretty", "junit", "json", "ndjson" - OutputDir : string option // --output-dir for generate command - Verbose : bool -} - -let parseArgs (argv: string array) : CliArgs = - let mutable command = "help" - let mutable subCommand = None - let mutable file = None - let mutable env = None - let mutable vars = Map.empty - let mutable output = "pretty" - let mutable outputDir = None - let mutable verbose = false - let mutable i = 0 - - if argv.Length > 0 then - command <- argv[0] - i <- 1 - - // For "generate openapi", consume the subcommand - if command = "generate" && i < argv.Length && not (argv[i].StartsWith "--") then - subCommand <- Some argv[i] - i <- i + 1 - - while i < argv.Length do - match argv[i] with - | "--env" when i + 1 < argv.Length -> - env <- Some argv[i + 1] - i <- i + 2 - | "--var" when i + 1 < argv.Length -> - let parts = argv[i + 1].Split([|'='|], 2) - if parts.Length = 2 then - vars <- vars |> Map.add (parts[0].Trim()) (parts[1].Trim()) - i <- i + 2 - | "--output" when i + 1 < argv.Length -> - output <- argv[i + 1] - i <- i + 2 - | "--output-dir" when i + 1 < argv.Length -> - outputDir <- Some argv[i + 1] - i <- i + 2 - | "--verbose" -> - verbose <- true - i <- i + 1 - | arg when not (arg.StartsWith "--") && file.IsNone -> - file <- Some arg - i <- i + 1 - | _ -> - i <- i + 1 - - { Command = command; SubCommand = subCommand; File = file; Env = env - Vars = vars; Output = output; OutputDir = outputDir; Verbose = verbose } - -let printHelp () = - printfn "Nap — API testing tool" - printfn "" - printfn "Usage:" - printfn " nap run <file|folder> Run a .nap file, .naplist playlist, or folder" - printfn " nap check <file> Validate a .nap or .naplist file" - printfn " nap generate openapi <spec> --output-dir <dir> Generate .nap files from OpenAPI spec" - printfn " nap help Show this help" - printfn "" - printfn "Options:" - printfn " --env <name> Environment name (loads .napenv.<name>)" - printfn " --var <key=value> Variable override (repeatable)" - printfn " --output <format> Output: pretty (default), junit, json, ndjson" - printfn " --output-dir <dir> Output directory for generate command" - printfn " --verbose Enable debug-level logging" - -/// Print result as ndjson and return whether it passed -let private printNdjson (r: NapResult) : bool = - printfn "%s" (Output.formatJson r) - Console.Out.Flush() - r.Passed - -/// Format and print results, return exit code -let private formatAndExit (output: string) (results: NapResult list) : int = - match output with - | "junit" -> printf "%s" (Output.formatJUnit results) - | "json" -> printf "%s" (Output.formatJsonArray results) - | _ -> - for r in results do - printf "%s" (Output.formatPretty r) - printf "%s" (Output.formatSummary results) - if results |> List.forall (fun r -> r.Passed) then 0 else 1 - -/// Run all .nap files in a directory -let private runDirectory (args: CliArgs) (dirPath: string) : int = - let files = Directory.GetFiles(dirPath, "*.nap") |> Array.sort - if files.Length = 0 then - eprintfn "No .nap files found in %s" dirPath - 2 - elif args.Output = "ndjson" then - let passed = files |> Array.forall (fun f -> - Runner.runNapFile f args.Vars args.Env |> Async.RunSynchronously |> printNdjson) - if passed then 0 else 1 - else - files - |> Array.map (fun f -> Runner.runNapFile f args.Vars args.Env |> Async.RunSynchronously) - |> Array.toList - |> formatAndExit args.Output - -/// Merge playlist vars with CLI overrides -let private mergeVars (playlist: NapPlaylist) (cliVars: Map<string, string>) : Map<string, string> = - let mutable v = playlist.Vars - for kv in cliVars do - v <- v |> Map.add kv.Key kv.Value - v - -/// Collect results from playlist steps recursively -let rec private collectSteps (steps: PlaylistStep list) (vars: Map<string, string>) (baseDir: string) (env: string option) : NapResult list = - steps |> List.collect (fun step -> - let full p = Path.GetFullPath(Path.Combine(baseDir, p)) - match step with - | NapFileStep p -> - [Runner.runNapFile (full p) vars env |> Async.RunSynchronously] - | FolderRef p -> - Directory.GetFiles(full p, "*.nap") - |> Array.sort - |> Array.map (fun f -> Runner.runNapFile f vars env |> Async.RunSynchronously) - |> Array.toList - | PlaylistRef p -> - let fp = full p - match File.ReadAllText(fp) |> Parser.parseNapList with - | Result.Ok nested -> collectSteps nested.Steps vars (Path.GetDirectoryName fp) env - | Result.Error _ -> [] - | ScriptStep p -> - [Runner.runScript (full p) |> Async.RunSynchronously] - ) - -/// Stream playlist steps as ndjson, return whether all passed -let rec private streamSteps (steps: PlaylistStep list) (vars: Map<string, string>) (baseDir: string) (env: string option) : bool = - steps |> List.forall (fun step -> - let full p = Path.GetFullPath(Path.Combine(baseDir, p)) - match step with - | NapFileStep p -> - Runner.runNapFile (full p) vars env |> Async.RunSynchronously |> printNdjson - | FolderRef p -> - Directory.GetFiles(full p, "*.nap") - |> Array.sort - |> Array.forall (fun f -> Runner.runNapFile f vars env |> Async.RunSynchronously |> printNdjson) - | PlaylistRef p -> - let fp = full p - match File.ReadAllText(fp) |> Parser.parseNapList with - | Result.Ok nested -> streamSteps nested.Steps vars (Path.GetDirectoryName fp) env - | Result.Error _ -> false - | ScriptStep p -> - Runner.runScript (full p) |> Async.RunSynchronously |> printNdjson - ) - -/// Run a .naplist playlist -let private runPlaylist (args: CliArgs) (filePath: string) : int = - let content = File.ReadAllText(filePath) - match Parser.parseNapList content with - | Result.Error msg -> - Logger.error $"Playlist parse error: {msg}" - eprintfn "Error parsing playlist: %s" msg - 2 - | Result.Ok playlist -> - Logger.info $"Playlist loaded: {playlist.Steps.Length} steps" - let dir = Path.GetDirectoryName(filePath) - let env = playlist.Env |> Option.orElse args.Env - let vars = mergeVars playlist args.Vars - match args.Output with - | "ndjson" -> if streamSteps playlist.Steps vars dir env then 0 else 1 - | _ -> collectSteps playlist.Steps vars dir env |> formatAndExit args.Output - -/// Run a single .nap file -let private runSingleNap (args: CliArgs) (filePath: string) : int = - let result = Runner.runNapFile filePath args.Vars args.Env |> Async.RunSynchronously - match args.Output with - | "junit" -> printf "%s" (Output.formatJUnit [result]) - | "json" | "ndjson" -> printf "%s" (Output.formatJson result) - | _ -> printf "%s" (Output.formatPretty result) - if result.Passed then 0 else 1 - -let runFile (args: CliArgs) : int = - match args.File with - | None -> - eprintfn "Error: no file specified" - printHelp () - 2 - | Some f -> - let filePath = Path.GetFullPath(f) - Logger.info $"Processing: {filePath}" - if not (File.Exists filePath) && not (Directory.Exists filePath) then - Logger.error $"File not found: {filePath}" - eprintfn "Error: %s not found" filePath - 2 - elif Directory.Exists filePath then runDirectory args filePath - elif filePath.EndsWith ".naplist" then runPlaylist args filePath - else runSingleNap args filePath - -let private writeGenerated (outDir: string) (result: OpenApiGenerator.GenerationResult) : unit = - let writeFile (f: OpenApiGenerator.GeneratedFile) = - let fullPath = Path.Combine(outDir, f.FileName) - let dir = Path.GetDirectoryName(fullPath) - if not (Directory.Exists dir) then - Directory.CreateDirectory(dir) |> ignore - File.WriteAllText(fullPath, f.Content) - writeFile result.Environment - for nap in result.NapFiles do - writeFile nap - writeFile result.Playlist - -/// Display generation results -let private displayGenerated (output: string) (generated: OpenApiGenerator.GenerationResult) (outDir: string) : unit = - match output with - | "json" -> - printfn "{\"files\":%d,\"playlist\":\"%s\"}" generated.NapFiles.Length generated.Playlist.FileName - | _ -> - printfn "Generated %d .nap files from OpenAPI spec" generated.NapFiles.Length - printfn " Playlist: %s" generated.Playlist.FileName - printfn " Environment: %s" generated.Environment.FileName - printfn " Output: %s" outDir - -let generateOpenApi (args: CliArgs) : int = - match args.File with - | None -> - eprintfn "Error: no spec file specified" - eprintfn "Usage: nap generate openapi <spec.json> --output-dir <dir>" - 2 - | Some specFile -> - let specPath = Path.GetFullPath(specFile) - if not (File.Exists specPath) then - eprintfn "Error: %s not found" specPath - 2 - else - let outDir = args.OutputDir |> Option.map Path.GetFullPath |> Option.defaultWith (fun () -> Path.GetDirectoryName(specPath)) - match File.ReadAllText(specPath) |> OpenApiGenerator.generate with - | Error msg -> eprintfn "Error: %s" msg; 1 - | Ok generated -> - if not (Directory.Exists outDir) then Directory.CreateDirectory(outDir) |> ignore - writeGenerated outDir generated - displayGenerated args.Output generated outDir - 0 - -let checkFile (args: CliArgs) : int = - match args.File with - | None -> - eprintfn "Error: no file specified" - 2 - | Some file -> - let filePath = Path.GetFullPath(file) - if not (File.Exists filePath) then - eprintfn "Error: %s not found" filePath - 2 - else - let content = File.ReadAllText(filePath) - let result = - if filePath.EndsWith ".naplist" - then Parser.parseNapList content |> Result.map ignore - else Parser.parseNapFile content |> Result.map ignore - match result with - | Result.Ok _ -> - printfn "\x1b[32m✓\x1b[0m %s is valid" (Path.GetFileName filePath) - 0 - | Result.Error msg -> - eprintfn "\x1b[31m✗\x1b[0m %s" (Path.GetFileName filePath) - eprintfn " %s" msg - 1 - -[<EntryPoint>] -let main argv = - let args = parseArgs argv - Logger.init args.Verbose - let joinedArgs = argv |> String.concat " " - Logger.info $"CLI started: args={joinedArgs} cwd={Directory.GetCurrentDirectory()}" - let exitCode = - match args.Command with - | "run" -> runFile args - | "check" -> checkFile args - | "generate" -> - match args.SubCommand with - | Some "openapi" -> generateOpenApi args - | Some other -> - eprintfn "Unknown generate target: %s" other - 2 - | None -> - eprintfn "Usage: nap generate openapi <spec.json> --output-dir <dir>" - 2 - | "version" | "--version" -> - let v = Reflection.Assembly.GetExecutingAssembly().GetName().Version - printfn "%d.%d.%d" v.Major v.Minor v.Build - 0 - | "help" | "--help" | "-h" -> - printHelp () - 0 - | other -> - eprintfn "Unknown command: %s" other - printHelp () - 2 - Logger.info $"CLI exiting with code {exitCode}" - Logger.close () - exitCode diff --git a/src/Nap.Core/Runner.fs b/src/Nap.Core/Runner.fs deleted file mode 100644 index fe933be..0000000 --- a/src/Nap.Core/Runner.fs +++ /dev/null @@ -1,283 +0,0 @@ -module Nap.Core.Runner - -open System -open System.Diagnostics -open System.Net.Http -open System.Text -open System.Text.Json -open System.Text.RegularExpressions -open Nap.Core - -let private httpClient = new HttpClient() - -/// Execute an HTTP request from a resolved NapRequest -let executeRequest (request: NapRequest) : Async<NapResponse> = async { - Logger.info $"HTTP {request.Method} {request.Url}" - Logger.debug $"Request headers: {request.Headers.Count} headers" - let msg = new HttpRequestMessage(request.Method.ToNetMethod(), request.Url) - - // Add headers - for kv in request.Headers do - // Content headers need to go on the content object - if kv.Key.Equals("Content-Type", StringComparison.OrdinalIgnoreCase) then () - else msg.Headers.TryAddWithoutValidation(kv.Key, kv.Value) |> ignore - - // Add body if present - match request.Body with - | Some body -> - msg.Content <- new StringContent(body.Content, Encoding.UTF8, body.ContentType) - | None -> () - - let sw = Stopwatch.StartNew() - let! response = httpClient.SendAsync(msg) |> Async.AwaitTask - sw.Stop() - - let! body = response.Content.ReadAsStringAsync() |> Async.AwaitTask - Logger.info $"HTTP {int response.StatusCode} in {sw.Elapsed.TotalMilliseconds:F0}ms" - Logger.debug $"Response body length: {body.Length}" - let headers = - response.Headers - |> Seq.append response.Content.Headers - |> Seq.map (fun kv -> kv.Key, kv.Value |> String.concat ", ") - |> Map.ofSeq - - return { - StatusCode = int response.StatusCode - Headers = headers - Body = body - Duration = sw.Elapsed - } -} - -/// Walk a dot-delimited path into a JSON body and return the leaf value as a string. -/// e.g. tryGetJsonPath "user.name" body → Some "Alice" -/// Returns None if the path doesn't exist or the body isn't valid JSON. -let private tryGetJsonPath (path: string) (body: string) : string option = - try - let doc = JsonDocument.Parse(body) - let parts = path.Split('.') - let mutable current = doc.RootElement - let mutable found = true - for part in parts do - if found then - match current.ValueKind with - | JsonValueKind.Object -> - match current.TryGetProperty(part) with - | true, prop -> current <- prop - | false, _ -> found <- false - | _ -> found <- false - if found then - match current.ValueKind with - | JsonValueKind.String -> Some (current.GetString()) - | JsonValueKind.Number -> Some (current.GetRawText()) - | JsonValueKind.True -> Some "true" - | JsonValueKind.False -> Some "false" - | JsonValueKind.Null -> Some "null" - | _ -> Some (current.GetRawText()) - else None - with _ -> None - -/// Resolve an assertion target (e.g. "status", "body.id", "headers.Content-Type") -/// to the actual string value from the HTTP response. -/// Returns None when the target doesn't exist in the response. -let private resolveTarget (response: NapResponse) (target: string) : string option = - if target = "status" then - Some (string response.StatusCode) - elif target = "duration" then - Some (sprintf "%.0fms" response.Duration.TotalMilliseconds) - elif target.StartsWith "headers." then - let headerName = target.Substring(8) - response.Headers - |> Map.tryFind headerName - |> Option.orElseWith (fun () -> - response.Headers |> Map.tryPick (fun k v -> - if k.Equals(headerName, StringComparison.OrdinalIgnoreCase) - then Some v else None)) - elif target.StartsWith "body." then - tryGetJsonPath (target.Substring(5)) response.Body - elif target = "body" then - Some response.Body - else None - -/// Parse a numeric value from a string, stripping a trailing "ms" duration suffix. -/// e.g. "500ms" → Some 500.0, "42" → Some 42.0, "abc" → None -let private parseNum (s: string) : float option = - let s = s.TrimEnd('m', 's') - match Double.TryParse(s) with - | true, v -> Some v - | _ -> None - -/// Compare two numeric values (actual vs expected) using the given comparator. -/// Returns false if either value is missing or non-numeric. -let private compareNumeric (cmp: float -> float -> bool) (actual: string option) (expected: string) : bool = - match actual with - | Some a -> - match parseNum a, parseNum expected with - | Some av, Some ev -> cmp av ev - | _ -> false - | None -> false - -/// Convert a glob pattern (using * and ? wildcards) to a regex and test a value against it. -let private globMatch (pattern: string) (value: string) : bool = - let regexPattern = - pattern.ToCharArray() - |> Array.map (fun c -> - match c with - | '*' -> ".*" - | '?' -> "." - | c when ".+^${}()|[]\\".Contains(c) -> $"\\{c}" - | c -> string c) - |> String.concat "" - Regex.IsMatch(value, $"^{regexPattern}$") - -/// Build an AssertionResult from an assertion, its pass/fail state, and display strings. -let private makeResult (assertion: Assertion) (passed: bool) (expected: string) (actual: string option) : AssertionResult = - { Assertion = assertion; Passed = passed; Expected = expected; Actual = actual |> Option.defaultValue "<missing>" } - -/// Evaluate a single assertion operator against the resolved actual value. -let private evaluateOp (assertion: Assertion) (actual: string option) : AssertionResult = - match assertion.Op with - | Equals expected -> - let passed = actual |> Option.map (fun a -> a = expected) |> Option.defaultValue false - makeResult assertion passed expected actual - | Exists -> - let passed = actual.IsSome - { Assertion = assertion; Passed = passed; Expected = "exists"; Actual = if actual.IsSome then "exists" else "<missing>" } - | Contains expected -> - let passed = actual |> Option.map (fun a -> a.Contains(expected, StringComparison.OrdinalIgnoreCase)) |> Option.defaultValue false - makeResult assertion passed $"contains \"{expected}\"" actual - | Matches pattern -> - let passed = actual |> Option.map (fun a -> globMatch pattern a) |> Option.defaultValue false - { Assertion = assertion; Passed = passed; Expected = $"matches \"{pattern}\""; Actual = actual |> Option.defaultValue "<missing>" } - | LessThan expected -> - makeResult assertion (compareNumeric (<) actual expected) $"< {expected}" actual - | GreaterThan expected -> - makeResult assertion (compareNumeric (>) actual expected) $"> {expected}" actual - -/// Evaluate all assertions against an HTTP response. -/// Each assertion's target is resolved to the actual response value, -/// then the operator (=, exists, contains, matches, <, >) is applied. -let evaluateAssertions (assertions: Assertion list) (response: NapResponse) : AssertionResult list = - assertions |> List.map (fun assertion -> - resolveTarget response assertion.Target |> evaluateOp assertion - ) - -/// Determine the dotnet CLI arguments for a script file -let private scriptArgs (scriptPath: string) : string = - if scriptPath.EndsWith ".csx" - then $"script \"{scriptPath}\"" - else $"fsi \"{scriptPath}\"" - -/// Run a script (.fsx or .csx) and capture its output -let runScript (scriptPath: string) : Async<NapResult> = async { - Logger.info $"Script start: {scriptPath}" - let psi = ProcessStartInfo() - psi.FileName <- "dotnet" - psi.Arguments <- scriptArgs scriptPath - psi.WorkingDirectory <- System.IO.Path.GetDirectoryName(scriptPath) - psi.RedirectStandardOutput <- true - psi.RedirectStandardError <- true - psi.UseShellExecute <- false - psi.CreateNoWindow <- true - - let sw = Stopwatch.StartNew() - - try - use proc = Process.Start(psi) - let! stdout = proc.StandardOutput.ReadToEndAsync() |> Async.AwaitTask - let! stderr = proc.StandardError.ReadToEndAsync() |> Async.AwaitTask - do! proc.WaitForExitAsync() |> Async.AwaitTask - sw.Stop() - - let logLines = - stdout.Split('\n') - |> Array.map (fun l -> l.TrimEnd('\r')) - |> Array.filter (fun l -> l.Length > 0) - |> Array.toList - - let passed = proc.ExitCode = 0 - Logger.info $"Script exit code: {proc.ExitCode}" - let error = - if passed then None - elif stderr.Length > 0 then Some stderr - else Some $"Script exited with code {proc.ExitCode}" - - return { - File = scriptPath - Request = { Method = GET; Url = ""; Headers = Map.empty; Body = None } - Response = None - Assertions = [] - Passed = passed - Error = error - Log = logLines - } - with ex -> - sw.Stop() - Logger.error $"Script failed: {ex.Message}" - return { - File = scriptPath - Request = { Method = GET; Url = ""; Headers = Map.empty; Body = None } - Response = None - Assertions = [] - Passed = false - Error = Some $"Script failed to start: {ex.Message}" - Log = [] - } -} - -/// Run a single .nap file end-to-end -let runNapFile (filePath: string) (vars: Map<string, string>) (envName: string option) : Async<NapResult> = async { - Logger.info $"File: {filePath}" - let dir = System.IO.Path.GetDirectoryName(filePath) - let content = System.IO.File.ReadAllText(filePath) - - match Parser.parseNapFile content with - | Error msg -> - Logger.error $"Parse error in {filePath}: {msg}" - return { - File = filePath - Request = { Method = GET; Url = ""; Headers = Map.empty; Body = None } - Response = None - Assertions = [] - Passed = false - Error = Some $"Parse error: {msg}" - Log = [] - } - | Ok napFile -> - // Resolve variables - let allVars = Environment.loadEnvironment dir envName vars napFile.Vars - Logger.debug $"Resolved {allVars.Count} variables" - let resolved = Environment.resolveNapFile allVars napFile - - try - let! response = executeRequest resolved.Request - let assertionResults = evaluateAssertions resolved.Assertions response - let passed = assertionResults |> List.filter (fun r -> r.Passed) |> List.length - let total = assertionResults.Length - Logger.info $"Assertions: {passed}/{total} passed" - for a in assertionResults do - let status = if a.Passed then "PASS" else "FAIL" - Logger.debug $"Assertion {a.Assertion.Target}: {status}" - let allPassed = assertionResults |> List.forall (fun r -> r.Passed) - - return { - File = filePath - Request = resolved.Request - Response = Some response - Assertions = assertionResults - Passed = allPassed - Error = None - Log = [] - } - with ex -> - Logger.error $"Request failed: {ex.Message}" - return { - File = filePath - Request = resolved.Request - Response = None - Assertions = [] - Passed = false - Error = Some $"Request failed: {ex.Message}" - Log = [] - } -} diff --git a/src/Nap.Core/Types.fs b/src/Nap.Core/Types.fs deleted file mode 100644 index 7cedd6d..0000000 --- a/src/Nap.Core/Types.fs +++ /dev/null @@ -1,110 +0,0 @@ -namespace Nap.Core - -open System -open System.Net.Http - -/// Assertion operators used in [assert] blocks -type AssertOp = - | Equals of string - | Exists - | Contains of string - | Matches of string - | LessThan of string - | GreaterThan of string - -/// A single assertion line, e.g. status = 200, body.id exists -type Assertion = { - Target : string // e.g. "status", "body.id", "headers.Content-Type", "duration" - Op : AssertOp -} - -/// HTTP method -type HttpMethod = - | GET | POST | PUT | PATCH | DELETE | HEAD | OPTIONS - member this.ToNetMethod() = - match this with - | GET -> System.Net.Http.HttpMethod.Get - | POST -> System.Net.Http.HttpMethod.Post - | PUT -> System.Net.Http.HttpMethod.Put - | PATCH -> System.Net.Http.HttpMethod.Patch - | DELETE -> System.Net.Http.HttpMethod.Delete - | HEAD -> System.Net.Http.HttpMethod.Head - | OPTIONS -> System.Net.Http.HttpMethod.Options - -/// Script references (pre/post hooks) -type ScriptRef = { - Pre : string option - Post : string option -} - -/// Metadata block [meta] -type NapMeta = { - Name : string option - Description : string option - Tags : string list -} - -/// Request body -type RequestBody = { - ContentType : string - Content : string -} - -/// The request definition from a .nap file -type NapRequest = { - Method : HttpMethod - Url : string - Headers : Map<string, string> - Body : RequestBody option -} - -/// A fully parsed .nap file -type NapFile = { - Meta : NapMeta - Vars : Map<string, string> - Request : NapRequest - Assertions : Assertion list - Script : ScriptRef -} - -/// Result of evaluating a single assertion -type AssertionResult = { - Assertion : Assertion - Passed : bool - Expected : string - Actual : string -} - -/// The HTTP response captured after running a request -type NapResponse = { - StatusCode : int - Headers : Map<string, string> - Body : string - Duration : TimeSpan -} - -/// Overall result of running a single .nap file -type NapResult = { - File : string - Request : NapRequest - Response : NapResponse option - Assertions : AssertionResult list - Passed : bool - Error : string option - Log : string list -} - -/// A step in a .naplist playlist -type PlaylistStep = - | NapFileStep of string // path to a .nap file - | PlaylistRef of string // path to another .naplist - | FolderRef of string // path to a folder - | ScriptStep of string // path to an .fsx or .csx orchestration script - -/// A parsed .naplist file -type NapPlaylist = { - Meta : NapMeta - Env : string option - Vars : Map<string, string> - Steps : PlaylistStep list -} diff --git a/src/Nap.VsCode/src/cliInstaller.ts b/src/Nap.VsCode/src/cliInstaller.ts deleted file mode 100644 index 80d0126..0000000 --- a/src/Nap.VsCode/src/cliInstaller.ts +++ /dev/null @@ -1,222 +0,0 @@ -// CLI Installer — downloads the correct Napper CLI binary from GitHub releases -// Decoupled from vscode SDK — takes config values as parameters - -import type * as http from "http"; -import * as https from "https"; -import * as fs from "fs"; -import * as path from "path"; -import { execFile } from "child_process"; -import { type Result, err, ok } from "./types"; -import { - CLI_ARCH_ARM64, - CLI_ARCH_X64, - CLI_ASSET_PREFIX, - CLI_BINARY_NAME, - CLI_BIN_DIR, - CLI_DOWNLOAD_ERROR_PREFIX, - CLI_DOWNLOAD_HOST, - CLI_DOWNLOAD_PATH_PREFIX, - CLI_FILE_MODE_EXECUTABLE, - CLI_MAX_REDIRECTS, - CLI_PLATFORM_DARWIN, - CLI_PLATFORM_LINUX, - CLI_PLATFORM_WIN32, - CLI_REDIRECT_ERROR, - CLI_RID_LINUX_X64, - CLI_RID_OSX_ARM64, - CLI_RID_OSX_X64, - CLI_RID_WIN_X64, - CLI_TOO_MANY_REDIRECTS, - CLI_UNSUPPORTED_PLATFORM_MSG, - CLI_VERSION_CHECK_ERROR, - CLI_VERSION_CHECK_TIMEOUT, - CLI_VERSION_FLAG, - CLI_WIN_EXE_SUFFIX, - HTTP_STATUS_CLIENT_ERROR_MIN, - HTTP_STATUS_OK, - HTTP_STATUS_REDIRECT_MIN, -} from "./constants"; - -const PLATFORM_RID_MAP: ReadonlyMap<string, string> = new Map([ - [`${CLI_PLATFORM_DARWIN}-${CLI_ARCH_ARM64}`, CLI_RID_OSX_ARM64], - [`${CLI_PLATFORM_DARWIN}-${CLI_ARCH_X64}`, CLI_RID_OSX_X64], - [`${CLI_PLATFORM_LINUX}-${CLI_ARCH_X64}`, CLI_RID_LINUX_X64], - [`${CLI_PLATFORM_WIN32}-${CLI_ARCH_X64}`, CLI_RID_WIN_X64], -]); - -export const platformToRid = ( - platform: string, - arch: string -): Result<string, string> => { - const key = `${platform}-${arch}`, - rid = PLATFORM_RID_MAP.get(key); - if (rid !== undefined) { - return ok(rid); - } - return err(`${CLI_UNSUPPORTED_PLATFORM_MSG}${key}`); -}; - -export const assetName = (rid: string): string => { - const base = `${CLI_ASSET_PREFIX}${rid}`; - return rid === CLI_RID_WIN_X64 ? `${base}${CLI_WIN_EXE_SUFFIX}` : base; -}; - -export const localBinaryName = (platform: string): string => - platform === CLI_PLATFORM_WIN32 - ? `${CLI_BINARY_NAME}${CLI_WIN_EXE_SUFFIX}` - : CLI_BINARY_NAME; - -export const installedCliPath = ( - storageDir: string, - platform: string -): string => path.join(storageDir, CLI_BIN_DIR, localBinaryName(platform)); - -export const isCliInstalled = (cliPath: string): boolean => - fs.existsSync(cliPath); - -export const getCliVersion = async ( - cliPath: string -): Promise<Result<string, string>> => - new Promise((resolve) => { - execFile( - cliPath, - [CLI_VERSION_FLAG], - { timeout: CLI_VERSION_CHECK_TIMEOUT }, - (error: Error | null, stdout: string) => { - if (error !== null) { - resolve(err(`${CLI_VERSION_CHECK_ERROR}${error.message}`)); - return; - } - resolve(ok(stdout.trim())); - } - ); - }); - -interface RedirectContext { - readonly dest: string; - readonly redirectCount: number; - readonly resolve: (value: Result<void, string>) => void; -} - -const handleRedirect = ( - response: http.IncomingMessage, - ctx: RedirectContext, -): void => { - const {location} = response.headers; - if (location === undefined || location === "") { - ctx.resolve(err(CLI_REDIRECT_ERROR)); - return; - } - response.resume(); - followRedirect(location, ctx.dest, ctx.redirectCount + 1) - .then(ctx.resolve) - .catch(() => { ctx.resolve(err(CLI_REDIRECT_ERROR)); }); -}, - - handleDownload = ( - response: http.IncomingMessage, - dest: string, - resolve: (value: Result<void, string>) => void -): void => { - const file = fs.createWriteStream(dest); - response.pipe(file); - file.on("finish", () => { - file.close(); - resolve(ok(undefined)); - }); - file.on("error", (e) => { resolve(err(e.message)); }); -}, - - buildRequestOptions = (url: string): { hostname: string; path: string; headers: Record<string, string> } => { - const parsedUrl = new URL(url); - return { - hostname: parsedUrl.hostname, - path: parsedUrl.pathname + parsedUrl.search, - headers: { "User-Agent": CLI_BINARY_NAME }, - }; -}, - - isRedirectStatus = (status: number): boolean => - status >= HTTP_STATUS_REDIRECT_MIN && status < HTTP_STATUS_CLIENT_ERROR_MIN, - - handleResponse = ( - response: http.IncomingMessage, - ctx: RedirectContext, -): void => { - const status = response.statusCode ?? 0; - if (isRedirectStatus(status)) { - handleRedirect(response, ctx); - } else if (status !== HTTP_STATUS_OK) { - response.resume(); - ctx.resolve(err(`${CLI_DOWNLOAD_ERROR_PREFIX}${status}`)); - } else { - handleDownload(response, ctx.dest, ctx.resolve); - } -}; - -async function followRedirect( - url: string, - dest: string, - redirectCount: number -): Promise<Result<void, string>> { - if (redirectCount > CLI_MAX_REDIRECTS) { - return err(CLI_TOO_MANY_REDIRECTS); - } - - const options = buildRequestOptions(url); - - return new Promise((resolve) => { - const ctx: RedirectContext = { dest, redirectCount, resolve }; - https - .get(options, (response) => { handleResponse(response, ctx); }) - .on("error", (e) => { resolve(err(e.message)); }); - }); -} - -export const downloadBinary = async ( - rid: string, - destPath: string -): Promise<Result<void, string>> => { - const asset = assetName(rid), - url = `https://${CLI_DOWNLOAD_HOST}${CLI_DOWNLOAD_PATH_PREFIX}${asset}`, - dir = path.dirname(destPath); - - if (!fs.existsSync(dir)) { - fs.mkdirSync(dir, { recursive: true }); - } - - return followRedirect(url, destPath, 0); -}; - -export const makeExecutable = ( - filePath: string, - platform: string -): void => { - if (platform !== CLI_PLATFORM_WIN32) { - fs.chmodSync(filePath, CLI_FILE_MODE_EXECUTABLE); - } -}; - -export interface InstallResult { - readonly cliPath: string; -} - -export const installCli = async ( - storageDir: string, - platform: string, - arch: string -): Promise<Result<InstallResult, string>> => { - const ridResult = platformToRid(platform, arch); - if (!ridResult.ok) { - return err(ridResult.error); - } - - const destPath = installedCliPath(storageDir, platform), - downloadResult = await downloadBinary(ridResult.value, destPath); - if (!downloadResult.ok) { - return err(downloadResult.error); - } - - makeExecutable(destPath, platform); - return ok({ cliPath: destPath }); -}; diff --git a/src/Nap.VsCode/src/cliRunner.ts b/src/Nap.VsCode/src/cliRunner.ts deleted file mode 100644 index 9b50c75..0000000 --- a/src/Nap.VsCode/src/cliRunner.ts +++ /dev/null @@ -1,241 +0,0 @@ -// Runs the Napper CLI as a subprocess and parses JSON results -// Decoupled from vscode SDK — takes config values as parameters - -import { execFile, spawn } from "child_process"; -import { - CLI_CMD_CHECK, - CLI_CMD_RUN, - CLI_FLAG_ENV, - CLI_FLAG_OUTPUT, - CLI_OUTPUT_JSON, - CLI_OUTPUT_NDJSON, - CLI_PARSE_FAILED_PREFIX, - CLI_SPAWN_FAILED_PREFIX, - DEFAULT_CLI_PATH, -} from "./constants"; -import { type Result, type RunResult, err, ok } from "./types"; - -const MAX_PREVIEW_LENGTH = 200; - -interface RunOptions { - readonly cliPath: string; - readonly filePath: string; - readonly env?: string | undefined; - readonly vars?: readonly string[]; - readonly cwd: string; -} - -const appendEnvArgs = ( - args: string[], - env: string | undefined -): void => { - if (env !== undefined && env !== "") { - args.push(CLI_FLAG_ENV, env); - } -}, - - buildArgs = (options: RunOptions): readonly string[] => { - const args: string[] = [ - CLI_CMD_RUN, - options.filePath, - CLI_FLAG_OUTPUT, - CLI_OUTPUT_JSON, - ]; - appendEnvArgs(args, options.env); - return args; -}, - - parseJsonOutput = ( - stdout: string -): Result<readonly RunResult[], string> => { - try { - const parsed: unknown = JSON.parse(stdout); - if (Array.isArray(parsed)) { - // validated: JSON.parse produced an array; elements typed at consumption - return ok(parsed); - } - return ok([parsed as RunResult]); - } catch { - return err(`${CLI_PARSE_FAILED_PREFIX}${stdout.slice(0, MAX_PREVIEW_LENGTH)}`); - } -}, - - formatSpawnError = ( - cliPath: string, - error: Error, - stderr: string -): string => { - const code = "code" in error ? ` (${String(error.code)})` : "", - stderrSuffix = stderr.length > 0 ? ` — ${stderr}` : ""; - return `${CLI_SPAWN_FAILED_PREFIX}${cliPath}${code}${stderrSuffix}`; -}, - - spawnCli = async ( - cliPath: string, - args: readonly string[], - cwd: string -): Promise<Result<readonly RunResult[], string>> => - new Promise((resolve) => { - execFile( - cliPath, - [...args], - { cwd, timeout: 30_000, env: { ...process.env } }, - (error, stdout, stderr) => { - if (error !== null && stdout.length === 0) { - resolve(err(formatSpawnError(cliPath, error, stderr))); - return; - } - resolve(parseJsonOutput(stdout)); - } - ); - }), - - resolveCliPath = (cliPath: string): string => - cliPath.length > 0 ? cliPath : DEFAULT_CLI_PATH; - -export const runCli = async ( - options: RunOptions -): Promise<Result<readonly RunResult[], string>> => { - const cliPath = resolveCliPath(options.cliPath), - args = buildArgs(options); - return spawnCli(cliPath, args, options.cwd); -}; - -interface StreamOptions { - readonly cliPath: string; - readonly filePath: string; - readonly env?: string | undefined; - readonly cwd: string; - readonly onResult: (result: RunResult) => void; - readonly onDone: (error?: string) => void; -} - -const buildStreamArgs = (options: StreamOptions): readonly string[] => { - const args: string[] = [ - CLI_CMD_RUN, - options.filePath, - CLI_FLAG_OUTPUT, - CLI_OUTPUT_NDJSON, - ]; - appendEnvArgs(args, options.env); - return args; -}, - - parseLine = (line: string): Result<RunResult, string> => { - try { - return ok(JSON.parse(line)); - } catch { - return err(`${CLI_PARSE_FAILED_PREFIX}${line.slice(0, MAX_PREVIEW_LENGTH)}`); - } -}, - - emitParsedLine = ( - trimmed: string, - onResult: (result: RunResult) => void -): void => { - const parsed = parseLine(trimmed); - if (parsed.ok) { - onResult(parsed.value); - } -}, - - processChunk = ( - buffer: string, - chunk: Buffer, - onResult: (result: RunResult) => void -): string => { - const combined = buffer + chunk.toString(), - lines = combined.split("\n"), - remainder = lines.pop() ?? ""; - for (const line of lines) { - const trimmed = line.trim(); - if (trimmed.length > 0) { - emitParsedLine(trimmed, onResult); - } - } - return remainder; -}; - -interface FlushContext { - readonly buffer: string; - readonly onResult: (result: RunResult) => void; - readonly stderrOutput: string; - readonly onDone: (error?: string) => void; -} - -const flushAndFinish = (ctx: FlushContext): void => { - const remaining = ctx.buffer.trim(); - if (remaining.length > 0) { - emitParsedLine(remaining, ctx.onResult); - } - ctx.onDone(ctx.stderrOutput.length > 0 ? ctx.stderrOutput : undefined); -}; - -interface StreamState { - buffer: string; - stderrOutput: string; - finished: boolean; -} - -interface StreamListenerContext { - readonly child: ReturnType<typeof spawn>; - readonly state: StreamState; - readonly options: StreamOptions; - readonly cliPath: string; -} - -const attachDataListeners = (ctx: StreamListenerContext): void => { - ctx.child.stdout?.on("data", (chunk: Buffer) => { - ctx.state.buffer = processChunk(ctx.state.buffer, chunk, ctx.options.onResult); - }); - ctx.child.stderr?.on("data", (chunk: Buffer) => { - ctx.state.stderrOutput += chunk.toString(); - }); -}, - - attachLifecycleListeners = (ctx: StreamListenerContext): void => { - ctx.child.on("close", () => { - if (ctx.state.finished) { return; } - ctx.state.finished = true; - flushAndFinish({ buffer: ctx.state.buffer, onResult: ctx.options.onResult, stderrOutput: ctx.state.stderrOutput, onDone: ctx.options.onDone }); - }); - ctx.child.on("error", (error) => { - if (ctx.state.finished) { return; } - ctx.state.finished = true; - ctx.options.onDone(`${CLI_SPAWN_FAILED_PREFIX}${ctx.cliPath} — ${error.message}`); - }); -}; - -export const streamCli = (options: StreamOptions): void => { - const cliPath = resolveCliPath(options.cliPath), - args = buildStreamArgs(options), - child = spawn(cliPath, [...args], { - cwd: options.cwd, - env: { ...process.env }, - }), - state: StreamState = { buffer: "", stderrOutput: "", finished: false }, - ctx: StreamListenerContext = { child, state, options, cliPath }; - attachDataListeners(ctx); - attachLifecycleListeners(ctx); -}; - -export const checkFile = async ( - cliPath: string, - filePath: string, - cwd: string -): Promise<Result<string, string>> => - new Promise((resolve) => { - const cmd = resolveCliPath(cliPath); - execFile( - cmd, - [CLI_CMD_CHECK, filePath], - { cwd, timeout: 10_000, env: { ...process.env } }, - (error, stdout, stderr) => { - if (error !== null) { - resolve(err(stderr.length > 0 ? stderr : error.message)); - return; - } - resolve(ok(stdout)); - } - ); - }); diff --git a/src/Nap.VsCode/src/codeLensProvider.ts b/src/Nap.VsCode/src/codeLensProvider.ts deleted file mode 100644 index 71bcd94..0000000 --- a/src/Nap.VsCode/src/codeLensProvider.ts +++ /dev/null @@ -1,101 +0,0 @@ -// CodeLens provider for .nap and .naplist files -// Shows "Run" and "Copy as curl" actions above key sections - -import * as vscode from "vscode"; -import { - CMD_COPY_CURL, - CMD_RUN_FILE, - HTTP_METHODS, - NAPLIST_EXTENSION, - NAP_EXTENSION, - SECTION_META, - SECTION_REQUEST, -} from "./constants"; - -const RUN_LENS_TITLE = "$(play) Run", - COPY_CURL_TITLE = "$(clippy) Copy as curl", - RUN_PLAYLIST_TITLE = "$(play) Run Playlist", - - makeRunLens = ( - range: vscode.Range, - uri: vscode.Uri -): vscode.CodeLens => - new vscode.CodeLens(range, { - title: RUN_LENS_TITLE, - command: CMD_RUN_FILE, - arguments: [uri], - }), - - makeCurlLens = ( - range: vscode.Range, - uri: vscode.Uri -): vscode.CodeLens => - new vscode.CodeLens(range, { - title: COPY_CURL_TITLE, - command: CMD_COPY_CURL, - arguments: [uri], - }), - - isShorthandMethod = (line: string): boolean => - HTTP_METHODS.some((m) => line.startsWith(`${m} `)), - - buildRequestLenses = ( - document: vscode.TextDocument -): vscode.CodeLens[] => { - const lenses: vscode.CodeLens[] = [], - firstLine = document.lineAt(0).text.trim(); - - if (isShorthandMethod(firstLine)) { - const range = new vscode.Range(0, 0, 0, firstLine.length); - lenses.push(makeRunLens(range, document.uri)); - lenses.push(makeCurlLens(range, document.uri)); - } - - for (let i = 0; i < document.lineCount; i++) { - const line = document.lineAt(i).text.trim(); - if (line === SECTION_REQUEST) { - const range = new vscode.Range(i, 0, i, line.length); - lenses.push(makeRunLens(range, document.uri)); - lenses.push(makeCurlLens(range, document.uri)); - } - } - - return lenses; -}, - - buildPlaylistLenses = ( - document: vscode.TextDocument -): vscode.CodeLens[] => { - const lenses: vscode.CodeLens[] = []; - - for (let i = 0; i < document.lineCount; i++) { - const line = document.lineAt(i).text.trim(); - if (line === SECTION_META) { - const range = new vscode.Range(i, 0, i, line.length); - lenses.push( - new vscode.CodeLens(range, { - title: RUN_PLAYLIST_TITLE, - command: CMD_RUN_FILE, - arguments: [document.uri], - }) - ); - } - } - - return lenses; -}; - -export class CodeLensProvider implements vscode.CodeLensProvider { - private readonly _onDidChangeCodeLenses = - new vscode.EventEmitter<void>(); - readonly onDidChangeCodeLenses = this._onDidChangeCodeLenses.event; - - provideCodeLenses(document: vscode.TextDocument): vscode.CodeLens[] { - const isNap = document.fileName.endsWith(NAP_EXTENSION), - isNapList = document.fileName.endsWith(NAPLIST_EXTENSION); - - if (isNap) {return buildRequestLenses(document);} - if (isNapList) {return buildPlaylistLenses(document);} - return []; - } -} diff --git a/src/Nap.VsCode/src/constants.ts b/src/Nap.VsCode/src/constants.ts deleted file mode 100644 index 6567661..0000000 --- a/src/Nap.VsCode/src/constants.ts +++ /dev/null @@ -1,377 +0,0 @@ -// All string constants in one location — no literals elsewhere - -// File extensions -export const NAP_EXTENSION = ".nap"; -export const NAPLIST_EXTENSION = ".naplist"; -export const NAPENV_EXTENSION = ".napenv"; -export const NAPENV_LOCAL_SUFFIX = ".local"; -export const FSX_EXTENSION = ".fsx"; -export const CSX_EXTENSION = ".csx"; - -// Glob patterns -export const NAP_GLOB = "**/*.nap"; -export const NAPLIST_GLOB = "**/*.naplist"; -export const NAPENV_GLOB = "**/.napenv*"; - -// View IDs -export const VIEW_EXPLORER = "napperExplorer"; - -// Command IDs -export const CMD_RUN_FILE = "napper.runFile"; -export const CMD_RUN_ALL = "napper.runAll"; -export const CMD_NEW_REQUEST = "napper.newRequest"; -export const CMD_NEW_PLAYLIST = "napper.newPlaylist"; -export const CMD_SWITCH_ENV = "napper.switchEnvironment"; -export const CMD_COPY_CURL = "napper.copyAsCurl"; -export const CMD_OPEN_RESPONSE = "napper.openResponse"; -export const CMD_SAVE_REPORT = "napper.savePlaylistReport"; - -// Config keys -export const CONFIG_SECTION = "napper"; -export const CONFIG_DEFAULT_ENV = "defaultEnvironment"; -export const CONFIG_AUTO_RUN = "autoRunOnSave"; -export const CONFIG_SPLIT_LAYOUT = "splitEditorLayout"; -export const CONFIG_MASK_SECRETS = "maskSecretsInPreview"; -export const CONFIG_CLI_PATH = "cliPath"; - -// CLI defaults -export const DEFAULT_CLI_PATH = "napper"; -export const CLI_OUTPUT_JSON = "json"; -export const CLI_OUTPUT_NDJSON = "ndjson"; -export const CLI_CMD_RUN = "run"; -export const CLI_CMD_CHECK = "check"; -export const CLI_CMD_GENERATE = "generate"; -export const CLI_SUBCMD_OPENAPI = "openapi"; -export const CLI_FLAG_OUTPUT = "--output"; -export const CLI_FLAG_ENV = "--env"; -export const CLI_FLAG_VAR = "--var"; -export const CLI_FLAG_OUTPUT_DIR = "--output-dir"; - -// Context values for tree items -export const CONTEXT_REQUEST_FILE = "requestFile"; -export const CONTEXT_PLAYLIST = "playlist"; -export const CONTEXT_FOLDER = "folder"; -export const CONTEXT_PLAYLIST_SECTION = "playlistSection"; -export const CONTEXT_SCRIPT_FILE = "scriptFile"; - -// Labels -export const PLAYLIST_SECTION_LABEL = "Playlists"; - -// Icons -export const ICON_PLAYLIST_SECTION = "list-tree"; -export const ICON_PLAYLIST_FILE = "list-ordered"; -export const ICON_IDLE = "circle-outline"; -export const ICON_RUNNING = "loading~spin"; -export const ICON_PASSED = "pass"; -export const ICON_FAILED = "error"; -export const ICON_ERROR = "warning"; -export const ICON_IMPORT_OPENAPI = "cloud-download"; - -// Badge decorations (single-char for file decorations) -export const BADGE_PASSED = "\u2713"; -export const BADGE_FAILED = "\u2717"; -export const BADGE_ERROR = "!"; - -// Section headers in .nap files -export const SECTION_REQUEST = "[request]"; -export const SECTION_META = "[meta]"; -export const SECTION_STEPS = "[steps]"; - -// Status bar -export const STATUS_BAR_PREFIX = "Napper: "; -export const STATUS_BAR_NO_ENV = "No Environment"; -export const STATUS_BAR_PRIORITY = 100; - -// Theme colors for run state icons -export const THEME_COLOR_PASSED = "testing.iconPassed"; -export const THEME_COLOR_FAILED = "testing.iconFailed"; -export const THEME_COLOR_ERROR = "problemsWarningIcon.foreground"; - -// Response panel -export const RESPONSE_PANEL_TITLE = "Napper Response"; -export const RESPONSE_PANEL_VIEW_TYPE = "napperResponse"; -export const SECTION_LABEL_REQUEST_HEADERS = "Request Headers"; -export const SECTION_LABEL_RESPONSE_HEADERS = "Response Headers"; -export const SECTION_LABEL_BODY = "Body"; -export const SECTION_LABEL_ASSERTIONS = "Assertions"; -export const SECTION_LABEL_OUTPUT = "Output"; -export const SECTION_LABEL_ERROR = "Error"; -export const SECTION_LABEL_REQUEST = "Request"; -export const SECTION_LABEL_RESPONSE = "Response"; -export const NO_REQUEST_HEADERS = "No request headers"; -export const SECTION_LABEL_REQUEST_BODY = "Request Body"; - -// Playlist panel -export const PLAYLIST_PANEL_TITLE = "Napper Playlist"; -export const PLAYLIST_PANEL_VIEW_TYPE = "napperPlaylist"; - -// Webview message types -export const MSG_ADD_RESULT = "addResult"; -export const MSG_RUN_COMPLETE = "runComplete"; -export const MSG_RUN_ERROR = "runError"; -export const MSG_SAVE_REPORT = "saveReport"; - -// Report -export const REPORT_FILE_EXTENSION = ".html"; -export const REPORT_FILE_SUFFIX = "-report"; -export const REPORT_SAVED_MSG = "Report saved: "; - -// CLI error messages -export const CLI_SPAWN_FAILED_PREFIX = "Failed to run CLI: "; -export const CLI_PARSE_FAILED_PREFIX = "Failed to parse CLI JSON: "; -export const CLI_ERROR_PREFIX = "Napper CLI error: "; - -// Status bar running -export const STATUS_RUNNING_ICON = "$(loading~spin) Running "; -export const STATUS_RUNNING_SUFFIX = "..."; - -// Curl -export const CURL_CMD_PREFIX = "curl -X "; - -// File creation -export const REQUEST_NAME_SUFFIX = "-request"; - -// Nap file content formatting -export const NAP_NAME_KEY_PREFIX = "name = \""; -export const NAP_NAME_KEY_SUFFIX = "\""; - -// Property keys -export const PROP_FILE_PATH = "filePath"; - -// CLI installer -export const CLI_REPO_OWNER = "MelbourneDeveloper"; -export const CLI_REPO_NAME = "napper"; -export const CLI_BINARY_NAME = "napper"; -export const CLI_BIN_DIR = "bin"; -export const CLI_DOWNLOAD_HOST = "github.com"; -export const CLI_REQUIRED_VERSION = "0.1.0"; -export const CLI_DOWNLOAD_PATH_PREFIX = `/MelbourneDeveloper/napper/releases/download/v${CLI_REQUIRED_VERSION}/`; -export const CLI_ASSET_PREFIX = "napper-"; -export const CLI_WIN_EXE_SUFFIX = ".exe"; -export const CLI_MAX_REDIRECTS = 5; -export const CLI_PLATFORM_DARWIN = "darwin"; -export const CLI_PLATFORM_LINUX = "linux"; -export const CLI_PLATFORM_WIN32 = "win32"; -export const CLI_ARCH_ARM64 = "arm64"; -export const CLI_ARCH_X64 = "x64"; -export const CLI_RID_OSX_ARM64 = "osx-arm64"; -export const CLI_RID_OSX_X64 = "osx-x64"; -export const CLI_RID_LINUX_X64 = "linux-x64"; -export const CLI_RID_WIN_X64 = "win-x64"; -export const CLI_INSTALL_MSG = "Installing Napper CLI..."; -export const CLI_INSTALL_COMPLETE_MSG = "Napper CLI installed successfully"; -export const CLI_INSTALL_FAILED_MSG = "Failed to install Napper CLI: "; -export const CLI_UNSUPPORTED_PLATFORM_MSG = "Unsupported platform: "; -export const CLI_DOWNLOAD_ERROR_PREFIX = "Download failed: HTTP "; -export const CLI_REDIRECT_ERROR = "Redirect with no location header"; -export const CLI_TOO_MANY_REDIRECTS = "Too many redirects"; -export const CLI_FILE_MODE_EXECUTABLE = 0o755; -export const CLI_VERSION_FLAG = "--version"; -export const CLI_VERSION_CHECK_TIMEOUT = 5000; -export const CLI_VERSION_CHECK_ERROR = "Failed to check CLI version: "; -export const CLI_VERSION_MISMATCH_MSG = "CLI version mismatch — re-installing"; - -// VSCode built-in commands -export const CMD_VSCODE_OPEN = "vscode.open"; - -// Layout options -export const LAYOUT_BESIDE = "beside"; -export const LAYOUT_BELOW = "below"; - -// Encoding -export const ENCODING_UTF8 = "utf-8"; - -// Language IDs -export const LANG_NAP = "nap"; -export const LANG_NAPLIST = "naplist"; - -// UI messages -export const MSG_NO_FILE_SELECTED = "No .nap or .naplist file selected"; -export const MSG_COPIED = "Copied to clipboard"; -export const MSG_NO_RESPONSE = "No response to show. Run a request first."; - -// UI prompts -export const PROMPT_SELECT_METHOD = "Select HTTP method"; -export const PROMPT_ENTER_URL = "Enter request URL"; -export const PROMPT_REQUEST_NAME = "Request file name"; -export const PROMPT_PLAYLIST_NAME = "Playlist name"; -export const PROMPT_SELECT_ENV = "Select Napper environment"; - -// Default values -export const PLACEHOLDER_URL = "https://api.example.com/resource"; -export const DEFAULT_PLAYLIST_NAME = "new-playlist"; -export const DEFAULT_METHOD = "GET"; - -// .nap file keys -export const NAP_KEY_METHOD = "method"; -export const NAP_KEY_URL = "url"; - -// HTTP methods -export const HTTP_METHODS = [ - "GET", - "POST", - "PUT", - "PATCH", - "DELETE", - "HEAD", - "OPTIONS", -] as const; - -// Branding -export const NAPPER_URL = "https://napperapi.dev"; -export const NIMBLESITE_URL = "https://nimblesite.co"; -export const REPORT_FOOTER_GENERATED_BY = "Generated by"; -export const REPORT_FOOTER_MADE_BY = "Made by"; - -// .nap file sections (additional) -export const SECTION_REQUEST_HEADERS = "[request.headers]"; -export const SECTION_REQUEST_BODY = "[request.body]"; -export const SECTION_ASSERT = "[assert]"; -export const SECTION_VARS = "[vars]"; - -// .nap file content -export const NAP_TRIPLE_QUOTE = '"""'; -export const HEADER_CONTENT_TYPE = "Content-Type"; -export const HEADER_ACCEPT = "Accept"; -export const CONTENT_TYPE_JSON = "application/json"; -export const ASSERT_STATUS_PREFIX = "status = "; -export const ASSERT_BODY_EXISTS_SUFFIX = " exists"; -export const ASSERT_BODY_PREFIX = "body."; -export const NAP_KEY_NAME = "name"; -export const NAP_KEY_DESCRIPTION = "description"; -export const NAP_KEY_GENERATED = "generated"; -export const NAP_VALUE_TRUE = "true"; -export const BASE_URL_VAR = "{{baseUrl}}"; -export const BASE_URL_KEY = "baseUrl"; -export const VARS_PLACEHOLDER = "REPLACE_ME"; - -// OpenAPI generator — commands -export const CMD_IMPORT_OPENAPI_URL = "napper.importOpenApiUrl"; -export const CMD_IMPORT_OPENAPI_FILE = "napper.importOpenApiFile"; -export const OPENAPI_PICK_FILE = "Select OpenAPI specification file"; -export const OPENAPI_PICK_FOLDER = "Select output folder"; -export const OPENAPI_SUCCESS_PREFIX = "Generated "; -export const OPENAPI_SUCCESS_SUFFIX = " test files from OpenAPI spec"; -export const OPENAPI_ERROR_PREFIX = "Failed to import OpenAPI: "; -export const OPENAPI_FILTER_LABEL = "OpenAPI Spec"; -export const OPENAPI_FILE_EXTENSIONS = ["json", "yaml", "yml"]; -export const OPENAPI_URL_PROMPT = "Enter OpenAPI specification URL"; -export const OPENAPI_URL_PLACEHOLDER = "https://petstore3.swagger.io/api/v3/openapi.json"; -export const OPENAPI_DOWNLOAD_FAILED_PREFIX = "Failed to download spec: "; -export const OPENAPI_DOWNLOADING = "Downloading OpenAPI spec..."; -export const ICON_IMPORT_OPENAPI_FILE = "file-symlink-file"; - -// OpenAPI generator — validation -export const OPENAPI_INVALID_SPEC = "Invalid OpenAPI specification: missing paths"; -export const OPENAPI_NO_ENDPOINTS = "No endpoints found in specification"; -export const OPENAPI_PARSE_ERROR = "Failed to parse JSON"; - -// OpenAPI generator — spec fields -export const HTTPS_SCHEME = "https"; -export const DEFAULT_BASE_URL = "https://api.example.com"; -export const OPENAPI_DEFAULT_TITLE = "API Tests"; -export const PARAM_IN_BODY = "body"; -export const PARAM_IN_QUERY = "query"; -export const PARAM_IN_PATH = "path"; -export const AUTH_BEARER_PREFIX = "Authorization = Bearer "; -export const AUTH_BASIC_PREFIX = "Authorization = Basic "; -export const SECURITY_TYPE_HTTP = "http"; -export const SECURITY_SCHEME_BEARER = "bearer"; -export const SECURITY_SCHEME_BASIC = "basic"; -export const SECURITY_TYPE_API_KEY = "apiKey"; -export const SECURITY_LOCATION_HEADER = "header"; -export const SECURITY_LOCATION_QUERY = "query"; - -// OpenAPI generator — HTTP methods (lowercase for spec parsing) -export const OPENAPI_HTTP_METHODS = [ - "get", - "post", - "put", - "patch", - "delete", - "head", - "options", -] as const; - -// JSON Schema types -export const SCHEMA_TYPE_STRING = "string"; -export const SCHEMA_TYPE_NUMBER = "number"; -export const SCHEMA_TYPE_INTEGER = "integer"; -export const SCHEMA_TYPE_BOOLEAN = "boolean"; -export const SCHEMA_TYPE_ARRAY = "array"; -export const SCHEMA_TYPE_OBJECT = "object"; -export const SCHEMA_EXAMPLE_STRING = "example"; - -// Logging -export const LOG_CHANNEL_NAME = "Napper"; -export const LOG_PREFIX_INFO = "INFO"; -export const LOG_PREFIX_WARN = "WARN"; -export const LOG_PREFIX_ERROR = "ERROR"; -export const LOG_PREFIX_DEBUG = "DEBUG"; -export const LOG_MSG_ACTIVATED = "Extension activated"; -export const LOG_MSG_DEACTIVATED = "Extension deactivated"; -export const LOG_MSG_RUN_FILE = "Running file:"; -export const LOG_MSG_RUN_PLAYLIST = "Running playlist:"; -export const LOG_MSG_CLI_RESULT_COUNT = "CLI returned results:"; -export const LOG_MSG_CLI_SPAWN_ERROR = "CLI spawn error:"; -export const LOG_MSG_STREAM_RESULT = "Stream result:"; -export const LOG_MSG_STREAM_DONE = "Stream completed"; -export const LOG_MSG_TREE_REFRESH = "Explorer tree refresh"; -export const LOG_MSG_OPENAPI_IMPORT = "OpenAPI import:"; - -// AI enrichment -export const OPENAPI_AI_CHOICE_TITLE = "How should tests be generated?"; -export const OPENAPI_AI_CHOICE_BASIC = "Generate tests"; -export const OPENAPI_AI_CHOICE_ENHANCED = "Generate with AI enhancement"; -export const OPENAPI_AI_PROGRESS_TITLE = "Enhancing with AI..."; -export const OPENAPI_AI_NO_COPILOT = "GitHub Copilot not available for AI enhancement"; -export const OPENAPI_AI_COPILOT_FAMILY = "copilot-gpt-4o"; -export const OPENAPI_AI_ENRICHING_ASSERTIONS = "Enriching assertions"; -export const OPENAPI_AI_ENRICHING_TEST_DATA = "Enriching test data"; -export const OPENAPI_AI_REORDERING_PLAYLIST = "Reordering playlist"; - -// Context menu command IDs -export const CMD_ADD_TO_PLAYLIST = "napper.addToPlaylist"; -export const CMD_PERF_TEST = "napper.performanceTest"; -export const CMD_DELETE_FILE = "napper.deleteFile"; -export const CMD_ADD_NAP_TO_PLAYLIST = "napper.addNapToPlaylist"; -export const CMD_ADD_SCRIPT_TO_PLAYLIST = "napper.addScriptToPlaylist"; -export const CMD_DUPLICATE_PLAYLIST = "napper.duplicatePlaylist"; -export const CMD_COPY_PATH = "napper.copyPath"; -export const CMD_ENRICH_AI = "napper.enrichWithAi"; - -// Context menu prompts -export const PROMPT_SELECT_PLAYLIST = "Select a playlist to add this script to"; -export const PROMPT_SELECT_NAP_FILE = "Select a .nap file to add"; -export const PROMPT_SELECT_SCRIPT_FILE = "Select a script file to add"; -export const PROMPT_CONFIRM_DELETE_PREFIX = "Are you sure you want to delete \""; -export const PROMPT_CONFIRM_DELETE_SUFFIX = "\"?"; -export const PROMPT_DUPLICATE_NAME = "Enter name for the duplicated playlist"; -export const CONFIRM_YES = "Yes"; -export const CONFIRM_NO = "No"; - -// Context menu messages -export const MSG_ADDED_TO_PLAYLIST = "Added to playlist: "; -export const MSG_FILE_DELETED = "Deleted: "; -export const MSG_PLAYLIST_DUPLICATED = "Duplicated playlist: "; -export const MSG_PATH_COPIED = "Path copied to clipboard"; -export const MSG_PERF_TEST_COMING_SOON = "Performance Test: Coming soon"; -export const MSG_NO_PLAYLISTS = "No .naplist files found in workspace"; -export const MSG_NO_NAP_FILES = "No .nap files found in workspace"; -export const MSG_NO_SCRIPT_FILES = "No script files found in workspace"; - -// Glob patterns for context menu pickers -export const SCRIPT_GLOB = "**/*.{fsx,csx}"; - -// Playlist duplication -export const DUPLICATE_SUFFIX = "-copy"; - -// Numeric thresholds -export const PERCENTAGE_MULTIPLIER = 100; -export const HTTP_STATUS_OK = 200; -export const HTTP_STATUS_REDIRECT_MIN = 300; -export const HTTP_STATUS_CLIENT_ERROR_MIN = 400; -export const JSON_INDENT_SIZE = 2; -export const PAD_DIGITS_DEFAULT = 2; -export const PAD_DIGITS_LARGE = 3; -export const PAD_LARGE_THRESHOLD = 100; diff --git a/src/Nap.VsCode/src/contextMenuCommands.ts b/src/Nap.VsCode/src/contextMenuCommands.ts deleted file mode 100644 index 13325c7..0000000 --- a/src/Nap.VsCode/src/contextMenuCommands.ts +++ /dev/null @@ -1,296 +0,0 @@ -// Context menu command handlers for tree view items -// Scripts: Add to Playlist, Performance Test, Delete -// Playlists: Add .nap, Add Script, Delete, Duplicate, Copy Path - -import * as vscode from "vscode"; -import * as path from "path"; -import * as fs from "fs"; -import type { ExplorerAdapter } from "./explorerAdapter"; -import { - appendStepToPlaylist, - updatePlaylistName, -} from "./explorerProvider"; -import { - CMD_ADD_NAP_TO_PLAYLIST, - CMD_ADD_SCRIPT_TO_PLAYLIST, - CMD_ADD_TO_PLAYLIST, - CMD_COPY_PATH, - CMD_DELETE_FILE, - CMD_DUPLICATE_PLAYLIST, - CMD_PERF_TEST, - CONFIRM_NO, - CONFIRM_YES, - DUPLICATE_SUFFIX, - ENCODING_UTF8, - MSG_ADDED_TO_PLAYLIST, - MSG_FILE_DELETED, - MSG_NO_NAP_FILES, - MSG_NO_PLAYLISTS, - MSG_NO_SCRIPT_FILES, - MSG_PATH_COPIED, - MSG_PERF_TEST_COMING_SOON, - MSG_PLAYLIST_DUPLICATED, - NAPLIST_EXTENSION, - NAPLIST_GLOB, - NAP_GLOB, - PROMPT_CONFIRM_DELETE_PREFIX, - PROMPT_CONFIRM_DELETE_SUFFIX, - PROMPT_DUPLICATE_NAME, - PROMPT_SELECT_NAP_FILE, - PROMPT_SELECT_PLAYLIST, - PROMPT_SELECT_SCRIPT_FILE, - SCRIPT_GLOB, -} from "./constants"; - -interface FilePickItem extends vscode.QuickPickItem { - readonly uri: vscode.Uri; -} - -const workspaceRoot = (): string | undefined => - vscode.workspace.workspaceFolders?.[0]?.uri.fsPath, - - toPickItems = ( - uris: readonly vscode.Uri[], - root: string -): readonly FilePickItem[] => - uris.map((uri) => ({ - label: path.relative(root, uri.fsPath), - uri, - })), - - writeStepToPlaylist = async ( - playlistPath: string, - pickedFilePath: string, - explorer: ExplorerAdapter -): Promise<void> => { - const playlistDir = path.dirname(playlistPath), - relStep = path.relative(playlistDir, pickedFilePath), - content = fs.readFileSync(playlistPath, ENCODING_UTF8), - updated = appendStepToPlaylist(content, relStep); - await vscode.workspace.fs.writeFile( - vscode.Uri.file(playlistPath), - Buffer.from(updated, ENCODING_UTF8) - ); - await vscode.window.showInformationMessage( - `${MSG_ADDED_TO_PLAYLIST}${path.basename(playlistPath)}` - ); - explorer.refresh(); -}, - - pickFileFromGlob = async ( - glob: string, - prompt: string, - emptyMsg: string -): Promise<FilePickItem | undefined> => { - const files = await vscode.workspace.findFiles(glob); - if (files.length === 0) { - await vscode.window.showInformationMessage(emptyMsg); - return undefined; - } - const root = workspaceRoot(); - if (root === undefined) { return undefined; } - return vscode.window.showQuickPick( - toPickItems(files, root), - { placeHolder: prompt } - ); -}, - - addFileToPlaylist = async ({ - playlistPath, - glob, - prompt, - emptyMsg, - explorer, -}: { - readonly playlistPath: string; - readonly glob: string; - readonly prompt: string; - readonly emptyMsg: string; - readonly explorer: ExplorerAdapter; -}): Promise<void> => { - const picked = await pickFileFromGlob(glob, prompt, emptyMsg); - if (picked === undefined) { return; } - await writeStepToPlaylist(playlistPath, picked.uri.fsPath, explorer); -}; - -export const addToPlaylist = async ( - filePath: string, - explorer: ExplorerAdapter -): Promise<void> => { - const picked = await pickFileFromGlob( - NAPLIST_GLOB, - PROMPT_SELECT_PLAYLIST, - MSG_NO_PLAYLISTS - ); - if (picked === undefined) { return; } - await writeStepToPlaylist(picked.uri.fsPath, filePath, explorer); -}; - -export const performanceTest = async (): Promise<void> => { - await vscode.window.showInformationMessage(MSG_PERF_TEST_COMING_SOON); -}; - -const confirmDelete = async ( - fileName: string -): Promise<boolean> => { - const answer = await vscode.window.showWarningMessage( - `${PROMPT_CONFIRM_DELETE_PREFIX}${fileName}${PROMPT_CONFIRM_DELETE_SUFFIX}`, - CONFIRM_YES, - CONFIRM_NO - ); - return answer === CONFIRM_YES; -}; - -export const deleteFile = async ( - filePath: string, - explorer: ExplorerAdapter -): Promise<void> => { - const fileName = path.basename(filePath), - confirmed = await confirmDelete(fileName); - if (!confirmed) { return; } - await vscode.workspace.fs.delete(vscode.Uri.file(filePath)); - await vscode.window.showInformationMessage( - `${MSG_FILE_DELETED}${fileName}` - ); - explorer.refresh(); -}; - -export const addNapToPlaylist = async ( - playlistPath: string, - explorer: ExplorerAdapter -): Promise<void> => { - await addFileToPlaylist({ - playlistPath, - glob: NAP_GLOB, - prompt: PROMPT_SELECT_NAP_FILE, - emptyMsg: MSG_NO_NAP_FILES, - explorer, - }); -}; - -export const addScriptToPlaylist = async ( - playlistPath: string, - explorer: ExplorerAdapter -): Promise<void> => { - await addFileToPlaylist({ - playlistPath, - glob: SCRIPT_GLOB, - prompt: PROMPT_SELECT_SCRIPT_FILE, - emptyMsg: MSG_NO_SCRIPT_FILES, - explorer, - }); -}; - -interface DuplicateContext { - readonly newPath: string; - readonly content: string; - readonly newName: string; - readonly explorer: ExplorerAdapter; -} - -const writeDuplicate = async (ctx: DuplicateContext): Promise<void> => { - await vscode.workspace.fs.writeFile( - vscode.Uri.file(ctx.newPath), - Buffer.from(ctx.content, ENCODING_UTF8) - ); - const doc = await vscode.workspace.openTextDocument(ctx.newPath); - await vscode.window.showTextDocument(doc); - await vscode.window.showInformationMessage( - `${MSG_PLAYLIST_DUPLICATED}${ctx.newName}` - ); - ctx.explorer.refresh(); -}; - -export const duplicatePlaylist = async ( - playlistPath: string, - explorer: ExplorerAdapter -): Promise<void> => { - const baseName = path.basename(playlistPath, NAPLIST_EXTENSION), - newName = await vscode.window.showInputBox({ - prompt: PROMPT_DUPLICATE_NAME, - value: `${baseName}${DUPLICATE_SUFFIX}`, - }); - if (newName === undefined) { return; } - const newPath = path.join( - path.dirname(playlistPath), - `${newName}${NAPLIST_EXTENSION}` - ), - content = fs.readFileSync(playlistPath, ENCODING_UTF8), - updated = updatePlaylistName(content, newName); - await writeDuplicate({ newPath, content: updated, newName, explorer }); -}; - -export const copyPath = async (filePath: string): Promise<void> => { - await vscode.env.clipboard.writeText(filePath); - await vscode.window.showInformationMessage(MSG_PATH_COPIED); -}; - -interface NodeArg { - readonly filePath?: string; -} - -const withFilePath = ( - handler: (fp: string) => Promise<void> -): ((arg?: NodeArg) => Promise<void>) => - async (arg?: NodeArg): Promise<void> => { - const fp = arg?.filePath; - if (fp !== undefined) { await handler(fp); } - }, - - registerScriptCommands = ( - context: vscode.ExtensionContext, - explorer: ExplorerAdapter -): void => { - context.subscriptions.push( - vscode.commands.registerCommand( - CMD_ADD_TO_PLAYLIST, - withFilePath(async (fp) => { await addToPlaylist(fp, explorer); }) - ), - vscode.commands.registerCommand(CMD_PERF_TEST, performanceTest), - vscode.commands.registerCommand( - CMD_DELETE_FILE, - withFilePath(async (fp) => { await deleteFile(fp, explorer); }) - ) - ); -}, - - registerPlaylistAddCommands = ( - context: vscode.ExtensionContext, - explorer: ExplorerAdapter -): void => { - context.subscriptions.push( - vscode.commands.registerCommand( - CMD_ADD_NAP_TO_PLAYLIST, - withFilePath(async (fp) => { await addNapToPlaylist(fp, explorer); }) - ), - vscode.commands.registerCommand( - CMD_ADD_SCRIPT_TO_PLAYLIST, - withFilePath(async (fp) => { await addScriptToPlaylist(fp, explorer); }) - ) - ); -}, - - registerPlaylistEditCommands = ( - context: vscode.ExtensionContext, - explorer: ExplorerAdapter -): void => { - context.subscriptions.push( - vscode.commands.registerCommand( - CMD_DUPLICATE_PLAYLIST, - withFilePath(async (fp) => { await duplicatePlaylist(fp, explorer); }) - ), - vscode.commands.registerCommand( - CMD_COPY_PATH, - withFilePath(async (fp) => { await copyPath(fp); }) - ) - ); -}; - -export const registerContextMenuCommands = ( - context: vscode.ExtensionContext, - explorer: ExplorerAdapter -): void => { - registerScriptCommands(context, explorer); - registerPlaylistAddCommands(context, explorer); - registerPlaylistEditCommands(context, explorer); -}; diff --git a/src/Nap.VsCode/src/curlCopy.ts b/src/Nap.VsCode/src/curlCopy.ts deleted file mode 100644 index 29e3422..0000000 --- a/src/Nap.VsCode/src/curlCopy.ts +++ /dev/null @@ -1,81 +0,0 @@ -// Curl copy command — copyAsCurl and parsing helpers -// Extracted from extension.ts to keep files under 450 LOC - -import * as vscode from "vscode"; -import { - CURL_CMD_PREFIX, - DEFAULT_METHOD, - HTTP_METHODS, - MSG_COPIED, - NAP_KEY_METHOD, - NAP_KEY_URL, -} from "./constants"; - -const EQUALS_CHAR = "=", - SPACE_CHAR = " ", - - valueAfterFirstEquals = (line: string): string => { - const eqIndex = line.indexOf(EQUALS_CHAR); - return eqIndex === -1 - ? "" - : line.slice(eqIndex + 1).trim(); -}, - - matchesHttpMethodLine = ( - trimmed: string, - method: string -): boolean => - trimmed.startsWith(`${method}${SPACE_CHAR}`), - - extractMethodFromLine = ( - trimmed: string -): { readonly method: string; readonly url: string } | undefined => { - for (const m of HTTP_METHODS) { - if (matchesHttpMethodLine(trimmed, m)) { - return { method: m, url: trimmed.slice(m.length + 1).trim() }; - } - } - return undefined; -}, - - parseLine = ( - trimmed: string, - current: { method: string; url: string } -): void => { - const httpMatch = extractMethodFromLine(trimmed); - if (httpMatch !== undefined) { - current.method = httpMatch.method; - current.url = httpMatch.url; - } - if (trimmed.startsWith(NAP_KEY_METHOD) && trimmed.includes(EQUALS_CHAR)) { - current.method = valueAfterFirstEquals(trimmed); - } - if (trimmed.startsWith(NAP_KEY_URL) && trimmed.includes(EQUALS_CHAR)) { - current.url = valueAfterFirstEquals(trimmed); - } -}; - -export const parseMethodAndUrl = ( - text: string -): { readonly method: string; readonly url: string } => { - const result = { method: DEFAULT_METHOD, url: "" }, - lines = text.split("\n"); - for (const line of lines) { - parseLine(line.trim(), result); - } - return result; -}; - -export const copyAsCurl = async ( - uri?: vscode.Uri -): Promise<void> => { - const fileUri = uri ?? vscode.window.activeTextEditor?.document.uri; - if (fileUri === undefined) { return; } - - const doc = await vscode.workspace.openTextDocument(fileUri), - { method, url } = parseMethodAndUrl(doc.getText()), - - curl = `${CURL_CMD_PREFIX}${method} '${url}'`; - await vscode.env.clipboard.writeText(curl); - void vscode.window.showInformationMessage(MSG_COPIED); -}; diff --git a/src/Nap.VsCode/src/explorerProvider.ts b/src/Nap.VsCode/src/explorerProvider.ts deleted file mode 100644 index 49f2e3f..0000000 --- a/src/Nap.VsCode/src/explorerProvider.ts +++ /dev/null @@ -1,207 +0,0 @@ -// Tree data provider for the Explorer view -// Shows .nap and .naplist files in workspace folder structure - -import * as path from "path"; -import { - CONTEXT_FOLDER, - CONTEXT_PLAYLIST, - CONTEXT_PLAYLIST_SECTION, - CONTEXT_REQUEST_FILE, - CONTEXT_SCRIPT_FILE, - CSX_EXTENSION, - FSX_EXTENSION, - HTTP_METHODS, - NAPLIST_EXTENSION, - NAP_EXTENSION, - NAP_KEY_METHOD, - NAP_NAME_KEY_PREFIX, - NAP_NAME_KEY_SUFFIX, - PLAYLIST_SECTION_LABEL, - SECTION_STEPS, -} from "./constants"; -import { type RunResult, RunState } from "./types"; - -// Decoupled node type — no vscode dependency -export interface TreeNode { - readonly label: string; - readonly filePath: string; - readonly isDirectory: boolean; - readonly contextValue: string; - readonly httpMethod?: string; - readonly runState: RunState; - readonly children?: readonly TreeNode[]; -} - -const isScriptFile = (filePath: string): boolean => - filePath.endsWith(FSX_EXTENSION) || filePath.endsWith(CSX_EXTENSION), - - getContextValue = (filePath: string, isDirectory: boolean): string => { - if (isDirectory) { - return CONTEXT_FOLDER; - } - if (filePath.endsWith(NAPLIST_EXTENSION)) { - return CONTEXT_PLAYLIST; - } - if (isScriptFile(filePath)) { - return CONTEXT_SCRIPT_FILE; - } - return CONTEXT_REQUEST_FILE; -}, - - isMethodLine = (trimmed: string, method: string): boolean => - trimmed.startsWith(`${method} `) || - trimmed === `${NAP_KEY_METHOD} = ${method}` || - trimmed === `${NAP_KEY_METHOD} = ${method}`, - - extractHttpMethod = (fileContent: string): string | undefined => { - const lines = fileContent.split("\n"); - for (const line of lines) { - const trimmed = line.trim(); - if (trimmed.length === 0 || trimmed.startsWith("#")) { - continue; - } - for (const method of HTTP_METHODS) { - if (isMethodLine(trimmed, method)) { - return method; - } - } - } - return undefined; -}, - - getRunState = ( - filePath: string, - results: ReadonlyMap<string, RunResult> -): RunState => { - const result = results.get(filePath); - if (result === undefined) { - return RunState.Idle; - } - if (result.error !== undefined) { - return RunState.Error; - } - return result.passed ? RunState.Passed : RunState.Failed; -}; - -export const createFileNode = ( - filePath: string, - fileContent: string, - results: ReadonlyMap<string, RunResult>, -): TreeNode => { - const method = filePath.endsWith(NAP_EXTENSION) - ? extractHttpMethod(fileContent) - : undefined, - base = { - label: path.basename(filePath, path.extname(filePath)), - filePath, - isDirectory: false as const, - contextValue: getContextValue(filePath, false), - runState: getRunState(filePath, results), - }; - if (method !== undefined) { - return { ...base, httpMethod: method }; - } - return base; -}; - -export const createFolderNode = ( - folderPath: string, - children: readonly TreeNode[] -): TreeNode => ({ - label: path.basename(folderPath), - filePath: folderPath, - isDirectory: true, - contextValue: CONTEXT_FOLDER, - runState: RunState.Idle, - children, -}); - -const isSectionHeader = (trimmed: string): boolean => - trimmed.startsWith("[") && trimmed.endsWith("]"); - -export const parsePlaylistStepPaths = (content: string): readonly string[] => { - const lines = content.split("\n"); - let inSteps = false; - const steps: string[] = []; - for (const line of lines) { - const trimmed = line.trim(); - if (isSectionHeader(trimmed)) { - inSteps = trimmed === SECTION_STEPS; - continue; - } - if (!inSteps || trimmed.length === 0 || trimmed.startsWith("#")) { - continue; - } - steps.push(trimmed); - } - return steps; -}; - -export const createPlaylistNode = ( - filePath: string, - results: ReadonlyMap<string, RunResult>, - stepChildren: readonly TreeNode[] -): TreeNode => ({ - label: path.basename(filePath, path.extname(filePath)), - filePath, - isDirectory: false, - contextValue: CONTEXT_PLAYLIST, - runState: getRunState(filePath, results), - children: stepChildren, -}); - -export const createPlaylistSectionNode = ( - children: readonly TreeNode[] -): TreeNode => ({ - label: PLAYLIST_SECTION_LABEL, - filePath: "", - isDirectory: false, - contextValue: CONTEXT_PLAYLIST_SECTION, - runState: RunState.Idle, - children, -}); - -const findStepsInsertIndex = ( - lines: readonly string[] -): { readonly inSteps: boolean; readonly index: number } => { - let inSteps = false; - for (let i = 0; i < lines.length; i++) { - const line = lines[i]; - if (line === undefined) { continue; } - const trimmed = line.trim(); - if (trimmed === SECTION_STEPS) { - inSteps = true; - continue; - } - if (inSteps && isSectionHeader(trimmed)) { - return { inSteps: true, index: i }; - } - } - return { inSteps, index: lines.length }; -}; - -export const appendStepToPlaylist = ( - content: string, - stepPath: string -): string => { - const lines = content.split("\n"), - result = findStepsInsertIndex(lines); - if (!result.inSteps) { - return `${content}\n${SECTION_STEPS}\n${stepPath}\n`; - } - lines.splice(result.index, 0, stepPath); - return lines.join("\n"); -}; - -export const updatePlaylistName = ( - content: string, - newName: string -): string => { - const lines = content.split("\n"), - updated = lines.map((line) => - line.trim().startsWith(NAP_NAME_KEY_PREFIX) - ? `${NAP_NAME_KEY_PREFIX}${newName}${NAP_NAME_KEY_SUFFIX}` - : line - ); - return updated.join("\n"); -}; diff --git a/src/Nap.VsCode/src/extension.ts b/src/Nap.VsCode/src/extension.ts deleted file mode 100644 index 05da65a..0000000 --- a/src/Nap.VsCode/src/extension.ts +++ /dev/null @@ -1,437 +0,0 @@ -// Napper VSCode Extension — main entry point -// Registers all providers, commands, and file watchers - -import * as vscode from "vscode"; -import * as path from "path"; -import * as fs from "fs"; -import { ExplorerAdapter } from "./explorerAdapter"; -import { CodeLensProvider } from "./codeLensProvider"; -import { EnvironmentStatusBar } from "./environmentAdapter"; -import { ResponsePanel } from "./responsePanel"; -import { PlaylistPanel } from "./playlistPanel"; -import { runCli, streamCli } from "./cliRunner"; -import type { RunResult } from "./types"; -import { parsePlaylistStepPaths } from "./explorerProvider"; -import { generatePlaylistReport } from "./reportGenerator"; -import { type Logger, createLogger } from "./logger"; -import { - getCliVersion, - installCli, - installedCliPath, - isCliInstalled, - localBinaryName, -} from "./cliInstaller"; -import { newPlaylist, newRequest } from "./fileCreation"; -import { copyAsCurl } from "./curlCopy"; -import { importOpenApiFromFile, importOpenApiFromUrl, runAiEnrichment } from "./openApiImport"; -import { registerContextMenuCommands } from "./contextMenuCommands"; -import { registerAutoRun, registerWatchers } from "./watchers"; -import { - CLI_BIN_DIR, - CLI_ERROR_PREFIX, - CLI_INSTALL_COMPLETE_MSG, - CLI_INSTALL_FAILED_MSG, - CLI_INSTALL_MSG, - CLI_REQUIRED_VERSION, - CLI_VERSION_MISMATCH_MSG, - CMD_COPY_CURL, - CMD_ENRICH_AI, - CMD_IMPORT_OPENAPI_FILE, - CMD_IMPORT_OPENAPI_URL, - CMD_NEW_PLAYLIST, - CMD_NEW_REQUEST, - CMD_OPEN_RESPONSE, - CMD_RUN_ALL, - CMD_RUN_FILE, - CMD_SAVE_REPORT, - CMD_SWITCH_ENV, - CONFIG_CLI_PATH, - CONFIG_SECTION, - CONFIG_SPLIT_LAYOUT, - DEFAULT_CLI_PATH, - ENCODING_UTF8, - LANG_NAP, - LANG_NAPLIST, - LAYOUT_BELOW, - LAYOUT_BESIDE, - LOG_CHANNEL_NAME, - LOG_MSG_ACTIVATED, - LOG_MSG_CLI_RESULT_COUNT, - LOG_MSG_CLI_SPAWN_ERROR, - LOG_MSG_DEACTIVATED, - LOG_MSG_RUN_FILE, - LOG_MSG_RUN_PLAYLIST, - LOG_MSG_STREAM_DONE, - LOG_MSG_STREAM_RESULT, - MSG_NO_FILE_SELECTED, - MSG_NO_RESPONSE, - NAPLIST_EXTENSION, - PROP_FILE_PATH, - REPORT_FILE_EXTENSION, - REPORT_FILE_SUFFIX, - REPORT_SAVED_MSG, - STATUS_RUNNING_ICON, - STATUS_RUNNING_SUFFIX, - VIEW_EXPLORER, -} from "./constants"; - -let bundledCliPath: string | undefined, - envStatusBar: EnvironmentStatusBar, - explorerProvider: ExplorerAdapter, - installedPath: string | undefined, - lastPlaylistReport: (() => void) | undefined, - lastResult: RunResult | undefined, - logger: Logger, - - playlistPanel: PlaylistPanel, - responsePanel: ResponsePanel; - -const getCliPath = (): string => { - const config = vscode.workspace.getConfiguration(CONFIG_SECTION), - configured = config.get<string>(CONFIG_CLI_PATH, DEFAULT_CLI_PATH); - if (configured !== DEFAULT_CLI_PATH) { return configured; } - if (bundledCliPath !== undefined && isCliInstalled(bundledCliPath)) { - return bundledCliPath; - } - return installedPath ?? DEFAULT_CLI_PATH; -}, - - handleInstallResult = ( - result: { readonly ok: true; readonly value: { readonly cliPath: string } } - | { readonly ok: false; readonly error: string } -): void => { - if (result.ok) { - installedPath = result.value.cliPath; - logger.info(CLI_INSTALL_COMPLETE_MSG); - return; - } - logger.error(`${CLI_INSTALL_FAILED_MSG}${result.error}`); - void vscode.window.showErrorMessage( - `${CLI_INSTALL_FAILED_MSG}${result.error}` - ); -}, - - isVersionMatch = async (candidate: string): Promise<boolean> => { - const versionResult = await getCliVersion(candidate); - if (versionResult.ok && versionResult.value === CLI_REQUIRED_VERSION) { - installedPath = candidate; - return true; - } - logger.info(CLI_VERSION_MISMATCH_MSG); - return false; -}, - - ensureCliInstalled = async ( - storageUri: vscode.Uri | undefined -): Promise<void> => { - if (storageUri === undefined) { return; } - const storagePath = storageUri.fsPath, - candidate = installedCliPath(storagePath, process.platform); - if (isCliInstalled(candidate) && await isVersionMatch(candidate)) { - return; - } - await vscode.window.withProgress( - { location: vscode.ProgressLocation.Notification, title: CLI_INSTALL_MSG, cancellable: false }, - async () => { - const result = await installCli(storagePath, process.platform, process.arch); - handleInstallResult(result); - } - ); -}, - - getWorkspacePath = (): string | undefined => - vscode.workspace.workspaceFolders?.[0]?.uri.fsPath, - - getResponseColumn = (): vscode.ViewColumn => { - const config = vscode.workspace.getConfiguration(CONFIG_SECTION), - layout = config.get<string>(CONFIG_SPLIT_LAYOUT, LAYOUT_BESIDE); - return layout === LAYOUT_BELOW - ? vscode.ViewColumn.Active - : vscode.ViewColumn.Beside; -}, - - resolveFileUri = ( - arg?: vscode.Uri | { readonly filePath: string } -): vscode.Uri | undefined => { - if (arg === undefined) { return vscode.window.activeTextEditor?.document.uri; } - if (arg instanceof vscode.Uri) { return arg; } - if (PROP_FILE_PATH in arg) { return vscode.Uri.file(arg.filePath); } - return undefined; -}, - - makeRunningStatus = (fsPath: string): vscode.Disposable => - vscode.window.setStatusBarMessage( - `${STATUS_RUNNING_ICON}${path.basename(fsPath)}${STATUS_RUNNING_SUFFIX}` - ), - - handleStreamResult = (result: RunResult, index: number): void => { - logger.debug(`${LOG_MSG_STREAM_RESULT} ${result.file}`); - explorerProvider.updateResult(result.file, result); - lastResult = result; - playlistPanel.addResult(index, result); -}, - - savePlaylistReport = ( - playlistFile: string, - results: readonly RunResult[] -): void => { - const dir = path.dirname(playlistFile), - baseName = path.basename(playlistFile, path.extname(playlistFile)), - reportPath = path.join( - dir, - `${baseName}${REPORT_FILE_SUFFIX}${REPORT_FILE_EXTENSION}` - ), - html = generatePlaylistReport(baseName, results); - fs.writeFileSync(reportPath, html, ENCODING_UTF8); - void vscode.env.openExternal(vscode.Uri.file(reportPath)); - void vscode.window.showInformationMessage( - `${REPORT_SAVED_MSG}${path.basename(reportPath)}` - ); -}, - - currentEnvOrUndefined = (): string | undefined => { - const env = envStatusBar.currentEnv; - return env !== "" ? env : undefined; -}, - - preparePlaylistRun = (fileUri: vscode.Uri): void => { - logger.info(`${LOG_MSG_RUN_PLAYLIST} ${fileUri.fsPath}`); - explorerProvider.clearResults(); - const content = fs.readFileSync(fileUri.fsPath, ENCODING_UTF8), - stepPaths = parsePlaylistStepPaths(content), - stepFileNames = stepPaths.map((s) => path.basename(s)); - playlistPanel.showRunning(fileUri.fsPath, stepFileNames, getResponseColumn()); -}; - -interface StreamState { - readonly collectedResults: RunResult[]; - resultIndex: number; - streamError: string | undefined; -} - -const collectResult = (state: StreamState, result: RunResult): void => { - handleStreamResult(result, state.resultIndex); - state.collectedResults.push(result); - state.resultIndex++; -}, - - awaitStream = async ( - fileUri: vscode.Uri, - cwd: string, - state: StreamState -): Promise<void> => { - await new Promise<void>((resolve) => { - streamCli({ - cliPath: getCliPath(), - filePath: fileUri.fsPath, - env: currentEnvOrUndefined(), - cwd, - onResult: (result: RunResult) => { collectResult(state, result); }, - onDone: (error?: string) => { state.streamError = error; resolve(); }, - }); - }); -}, - - handleStreamError = ( - state: StreamState -): void => { - logger.error(`${LOG_MSG_CLI_SPAWN_ERROR} ${state.streamError}`); - playlistPanel.showError(state.streamError ?? ""); - void vscode.window.showErrorMessage( - `${CLI_ERROR_PREFIX}${state.streamError}` - ); -}, - - handleStreamSuccess = ( - state: StreamState, - fileUri: vscode.Uri -): void => { - logger.info(LOG_MSG_STREAM_DONE); - playlistPanel.showComplete(state.collectedResults); - const doSave = (): void => { - savePlaylistReport(fileUri.fsPath, state.collectedResults); - }; - playlistPanel.onSaveReport = doSave; - lastPlaylistReport = (): void => { - savePlaylistReport(fileUri.fsPath, state.collectedResults); - }; -}, - - runPlaylistStreaming = async ( - fileUri: vscode.Uri, - cwd: string -): Promise<void> => { - preparePlaylistRun(fileUri); - const statusMsg = makeRunningStatus(fileUri.fsPath), - state: StreamState = { collectedResults: [], resultIndex: 0, streamError: undefined }; - await awaitStream(fileUri, cwd, state); - statusMsg.dispose(); - if (state.streamError !== undefined && state.collectedResults.length === 0) { - handleStreamError(state); - } else { - handleStreamSuccess(state, fileUri); - } -}, - - handleCliResults = (results: readonly RunResult[]): void => { - logger.info(`${LOG_MSG_CLI_RESULT_COUNT} ${results.length}`); - for (const r of results) { - explorerProvider.updateResult(r.file, r); - lastResult = r; - } - const [first] = results; - if (first !== undefined) { - responsePanel.show(first, getResponseColumn()); - } -}, - - runSingleFile = async ( - fileUri: vscode.Uri, - cwd: string -): Promise<void> => { - logger.info(`${LOG_MSG_RUN_FILE} ${fileUri.fsPath}`); - const statusMsg = makeRunningStatus(fileUri.fsPath), - result = await runCli({ - cliPath: getCliPath(), - filePath: fileUri.fsPath, - env: currentEnvOrUndefined(), - cwd, - }); - statusMsg.dispose(); - if (!result.ok) { - logger.error(`${LOG_MSG_CLI_SPAWN_ERROR} ${result.error}`); - void vscode.window.showErrorMessage(`${CLI_ERROR_PREFIX}${result.error}`); - return; - } - handleCliResults(result.value); -}, - - runFile = async ( - arg?: vscode.Uri | { readonly filePath: string } -): Promise<void> => { - const fileUri = resolveFileUri(arg); - if (fileUri === undefined) { - void vscode.window.showWarningMessage(MSG_NO_FILE_SELECTED); - return; - } - const cwd = getWorkspacePath(); - if (cwd === undefined) { return; } - if (fileUri.fsPath.endsWith(NAPLIST_EXTENSION)) { - await runPlaylistStreaming(fileUri, cwd); - } else { - await runSingleFile(fileUri, cwd); - } -}, - - runAll = async (): Promise<void> => { - const cwd = getWorkspacePath(); - if (cwd === undefined) { return; } - await runFile(vscode.Uri.file(cwd)); -}, - - openResponse = (): void => { - if (lastResult !== undefined) { - responsePanel.show(lastResult, getResponseColumn()); - } else { - void vscode.window.showInformationMessage(MSG_NO_RESPONSE); - } -}, - - - registerRunCommands = (context: vscode.ExtensionContext): void => { - context.subscriptions.push( - vscode.commands.registerCommand(CMD_RUN_FILE, runFile), - vscode.commands.registerCommand(CMD_RUN_ALL, runAll), - vscode.commands.registerCommand(CMD_COPY_CURL, copyAsCurl), - vscode.commands.registerCommand(CMD_OPEN_RESPONSE, openResponse), - vscode.commands.registerCommand(CMD_SAVE_REPORT, () => { - if (lastPlaylistReport !== undefined) { - lastPlaylistReport(); - } - }) - ); -}, - - registerEditCommands = (context: vscode.ExtensionContext): void => { - context.subscriptions.push( - vscode.commands.registerCommand(CMD_NEW_REQUEST, async () => { - await newRequest(explorerProvider); - }), - vscode.commands.registerCommand(CMD_NEW_PLAYLIST, async () => { - await newPlaylist(explorerProvider); - }), - vscode.commands.registerCommand(CMD_SWITCH_ENV, async () => { - await envStatusBar.showPicker(); - }) - ); -}, - - registerOpenApiCommands = (context: vscode.ExtensionContext): void => { - context.subscriptions.push( - vscode.commands.registerCommand(CMD_IMPORT_OPENAPI_URL, async () => { - await importOpenApiFromUrl(explorerProvider, logger); - }), - vscode.commands.registerCommand(CMD_IMPORT_OPENAPI_FILE, async () => { - await importOpenApiFromFile(explorerProvider, logger); - }), - vscode.commands.registerCommand(CMD_ENRICH_AI, async (arg?: { readonly filePath?: string }) => { - const fp = arg?.filePath; - if (fp === undefined) { return; } - await runAiEnrichment(path.dirname(fp), logger); - explorerProvider.refresh(); - }) - ); -}, - - initProviders = (): void => { - explorerProvider = new ExplorerAdapter(); - envStatusBar = new EnvironmentStatusBar(); - responsePanel = new ResponsePanel(); - playlistPanel = new PlaylistPanel(); -}, - - registerCodeLens = (context: vscode.ExtensionContext): void => { - const codeLens = new CodeLensProvider(); - context.subscriptions.push( - vscode.languages.registerCodeLensProvider( - [{ language: LANG_NAP }, { language: LANG_NAPLIST }], codeLens - ) - ); -}, - - initLogger = (context: vscode.ExtensionContext): void => { - const outputChannel = vscode.window.createOutputChannel(LOG_CHANNEL_NAME); - context.subscriptions.push(outputChannel); - logger = createLogger((msg) => { outputChannel.appendLine(msg); }); - logger.info(LOG_MSG_ACTIVATED); - bundledCliPath = path.join( - context.extensionPath, CLI_BIN_DIR, localBinaryName(process.platform) - ); - ensureCliInstalled(context.globalStorageUri).catch(() => undefined); -}; - -export interface ExtensionApi { - readonly explorerProvider: ExplorerAdapter; -} - -export function activate(context: vscode.ExtensionContext): ExtensionApi { - initLogger(context); - initProviders(); - context.subscriptions.push( - vscode.window.registerTreeDataProvider(VIEW_EXPLORER, explorerProvider), - vscode.window.registerFileDecorationProvider(explorerProvider), - ); - registerCodeLens(context); - registerRunCommands(context); - registerEditCommands(context); - registerOpenApiCommands(context); - registerContextMenuCommands(context, explorerProvider); - registerWatchers(context, explorerProvider, logger); - registerAutoRun(context, async (uri) => runFile(uri)); - context.subscriptions.push(envStatusBar, responsePanel, playlistPanel); - return { explorerProvider }; -} - -export function deactivate(): void { - logger.info(LOG_MSG_DEACTIVATED); -} diff --git a/src/Nap.VsCode/src/fileCreation.ts b/src/Nap.VsCode/src/fileCreation.ts deleted file mode 100644 index b1dfeee..0000000 --- a/src/Nap.VsCode/src/fileCreation.ts +++ /dev/null @@ -1,95 +0,0 @@ -// File creation commands — newRequest, newPlaylist -// Extracted from extension.ts to keep files under 450 LOC - -import * as vscode from "vscode"; -import * as path from "path"; -import type { ExplorerAdapter } from "./explorerAdapter"; -import { - DEFAULT_PLAYLIST_NAME, - ENCODING_UTF8, - HTTP_METHODS, - NAPLIST_EXTENSION, - NAP_EXTENSION, - NAP_NAME_KEY_PREFIX, - NAP_NAME_KEY_SUFFIX, - PLACEHOLDER_URL, - PROMPT_ENTER_URL, - PROMPT_PLAYLIST_NAME, - PROMPT_REQUEST_NAME, - PROMPT_SELECT_METHOD, - REQUEST_NAME_SUFFIX, - SECTION_META, - SECTION_STEPS, -} from "./constants"; - -const promptMethod = (): Thenable<string | undefined> => - vscode.window.showQuickPick( - HTTP_METHODS.map((m) => m), - { placeHolder: PROMPT_SELECT_METHOD } - ), - - promptUrl = (): Thenable<string | undefined> => - vscode.window.showInputBox({ - prompt: PROMPT_ENTER_URL, - placeHolder: PLACEHOLDER_URL, - }), - - promptFileName = ( - defaultValue: string -): Thenable<string | undefined> => - vscode.window.showInputBox({ - prompt: PROMPT_REQUEST_NAME, - value: defaultValue, - }), - - writeAndOpen = async ( - filePath: string, - content: string, - explorer: ExplorerAdapter -): Promise<void> => { - await vscode.workspace.fs.writeFile( - vscode.Uri.file(filePath), - Buffer.from(content, ENCODING_UTF8) - ); - const doc = await vscode.workspace.openTextDocument(filePath); - await vscode.window.showTextDocument(doc); - explorer.refresh(); -}, - - getWorkspacePath = (): string | undefined => - vscode.workspace.workspaceFolders?.[0]?.uri.fsPath; - -export const newRequest = async ( - explorer: ExplorerAdapter -): Promise<void> => { - const method = await promptMethod(); - if (method === undefined) { return; } - const url = await promptUrl(); - if (url === undefined) { return; } - const cwd = getWorkspacePath(); - if (cwd === undefined) { return; } - const defaultName = `${method.toLowerCase()}${REQUEST_NAME_SUFFIX}`, - name = await promptFileName(defaultName); - if (name === undefined) { return; } - - const filePath = path.join(cwd, `${name}${NAP_EXTENSION}`); - await writeAndOpen(filePath, `${method} ${url}\n`, explorer); -}; - -export const newPlaylist = async ( - explorer: ExplorerAdapter -): Promise<void> => { - const cwd = getWorkspacePath(); - if (cwd === undefined) { return; } - - const name = await vscode.window.showInputBox({ - prompt: PROMPT_PLAYLIST_NAME, - value: DEFAULT_PLAYLIST_NAME, - }); - if (name === undefined) { return; } - - const filePath = path.join(cwd, `${name}${NAPLIST_EXTENSION}`), - content = - `${SECTION_META}\n${NAP_NAME_KEY_PREFIX}${name}${NAP_NAME_KEY_SUFFIX}\n\n${SECTION_STEPS}\n`; - await writeAndOpen(filePath, content, explorer); -}; diff --git a/src/Nap.VsCode/src/openApiAiEnhancer.ts b/src/Nap.VsCode/src/openApiAiEnhancer.ts deleted file mode 100644 index 7af0e38..0000000 --- a/src/Nap.VsCode/src/openApiAiEnhancer.ts +++ /dev/null @@ -1,266 +0,0 @@ -// AI enrichment for OpenAPI-generated .nap files -// Pure functions — NO VS Code SDK dependency — fully testable - -import { type Result, err, ok } from "./types"; -import { - NAP_TRIPLE_QUOTE, - SECTION_ASSERT, - SECTION_REQUEST_BODY, - SECTION_STEPS, -} from "./constants"; - -// ─── Types ────────────────────────────────────────────────── - -export interface OperationSummary { - readonly operationId: string; - readonly method: string; - readonly path: string; - readonly summary: string; - readonly responseFields: readonly string[]; - readonly hasRequestBody: boolean; -} - -export interface AssertionEnrichment { - readonly operationId: string; - readonly assertions: readonly string[]; -} - -export interface TestDataEnrichment { - readonly operationId: string; - readonly requestBody: string; -} - -export interface GeneratedFile { - readonly fileName: string; - readonly content: string; -} - -export interface EnrichmentResult { - readonly files: readonly GeneratedFile[]; - readonly playlistContent: string; -} - -// ─── Prompt builders ──────────────────────────────────────── - -const ASSERTION_SYSTEM = [ - "You are an API test engineer.", - "Given API operations with their response fields,", - "suggest semantic assertions that go beyond 'exists' checks.", - "Return ONLY a JSON array.", - "Each element: { operationId: string, assertions: string[] }.", - "Assertions use napper syntax: body.field > 0, body.email contains @,", - "body.name != \"\", headers.Content-Type contains json.", - "Do NOT repeat status assertions. Only add value/format checks.", -].join(" "), - - TEST_DATA_SYSTEM = [ - "You are an API test data generator.", - "Given API operations that accept request bodies,", - "generate realistic JSON request body examples.", - "Return ONLY a JSON array.", - "Each element: { operationId: string, requestBody: string }.", - "requestBody must be a valid JSON string with realistic values.", - "Use real-looking names, emails, dates, IDs — not placeholders.", -].join(" "), - - PLAYLIST_SYSTEM = [ - "You are an API test orchestrator.", - "Given a list of test file paths, reorder them for logical flow:", - "auth/login first, then creates, then reads, then updates, then deletes.", - "Return ONLY a JSON array of the file paths in the recommended order.", -].join(" "); - -export const buildAssertionPrompt = ( - operations: readonly OperationSummary[] -): string => { - const lines = operations.map( - (op) => - `- ${op.method.toUpperCase()} ${op.path} (${op.operationId}): ` + - `response fields: [${op.responseFields.join(", ")}]` - ); - return lines.join("\n"); -}; - -export const buildTestDataPrompt = ( - operations: readonly OperationSummary[] -): string => { - const withBody = operations.filter((op) => op.hasRequestBody), - lines = withBody.map( - (op) => - `- ${op.method.toUpperCase()} ${op.path} (${op.operationId}): ${op.summary}` - ); - return lines.join("\n"); -}; - -export const buildPlaylistOrderPrompt = ( - filePaths: readonly string[] -): string => filePaths.join("\n"); - -export const getAssertionSystemPrompt = (): string => ASSERTION_SYSTEM; -export const getTestDataSystemPrompt = (): string => TEST_DATA_SYSTEM; -export const getPlaylistSystemPrompt = (): string => PLAYLIST_SYSTEM; - -// ─── Response parsers ─────────────────────────────────────── - -export const parseAssertionResponse = ( - json: string -): Result<readonly AssertionEnrichment[], string> => { - try { - const parsed: unknown = JSON.parse(json); - if (!Array.isArray(parsed)) { - return err("Expected JSON array for assertion enrichments"); - } - return ok(parsed as readonly AssertionEnrichment[]); - } catch { - return err("Failed to parse assertion enrichment response"); - } -}; - -export const parseTestDataResponse = ( - json: string -): Result<readonly TestDataEnrichment[], string> => { - try { - const parsed: unknown = JSON.parse(json); - if (!Array.isArray(parsed)) { - return err("Expected JSON array for test data enrichments"); - } - return ok(parsed as readonly TestDataEnrichment[]); - } catch { - return err("Failed to parse test data enrichment response"); - } -}; - -export const parsePlaylistOrderResponse = ( - json: string -): Result<readonly string[], string> => { - try { - const parsed: unknown = JSON.parse(json); - if (!Array.isArray(parsed)) { - return err("Expected JSON array for playlist order"); - } - return ok(parsed as readonly string[]); - } catch { - return err("Failed to parse playlist order response"); - } -}; - -// ─── Content enrichment (line-based, no regex) ────────────── - -const isSectionHeader = (line: string): boolean => - line.startsWith("[") && line.endsWith("]"), - - skipToNextSection = ( - lines: readonly string[], - startIdx: number -): number => { - let idx = startIdx; - while (idx < lines.length && !isSectionHeader(lines[idx] ?? "")) { idx++; } - return idx; -}, - - trimTrailingBlanks = ( - lines: readonly string[], - endIdx: number, - minIdx: number -): number => { - let idx = endIdx; - while (idx > minIdx && (lines[idx - 1] ?? "").trim().length === 0) { idx--; } - return idx; -}, - - findSectionEnd = ( - lines: readonly string[], - sectionHeader: string -): number => { - const sectionIdx = lines.indexOf(sectionHeader); - if (sectionIdx < 0) { return -1; } - const rawEnd = skipToNextSection(lines, sectionIdx + 1); - return trimTrailingBlanks(lines, rawEnd, sectionIdx + 1); -}; - -export const enrichAssertions = ( - napContent: string, - newAssertions: readonly string[] -): string => { - if (newAssertions.length === 0) { return napContent; } - const lines = napContent.split("\n"), - insertAt = findSectionEnd(lines, SECTION_ASSERT); - if (insertAt < 0) { return napContent; } - const before = lines.slice(0, insertAt), - after = lines.slice(insertAt); - return [...before, ...newAssertions, ...after].join("\n"); -}; - -export const enrichRequestBody = ( - napContent: string, - newBody: string -): string => { - const lines = napContent.split("\n"), - bodyIdx = lines.indexOf(SECTION_REQUEST_BODY); - if (bodyIdx < 0) { return napContent; } - // Find the triple-quote delimited body and replace it - let startQuote = -1, - endQuote = -1; - for (let i = bodyIdx + 1; i < lines.length; i++) { - if ((lines[i] ?? "").trim() === NAP_TRIPLE_QUOTE) { - if (startQuote < 0) { startQuote = i; } - else { endQuote = i; break; } - } - } - if (startQuote < 0 || endQuote < 0) { return napContent; } - const before = lines.slice(0, startQuote + 1), - after = lines.slice(endQuote); - return [...before, newBody, ...after].join("\n"); -}; - -export const reorderPlaylistSteps = ( - playlistContent: string, - orderedFiles: readonly string[] -): string => { - if (orderedFiles.length === 0) { return playlistContent; } - const lines = playlistContent.split("\n"), - stepsIdx = lines.indexOf(SECTION_STEPS); - if (stepsIdx < 0) { return playlistContent; } - const before = lines.slice(0, stepsIdx + 1), - newSteps = orderedFiles.map((f) => - f.startsWith("./") ? f : `./${f}` - ); - return [...before, ...newSteps, ""].join("\n"); -}; - -// ─── File-level enrichment ────────────────────────────────── - -const fileMatchesOperation = ( - file: GeneratedFile, - operationId: string -): boolean => file.content.includes(operationId); - -export const applyAssertionEnrichments = ( - files: readonly GeneratedFile[], - enrichments: readonly AssertionEnrichment[] -): readonly GeneratedFile[] => - files.map((file) => { - const match = enrichments.find((e) => - fileMatchesOperation(file, e.operationId) - ); - if (match === undefined) { return file; } - return { - fileName: file.fileName, - content: enrichAssertions(file.content, match.assertions), - }; - }); - -export const applyTestDataEnrichments = ( - files: readonly GeneratedFile[], - enrichments: readonly TestDataEnrichment[] -): readonly GeneratedFile[] => - files.map((file) => { - const match = enrichments.find((e) => - fileMatchesOperation(file, e.operationId) - ); - if (match === undefined) { return file; } - return { - fileName: file.fileName, - content: enrichRequestBody(file.content, match.requestBody), - }; - }); diff --git a/src/Nap.VsCode/src/openApiImport.ts b/src/Nap.VsCode/src/openApiImport.ts deleted file mode 100644 index 0a93c37..0000000 --- a/src/Nap.VsCode/src/openApiImport.ts +++ /dev/null @@ -1,474 +0,0 @@ -// OpenAPI import command — calls CLI to generate .nap files from spec -// Deterministic generation lives in F# CLI; AI enrichment is optional via Copilot - -import * as vscode from "vscode"; -import * as path from "path"; -import * as fs from "fs"; -import { execFile } from "child_process"; -import type { ExplorerAdapter } from "./explorerAdapter"; -import type { Logger } from "./logger"; -import { type Result, err, ok } from "./types"; -import * as https from "https"; -import type { IncomingMessage } from "http"; -import { - CLI_CMD_GENERATE, - CLI_FLAG_OUTPUT, - CLI_FLAG_OUTPUT_DIR, - CLI_OUTPUT_JSON, - CLI_PARSE_FAILED_PREFIX, - CLI_SPAWN_FAILED_PREFIX, - CLI_SUBCMD_OPENAPI, - CONFIG_CLI_PATH, - CONFIG_SECTION, - DEFAULT_CLI_PATH, - HTTP_STATUS_CLIENT_ERROR_MIN, - HTTP_STATUS_REDIRECT_MIN, - LOG_MSG_OPENAPI_IMPORT, - NAPLIST_EXTENSION, - NAP_EXTENSION, - OPENAPI_AI_CHOICE_BASIC, - OPENAPI_AI_CHOICE_ENHANCED, - OPENAPI_AI_CHOICE_TITLE, - OPENAPI_AI_COPILOT_FAMILY, - OPENAPI_AI_ENRICHING_ASSERTIONS, - OPENAPI_AI_ENRICHING_TEST_DATA, - OPENAPI_AI_NO_COPILOT, - OPENAPI_AI_PROGRESS_TITLE, - OPENAPI_AI_REORDERING_PLAYLIST, - OPENAPI_DOWNLOADING, - OPENAPI_DOWNLOAD_FAILED_PREFIX, - OPENAPI_ERROR_PREFIX, - OPENAPI_FILE_EXTENSIONS, - OPENAPI_FILTER_LABEL, - OPENAPI_PICK_FILE, - OPENAPI_PICK_FOLDER, - OPENAPI_SUCCESS_PREFIX, - OPENAPI_SUCCESS_SUFFIX, - OPENAPI_URL_PLACEHOLDER, - OPENAPI_URL_PROMPT, - SECTION_REQUEST_BODY, -} from "./constants"; -import { - type GeneratedFile, - type OperationSummary, - applyAssertionEnrichments, - applyTestDataEnrichments, - buildAssertionPrompt, - buildPlaylistOrderPrompt, - buildTestDataPrompt, - getAssertionSystemPrompt, - getPlaylistSystemPrompt, - getTestDataSystemPrompt, - parseAssertionResponse, - parsePlaylistOrderResponse, - parseTestDataResponse, - reorderPlaylistSteps, -} from "./openApiAiEnhancer"; - -// ─── CLI generate types ───────────────────────────────────── - -interface GenerateResult { - readonly files: number; - readonly playlist: string; -} - -interface PickedPaths { - readonly specFile: vscode.Uri; - readonly outFolder: vscode.Uri; -} - -interface ImportContext { - readonly explorer: ExplorerAdapter; - readonly logger: Logger; -} - -interface LmRequestParams { - readonly model: vscode.LanguageModelChat; - readonly systemPrompt: string; - readonly userPrompt: string; - readonly token: vscode.CancellationToken; -} - -interface EnrichStepParams { - readonly lm: LmRequestParams; - readonly operations: readonly OperationSummary[]; - readonly files: readonly GeneratedFile[]; -} - -interface EnrichmentContext { - readonly progress: vscode.Progress<{ message?: string }>; - readonly baseParams: LmRequestParams; - readonly outDir: string; - readonly logger: Logger; -} - -const MAX_PREVIEW_LENGTH = 200, - NAME_PREFIX = "name = ", - BODY_PREFIX = "body.", - EXISTS_SUFFIX = " exists", - -// ─── CLI integration ──────────────────────────────────────── - - resolveCliPath = (): string => { - const configured = vscode.workspace - .getConfiguration(CONFIG_SECTION) - .get<string>(CONFIG_CLI_PATH, ""); - return configured.length > 0 ? configured : DEFAULT_CLI_PATH; -}, - - pickSpecFile = (): Thenable<readonly vscode.Uri[] | undefined> => - vscode.window.showOpenDialog({ - canSelectFiles: true, - canSelectFolders: false, - canSelectMany: false, - filters: { [OPENAPI_FILTER_LABEL]: [...OPENAPI_FILE_EXTENSIONS] }, - title: OPENAPI_PICK_FILE, - }), - - defaultWorkspaceUri = (): { readonly defaultUri: vscode.Uri } | Record<string, never> => { - const uri = vscode.workspace.workspaceFolders?.[0]?.uri; - return uri !== undefined ? { defaultUri: uri } : {}; -}, - - pickOutputFolder = (): Thenable<readonly vscode.Uri[] | undefined> => - vscode.window.showOpenDialog({ - canSelectFiles: false, - canSelectFolders: true, - canSelectMany: false, - title: OPENAPI_PICK_FOLDER, - ...defaultWorkspaceUri(), - }), - - pickPaths = async (): Promise<PickedPaths | undefined> => { - const specFiles = await pickSpecFile(), - specFile = specFiles?.[0]; - if (specFile === undefined) { return undefined; } - const outputFolder = await pickOutputFolder(), - outFolder = outputFolder?.[0]; - if (outFolder === undefined) { return undefined; } - return { specFile, outFolder }; -}, - - buildGenerateArgs = ( - specPath: string, - outDir: string -): readonly string[] => [ - CLI_CMD_GENERATE, CLI_SUBCMD_OPENAPI, specPath, - CLI_FLAG_OUTPUT_DIR, outDir, CLI_FLAG_OUTPUT, CLI_OUTPUT_JSON, -], - - parseGenerateOutput = ( - stdout: string -): Result<GenerateResult, string> => { - try { - const parsed = JSON.parse(stdout) as GenerateResult; - return ok(parsed); - } catch { - return err(`${CLI_PARSE_FAILED_PREFIX}${stdout.slice(0, MAX_PREVIEW_LENGTH)}`); - } -}, - - callCliGenerate = async ( - specPath: string, - outDir: string -): Promise<Result<GenerateResult, string>> => - new Promise((resolve) => { - const cliPath = resolveCliPath(); - execFile( - cliPath, [...buildGenerateArgs(specPath, outDir)], - { timeout: 30_000, env: { ...process.env } }, - (error, stdout, stderr) => { - if (error !== null && stdout.length === 0) { - const msg = stderr.length > 0 ? ` — ${stderr}` : ""; - resolve(err(`${CLI_SPAWN_FAILED_PREFIX}${cliPath}${msg}`)); - return; - } - resolve(parseGenerateOutput(stdout)); - } - ); - }), - - handleSuccess = async ( - outDir: string, - generated: GenerateResult, - ctx: ImportContext -): Promise<void> => { - ctx.logger.info(`${LOG_MSG_OPENAPI_IMPORT} ${generated.files}`); - ctx.explorer.refresh(); - const doc = await vscode.workspace.openTextDocument(path.join(outDir, generated.playlist)); - await vscode.window.showTextDocument(doc); - void vscode.window.showInformationMessage( - `${OPENAPI_SUCCESS_PREFIX}${generated.files}${OPENAPI_SUCCESS_SUFFIX}` - ); -}, - -// ─── AI choice ────────────────────────────────────────────── - - askAiChoice = async (): Promise<string | undefined> => { - const picked = await vscode.window.showQuickPick( - [{ label: OPENAPI_AI_CHOICE_BASIC }, { label: OPENAPI_AI_CHOICE_ENHANCED }], - { title: OPENAPI_AI_CHOICE_TITLE, placeHolder: OPENAPI_AI_CHOICE_TITLE } - ); - return picked?.label; -}, - -// ─── Language model helpers ───────────────────────────────── - - selectCopilotModel = async (): Promise<vscode.LanguageModelChat | undefined> => { - const models = await vscode.lm.selectChatModels({ family: OPENAPI_AI_COPILOT_FAMILY }); - return models[0]; -}, - - sendLmRequest = async ( - params: LmRequestParams -): Promise<string> => { - const messages = [ - vscode.LanguageModelChatMessage.User(`${params.systemPrompt}\n\n${params.userPrompt}`), - ], - response = await params.model.sendRequest(messages, {}, params.token), - parts: string[] = []; - for await (const chunk of response.text) { parts.push(chunk); } - return parts.join(""); -}, - -// ─── File reading helpers ─────────────────────────────────── - - collectNapFiles = ( - dir: string, - baseDir: string, - out: GeneratedFile[] -): void => { - for (const entry of fs.readdirSync(dir, { withFileTypes: true })) { - const full = path.join(dir, entry.name); - if (entry.isDirectory()) { collectNapFiles(full, baseDir, out); } - else if (entry.name.endsWith(NAP_EXTENSION)) { - out.push({ fileName: path.relative(baseDir, full), content: fs.readFileSync(full, "utf-8") }); - } - } -}, - - readGeneratedFiles = (outDir: string): readonly GeneratedFile[] => { - const files: GeneratedFile[] = []; - collectNapFiles(outDir, outDir, files); - return files; -}, - -// ─── Operation extraction ─────────────────────────────────── - - HTTP_METHOD_PREFIXES = ["GET ", "POST ", "PUT ", "PATCH ", "DELETE ", "HEAD ", "OPTIONS "] as const, - - isRequestLine = (line: string): boolean => - HTTP_METHOD_PREFIXES.some((prefix) => line.startsWith(prefix)), - - extractSummary = (file: GeneratedFile): OperationSummary => { - const lines = file.content.split("\n"), - nameLine = lines.find((l) => l.startsWith(NAME_PREFIX)), - requestLine = lines.find(isRequestLine), - name = nameLine?.slice(NAME_PREFIX.length) ?? file.fileName; - return { - operationId: name, - method: requestLine?.split(" ")[0] ?? "GET", - path: requestLine?.split(" ")[1] ?? "", - summary: name, - responseFields: lines - .filter((l) => l.startsWith(BODY_PREFIX) && l.includes(EXISTS_SUFFIX)) - .map((l) => l.slice(BODY_PREFIX.length, l.indexOf(EXISTS_SUFFIX))), - hasRequestBody: file.content.includes(SECTION_REQUEST_BODY), - }; -}, - -// ─── Enrichment steps ─────────────────────────────────────── - - enrichAssertionStep = async ( - step: EnrichStepParams, - logger: Logger -): Promise<readonly GeneratedFile[]> => { - const response = await sendLmRequest({ - ...step.lm, systemPrompt: getAssertionSystemPrompt(), - userPrompt: buildAssertionPrompt(step.operations), - }), - result = parseAssertionResponse(response); - if (!result.ok) { logger.info(result.error); return step.files; } - return applyAssertionEnrichments(step.files, result.value); -}, - - enrichTestDataStep = async ( - step: EnrichStepParams, - logger: Logger -): Promise<readonly GeneratedFile[]> => { - const prompt = buildTestDataPrompt(step.operations); - if (prompt.length === 0) { return step.files; } - const response = await sendLmRequest({ - ...step.lm, systemPrompt: getTestDataSystemPrompt(), userPrompt: prompt, - }), - result = parseTestDataResponse(response); - if (!result.ok) { logger.info(result.error); return step.files; } - return applyTestDataEnrichments(step.files, result.value); -}, - - reorderPlaylistStep = async ( - params: LmRequestParams, - outDir: string, - fileNames: readonly string[] -): Promise<void> => { - const naplists = fs.readdirSync(outDir).filter((f) => f.endsWith(NAPLIST_EXTENSION)), - [first] = naplists; - if (first === undefined) { return; } - const playlistPath = path.join(outDir, first), - response = await sendLmRequest({ - ...params, systemPrompt: getPlaylistSystemPrompt(), - userPrompt: buildPlaylistOrderPrompt(fileNames), - }), - result = parsePlaylistOrderResponse(response); - if (!result.ok) { return; } - fs.writeFileSync(playlistPath, reorderPlaylistSteps( - fs.readFileSync(playlistPath, "utf-8"), result.value - ), "utf-8"); -}, - - writeEnrichedFiles = ( - outDir: string, - files: readonly GeneratedFile[] -): void => { - for (const file of files) { - fs.writeFileSync(path.join(outDir, file.fileName), file.content, "utf-8"); - } -}, - -// ─── AI enrichment orchestrator ───────────────────────────── - - executeEnrichmentSteps = async ( - ctx: EnrichmentContext -): Promise<void> => { - const files = readGeneratedFiles(ctx.outDir), - operations = files.map(extractSummary); - - ctx.progress.report({ message: OPENAPI_AI_ENRICHING_ASSERTIONS }); - let enriched = await enrichAssertionStep({ lm: ctx.baseParams, operations, files }, ctx.logger); - - ctx.progress.report({ message: OPENAPI_AI_ENRICHING_TEST_DATA }); - enriched = await enrichTestDataStep({ lm: ctx.baseParams, operations, files: enriched }, ctx.logger); - - ctx.progress.report({ message: OPENAPI_AI_REORDERING_PLAYLIST }); - await reorderPlaylistStep(ctx.baseParams, ctx.outDir, enriched.map((f) => f.fileName)); - - writeEnrichedFiles(ctx.outDir, enriched); -}; - -export const runAiEnrichment = async ( - outDir: string, - logger: Logger -): Promise<void> => { - const model = await selectCopilotModel(); - if (model === undefined) { - await vscode.window.showWarningMessage(OPENAPI_AI_NO_COPILOT); - return; - } - await vscode.window.withProgress( - { location: vscode.ProgressLocation.Notification, title: OPENAPI_AI_PROGRESS_TITLE, cancellable: true }, - async (progress, token) => { - const baseParams: LmRequestParams = { model, systemPrompt: "", userPrompt: "", token }; - await executeEnrichmentSteps({ progress, baseParams, outDir, logger }); - } - ); -}; - -// ─── URL download ─────────────────────────────────────────── - -const isRedirect = (code: number): boolean => - code >= HTTP_STATUS_REDIRECT_MIN && code < HTTP_STATUS_CLIENT_ERROR_MIN, - - isClientError = (code: number): boolean => - code >= HTTP_STATUS_CLIENT_ERROR_MIN, - - collectBody = ( - res: IncomingMessage, - resolve: (r: Result<string, string>) => void -): void => { - const chunks: Buffer[] = []; - res.on("data", (chunk: Buffer) => { chunks.push(chunk); }); - res.on("end", () => { resolve(ok(Buffer.concat(chunks).toString("utf-8"))); }); - res.on("error", (e) => { resolve(err(`${OPENAPI_DOWNLOAD_FAILED_PREFIX}${e.message}`)); }); -}; - -// Use function declaration for hoisting (recursive redirect) -export async function downloadSpec(url: string): Promise<Result<string, string>> { - return new Promise((resolve) => { - https.get(url, (res) => { - const status = res.statusCode ?? 0; - if (isRedirect(status) && res.headers.location !== undefined) { - downloadSpec(res.headers.location).then(resolve).catch(() => { - resolve(err(`${OPENAPI_DOWNLOAD_FAILED_PREFIX}redirect`)); - }); - return; - } - if (isClientError(status)) { resolve(err(`${OPENAPI_DOWNLOAD_FAILED_PREFIX}HTTP ${status}`)); return; } - collectBody(res, resolve); - }).on("error", (e) => { resolve(err(`${OPENAPI_DOWNLOAD_FAILED_PREFIX}${e.message}`)); }); - }); -} - -const askForUrl = async (): Promise<string | undefined> => - vscode.window.showInputBox({ - prompt: OPENAPI_URL_PROMPT, - placeHolder: OPENAPI_URL_PLACEHOLDER, - ignoreFocusOut: true, - }); - -export const saveTempSpec = (content: string, outDir: string): string => { - const specPath = path.join(outDir, ".openapi-spec.json"); - fs.writeFileSync(specPath, content, "utf-8"); - return specPath; -}; - -// ─── Shared generate + enrich flow ────────────────────────── - -const generateAndEnrich = async ( - specPath: string, - outDir: string, - ctx: ImportContext -): Promise<void> => { - const choice = await askAiChoice(); - if (choice === undefined) { return; } - const result = await callCliGenerate(specPath, outDir); - if (!result.ok) { - await vscode.window.showErrorMessage(`${OPENAPI_ERROR_PREFIX}${result.error}`); - return; - } - if (choice === OPENAPI_AI_CHOICE_ENHANCED) { - await runAiEnrichment(outDir, ctx.logger); - } - await handleSuccess(outDir, result.value, ctx); -}; - -// ─── Main entry points ────────────────────────────────────── - -export const importOpenApiFromUrl = async ( - explorer: ExplorerAdapter, - logger: Logger -): Promise<void> => { - const url = await askForUrl(); - if (url === undefined || url.length === 0) { return; } - const outFolder = await pickOutputFolder(), - outDir = outFolder?.[0]?.fsPath; - if (outDir === undefined) { return; } - const specResult = await vscode.window.withProgress( - { location: vscode.ProgressLocation.Notification, title: OPENAPI_DOWNLOADING, cancellable: false }, - async () => downloadSpec(url) - ); - if (!specResult.ok) { - await vscode.window.showErrorMessage(`${OPENAPI_ERROR_PREFIX}${specResult.error}`); - return; - } - const specPath = saveTempSpec(specResult.value, outDir); - await generateAndEnrich(specPath, outDir, { explorer, logger }); -}; - -export const importOpenApiFromFile = async ( - explorer: ExplorerAdapter, - logger: Logger -): Promise<void> => { - const paths = await pickPaths(); - if (paths === undefined) { return; } - await generateAndEnrich(paths.specFile.fsPath, paths.outFolder.fsPath, { explorer, logger }); -}; diff --git a/src/Nap.VsCode/src/reportGenerator.ts b/src/Nap.VsCode/src/reportGenerator.ts deleted file mode 100644 index 0cae1f5..0000000 --- a/src/Nap.VsCode/src/reportGenerator.ts +++ /dev/null @@ -1,373 +0,0 @@ -// Standalone HTML report generator for playlist results -// Pure function — no VS Code SDK dependency -// Generates a beautiful, self-contained HTML file - -import * as path from "path"; -import type { RunResult } from "./types"; -import { escapeHtml, formatBodyHtml } from "./htmlUtils"; -import { REPORT_STYLES } from "./reportStyles"; -import { - NAPPER_URL, - NIMBLESITE_URL, - PERCENTAGE_MULTIPLIER, - REPORT_FOOTER_GENERATED_BY, - REPORT_FOOTER_MADE_BY, - SECTION_LABEL_REQUEST, - SECTION_LABEL_REQUEST_BODY, - SECTION_LABEL_REQUEST_HEADERS, - SECTION_LABEL_RESPONSE, - SECTION_LABEL_RESPONSE_HEADERS, -} from "./constants"; - -const buildReportAssertionRow = (a: { - readonly passed: boolean; - readonly target: string; - readonly expected: string; - readonly actual: string; -}): string => { - const cls = a.passed ? "pass" : "fail", - icon = a.passed ? "\u2713" : "\u2717", - detail = a.passed - ? "" - : `<span class="assertion-detail">expected: ${escapeHtml(a.expected)} | actual: ${escapeHtml(a.actual)}</span>`; - return `<div class="assertion-row ${cls}"> - <span class="assertion-icon">${icon}</span> - <span class="assertion-target">${escapeHtml(a.target)}</span> - ${detail} - </div>`; -}, - - buildReportAssertions = (result: RunResult): string => { - if (result.assertions.length === 0) {return "";} - - const rows = result.assertions - .map((a) => buildReportAssertionRow(a)) - .join("\n"); - - return `<div class="detail-section"> - <div class="detail-section-title">Assertions</div> - <div class="assertions-list">${rows}</div> - </div>`; -}, - - buildReportHeadersTable = ( - headers: Readonly<Record<string, string>> | undefined -): string => { - if (!headers) {return "";} - - return Object.entries(headers) - .map( - ([k, v]) => - `<tr><td class="h-key">${escapeHtml(k)}</td><td class="h-val">${escapeHtml(v)}</td></tr>` - ) - .join("\n"); -}, - - buildReportHeadersSection = ( - title: string, - headers: Readonly<Record<string, string>> | undefined -): string => { - const rows = buildReportHeadersTable(headers); - if (!rows) {return "";} - - return `<div class="detail-section"> - <div class="detail-section-title">${title}</div> - <table class="headers-table"> - <thead><tr><th>Header</th><th>Value</th></tr></thead> - <tbody>${rows}</tbody> - </table> - </div>`; -}, - - buildReportLog = (log: readonly string[] | undefined): string => { - if (!log || log.length === 0) {return "";} - - const lines = log.map((line) => escapeHtml(line)).join("\n"); - - return `<div class="detail-section"> - <div class="detail-section-title">Output</div> - <pre class="log-output">${lines}</pre> - </div>`; -}, - - buildReportBody = (body: string | undefined): string => { - if (body === undefined || body === "") {return "";} - - return `<div class="detail-section"> - <div class="detail-section-title">Response Body</div> - <pre class="code-block">${formatBodyHtml(body)}</pre> - </div>`; -}, - - buildReportRequestUrl = (result: RunResult): string => - result.requestUrl !== undefined && result.requestUrl !== "" - ? `<div class="request-url-line"><span class="request-method-tag">${escapeHtml(result.requestMethod ?? "")}</span> ${escapeHtml(result.requestUrl)}</div>` - : "", - - buildReportRequestBody = (result: RunResult): string => { - if (result.requestBody === undefined || result.requestBody === "") {return "";} - const formatted = formatBodyHtml(result.requestBody), - contentTypeHint = result.requestBodyContentType !== undefined && result.requestBodyContentType !== "" - ? `<div class="content-type-hint">${escapeHtml(result.requestBodyContentType)}</div>` - : ""; - return `<div class="detail-section"> - <div class="detail-section-title">${SECTION_LABEL_REQUEST_BODY}</div> - ${contentTypeHint} - <pre class="code-block">${formatted}</pre> - </div>`; -}, - - buildReportCollapsibleGroup = ({ - title, - content, - open, -}: { - readonly title: string; - readonly content: string; - readonly open: boolean; -}): string => - `<details class="report-group"${open ? " open" : ""}> - <summary class="report-group-summary"><span class="report-group-title">${title}</span><span class="report-group-chevron">▶</span></summary> - <div class="report-group-content">${content}</div> - </details>`, - - buildReportRequestGroup = (result: RunResult): string => { - const urlHtml = buildReportRequestUrl(result), - headersHtml = buildReportHeadersSection(SECTION_LABEL_REQUEST_HEADERS, result.requestHeaders), - bodyHtml = buildReportRequestBody(result), - content = `${urlHtml}${headersHtml}${bodyHtml}`; - - return buildReportCollapsibleGroup({ - title: SECTION_LABEL_REQUEST, - content: content !== "" ? content : '<span class="empty-hint">No request details</span>', - open: false, - }); -}, - - buildReportResponseGroup = (result: RunResult): string => { - const parts: string[] = []; - - if (result.assertions.length > 0) { - parts.push(buildReportAssertions(result)); - } - - const headersHtml = buildReportHeadersSection(SECTION_LABEL_RESPONSE_HEADERS, result.headers); - if (headersHtml !== "") { - parts.push(headersHtml); - } - - const bodyHtml = buildReportBody(result.body); - if (bodyHtml !== "") { - parts.push(bodyHtml); - } - - if (parts.length === 0) {return "";} - - return buildReportCollapsibleGroup({ - title: SECTION_LABEL_RESPONSE, - content: parts.join("\n"), - open: true, - }); -}, - - buildStepCardBadges = (result: RunResult, cls: string, duration: string): string => { - const httpBadge = - result.statusCode !== undefined - ? `<span class="badge http">${result.statusCode}</span>` - : "", - - durationBadge = duration !== "" - ? `<span class="badge duration">${duration}</span>` - : "", - - statusBadge = `<span class="badge status-${cls}">${result.passed ? "PASSED" : "FAILED"}</span>`; - - return `${httpBadge} - ${durationBadge} - ${statusBadge}`; -}, - - buildStepCardErrorHtml = (error: string | undefined): string => - error !== undefined && error !== "" - ? `<div class="detail-section"><div class="detail-section-title">Error</div><pre class="error-box">${escapeHtml(error)}</pre></div>` - : "", - - buildStepCardMetaHtml = (assertionText: string): string => - assertionText !== "" ? `<span class="step-meta-item">${assertionText}</span>` : "", - - buildStepCardHeader = (opts: { - readonly result: RunResult; - readonly index: number; - readonly cls: string; - readonly icon: string; - readonly fileName: string; - readonly assertionText: string; - readonly duration: string; -}): string => ` - <div class="step-header" onclick="toggleStep(${opts.index})"> - <div class="step-indicator ${opts.cls}">${opts.icon}</div> - <div class="step-info"> - <div class="step-name">${escapeHtml(opts.fileName)}</div> - <div class="step-meta">${buildStepCardMetaHtml(opts.assertionText)}</div> - </div> - <div class="step-badges"> - ${buildStepCardBadges(opts.result, opts.cls, opts.duration)} - </div> - <span class="step-chevron">▶</span> - </div>`, - - buildStepCardProps = (result: RunResult): { - readonly cls: string; - readonly icon: string; - readonly fileName: string; - readonly duration: string; - readonly assertionText: string; -} => { - const passedAssertions = result.assertions.filter((a) => a.passed).length, - totalAssertions = result.assertions.length; - return { - cls: result.passed ? "pass" : "fail", - icon: result.passed ? "\u2713" : "\u2717", - fileName: path.basename(result.file), - duration: result.duration !== undefined ? `${result.duration.toFixed(0)}ms` : "", - assertionText: totalAssertions > 0 ? `${passedAssertions}/${totalAssertions} assertions` : "", - }; -}, - - buildStepCardDetail = (result: RunResult): string => - `${buildStepCardErrorHtml(result.error)} - ${buildReportLog(result.log)} - ${buildReportRequestGroup(result)} - ${buildReportResponseGroup(result)}`, - - buildStepCard = (result: RunResult, index: number): string => { - const props = buildStepCardProps(result), - header = buildStepCardHeader({ result, index, ...props }); - return `<div class="step-card" data-index="${index}"> - ${header} - <div class="step-detail">${buildStepCardDetail(result)}</div> - </div>`; -}, - - computeReportStats = (results: readonly RunResult[]): { - readonly totalCount: number; - readonly passedCount: number; - readonly failedCount: number; - readonly totalDuration: number; - readonly allPassed: boolean; - readonly passRate: string; -} => { - const totalCount = results.length, - passedCount = results.filter((r) => r.passed).length, - failedCount = totalCount - passedCount, - totalDuration = results.reduce( - (acc, r) => acc + (r.duration ?? 0), - 0 - ), - allPassed = totalCount > 0 && failedCount === 0, - passRate = - totalCount > 0 ? ((passedCount / totalCount) * PERCENTAGE_MULTIPLIER).toFixed(0) : "0"; - return { totalCount, passedCount, failedCount, totalDuration, allPassed, passRate }; -}, - - buildReportStatusSection = (stats: { - readonly allPassed: boolean; - readonly statusCls: string; - readonly statusText: string; - readonly statusIcon: string; -}): string => ` - <div class="status-banner ${stats.statusCls}"> - <div class="status-icon">${stats.statusIcon}</div> - <span>${stats.statusText}</span> - </div>`, - - buildStatCard = (opts: { - readonly label: string; - readonly valueCls: string; - readonly value: string; - readonly sub: string; -}): string => - `<div class="stat-card"><div class="stat-label">${opts.label}</div><div class="stat-value ${opts.valueCls}">${opts.value}</div><div class="stat-sub">${opts.sub}</div></div>`, - - buildReportStatsGrid = (stats: ReturnType<typeof computeReportStats>): string => { - const passRateCard = buildStatCard({ label: "Pass Rate", valueCls: stats.allPassed ? "pass" : "fail", value: `${stats.passRate}%`, sub: `${stats.passedCount} of ${stats.totalCount} steps` }), - passedCard = buildStatCard({ label: "Passed", valueCls: "pass", value: `${stats.passedCount}`, sub: "steps succeeded" }), - failedCls = stats.failedCount > 0 ? "fail" : "neutral", - failedCard = buildStatCard({ label: "Failed", valueCls: failedCls, value: `${stats.failedCount}`, sub: "steps failed" }), - durationVal = `${stats.totalDuration.toFixed(0)}<span style="font-size: 16px; font-weight: 400;">ms</span>`, - durationCard = buildStatCard({ label: "Duration", valueCls: "neutral", value: durationVal, sub: "total execution time" }); - return `<div class="stats-grid">${passRateCard}${passedCard}${failedCard}${durationCard}</div>`; -}, - - buildReportProgressBar = (passRate: string, allPassed: boolean): string => ` - <div class="progress-container"> - <div class="progress-bar-bg"> - <div class="progress-bar-fill ${allPassed ? "pass" : "mixed"}" style="width: ${passRate}%; --pass-pct: ${passRate}%;"></div> - </div> - </div>`, - - buildReportDashboard = (stats: ReturnType<typeof computeReportStats>, stepsHtml: string): string => { - const statusCls = stats.allPassed ? "passed" : "failed", - statusText = stats.allPassed ? "All Steps Passed" : "Some Steps Failed", - statusIcon = stats.allPassed ? "\u2713" : "\u2717"; - - return `<div class="dashboard"> - ${buildReportStatusSection({ allPassed: stats.allPassed, statusCls, statusText, statusIcon })} - ${buildReportStatsGrid(stats)} - ${buildReportProgressBar(stats.passRate, stats.allPassed)} - <div class="section-title">Steps (${stats.totalCount})</div> - <div class="steps-list"> - ${stepsHtml} - </div> - </div>`; -}, - - buildReportFooter = (): string => ` - <div class="footer"> - ${REPORT_FOOTER_GENERATED_BY} <a href="${NAPPER_URL}">Napper</a> · ${REPORT_FOOTER_MADE_BY} <a href="${NIMBLESITE_URL}">Nimblesite</a> - </div>`, - - buildReportHeroHtml = (playlistName: string, timestamp: string): string => ` - <div class="hero"> - <div class="hero-content"> - <div class="hero-label">Playlist Report</div> - <h1>${escapeHtml(playlistName)}</h1> - <div class="hero-timestamp">${escapeHtml(timestamp)}</div> - </div> - </div>`, - - buildReportToggleScript = (): string => ` - <script> - function toggleStep(index) { - var card = document.querySelector('.step-card[data-index="' + index + '"]'); - if (!card) return; - card.classList.toggle('open'); - } - </script>`, - - buildReportHead = (playlistName: string): string => `<head> -<meta charset="UTF-8"/> -<meta name="viewport" content="width=device-width, initial-scale=1.0"/> -<title>Napper Report — ${escapeHtml(playlistName)} - -`; - -export const generatePlaylistReport = ( - playlistName: string, - results: readonly RunResult[] -): string => { - const stats = computeReportStats(results), - stepsHtml = results.map((result, index) => buildStepCard(result, index)).join("\n"), - hero = buildReportHeroHtml(playlistName, new Date().toLocaleString()), - dashboard = buildReportDashboard(stats, stepsHtml); - return ` - -${buildReportHead(playlistName)} - - ${hero} - ${dashboard} - ${buildReportFooter()} - ${buildReportToggleScript()} - -`; -}; diff --git a/src/Nap.VsCode/src/test/e2e/activation.e2e.test.ts b/src/Nap.VsCode/src/test/e2e/activation.e2e.test.ts deleted file mode 100644 index 4d68def..0000000 --- a/src/Nap.VsCode/src/test/e2e/activation.e2e.test.ts +++ /dev/null @@ -1,177 +0,0 @@ -import * as assert from "assert"; -import * as fs from "fs"; -import { - activateExtension, - getExtensionPath, - getRegisteredCommands, - sleep, -} from "../helpers/helpers"; -import { - CMD_COPY_CURL, - CMD_NEW_PLAYLIST, - CMD_NEW_REQUEST, - CMD_OPEN_RESPONSE, - CMD_RUN_ALL, - CMD_RUN_FILE, - CMD_SWITCH_ENV, - NAPENV_EXTENSION, - NAPLIST_EXTENSION, - NAP_EXTENSION, - VIEW_EXPLORER, -} from "../../constants"; - -suite("Extension Activation", () => { - suiteSetup(async function () { - this.timeout(30000); - await activateExtension(); - await sleep(3000); - }); - - test("extension activates successfully", async () => { - const ctx = await activateExtension(); - assert.strictEqual( - ctx.extension.isActive, - true, - "Extension should be active" - ); - }); - - test("all commands are registered", async () => { - const commands = await getRegisteredCommands(), - - expectedCommands = [ - CMD_RUN_FILE, - CMD_RUN_ALL, - CMD_NEW_REQUEST, - CMD_NEW_PLAYLIST, - CMD_SWITCH_ENV, - CMD_COPY_CURL, - CMD_OPEN_RESPONSE, - ]; - - for (const cmd of expectedCommands) { - assert.ok( - commands.includes(cmd), - `Command ${cmd} should be registered` - ); - } - }); - - test("package.json declares all views in napper-panel container", () => { - const packageJsonPath = getExtensionPath("package.json"), - raw = fs.readFileSync(packageJsonPath, "utf-8"), - packageJson = JSON.parse(raw) as { - contributes: { - views: Record; - }; - }, - - napperPanelViews = packageJson.contributes.views["napper-panel"]; - assert.ok( - Array.isArray(napperPanelViews), - "napper-panel view container should exist" - ); - - const viewIds = napperPanelViews.map((v) => v.id); - assert.ok( - viewIds.includes(VIEW_EXPLORER), - "napperExplorer view should be declared" - ); - }); - - test("package.json registers all three languages", () => { - const packageJsonPath = getExtensionPath("package.json"), - raw = fs.readFileSync(packageJsonPath, "utf-8"), - packageJson = JSON.parse(raw) as { - contributes: { - languages: { id: string; extensions: string[] }[]; - }; - }, - - {languages} = packageJson.contributes, - langIds = languages.map((l) => l.id); - - assert.ok(langIds.includes("nap"), "nap language should be registered"); - assert.ok( - langIds.includes("naplist"), - "naplist language should be registered" - ); - assert.ok( - langIds.includes("napenv"), - "napenv language should be registered" - ); - - const napLang = languages.find((l) => l.id === "nap"); - assert.ok(napLang !== undefined, "nap language must be registered"); - assert.ok( - napLang.extensions.includes(NAP_EXTENSION), - ".nap extension should be associated" - ); - - const naplistLang = languages.find((l) => l.id === "naplist"); - assert.ok(naplistLang !== undefined, "naplist language must be registered"); - assert.ok( - naplistLang.extensions.includes(NAPLIST_EXTENSION), - ".naplist extension should be associated" - ); - - const napenvLang = languages.find((l) => l.id === "napenv"); - assert.ok(napenvLang !== undefined, "napenv language must be registered"); - assert.ok( - napenvLang.extensions.includes(NAPENV_EXTENSION), - ".napenv extension should be associated" - ); - }); - - test("package.json declares all configuration properties", () => { - const packageJsonPath = getExtensionPath("package.json"), - raw = fs.readFileSync(packageJsonPath, "utf-8"), - packageJson = JSON.parse(raw) as { - contributes: { - configuration: { - properties: Record; - }; - }; - }, - - props = packageJson.contributes.configuration.properties, - expectedKeys = [ - "napper.defaultEnvironment", - "napper.autoRunOnSave", - "napper.splitEditorLayout", - "napper.maskSecretsInPreview", - "napper.cliPath", - ]; - - for (const key of expectedKeys) { - assert.ok( - key in props, - `Configuration property ${key} should be declared` - ); - } - }); - - test("package.json declares context menu for napperExplorer", () => { - const packageJsonPath = getExtensionPath("package.json"), - raw = fs.readFileSync(packageJsonPath, "utf-8"), - packageJson = JSON.parse(raw) as { - contributes: { - menus: { - "view/item/context": { - command: string; - when: string; - }[]; - }; - }; - }, - - contextMenus = packageJson.contributes.menus["view/item/context"], - runFileMenu = contextMenus.find( - (m) => m.command === CMD_RUN_FILE - ); - assert.ok( - runFileMenu, - "runFile context menu should exist for explorer items" - ); - }); -}); diff --git a/src/Nap.VsCode/src/test/e2e/codelens.e2e.test.ts b/src/Nap.VsCode/src/test/e2e/codelens.e2e.test.ts deleted file mode 100644 index 9a36bfa..0000000 --- a/src/Nap.VsCode/src/test/e2e/codelens.e2e.test.ts +++ /dev/null @@ -1,141 +0,0 @@ -import * as assert from "assert"; -import * as vscode from "vscode"; -import { - activateExtension, - closeAllEditors, - openDocument, - sleep, -} from "../helpers/helpers"; -import { - CMD_COPY_CURL, - CMD_RUN_FILE, -} from "../../constants"; - -suite("CodeLens", () => { - suiteSetup(async function () { - this.timeout(30000); - await activateExtension(); - await sleep(3000); - }); - - suiteTeardown(async () => { - await closeAllEditors(); - }); - - test("CodeLens appears for shorthand .nap file", async function () { - this.timeout(15000); - const doc = await openDocument("get-httpbin.nap"); - await sleep(3000); - - const lenses = await vscode.commands.executeCommand( - "vscode.executeCodeLensProvider", - doc.uri - ); - - assert.ok( - lenses.length > 0, - "Should have at least one CodeLens for shorthand .nap file" - ); - - const runLens = lenses.find( - (l) => l.command?.command === CMD_RUN_FILE - ); - assert.ok( - runLens, - "Should have a Run CodeLens" - ); - - const curlLens = lenses.find( - (l) => l.command?.command === CMD_COPY_CURL - ); - assert.ok( - curlLens, - "Should have a Copy as curl CodeLens" - ); - }); - - test("CodeLens appears for .nap file with [request] section", async function () { - this.timeout(15000); - const doc = await openDocument("petstore/list-pets.nap"); - await sleep(3000); - - const lenses = await vscode.commands.executeCommand( - "vscode.executeCodeLensProvider", - doc.uri - ); - - assert.ok( - lenses.length > 0, - "Should have CodeLens for [request] section" - ); - - const runLens = lenses.find( - (l) => l.command?.command === CMD_RUN_FILE - ); - assert.ok(runLens, "Run lens should exist on [request] section"); - }); - - test("CodeLens appears for POST .nap file", async function () { - this.timeout(15000); - const doc = await openDocument("post-jsonplaceholder.nap"); - await sleep(3000); - - const lenses = await vscode.commands.executeCommand( - "vscode.executeCodeLensProvider", - doc.uri - ); - - assert.ok( - lenses.length > 0, - "Should have CodeLens for POST .nap file" - ); - }); - - test("CodeLens appears for .naplist file", async function () { - this.timeout(15000); - const doc = await openDocument("petstore/smoke.naplist"); - await sleep(3000); - - const lenses = await vscode.commands.executeCommand( - "vscode.executeCodeLensProvider", - doc.uri - ); - - assert.ok( - lenses.length > 0, - "Should have CodeLens for .naplist file with [meta] section" - ); - - const runPlaylistLens = lenses.find( - (l) => l.command?.command === CMD_RUN_FILE - ); - assert.ok( - runPlaylistLens, - "Should have Run Playlist CodeLens" - ); - }); - - test("CodeLens Run lens passes document URI as argument", async function () { - this.timeout(15000); - const doc = await openDocument("get-httpbin.nap"); - await sleep(3000); - - const lenses = await vscode.commands.executeCommand( - "vscode.executeCodeLensProvider", - doc.uri - ), - - runLens = lenses.find( - (l) => l.command?.command === CMD_RUN_FILE - ); - assert.ok(runLens, "Run lens should exist"); - assert.ok( - runLens.command?.arguments, - "Run lens should have arguments" - ); - assert.ok( - runLens.command.arguments.length > 0, - "Run lens should pass at least one argument (the URI)" - ); - }); -}); diff --git a/src/Nap.VsCode/src/test/e2e/configuration.e2e.test.ts b/src/Nap.VsCode/src/test/e2e/configuration.e2e.test.ts deleted file mode 100644 index d538f12..0000000 --- a/src/Nap.VsCode/src/test/e2e/configuration.e2e.test.ts +++ /dev/null @@ -1,92 +0,0 @@ -import * as assert from "assert"; -import * as vscode from "vscode"; -import { - activateExtension, - sleep, -} from "../helpers/helpers"; -import { - CONFIG_AUTO_RUN, - CONFIG_CLI_PATH, - CONFIG_DEFAULT_ENV, - CONFIG_MASK_SECRETS, - CONFIG_SECTION, - CONFIG_SPLIT_LAYOUT, - DEFAULT_CLI_PATH, -} from "../../constants"; - -suite("Configuration", () => { - suiteSetup(async function () { - this.timeout(30000); - await activateExtension(); - await sleep(3000); - }); - - test("napper configuration section exists", () => { - const config = vscode.workspace.getConfiguration(CONFIG_SECTION); - assert.notStrictEqual(config, undefined, "napper configuration section should exist"); - }); - - test("autoRunOnSave defaults to false", () => { - const config = vscode.workspace.getConfiguration(CONFIG_SECTION), - autoRun = config.get(CONFIG_AUTO_RUN); - assert.strictEqual( - autoRun, - false, - "autoRunOnSave should default to false" - ); - }); - - test("splitEditorLayout defaults to beside", () => { - const config = vscode.workspace.getConfiguration(CONFIG_SECTION), - layout = config.get(CONFIG_SPLIT_LAYOUT); - assert.strictEqual( - layout, - "beside", - "splitEditorLayout should default to 'beside'" - ); - }); - - test("maskSecretsInPreview defaults to true", () => { - const config = vscode.workspace.getConfiguration(CONFIG_SECTION), - mask = config.get(CONFIG_MASK_SECRETS); - assert.strictEqual( - mask, - true, - "maskSecretsInPreview should default to true" - ); - }); - - test("cliPath has a default value", () => { - const config = vscode.workspace.getConfiguration(CONFIG_SECTION), - cliPath = config.get(CONFIG_CLI_PATH); - assert.strictEqual( - cliPath, - DEFAULT_CLI_PATH, - `cliPath should default to ${DEFAULT_CLI_PATH}` - ); - }); - - test("defaultEnvironment defaults to empty string", () => { - const config = vscode.workspace.getConfiguration(CONFIG_SECTION), - env = config.get(CONFIG_DEFAULT_ENV); - assert.strictEqual( - env, - "", - "defaultEnvironment should default to empty string" - ); - }); - - test("splitEditorLayout only accepts valid values", () => { - const config = vscode.workspace.getConfiguration(CONFIG_SECTION), - inspected = config.inspect(CONFIG_SPLIT_LAYOUT); - assert.ok( - inspected, - "splitEditorLayout should be inspectable" - ); - assert.strictEqual( - inspected.defaultValue, - "beside", - "Default should be 'beside'" - ); - }); -}); diff --git a/src/Nap.VsCode/src/test/e2e/copycurl.e2e.test.ts b/src/Nap.VsCode/src/test/e2e/copycurl.e2e.test.ts deleted file mode 100644 index b158665..0000000 --- a/src/Nap.VsCode/src/test/e2e/copycurl.e2e.test.ts +++ /dev/null @@ -1,87 +0,0 @@ -import * as assert from "assert"; -import * as vscode from "vscode"; -import { - activateExtension, - closeAllEditors, - executeCommand, - openDocument, - sleep, -} from "../helpers/helpers"; -import { CMD_COPY_CURL } from "../../constants"; - -suite("Copy as Curl", () => { - suiteSetup(async function () { - this.timeout(30000); - await activateExtension(); - await sleep(3000); - }); - - suiteTeardown(async () => { - await closeAllEditors(); - }); - - test("copy curl for shorthand GET request", async function () { - this.timeout(15000); - const doc = await openDocument("get-httpbin.nap"); - await sleep(1000); - - await executeCommand(CMD_COPY_CURL, doc.uri); - await sleep(1000); - - const clipboard = await vscode.env.clipboard.readText(); - assert.ok( - clipboard.includes("curl"), - "Clipboard should contain curl command" - ); - assert.ok( - clipboard.includes("httpbin.org/get"), - "Clipboard should contain the request URL" - ); - assert.ok( - clipboard.includes("GET"), - "Clipboard should contain GET method" - ); - }); - - test("copy curl for POST request with [request] section", async function () { - this.timeout(15000); - const doc = await openDocument("post-jsonplaceholder.nap"); - await sleep(1000); - - await executeCommand(CMD_COPY_CURL, doc.uri); - await sleep(1000); - - const clipboard = await vscode.env.clipboard.readText(); - assert.ok( - clipboard.includes("curl"), - "Clipboard should contain curl" - ); - assert.ok( - clipboard.includes("POST"), - "Clipboard should contain POST method" - ); - assert.ok( - clipboard.includes("jsonplaceholder.typicode.com"), - "Clipboard should contain the URL" - ); - }); - - test("copy curl for GET with [request] section", async function () { - this.timeout(15000); - const doc = await openDocument("petstore/list-pets.nap"); - await sleep(1000); - - await executeCommand(CMD_COPY_CURL, doc.uri); - await sleep(1000); - - const clipboard = await vscode.env.clipboard.readText(); - assert.ok( - clipboard.includes("curl"), - "Clipboard should contain curl" - ); - assert.ok( - clipboard.includes("petstore.swagger.io"), - "Clipboard should contain petstore URL" - ); - }); -}); diff --git a/src/Nap.VsCode/src/test/e2e/csx-scripts.e2e.test.ts b/src/Nap.VsCode/src/test/e2e/csx-scripts.e2e.test.ts deleted file mode 100644 index 8cd0123..0000000 --- a/src/Nap.VsCode/src/test/e2e/csx-scripts.e2e.test.ts +++ /dev/null @@ -1,570 +0,0 @@ -import * as assert from "assert"; -import * as vscode from "vscode"; -import * as fs from "fs"; -import * as path from "path"; -import { - activateExtension, - closeAllEditors, - executeCommand, - extractStepLines, - getFixturePath, - openDocument, - sleep, - waitForCondition, -} from "../helpers/helpers"; -import { - CMD_RUN_FILE, - CMD_SAVE_REPORT, - PLAYLIST_PANEL_TITLE, - REPORT_FILE_EXTENSION, - REPORT_FILE_SUFFIX, - RESPONSE_PANEL_TITLE, -} from "../../constants"; - -const findTabByLabel = (label: string): vscode.Tab | undefined => - vscode.window.tabGroups.all - .flatMap((g) => g.tabs) - .find((tab) => tab.label.includes(label)), - - countTabsByLabel = (label: string): number => - vscode.window.tabGroups.all - .flatMap((g) => g.tabs) - .filter((t) => t.label.includes(label)).length; - -suite("CSX Script Edge Cases — Real Execution", () => { - suiteSetup(async function () { - this.timeout(30000); - await activateExtension(); - await sleep(3000); - }); - - suiteTeardown(async () => { - await closeAllEditors(); - }); - - // ── CSX-only playlist (no .nap requests at all) ────────────────────── - - test("csx-only playlist opens panel and completes successfully", async function () { - this.timeout(60000); - await closeAllEditors(); - await sleep(500); - - const doc = await openDocument("petstore/csx-only.naplist"); - assert.strictEqual( - doc.languageId, - "naplist", - "csx-only.naplist must have naplist language mode" - ); - - const runPromise = executeCommand(CMD_RUN_FILE, doc.uri); - - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 5000 - ); - - const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - playlistTab, - `Tab '${PLAYLIST_PANEL_TITLE}' must open for a playlist containing only .csx scripts` - ); - - const responseTab = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.strictEqual( - responseTab, - undefined, - `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear — .csx-only playlist uses playlist panel` - ); - - await runPromise; - - const panelAfter = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - panelAfter, - `Tab '${PLAYLIST_PANEL_TITLE}' must persist after csx-only playlist completes` - ); - }); - - test("csx-only playlist contains no .nap steps and all scripts exist", () => { - const playlistPath = getFixturePath("petstore/csx-only.naplist"), - content = fs.readFileSync(playlistPath, "utf-8"); - - assert.ok(content.includes("[meta]"), "Must have [meta] section"); - assert.ok(content.includes("[steps]"), "Must have [steps] section"); - assert.ok( - content.includes("echo.csx"), - "Must reference echo.csx" - ); - assert.ok( - content.includes("multi-output.csx"), - "Must reference multi-output.csx" - ); - - const scriptsDir = getFixturePath("scripts"); - assert.ok( - fs.existsSync(path.join(scriptsDir, "echo.csx")), - "echo.csx must exist" - ); - assert.ok( - fs.existsSync(path.join(scriptsDir, "multi-output.csx")), - "multi-output.csx must exist" - ); - }); - - // ── Failing script — extension must not crash ──────────────────────── - - test("playlist with failing csx script opens panel and completes without crashing", async function () { - this.timeout(60000); - await closeAllEditors(); - await sleep(500); - - const doc = await openDocument("petstore/csx-fail.naplist"); - assert.strictEqual( - doc.languageId, - "naplist", - "csx-fail.naplist must have naplist language mode" - ); - - const runPromise = executeCommand(CMD_RUN_FILE, doc.uri); - - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 5000 - ); - - assert.ok( - findTabByLabel(PLAYLIST_PANEL_TITLE), - `Tab '${PLAYLIST_PANEL_TITLE}' must open even when playlist contains a failing .csx script` - ); - - // The run must resolve — a failing script must not hang the extension - await runPromise; - - assert.ok( - findTabByLabel(PLAYLIST_PANEL_TITLE), - `Tab '${PLAYLIST_PANEL_TITLE}' must persist after playlist with failing script completes` - ); - - assert.strictEqual( - findTabByLabel(RESPONSE_PANEL_TITLE), - undefined, - `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear — even failed playlists use playlist panel` - ); - }); - - test("csx-fail.naplist fixture has failing script and valid steps", () => { - const playlistPath = getFixturePath("petstore/csx-fail.naplist"), - content = fs.readFileSync(playlistPath, "utf-8"); - - assert.ok(content.includes("fail.csx"), "Must reference fail.csx"); - assert.ok(content.includes("echo.csx"), "Must reference echo.csx"); - assert.ok(content.includes("list-pets.nap"), "Must reference list-pets.nap"); - - const scriptsDir = getFixturePath("scripts"); - assert.ok( - fs.existsSync(path.join(scriptsDir, "fail.csx")), - "fail.csx fixture must exist" - ); - - const failContent = fs.readFileSync( - path.join(scriptsDir, "fail.csx"), - "utf-8" - ); - assert.ok( - failContent.includes("Environment.Exit(1)"), - "fail.csx must exit with non-zero code" - ); - }); - - // ── Compilation error — extension must handle gracefully ───────────── - - test("playlist with compilation-error csx opens panel and completes without crashing", async function () { - this.timeout(60000); - await closeAllEditors(); - await sleep(500); - - const doc = await openDocument("petstore/csx-compile-error.naplist"); - assert.strictEqual( - doc.languageId, - "naplist", - "csx-compile-error.naplist must have naplist language mode" - ); - - const runPromise = executeCommand(CMD_RUN_FILE, doc.uri); - - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 5000 - ); - - assert.ok( - findTabByLabel(PLAYLIST_PANEL_TITLE), - `Tab '${PLAYLIST_PANEL_TITLE}' must open even when playlist contains a .csx with compilation errors` - ); - - // Must not hang — compilation errors should produce a failed result, not block forever - await runPromise; - - assert.ok( - findTabByLabel(PLAYLIST_PANEL_TITLE), - `Tab '${PLAYLIST_PANEL_TITLE}' must persist after playlist with compilation-error script` - ); - }); - - test("csx-compile-error.naplist fixture has script with type error", () => { - const playlistPath = getFixturePath("petstore/csx-compile-error.naplist"), - content = fs.readFileSync(playlistPath, "utf-8"); - - assert.ok( - content.includes("compile-error.csx"), - "Must reference compile-error.csx" - ); - - const scriptsDir = getFixturePath("scripts"), - scriptContent = fs.readFileSync( - path.join(scriptsDir, "compile-error.csx"), - "utf-8" - ); - - // The script assigns a string to an int — guaranteed compilation failure - assert.ok( - scriptContent.includes("int x"), - "compile-error.csx must declare an int variable" - ); - }); - - // ── Multiple CSX scripts interleaved with .nap requests ────────────── - - test("playlist with multiple csx scripts interleaved with requests completes", async function () { - this.timeout(90000); - await closeAllEditors(); - await sleep(500); - - const doc = await openDocument("petstore/csx-multi.naplist"); - assert.strictEqual( - doc.languageId, - "naplist", - "csx-multi.naplist must have naplist language mode" - ); - - const runPromise = executeCommand(CMD_RUN_FILE, doc.uri); - - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 5000 - ); - - assert.ok( - findTabByLabel(PLAYLIST_PANEL_TITLE), - `Tab '${PLAYLIST_PANEL_TITLE}' must open for multi-script interleaved playlist` - ); - - await runPromise; - - assert.ok( - findTabByLabel(PLAYLIST_PANEL_TITLE), - `Tab '${PLAYLIST_PANEL_TITLE}' must persist after multi-script interleaved playlist` - ); - - assert.strictEqual( - findTabByLabel(RESPONSE_PANEL_TITLE), - undefined, - `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear for interleaved playlist` - ); - }); - - test("csx-multi.naplist has 5 steps mixing scripts and requests", () => { - const playlistPath = getFixturePath("petstore/csx-multi.naplist"), - content = fs.readFileSync(playlistPath, "utf-8"), - lines = content.split("\n"); - - let inSteps = false; - const steps: string[] = []; - for (const line of lines) { - const trimmed = line.trim(); - if (trimmed === "[steps]") { - inSteps = true; - continue; - } - if (trimmed.startsWith("[") && trimmed.endsWith("]")) { - inSteps = false; - continue; - } - if (inSteps && trimmed.length > 0) { - steps.push(trimmed); - } - } - - assert.strictEqual(steps.length, 5, "csx-multi must have exactly 5 steps"); - - const csxSteps = steps.filter((s) => s.endsWith(".csx")), - napSteps = steps.filter((s) => s.endsWith(".nap")); - assert.strictEqual(csxSteps.length, 3, "Must have 3 .csx script steps"); - assert.strictEqual(napSteps.length, 2, "Must have 2 .nap request steps"); - }); - - // ── Slow script — panel opens before script finishes ───────────────── - - test("slow csx script: panel opens immediately, run eventually completes", async function () { - this.timeout(90000); - await closeAllEditors(); - await sleep(500); - - const doc = await openDocument("petstore/csx-slow.naplist"), - - runPromise = executeCommand(CMD_RUN_FILE, doc.uri); - - // Panel must appear within 2s — the slow script takes 3s+ - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 2000 - ); - - assert.ok( - findTabByLabel(PLAYLIST_PANEL_TITLE), - `Tab '${PLAYLIST_PANEL_TITLE}' must open BEFORE slow .csx script finishes` - ); - - // Now wait for the full run to complete - await runPromise; - - assert.ok( - findTabByLabel(PLAYLIST_PANEL_TITLE), - `Tab '${PLAYLIST_PANEL_TITLE}' must persist after slow script completes` - ); - }); - - // ── Re-run csx-only playlist reuses panel ──────────────────────────── - - test("re-running csx-only playlist reuses panel, no duplicates", async function () { - this.timeout(120000); - await closeAllEditors(); - await sleep(500); - - const doc = await openDocument("petstore/csx-only.naplist"); - - // First run - await executeCommand(CMD_RUN_FILE, doc.uri); - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 5000 - ); - - assert.ok( - findTabByLabel(PLAYLIST_PANEL_TITLE), - "Playlist panel must exist after first csx-only run" - ); - - // Second run - const secondRunPromise = executeCommand(CMD_RUN_FILE, doc.uri); - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 2000 - ); - - assert.strictEqual( - countTabsByLabel(PLAYLIST_PANEL_TITLE), - 1, - "Only ONE playlist panel tab must exist during re-run — panel must be reused" - ); - - await secondRunPromise; - - assert.strictEqual( - countTabsByLabel(PLAYLIST_PANEL_TITLE), - 1, - "Only ONE playlist panel tab must exist after re-run completes" - ); - }); - - // ── Save report after failed playlist ──────────────────────────────── - - test("save report works after playlist with failing csx script", async function () { - this.timeout(60000); - await closeAllEditors(); - await sleep(500); - - const playlistPath = getFixturePath("petstore/csx-fail.naplist"), - expectedReportPath = path.join( - path.dirname(playlistPath), - `csx-fail${REPORT_FILE_SUFFIX}${REPORT_FILE_EXTENSION}` - ); - - if (fs.existsSync(expectedReportPath)) { - fs.unlinkSync(expectedReportPath); - } - - const doc = await openDocument("petstore/csx-fail.naplist"); - await executeCommand(CMD_RUN_FILE, doc.uri); - - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 5000 - ); - - // Wait for run to fully complete before saving report - await sleep(15000); - - await executeCommand(CMD_SAVE_REPORT); - - // Report file must be created even when playlist contains failures - await waitForCondition( - () => fs.existsSync(expectedReportPath), - 5000 - ); - - assert.ok( - fs.existsSync(expectedReportPath), - `Report must be created at ${expectedReportPath} even when playlist has failing scripts` - ); - - const reportContent = fs.readFileSync(expectedReportPath, "utf-8"); - assert.ok( - reportContent.includes(" findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 5000 - ); - - // Wait for run to complete - await sleep(15000); - - await executeCommand(CMD_SAVE_REPORT); - - await waitForCondition( - () => fs.existsSync(expectedReportPath), - 5000 - ); - - assert.ok( - fs.existsSync(expectedReportPath), - `Report must be created for csx-only playlist` - ); - - const reportContent = fs.readFileSync(expectedReportPath, "utf-8"); - assert.ok( - reportContent.includes(" { - const scriptsDir = getFixturePath("scripts"), - - expectedScripts = [ - "echo.csx", - "fail.csx", - "compile-error.csx", - "multi-output.csx", - "slow.csx", - ]; - - for (const script of expectedScripts) { - const scriptPath = path.join(scriptsDir, script); - assert.ok( - fs.existsSync(scriptPath), - `Fixture script ${script} must exist` - ); - - const content = fs.readFileSync(scriptPath, "utf-8"); - assert.ok( - content.trim().length > 0, - `Fixture script ${script} must not be empty` - ); - } - }); - - test("all csx edge-case naplist fixtures exist and have valid structure", () => { - const petstoreDir = getFixturePath("petstore"), - - expectedPlaylists = [ - "csx-only.naplist", - "csx-fail.naplist", - "csx-compile-error.naplist", - "csx-multi.naplist", - "csx-slow.naplist", - ]; - - for (const playlist of expectedPlaylists) { - const playlistPath = path.join(petstoreDir, playlist); - assert.ok( - fs.existsSync(playlistPath), - `Fixture playlist ${playlist} must exist` - ); - - const content = fs.readFileSync(playlistPath, "utf-8"); - assert.ok( - content.includes("[meta]"), - `${playlist} must have [meta] section` - ); - assert.ok( - content.includes("[steps]"), - `${playlist} must have [steps] section` - ); - } - }); - - test("all naplist step file references resolve to existing files", () => { - const petstoreDir = getFixturePath("petstore"), - - playlists = [ - "csx-only.naplist", - "csx-fail.naplist", - "csx-compile-error.naplist", - "csx-multi.naplist", - "csx-slow.naplist", - ]; - - for (const playlist of playlists) { - const playlistPath = path.join(petstoreDir, playlist), - content = fs.readFileSync(playlistPath, "utf-8"), - stepLines = extractStepLines(content); - - for (const step of stepLines) { - const resolved = path.resolve(petstoreDir, step); - assert.ok( - fs.existsSync(resolved), - `Step '${step}' in ${playlist} must resolve to existing file: ${resolved}` - ); - } - } - }); -}); diff --git a/src/Nap.VsCode/src/test/e2e/environment.e2e.test.ts b/src/Nap.VsCode/src/test/e2e/environment.e2e.test.ts deleted file mode 100644 index 7495669..0000000 --- a/src/Nap.VsCode/src/test/e2e/environment.e2e.test.ts +++ /dev/null @@ -1,90 +0,0 @@ -import * as assert from "assert"; -import * as vscode from "vscode"; -import * as fs from "fs"; -import { - activateExtension, - getExtensionPath, - getFixturePath, - sleep, -} from "../helpers/helpers"; -import { - CMD_SWITCH_ENV, - CONFIG_DEFAULT_ENV, - CONFIG_SECTION, - NAPENV_EXTENSION, -} from "../../constants"; - -suite("Environment Switching", () => { - suiteSetup(async function () { - this.timeout(30000); - await activateExtension(); - await sleep(3000); - }); - - test(".napenv file exists in test workspace", () => { - const envPath = getFixturePath(`petstore/${NAPENV_EXTENSION}`); - assert.ok( - fs.existsSync(envPath), - ".napenv file should exist in petstore fixture" - ); - }); - - test(".napenv.staging file exists for multi-env testing", () => { - const envPath = getFixturePath("petstore/.napenv.staging"); - assert.ok( - fs.existsSync(envPath), - ".napenv.staging file should exist" - ); - }); - - test(".napenv file contains environment variables", () => { - const envPath = getFixturePath(`petstore/${NAPENV_EXTENSION}`), - content = fs.readFileSync(envPath, "utf-8"); - assert.ok( - content.includes("baseUrl"), - ".napenv should define baseUrl variable" - ); - assert.ok( - content.includes("petId"), - ".napenv should define petId variable" - ); - }); - - test("configuration property for defaultEnvironment is readable", () => { - const config = vscode.workspace.getConfiguration(CONFIG_SECTION), - envValue = config.get(CONFIG_DEFAULT_ENV); - assert.ok( - envValue !== undefined, - "defaultEnvironment config should be readable (may be empty string)" - ); - }); - - test("switchEnvironment command is registered", async () => { - const commands = await vscode.commands.getCommands(true); - assert.ok( - commands.includes(CMD_SWITCH_ENV), - "switchEnvironment command should be registered" - ); - }); - - test("package.json declares defaultEnvironment configuration", () => { - const packageJsonPath = getExtensionPath("package.json"), - raw = fs.readFileSync(packageJsonPath, "utf-8"), - packageJson = JSON.parse(raw) as { - contributes: { - configuration: { - properties: Record; - }; - }; - }, - - envProp = - packageJson.contributes.configuration.properties["napper.defaultEnvironment"]; - assert.ok(envProp, "defaultEnvironment property should exist"); - assert.strictEqual( - envProp.type, - "string", - "defaultEnvironment should be a string type" - ); - }); -}); diff --git a/src/Nap.VsCode/src/test/e2e/explorer.e2e.test.ts b/src/Nap.VsCode/src/test/e2e/explorer.e2e.test.ts deleted file mode 100644 index 94d13f4..0000000 --- a/src/Nap.VsCode/src/test/e2e/explorer.e2e.test.ts +++ /dev/null @@ -1,255 +0,0 @@ -import * as assert from "assert"; -import * as vscode from "vscode"; -import * as fs from "fs"; -import { - activateExtension, - closeAllEditors, - deleteFixtureFile, - getFixturePath, - openDocument, - sleep, - writeFixtureFile, -} from "../helpers/helpers"; -import type { ExtensionApi } from "../../extension"; -import type { TreeNode } from "../../explorerProvider"; -import { - CONTEXT_PLAYLIST, - CONTEXT_PLAYLIST_SECTION, - CONTEXT_REQUEST_FILE, -} from "../../constants"; - -const EXTENSION_ID = "nimblesite.napper", - - getExplorerProvider = (): ExtensionApi["explorerProvider"] => { - const ext = vscode.extensions.getExtension(EXTENSION_ID); - if (!ext) {throw new Error(`Extension ${EXTENSION_ID} not found`);} - return ext.exports.explorerProvider; -}, - - findNodeByLabel = ( - nodes: readonly TreeNode[], - label: string -): TreeNode | undefined => - nodes.find((n: TreeNode) => n.label === label); - -suite("Explorer Tree View", () => { - suiteSetup(async function () { - this.timeout(30000); - await activateExtension(); - await sleep(3000); - }); - - suiteTeardown(async () => { - await closeAllEditors(); - }); - - test("workspace contains .nap fixture files", () => { - const httpbinPath = getFixturePath("get-httpbin.nap"); - assert.ok( - fs.existsSync(httpbinPath), - "get-httpbin.nap fixture should exist in workspace" - ); - - const postPath = getFixturePath("post-jsonplaceholder.nap"); - assert.ok( - fs.existsSync(postPath), - "post-jsonplaceholder.nap fixture should exist" - ); - }); - - test("workspace contains petstore subfolder with .nap files", () => { - const listPetsPath = getFixturePath("petstore/list-pets.nap"); - assert.ok( - fs.existsSync(listPetsPath), - "petstore/list-pets.nap should exist" - ); - - const getPetPath = getFixturePath("petstore/get-pet.nap"); - assert.ok( - fs.existsSync(getPetPath), - "petstore/get-pet.nap should exist" - ); - }); - - test("workspace contains .naplist file", () => { - const playlistPath = getFixturePath("petstore/smoke.naplist"); - assert.ok( - fs.existsSync(playlistPath), - "petstore/smoke.naplist should exist" - ); - - const content = fs.readFileSync(playlistPath, "utf-8"); - assert.ok( - content.includes("[steps]"), - "Playlist should have [steps] section" - ); - assert.ok( - content.includes("list-pets.nap"), - "Playlist should reference list-pets.nap" - ); - }); - - test("opening a .nap file sets correct language mode", async function () { - this.timeout(10000); - const doc = await openDocument("get-httpbin.nap"); - assert.strictEqual( - doc.languageId, - "nap", - "Language should be nap for .nap files" - ); - }); - - test("opening a .naplist file sets correct language mode", async function () { - this.timeout(10000); - const doc = await openDocument("petstore/smoke.naplist"); - assert.strictEqual( - doc.languageId, - "naplist", - "Language should be naplist for .naplist files" - ); - }); - - test("file watcher detects new .nap file creation", async function () { - this.timeout(15000); - const testFileName = "temp-watcher-test.nap"; - - writeFixtureFile(testFileName, "GET https://httpbin.org/status/200\n"); - await sleep(2000); - - const filePath = getFixturePath(testFileName); - assert.ok( - fs.existsSync(filePath), - "Newly created .nap file should exist" - ); - - deleteFixtureFile(testFileName); - await sleep(1000); - }); - - test(".nap file content is readable and valid", async function () { - this.timeout(10000); - const doc = await openDocument("post-jsonplaceholder.nap"), - text = doc.getText(); - - assert.ok( - text.includes("[request]"), - "Should have [request] section" - ); - assert.ok( - text.includes("[assert]"), - "Should have [assert] section" - ); - assert.ok( - text.includes("jsonplaceholder.typicode.com"), - "Should contain the API URL" - ); - }); - - test("nested playlist in tree view expands to show its own children", function () { - this.timeout(10000); - const provider = getExplorerProvider(), - rootNodes = provider.getChildren(), - - // Find the Playlists section - playlistSection = rootNodes.find( - (n) => n.contextValue === CONTEXT_PLAYLIST_SECTION - ); - assert.ok( - playlistSection, - "Tree must have a Playlists section" - ); - assert.ok( - playlistSection.children && playlistSection.children.length > 0, - "Playlists section must have children" - ); - - // Find full.naplist — it references smoke.naplist (nested) and get-pet.nap - const fullPlaylist = findNodeByLabel(playlistSection.children, "full"); - assert.ok( - fullPlaylist, - "Playlists section must contain 'full' playlist (from full.naplist)" - ); - assert.strictEqual( - fullPlaylist.contextValue, - CONTEXT_PLAYLIST, - "full playlist must have playlist context" - ); - assert.ok( - fullPlaylist.children && fullPlaylist.children.length > 0, - "full playlist must have children (its steps)" - ); - - // The nested smoke.naplist step must itself be a playlist with children - const smokeChild = findNodeByLabel(fullPlaylist.children, "smoke"); - assert.ok( - smokeChild, - "full playlist must contain 'smoke' as a child (the nested .naplist)" - ); - assert.strictEqual( - smokeChild.contextValue, - CONTEXT_PLAYLIST, - "Nested smoke.naplist must have playlist context, not requestFile" - ); - assert.ok( - smokeChild.children && smokeChild.children.length > 0, - "Nested smoke.naplist MUST have its own children — it must be expandable" - ); - - // Verify smoke's children are the actual .nap step files - const smokeChildLabels = smokeChild.children.map((c) => c.label); - assert.ok( - smokeChildLabels.includes("list-pets"), - "Nested smoke playlist must contain list-pets step" - ); - assert.ok( - smokeChildLabels.includes("get-pet"), - "Nested smoke playlist must contain get-pet step" - ); - - // The get-pet.nap direct child of full.naplist is a leaf (not a playlist) - const getPetChild = findNodeByLabel(fullPlaylist.children, "get-pet"); - assert.ok( - getPetChild, - "full playlist must also contain 'get-pet' as a direct step" - ); - assert.strictEqual( - getPetChild.contextValue, - CONTEXT_REQUEST_FILE, - "get-pet.nap must be a requestFile (leaf node)" - ); - }); - - test("nested playlist in file tree also expands with children", function () { - this.timeout(10000); - const provider = getExplorerProvider(), - rootNodes = provider.getChildren(), - - // Find the petstore folder in the file tree - petstoreFolder = findNodeByLabel(rootNodes, "petstore"); - assert.ok(petstoreFolder, "File tree must contain petstore folder"); - - const petstoreChildren = provider.getChildren(petstoreFolder), - - // Find full.naplist in the petstore folder - fullNode = findNodeByLabel(petstoreChildren, "full"); - assert.ok( - fullNode, - "petstore folder must contain 'full' playlist node" - ); - assert.ok( - fullNode.children && fullNode.children.length > 0, - "full playlist in file tree must have expandable children" - ); - - // The nested smoke.naplist must be a playlist with its own children - const smokeInFileTree = findNodeByLabel(fullNode.children, "smoke"); - assert.ok( - smokeInFileTree, - "full playlist in file tree must contain nested 'smoke' playlist" - ); - assert.ok( - smokeInFileTree.children && smokeInFileTree.children.length > 0, - "Nested smoke.naplist in file tree MUST expand to show its own children" - ); - }); -}); diff --git a/src/Nap.VsCode/src/test/e2e/openApiImport.e2e.test.ts b/src/Nap.VsCode/src/test/e2e/openApiImport.e2e.test.ts deleted file mode 100644 index 158d0b1..0000000 --- a/src/Nap.VsCode/src/test/e2e/openApiImport.e2e.test.ts +++ /dev/null @@ -1,262 +0,0 @@ -import * as assert from "assert"; -import * as vscode from "vscode"; -import * as fs from "fs"; -import * as path from "path"; -import * as os from "os"; -import { execFile } from "child_process"; -import { - activateExtension, - getRegisteredCommands, - readFixtureFile, -} from "../helpers/helpers"; -import { downloadSpec, saveTempSpec } from "../../openApiImport"; -import { - BASE_URL_KEY, - CLI_CMD_GENERATE, - CLI_FLAG_OUTPUT, - CLI_FLAG_OUTPUT_DIR, - CLI_OUTPUT_JSON, - CLI_SPAWN_FAILED_PREFIX, - CLI_SUBCMD_OPENAPI, - CMD_IMPORT_OPENAPI_FILE, - CMD_IMPORT_OPENAPI_URL, - CONFIG_CLI_PATH, - CONFIG_SECTION, - DEFAULT_CLI_PATH, - ENCODING_UTF8, - NAPENV_EXTENSION, - NAP_EXTENSION, - OPENAPI_DOWNLOAD_FAILED_PREFIX, - OPENAPI_URL_PLACEHOLDER, - SECTION_ASSERT, - SECTION_META, - SECTION_REQUEST, - SECTION_STEPS, -} from "../../constants"; - -const PETSTORE_URL = OPENAPI_URL_PLACEHOLDER, - NONEXISTENT_URL = "https://httpbin.org/status/404", - TEMP_SPEC_FILENAME = ".openapi-spec.json"; - -suite("OpenAPI Import", () => { - suiteSetup(async function () { - this.timeout(30_000); - await activateExtension(); - }); - - test("import URL command is registered", async () => { - const commands = await getRegisteredCommands(); - assert.ok( - commands.includes(CMD_IMPORT_OPENAPI_URL), - `Command ${CMD_IMPORT_OPENAPI_URL} should be registered` - ); - }); - - test("import file command is registered", async () => { - const commands = await getRegisteredCommands(); - assert.ok( - commands.includes(CMD_IMPORT_OPENAPI_FILE), - `Command ${CMD_IMPORT_OPENAPI_FILE} should be registered` - ); - }); - - test("downloadSpec fetches valid OpenAPI from petstore URL", async function () { - this.timeout(30_000); - const result = await downloadSpec(PETSTORE_URL); - assert.ok(result.ok, "Download should succeed"); - const parsed: unknown = JSON.parse(result.value), - spec = parsed as { openapi?: string; paths?: Record }; - assert.ok( - spec.openapi !== undefined, - "Downloaded spec must have an openapi version field" - ); - assert.ok( - spec.paths !== undefined, - "Downloaded spec must have paths" - ); - assert.ok( - Object.keys(spec.paths ?? {}).length > 0, - "Downloaded spec must have at least one path" - ); - }); - - test("downloadSpec returns error for 404 URL", async function () { - this.timeout(15_000); - const result = await downloadSpec(NONEXISTENT_URL); - assert.ok(!result.ok, "Download should fail for 404"); - assert.ok( - result.error.startsWith(OPENAPI_DOWNLOAD_FAILED_PREFIX), - `Error should start with download failed prefix, got: ${result.error}` - ); - }); - - test("downloadSpec follows redirects", async function () { - this.timeout(15_000); - const redirectUrl = "https://httpbin.org/redirect-to?url=https%3A%2F%2Fpetstore3.swagger.io%2Fapi%2Fv3%2Fopenapi.json&status_code=302", - result = await downloadSpec(redirectUrl); - assert.ok(result.ok, "Download should succeed after redirect"); - const parsed: unknown = JSON.parse(result.value), - spec = parsed as { openapi?: string }; - assert.ok( - spec.openapi !== undefined, - "Redirected spec must have openapi version field" - ); - }); - - test("saveTempSpec writes file and returns path", () => { - const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "napper-test-")), - content = '{"openapi":"3.0.0","paths":{}}', - specPath = saveTempSpec(content, tmpDir); - assert.ok( - fs.existsSync(specPath), - "Temp spec file must exist after save" - ); - assert.ok( - specPath.endsWith(TEMP_SPEC_FILENAME), - `Spec path must end with ${TEMP_SPEC_FILENAME}` - ); - const written = fs.readFileSync(specPath, "utf-8"); - assert.strictEqual( - written, - content, - "Written content must match input" - ); - // Cleanup - fs.rmSync(tmpDir, { recursive: true }); - }); - - test("saveTempSpec overwrites existing file", () => { - const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "napper-test-")), - first = '{"openapi":"3.0.0"}', - second = '{"openapi":"3.1.0","paths":{"/pets":{}}}'; - saveTempSpec(first, tmpDir); - const specPath = saveTempSpec(second, tmpDir), - written = fs.readFileSync(specPath, "utf-8"); - assert.strictEqual( - written, - second, - "Second write must overwrite the first" - ); - fs.rmSync(tmpDir, { recursive: true }); - }); -}); - -// ─── CLI generate openapi E2E ──────────────────────────────── - -const ECOMMERCE_SPEC_FIXTURE = "ecommerce-spec.json", - EXPECTED_ENDPOINT_COUNT = 11, - - resolveCliPath = (): string => { - const configured = vscode.workspace - .getConfiguration(CONFIG_SECTION) - .get(CONFIG_CLI_PATH, ""); - return configured.length > 0 ? configured : DEFAULT_CLI_PATH; -}, - - runCliGenerate = async ( - specPath: string, - outDir: string -): Promise => - new Promise((resolve, reject) => { - execFile( - resolveCliPath(), - [ - CLI_CMD_GENERATE, CLI_SUBCMD_OPENAPI, specPath, - CLI_FLAG_OUTPUT_DIR, outDir, - CLI_FLAG_OUTPUT, CLI_OUTPUT_JSON, - ], - { timeout: 30_000 }, - (error: Error | null, stdout: string, stderr: string) => { - if (error !== null && stdout.length === 0) { - reject(new Error(`${CLI_SPAWN_FAILED_PREFIX}${stderr}`)); - return; - } - resolve(stdout); - } - ); - }), - - collectNapFiles = (dir: string): string[] => - fs.readdirSync(dir) - .filter((f: string) => f.endsWith(NAP_EXTENSION)) - .map((f: string) => path.join(dir, f)); - -suite("OpenAPI CLI Generate", () => { - suiteSetup(async function () { - this.timeout(30_000); - await activateExtension(); - }); - - test("CLI generates .nap files from ecommerce spec", async function () { - this.timeout(30_000); - const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "napper-generate-")); - - try { - const specContent = readFixtureFile(ECOMMERCE_SPEC_FIXTURE), - specPath = path.join(tmpDir, ECOMMERCE_SPEC_FIXTURE); - fs.writeFileSync(specPath, specContent, ENCODING_UTF8); - - const stdout = await runCliGenerate(specPath, tmpDir), - generated = JSON.parse(stdout) as { files: number; playlist: string }; - - assert.strictEqual( - generated.files, - EXPECTED_ENDPOINT_COUNT, - `CLI must generate exactly ${EXPECTED_ENDPOINT_COUNT} .nap files` - ); - - const playlistPath = path.join(tmpDir, generated.playlist); - assert.ok( - fs.existsSync(playlistPath), - `Playlist file must exist at ${generated.playlist}` - ); - - const playlistContent = fs.readFileSync(playlistPath, ENCODING_UTF8); - assert.ok( - playlistContent.includes(SECTION_META), - "Playlist must have [meta] section" - ); - assert.ok( - playlistContent.includes(SECTION_STEPS), - "Playlist must have [steps] section" - ); - - const napenvPath = path.join(tmpDir, NAPENV_EXTENSION); - assert.ok( - fs.existsSync(napenvPath), - ".napenv file must exist with base URL" - ); - - const envContent = fs.readFileSync(napenvPath, ENCODING_UTF8); - assert.ok( - envContent.includes(BASE_URL_KEY), - ".napenv must contain baseUrl key" - ); - - const napFiles = collectNapFiles(tmpDir); - assert.strictEqual( - napFiles.length, - EXPECTED_ENDPOINT_COUNT, - `Must find exactly ${EXPECTED_ENDPOINT_COUNT} .nap files on disk` - ); - - for (const napFile of napFiles) { - const content = fs.readFileSync(napFile, ENCODING_UTF8); - assert.ok( - content.includes(SECTION_META), - `${path.basename(napFile)} must have [meta] section` - ); - assert.ok( - content.includes(SECTION_REQUEST), - `${path.basename(napFile)} must have [request] section` - ); - assert.ok( - content.includes(SECTION_ASSERT), - `${path.basename(napFile)} must have [assert] section` - ); - } - } finally { - fs.rmSync(tmpDir, { recursive: true }); - } - }); -}); diff --git a/src/Nap.VsCode/src/test/e2e/playlist.e2e.test.ts b/src/Nap.VsCode/src/test/e2e/playlist.e2e.test.ts deleted file mode 100644 index ab3135b..0000000 --- a/src/Nap.VsCode/src/test/e2e/playlist.e2e.test.ts +++ /dev/null @@ -1,616 +0,0 @@ -import * as assert from "assert"; -import * as vscode from "vscode"; -import * as fs from "fs"; -import { - activateExtension, - closeAllEditors, - executeCommand, - extractStepLines, - getFixturePath, - openDocument, - sleep, - waitForCondition, -} from "../helpers/helpers"; -import * as path from "path"; -import { - CMD_RUN_FILE, - CMD_SAVE_REPORT, - CONFIG_CLI_PATH, - CONFIG_SECTION, - PLAYLIST_PANEL_TITLE, - REPORT_FILE_EXTENSION, - REPORT_FILE_SUFFIX, - RESPONSE_PANEL_TITLE, -} from "../../constants"; - -const findTabByLabel = (label: string): vscode.Tab | undefined => - vscode.window.tabGroups.all - .flatMap((g) => g.tabs) - .find((tab) => tab.label.includes(label)); - -suite("Playlist Panel — Real API Calls", () => { - suiteSetup(async function () { - this.timeout(30000); - await activateExtension(); - await sleep(3000); - }); - - suiteTeardown(async () => { - await closeAllEditors(); - }); - - test("playlist panel opens IMMEDIATELY when run starts, before API calls complete", async function () { - this.timeout(45000); - await closeAllEditors(); - await sleep(500); - - const doc = await openDocument("petstore/smoke.naplist"), - - // Fire the command but do NOT await — we want to check the panel - // Appears while API calls are still in flight - runPromise = executeCommand(CMD_RUN_FILE, doc.uri); - - // Panel must appear within 2 seconds — API calls take much longer - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 2000 - ); - - const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - playlistTab, - `Tab '${PLAYLIST_PANEL_TITLE}' must open IMMEDIATELY when playlist starts, not after all API calls finish` - ); - - const responseTab = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.strictEqual( - responseTab, - undefined, - `Tab '${RESPONSE_PANEL_TITLE}' must NOT exist after running a .naplist — playlist panel should open instead` - ); - - // Now wait for actual completion — panel must persist - await runPromise; - - const panelAfterCompletion = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - panelAfterCompletion, - `Tab '${PLAYLIST_PANEL_TITLE}' must persist after all API calls complete` - ); - - const responseTabAfterRun = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.strictEqual( - responseTabAfterRun, - undefined, - `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear even after playlist completes — only playlist panel is used` - ); - }); - - test("running a playlist via filePath object opens panel immediately", async function () { - this.timeout(45000); - await closeAllEditors(); - await sleep(500); - - const playlistPath = getFixturePath("petstore/smoke.naplist"), - - // Fire without await to test immediate opening - runPromise = executeCommand(CMD_RUN_FILE, { filePath: playlistPath }); - - // Panel must appear within 2 seconds — proves immediate opening - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 2000 - ); - - const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - playlistTab, - `Tab '${PLAYLIST_PANEL_TITLE}' must open IMMEDIATELY via filePath object (tree view click path)` - ); - - const responseTab = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.strictEqual( - responseTab, - undefined, - `Tab '${RESPONSE_PANEL_TITLE}' must NOT exist — the tree view play button must open the playlist panel` - ); - - await runPromise; - - const panelAfterCompletion = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - panelAfterCompletion, - `Tab '${PLAYLIST_PANEL_TITLE}' must persist after completion via filePath path` - ); - - const responseTabAfterFilePath = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.strictEqual( - responseTabAfterFilePath, - undefined, - `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear after playlist completion via filePath` - ); - }); - - test("running a single .nap file opens response panel, not playlist panel", async function () { - this.timeout(45000); - await closeAllEditors(); - await sleep(500); - - const doc = await openDocument("get-httpbin.nap"); - assert.strictEqual( - doc.languageId, - "nap", - "get-httpbin.nap should have nap language mode" - ); - - await executeCommand(CMD_RUN_FILE, doc.uri); - - await waitForCondition( - () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - 10000 - ); - - const responseTab = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.ok( - responseTab, - `Tab '${RESPONSE_PANEL_TITLE}' must exist after running a single .nap file` - ); - assert.notStrictEqual( - responseTab.group, - undefined, - "Response tab should be visible in a tab group" - ); - - const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.strictEqual( - playlistTab, - undefined, - `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist after running a single .nap file` - ); - }); - - test("playlist file has correct structure", () => { - const playlistPath = getFixturePath("petstore/smoke.naplist"), - content = fs.readFileSync(playlistPath, "utf-8"); - - assert.ok(content.includes("[meta]"), "Should have [meta] section"); - assert.ok(content.includes("[steps]"), "Should have [steps] section"); - assert.ok( - content.includes("list-pets.nap"), - "Should reference list-pets step" - ); - assert.ok( - content.includes("get-pet.nap"), - "Should reference get-pet step" - ); - }); - - test("playlist steps reference files that exist", () => { - const playlistPath = getFixturePath("petstore/smoke.naplist"), - content = fs.readFileSync(playlistPath, "utf-8"), - stepPaths = extractStepLines(content); - - assert.ok( - stepPaths.length > 0, - "Playlist should have at least one step" - ); - - const basePath = getFixturePath("petstore"); - for (const stepRelative of stepPaths) { - const stepFull = `${basePath}/${stepRelative.replace("./", "")}`; - assert.ok( - fs.existsSync(stepFull), - `Step file should exist: ${stepRelative}` - ); - } - }); - - test("playlist with script step opens panel and completes without error", async function () { - this.timeout(60000); - await closeAllEditors(); - await sleep(500); - - const doc = await openDocument("petstore/with-script.naplist"); - assert.strictEqual( - doc.languageId, - "naplist", - "with-script.naplist should have naplist language mode" - ); - - const runPromise = executeCommand(CMD_RUN_FILE, doc.uri); - - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 5000 - ); - - const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - playlistTab, - `Tab '${PLAYLIST_PANEL_TITLE}' must open when running playlist that includes .fsx script steps` - ); - - const responseTabDuringRun = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.strictEqual( - responseTabDuringRun, - undefined, - `Tab '${RESPONSE_PANEL_TITLE}' must NOT exist — playlist with scripts should use playlist panel, not response panel` - ); - - await runPromise; - - const panelAfterCompletion = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - panelAfterCompletion, - `Tab '${PLAYLIST_PANEL_TITLE}' must persist after playlist with scripts completes` - ); - - const responseTabAfterCompletion = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.strictEqual( - responseTabAfterCompletion, - undefined, - `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear even after playlist with scripts completes` - ); - }); - - test("with-script.naplist fixture references existing files", () => { - const playlistPath = getFixturePath("petstore/with-script.naplist"), - content = fs.readFileSync(playlistPath, "utf-8"); - - assert.ok(content.includes("[meta]"), "Should have [meta] section"); - assert.ok(content.includes("[steps]"), "Should have [steps] section"); - assert.ok( - content.includes("echo.fsx"), - "Should reference echo.fsx script step" - ); - assert.ok( - content.includes("list-pets.nap"), - "Should reference list-pets.nap API step" - ); - - const scriptsDir = getFixturePath("scripts"); - assert.ok( - fs.existsSync(`${scriptsDir}/echo.fsx`), - "echo.fsx fixture script must exist" - ); - - const echoContent = fs.readFileSync(`${scriptsDir}/echo.fsx`, "utf-8"); - assert.ok( - echoContent.includes("printfn"), - "echo.fsx must contain printfn to produce output" - ); - }); - - test("re-running a playlist resets state and opens fresh running panel", async function () { - this.timeout(90000); - await closeAllEditors(); - await sleep(500); - - const doc = await openDocument("petstore/smoke.naplist"); - - // First run — wait for full completion so results are stored - await executeCommand(CMD_RUN_FILE, doc.uri); - - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 10000 - ); - - const panelAfterFirstRun = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - panelAfterFirstRun, - `Tab '${PLAYLIST_PANEL_TITLE}' must exist after first playlist run completes` - ); - - // Second run — fire WITHOUT await to test immediate state reset - const secondRunPromise = executeCommand(CMD_RUN_FILE, doc.uri); - - // Panel must still exist immediately (reused, not recreated) - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 2000 - ); - - const panelDuringSecondRun = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - panelDuringSecondRun, - `Tab '${PLAYLIST_PANEL_TITLE}' must be reused for second run — not closed and reopened` - ); - - // Only ONE playlist tab should exist (proves reuse, not duplication) - const playlistTabs = vscode.window.tabGroups.all - .flatMap((g) => g.tabs) - .filter((t) => t.label.includes(PLAYLIST_PANEL_TITLE)); - assert.strictEqual( - playlistTabs.length, - 1, - "Only one playlist panel tab should exist during re-run — panel must be reused" - ); - - // Response panel must NOT appear during re-run - const responseTabDuringRerun = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.strictEqual( - responseTabDuringRerun, - undefined, - `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear during playlist re-run` - ); - - // Wait for second run to complete - await secondRunPromise; - - // Panel must persist after second run - const panelAfterSecondRun = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - panelAfterSecondRun, - `Tab '${PLAYLIST_PANEL_TITLE}' must persist after second playlist run completes` - ); - }); - - test("opening .naplist sets naplist language mode", async function () { - this.timeout(10000); - const doc = await openDocument("petstore/smoke.naplist"); - assert.strictEqual( - doc.languageId, - "naplist", - "Language should be naplist" - ); - }); - - test("save report command creates HTML report file after playlist completes", async function () { - this.timeout(60000); - await closeAllEditors(); - await sleep(500); - - const playlistPath = getFixturePath("petstore/smoke.naplist"), - expectedReportPath = path.join( - path.dirname(playlistPath), - `smoke${REPORT_FILE_SUFFIX}${REPORT_FILE_EXTENSION}` - ); - - // Clean up any leftover report from previous runs - if (fs.existsSync(expectedReportPath)) { - fs.unlinkSync(expectedReportPath); - } - - const doc = await openDocument("petstore/smoke.naplist"); - await executeCommand(CMD_RUN_FILE, doc.uri); - - // Wait for panel to appear and run to complete - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 5000 - ); - - // Report must NOT exist before the save command is invoked - assert.strictEqual( - fs.existsSync(expectedReportPath), - false, - "Report file must not exist before Save Report is triggered" - ); - - // Trigger save report — same as clicking the Save Report button - await executeCommand(CMD_SAVE_REPORT); - - // Report file must now exist at the expected path - assert.ok( - fs.existsSync(expectedReportPath), - `Report file must be created at ${expectedReportPath} after Save Report command` - ); - - const reportContent = fs.readFileSync(expectedReportPath, "utf-8"); - - assert.ok( - reportContent.includes(" findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 5000 - ); - - const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - playlistTab, - `Tab '${PLAYLIST_PANEL_TITLE}' must open when running playlist that includes .csx script steps` - ); - - const responseTabDuringRun = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.strictEqual( - responseTabDuringRun, - undefined, - `Tab '${RESPONSE_PANEL_TITLE}' must NOT exist — playlist with C# scripts should use playlist panel` - ); - - await runPromise; - - const panelAfterCompletion = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - panelAfterCompletion, - `Tab '${PLAYLIST_PANEL_TITLE}' must persist after playlist with C# scripts completes` - ); - - const responseTabAfterCompletion = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.strictEqual( - responseTabAfterCompletion, - undefined, - `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear after playlist with C# scripts completes` - ); - }); - - test("with-csx-script.naplist fixture references existing files", () => { - const playlistPath = getFixturePath("petstore/with-csx-script.naplist"), - content = fs.readFileSync(playlistPath, "utf-8"); - - assert.ok(content.includes("[meta]"), "Should have [meta] section"); - assert.ok(content.includes("[steps]"), "Should have [steps] section"); - assert.ok( - content.includes("echo.csx"), - "Should reference echo.csx script step" - ); - assert.ok( - content.includes("list-pets.nap"), - "Should reference list-pets.nap API step" - ); - - const scriptsDir = getFixturePath("scripts"); - assert.ok( - fs.existsSync(`${scriptsDir}/echo.csx`), - "echo.csx fixture script must exist" - ); - - const echoContent = fs.readFileSync(`${scriptsDir}/echo.csx`, "utf-8"); - assert.ok( - echoContent.includes("Console.WriteLine"), - "echo.csx must contain Console.WriteLine to produce output" - ); - }); - - test("playlist with mixed FSX and CSX scripts opens panel and completes without error", async function () { - this.timeout(90000); - await closeAllEditors(); - await sleep(500); - - const doc = await openDocument("petstore/with-mixed-scripts.naplist"); - assert.strictEqual( - doc.languageId, - "naplist", - "with-mixed-scripts.naplist should have naplist language mode" - ); - - const runPromise = executeCommand(CMD_RUN_FILE, doc.uri); - - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 5000 - ); - - const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - playlistTab, - `Tab '${PLAYLIST_PANEL_TITLE}' must open when running playlist with mixed F# and C# scripts` - ); - - const responseTabDuringRun = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.strictEqual( - responseTabDuringRun, - undefined, - `Tab '${RESPONSE_PANEL_TITLE}' must NOT exist — mixed-script playlist should use playlist panel` - ); - - await runPromise; - - const panelAfterCompletion = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - panelAfterCompletion, - `Tab '${PLAYLIST_PANEL_TITLE}' must persist after mixed F#/C# playlist completes` - ); - - const responseTabAfterCompletion = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.strictEqual( - responseTabAfterCompletion, - undefined, - `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear after mixed F#/C# playlist completes` - ); - }); - - test("with-mixed-scripts.naplist fixture references both FSX and CSX files", () => { - const playlistPath = getFixturePath("petstore/with-mixed-scripts.naplist"), - content = fs.readFileSync(playlistPath, "utf-8"); - - assert.ok(content.includes("[meta]"), "Should have [meta] section"); - assert.ok(content.includes("[steps]"), "Should have [steps] section"); - assert.ok( - content.includes("echo.fsx"), - "Should reference echo.fsx F# script step" - ); - assert.ok( - content.includes("echo.csx"), - "Should reference echo.csx C# script step" - ); - assert.ok( - content.includes("list-pets.nap"), - "Should reference list-pets.nap API step" - ); - assert.ok( - content.includes("get-pet.nap"), - "Should reference get-pet.nap API step" - ); - - const scriptsDir = getFixturePath("scripts"); - assert.ok( - fs.existsSync(`${scriptsDir}/echo.fsx`), - "echo.fsx must exist for mixed playlist" - ); - assert.ok( - fs.existsSync(`${scriptsDir}/echo.csx`), - "echo.csx must exist for mixed playlist" - ); - }); - - test("playlist with missing CLI shows error in panel, never PASSED", async function () { - this.timeout(30000); - await closeAllEditors(); - await sleep(500); - - const config = vscode.workspace.getConfiguration(CONFIG_SECTION), - originalPath = config.get(CONFIG_CLI_PATH); - - // Point to a nonexistent CLI binary - await config.update( - CONFIG_CLI_PATH, - "/nonexistent/napper-fake-binary", - vscode.ConfigurationTarget.Workspace - ); - - try { - const doc = await openDocument("petstore/smoke.naplist"), - - // Fire command — don't await since it may resolve quickly - runPromise = executeCommand(CMD_RUN_FILE, doc.uri); - - // Panel must open even when CLI fails (showRunning fires before CLI) - await waitForCondition( - () => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 5000 - ); - - const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); - assert.ok( - playlistTab, - `Tab '${PLAYLIST_PANEL_TITLE}' must open even when CLI fails — error must be shown in the panel, not silently ignored` - ); - - await runPromise; - } finally { - // Restore original CLI path - await config.update( - CONFIG_CLI_PATH, - originalPath, - vscode.ConfigurationTarget.Workspace - ); - } - }); -}); diff --git a/src/Nap.VsCode/src/test/unit/explorerProvider.test.ts b/src/Nap.VsCode/src/test/unit/explorerProvider.test.ts deleted file mode 100644 index 427166c..0000000 --- a/src/Nap.VsCode/src/test/unit/explorerProvider.test.ts +++ /dev/null @@ -1,197 +0,0 @@ -import * as assert from "assert"; -import { createFileNode, createFolderNode } from "../../explorerProvider"; -import { type RunResult, RunState } from "../../types"; -import { - CONTEXT_FOLDER, - CONTEXT_PLAYLIST, - CONTEXT_REQUEST_FILE, -} from "../../constants"; - -const FAKE_NAP_PATH = "/workspace/test.nap", - FAKE_NAPLIST_PATH = "/workspace/smoke.naplist", - FAKE_FOLDER_PATH = "/workspace/petstore", - - GET_CONTENT = "[request]\nmethod = GET\nurl = https://example.com\n", - POST_CONTENT = "[request]\nmethod = POST\nurl = https://example.com\n", - SHORTHAND_GET_CONTENT = "GET https://example.com\n", - SHORTHAND_DELETE_CONTENT = "DELETE https://example.com/1\n", - NO_METHOD_CONTENT = "[request]\nurl = https://example.com\n", - - makePassedResult = (file: string): RunResult => ({ - file, - passed: true, - statusCode: 200, - duration: 42, - assertions: [{ target: "status", passed: true, expected: "200", actual: "200" }], -}), - - makeFailedResult = (file: string): RunResult => ({ - file, - passed: false, - statusCode: 404, - duration: 31, - assertions: [{ target: "status", passed: false, expected: "200", actual: "404" }], -}), - - makeErrorResult = (file: string): RunResult => ({ - file, - passed: false, - error: "Connection refused", - assertions: [], -}); - -suite("explorerProvider — createFileNode", () => { - test("idle state when no results exist", () => { - const emptyResults = new Map(), - node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, emptyResults); - - assert.strictEqual(node.runState, RunState.Idle, "should be Idle with no results"); - assert.strictEqual(node.isDirectory, false); - assert.strictEqual(node.contextValue, CONTEXT_REQUEST_FILE); - }); - - test("passed state with green icon when result.passed is true", () => { - const results = new Map(); - results.set(FAKE_NAP_PATH, makePassedResult(FAKE_NAP_PATH)); - const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, results); - - assert.strictEqual(node.runState, RunState.Passed, "should be Passed when result.passed is true"); - }); - - test("failed state with red icon when result.passed is false", () => { - const results = new Map(); - results.set(FAKE_NAP_PATH, makeFailedResult(FAKE_NAP_PATH)); - const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, results); - - assert.strictEqual(node.runState, RunState.Failed, "should be Failed when result.passed is false"); - }); - - test("error state when result has error string", () => { - const results = new Map(); - results.set(FAKE_NAP_PATH, makeErrorResult(FAKE_NAP_PATH)); - const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, results); - - assert.strictEqual(node.runState, RunState.Error, "should be Error when result.error is set"); - }); - - test("result for different file does not affect this node", () => { - const otherPath = "/workspace/other.nap", - results = new Map(); - results.set(otherPath, makePassedResult(otherPath)); - const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, results); - - assert.strictEqual(node.runState, RunState.Idle, "should be Idle when result is for different file"); - }); - - test("extracts GET method from key-value format", () => { - const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, new Map()); - assert.strictEqual(node.httpMethod, "GET"); - }); - - test("extracts POST method from key-value format", () => { - const node = createFileNode(FAKE_NAP_PATH, POST_CONTENT, new Map()); - assert.strictEqual(node.httpMethod, "POST"); - }); - - test("extracts GET method from shorthand format", () => { - const node = createFileNode(FAKE_NAP_PATH, SHORTHAND_GET_CONTENT, new Map()); - assert.strictEqual(node.httpMethod, "GET"); - }); - - test("extracts DELETE method from shorthand format", () => { - const node = createFileNode(FAKE_NAP_PATH, SHORTHAND_DELETE_CONTENT, new Map()); - assert.strictEqual(node.httpMethod, "DELETE"); - }); - - test("no method extracted when content has no method line", () => { - const node = createFileNode(FAKE_NAP_PATH, NO_METHOD_CONTENT, new Map()); - assert.strictEqual(node.httpMethod, undefined); - }); - - test("naplist files get playlist context value", () => { - const node = createFileNode(FAKE_NAPLIST_PATH, "[meta]\nname = smoke\n", new Map()); - assert.strictEqual(node.contextValue, CONTEXT_PLAYLIST); - }); - - test("naplist files do not extract http method", () => { - const node = createFileNode(FAKE_NAPLIST_PATH, "GET https://example.com\n", new Map()); - assert.strictEqual(node.httpMethod, undefined); - }); - - test("label is filename without extension", () => { - const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, new Map()); - assert.strictEqual(node.label, "test"); - }); - - test("passed result stays passed even with multiple assertions", () => { - const results = new Map(); - results.set(FAKE_NAP_PATH, { - file: FAKE_NAP_PATH, - passed: true, - statusCode: 200, - duration: 50, - assertions: [ - { target: "status", passed: true, expected: "200", actual: "200" }, - { target: "body.id", passed: true, expected: "exists", actual: "1" }, - { target: "body.title", passed: true, expected: "Test", actual: "Test" }, - ], - }); - const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, results); - - assert.strictEqual(node.runState, RunState.Passed); - }); - - test("failed result even when some assertions pass", () => { - const results = new Map(); - results.set(FAKE_NAP_PATH, { - file: FAKE_NAP_PATH, - passed: false, - statusCode: 200, - duration: 50, - assertions: [ - { target: "status", passed: true, expected: "200", actual: "200" }, - { target: "body.name", passed: false, expected: "Alice", actual: "Bob" }, - ], - }); - const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, results); - - assert.strictEqual(node.runState, RunState.Failed, "should be Failed when passed is false"); - }); - - test("error takes priority over passed field", () => { - const results = new Map(); - results.set(FAKE_NAP_PATH, { - file: FAKE_NAP_PATH, - passed: false, - error: "timeout", - assertions: [], - }); - const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, results); - - assert.strictEqual(node.runState, RunState.Error, "error field should produce Error state, not Failed"); - }); -}); - -suite("explorerProvider — createFolderNode", () => { - test("folder node is always idle", () => { - const child = createFileNode(FAKE_NAP_PATH, GET_CONTENT, new Map()), - folder = createFolderNode(FAKE_FOLDER_PATH, [child]); - - assert.strictEqual(folder.runState, RunState.Idle); - assert.strictEqual(folder.isDirectory, true); - assert.strictEqual(folder.contextValue, CONTEXT_FOLDER); - }); - - test("folder label is directory basename", () => { - const folder = createFolderNode(FAKE_FOLDER_PATH, []); - assert.strictEqual(folder.label, "petstore"); - }); - - test("folder children are preserved", () => { - const child1 = createFileNode(FAKE_NAP_PATH, GET_CONTENT, new Map()), - child2 = createFileNode("/workspace/other.nap", POST_CONTENT, new Map()), - folder = createFolderNode(FAKE_FOLDER_PATH, [child1, child2]); - - assert.strictEqual(folder.children?.length, 2); - }); -}); diff --git a/src/Nap.VsCode/src/test/unit/htmlUtils.test.ts b/src/Nap.VsCode/src/test/unit/htmlUtils.test.ts deleted file mode 100644 index 0e112e8..0000000 --- a/src/Nap.VsCode/src/test/unit/htmlUtils.test.ts +++ /dev/null @@ -1,907 +0,0 @@ -import * as assert from "assert"; -import { - buildResultDetailHtml, - buildRequestGroupHtml, - buildResponseGroupHtml, - buildErrorHtml, - buildLogHtml, - buildCollapsibleSection, - buildHeadersTableRows, - escapeHtml, -} from "../../htmlUtils"; -import type { RunResult } from "../../types"; -import { - NO_REQUEST_HEADERS, - SECTION_LABEL_ASSERTIONS, - SECTION_LABEL_BODY, - SECTION_LABEL_ERROR, - SECTION_LABEL_OUTPUT, - SECTION_LABEL_REQUEST, - SECTION_LABEL_REQUEST_BODY, - SECTION_LABEL_REQUEST_HEADERS, - SECTION_LABEL_RESPONSE, - SECTION_LABEL_RESPONSE_HEADERS, -} from "../../constants"; - -const MOCK_FULL_RESULT: RunResult = { - file: "/workspace/api/get-users.nap", - passed: true, - statusCode: 200, - duration: 150, - requestMethod: "GET", - requestUrl: "https://api.example.com/users", - requestHeaders: { "Authorization": "Bearer tok123", "Accept": "application/json" }, - headers: { "content-type": "application/json", "x-request-id": "abc-def" }, - body: '{"users":[{"id":1}]}', - assertions: [ - { target: "status", passed: true, expected: "200", actual: "200" }, - { target: "headers.Content-Type", passed: true, expected: "application/json", actual: "application/json" }, - ], -}, - - MOCK_FAILED_RESULT: RunResult = { - file: "/workspace/api/delete-user.nap", - passed: false, - statusCode: 403, - duration: 42, - requestMethod: "DELETE", - requestUrl: "https://api.example.com/users/99", - requestHeaders: {}, - headers: { "content-type": "text/plain" }, - body: "Forbidden", - error: "Access denied: insufficient permissions", - assertions: [ - { target: "status", passed: false, expected: "200", actual: "403" }, - ], -}, - - MOCK_MINIMAL_RESULT: RunResult = { - file: "/workspace/api/health.nap", - passed: true, - assertions: [], -}, - - MOCK_SCRIPT_RESULT: RunResult = { - file: "/workspace/scripts/setup.fsx", - passed: true, - duration: 500, - log: ["Seeding database...", "Created 10 records", "Done"], - assertions: [], -}, - - MOCK_NO_URL_RESULT: RunResult = { - file: "/workspace/api/check.nap", - passed: true, - statusCode: 200, - requestHeaders: { "Accept": "text/html" }, - headers: { "content-type": "text/html" }, - body: "", - assertions: [ - { target: "status", passed: true, expected: "200", actual: "200" }, - ], -}, - - MOCK_XSS_RESULT: RunResult = { - file: "/workspace/api/xss.nap", - passed: false, - statusCode: 200, - requestMethod: "POST", - requestUrl: "https://api.example.com/search?q=", - requestHeaders: { "X-Evil": "" }, - headers: { "x-injected": "val\"onmouseover=alert(1)" }, - body: '{"msg":""}', - error: "Error: bold injection", - log: ["Log line with "], - assertions: [ - { - target: "body.', - requestBodyContentType: '', - headers: {}, - assertions: [], -}; - -suite("Result Detail HTML — Request/Response grouping", () => { - test("output has a Request details section that is NOT open", () => { - const html = buildResultDetailHtml(MOCK_FULL_RESULT); - - assert.ok( - html.includes(`
`), - "Request section must be a
element WITHOUT the open attribute" - ); - assert.ok( - html.includes(SECTION_LABEL_REQUEST), - "Request section must have the Request title" - ); - }); - - test("output has a Response details section that IS open", () => { - const html = buildResultDetailHtml(MOCK_FULL_RESULT); - - assert.ok( - html.includes(`
`), - "Response section must be a
element WITH the open attribute" - ); - assert.ok( - html.includes(SECTION_LABEL_RESPONSE), - "Response section must have the Response title" - ); - }); - - test("Request section appears before Response section", () => { - const html = buildResultDetailHtml(MOCK_FULL_RESULT), - requestIdx = html.indexOf(SECTION_LABEL_REQUEST), - responseIdx = html.indexOf(SECTION_LABEL_RESPONSE); - - assert.ok(requestIdx > -1, "Request section must exist"); - assert.ok(responseIdx > -1, "Response section must exist"); - assert.ok( - requestIdx < responseIdx, - "Request section must appear before Response section in the DOM" - ); - }); - - test("Request section contains the request URL and method", () => { - const html = buildRequestGroupHtml(MOCK_FULL_RESULT); - - assert.ok( - html.includes("https://api.example.com/users"), - "Request section must contain the request URL" - ); - assert.ok( - html.includes("GET"), - "Request section must contain the HTTP method" - ); - assert.ok( - html.includes("request-url"), - "Request URL must use the request-url CSS class" - ); - assert.ok( - html.includes("request-method"), - "HTTP method must use the request-method CSS class" - ); - }); - - test("Request section contains request headers", () => { - const html = buildRequestGroupHtml(MOCK_FULL_RESULT); - - assert.ok( - html.includes(SECTION_LABEL_REQUEST_HEADERS), - "Request section must have a Request Headers subsection" - ); - assert.ok( - html.includes("Authorization"), - "Request headers must include the Authorization header key" - ); - assert.ok( - html.includes("Bearer tok123"), - "Request headers must include the Authorization header value" - ); - assert.ok( - html.includes("Accept"), - "Request headers must include the Accept header key" - ); - }); - - test("Response section contains assertions", () => { - const html = buildResponseGroupHtml(MOCK_FULL_RESULT); - - assert.ok( - html.includes(SECTION_LABEL_ASSERTIONS), - "Response section must have an Assertions subsection" - ); - assert.ok( - html.includes("status"), - "Assertions must include the status assertion target" - ); - assert.ok( - html.includes("headers.Content-Type"), - "Assertions must include the Content-Type assertion target" - ); - }); - - test("Response section contains response headers", () => { - const html = buildResponseGroupHtml(MOCK_FULL_RESULT); - - assert.ok( - html.includes(SECTION_LABEL_RESPONSE_HEADERS), - "Response section must have a Response Headers subsection" - ); - assert.ok( - html.includes("content-type"), - "Response headers must include the content-type key" - ); - assert.ok( - html.includes("x-request-id"), - "Response headers must include the x-request-id key" - ); - assert.ok( - html.includes("abc-def"), - "Response headers must include the x-request-id value" - ); - }); - - test("Response section contains response body", () => { - const html = buildResponseGroupHtml(MOCK_FULL_RESULT); - - assert.ok( - html.includes(SECTION_LABEL_BODY), - "Response section must have a Body subsection" - ); - assert.ok( - html.includes("users"), - "Body must contain the JSON key from the response" - ); - }); -}); - -suite("Result Detail HTML — Error and Log sections", () => { - test("error section is open and appears before request/response groups", () => { - const html = buildResultDetailHtml(MOCK_FAILED_RESULT), - errorIdx = html.indexOf(SECTION_LABEL_ERROR), - requestIdx = html.indexOf(SECTION_LABEL_REQUEST); - - assert.ok(errorIdx > -1, "Error section must exist for failed results"); - assert.ok( - errorIdx < requestIdx, - "Error section must appear before the Request group" - ); - assert.ok( - html.includes("Access denied: insufficient permissions"), - "Error section must show the error message" - ); - }); - - test("error section uses open details element", () => { - const html = buildErrorHtml("Something went wrong"); - const detailsMatch = html.indexOf('
'); - - assert.ok( - detailsMatch > -1, - "Error section must be an open
element" - ); - }); - - test("no error section when error is undefined", () => { - const html = buildErrorHtml(undefined); - assert.strictEqual(html, "", "Error HTML must be empty when error is undefined"); - }); - - test("no error section when error is empty string", () => { - const html = buildErrorHtml(""); - assert.strictEqual(html, "", "Error HTML must be empty when error is empty string"); - }); - - test("log section appears and shows all log lines", () => { - const html = buildResultDetailHtml(MOCK_SCRIPT_RESULT); - - assert.ok( - html.includes(SECTION_LABEL_OUTPUT), - "Output section must exist for results with log lines" - ); - assert.ok( - html.includes("Seeding database..."), - "Log must show first log line" - ); - assert.ok( - html.includes("Created 10 records"), - "Log must show second log line" - ); - assert.ok( - html.includes("Done"), - "Log must show last log line" - ); - }); - - test("no log section when log is undefined", () => { - const html = buildLogHtml(undefined); - assert.strictEqual(html, "", "Log HTML must be empty when log is undefined"); - }); - - test("no log section when log is empty array", () => { - const html = buildLogHtml([]); - assert.strictEqual(html, "", "Log HTML must be empty when log is empty array"); - }); - - test("log section appears before request/response groups", () => { - const html = buildResultDetailHtml(MOCK_SCRIPT_RESULT), - logIdx = html.indexOf(SECTION_LABEL_OUTPUT), - requestIdx = html.indexOf(SECTION_LABEL_REQUEST); - - assert.ok( - logIdx < requestIdx, - "Log section must appear before the Request group" - ); - }); -}); - -suite("Result Detail HTML — Minimal and edge-case results", () => { - test("minimal result still produces Request group", () => { - const html = buildResultDetailHtml(MOCK_MINIMAL_RESULT); - - assert.ok( - html.includes(SECTION_LABEL_REQUEST), - "Even a minimal result must have a Request section" - ); - }); - - test("minimal result with no assertions/headers/body produces no Response group", () => { - const html = buildResponseGroupHtml(MOCK_MINIMAL_RESULT); - - assert.strictEqual( - html, - "", - "Response group must be empty when there are no assertions, headers, or body" - ); - }); - - test("request group without URL shows no request-url div", () => { - const html = buildRequestGroupHtml(MOCK_MINIMAL_RESULT); - - assert.ok( - !html.includes("request-url"), - "Request group must not contain request-url div when URL is undefined" - ); - }); - - test("request group without request headers shows empty hint", () => { - const html = buildRequestGroupHtml(MOCK_MINIMAL_RESULT); - - assert.ok( - html.includes(NO_REQUEST_HEADERS), - "Request group must show empty-hint text when no request headers exist" - ); - }); - - test("result with empty body produces no Body subsection", () => { - const html = buildResponseGroupHtml(MOCK_EMPTY_BODY_RESULT); - - assert.ok( - !html.includes(SECTION_LABEL_BODY), - "Response group must not contain a Body subsection when body is empty string" - ); - }); - - test("result with empty headers object produces no Response Headers subsection", () => { - const html = buildResponseGroupHtml(MOCK_EMPTY_BODY_RESULT); - - assert.ok( - !html.includes(SECTION_LABEL_RESPONSE_HEADERS), - "Response group must not contain a Response Headers subsection when headers is empty" - ); - }); - - test("result without URL but with request headers still shows headers", () => { - const html = buildRequestGroupHtml(MOCK_NO_URL_RESULT); - - assert.ok( - !html.includes("request-url"), - "Request group must not show request-url when URL is undefined" - ); - assert.ok( - html.includes("Accept"), - "Request group must still show request headers when present" - ); - assert.ok( - html.includes("text/html"), - "Request group must show request header values" - ); - }); - - test("non-JSON body is rendered as escaped plain text", () => { - const html = buildResponseGroupHtml(MOCK_INVALID_JSON_BODY); - - assert.ok( - html.includes("this is not json"), - "Non-JSON body text must appear in the output" - ); - assert.ok( - !html.includes("json-key"), - "Non-JSON body must not have JSON syntax highlighting classes" - ); - }); -}); - -suite("Result Detail HTML — Failed assertion details", () => { - test("failed assertions show expected and actual values", () => { - const html = buildResponseGroupHtml(MOCK_FAILED_RESULT); - - assert.ok( - html.includes("expected"), - "Failed assertion must show 'expected' label" - ); - assert.ok( - html.includes("actual"), - "Failed assertion must show 'actual' label" - ); - assert.ok( - html.includes("200"), - "Failed assertion must show the expected value" - ); - assert.ok( - html.includes("403"), - "Failed assertion must show the actual value" - ); - }); - - test("failed assertions use the fail CSS class", () => { - const html = buildResponseGroupHtml(MOCK_FAILED_RESULT); - - assert.ok( - html.includes('class="assert-row fail"'), - "Failed assertion row must have the 'fail' CSS class" - ); - }); - - test("passed assertions use the pass CSS class", () => { - const html = buildResponseGroupHtml(MOCK_FULL_RESULT); - - assert.ok( - html.includes('class="assert-row pass"'), - "Passed assertion row must have the 'pass' CSS class" - ); - }); - - test("passed assertions do NOT show expected/actual detail", () => { - const html = buildResponseGroupHtml(MOCK_FULL_RESULT); - - assert.ok( - !html.includes("assert-detail"), - "Passed assertions must not show the expected/actual detail div" - ); - }); -}); - -suite("Result Detail HTML — XSS prevention", () => { - test("HTML in request URL is escaped", () => { - const html = buildRequestGroupHtml(MOCK_XSS_RESULT); - - assert.ok( - !html.includes(""), - "Raw script tags in URL must be escaped" - ); - assert.ok( - html.includes("<script>"), - "Script tags in URL must be HTML-escaped" - ); - }); - - test("HTML in request header values is escaped", () => { - const html = buildRequestGroupHtml(MOCK_XSS_RESULT); - - assert.ok( - !html.includes(""), - "Raw HTML in request header values must be escaped" - ); - assert.ok( - html.includes("<img onerror=alert(1)>"), - "HTML in request header values must be escaped" - ); - }); - - test("HTML in response header values is escaped", () => { - const html = buildResponseGroupHtml(MOCK_XSS_RESULT); - - assert.ok( - !html.includes('val"onmouseover=alert(1)'), - "Raw quotes in response header values must be escaped" - ); - assert.ok( - html.includes(""onmouseover"), - "Quotes in response header values must be HTML-escaped" - ); - }); - - test("HTML in error message is escaped", () => { - const html = buildErrorHtml(MOCK_XSS_RESULT.error); - - assert.ok( - !html.includes("bold injection"), - "Raw HTML in error must be escaped" - ); - assert.ok( - html.includes("<b>bold injection</b>"), - "HTML tags in error must be escaped" - ); - }); - - test("HTML in log lines is escaped", () => { - const html = buildLogHtml(MOCK_XSS_RESULT.log); - - assert.ok( - !html.includes(""), - "Raw script tags in log lines must be escaped" - ); - assert.ok( - html.includes("<script>"), - "Script tags in log lines must be escaped" - ); - }); - - test("HTML in assertion targets is escaped", () => { - const html = buildResponseGroupHtml(MOCK_XSS_RESULT); - - assert.ok( - !html.includes("body.'), - "Raw script tags in request body must be escaped" - ); - assert.ok( - html.includes("<script>"), - "Script tags in request body must be HTML-escaped" - ); - }); - - test("HTML in request body content type is escaped", () => { - const html = buildRequestGroupHtml(MOCK_XSS_REQUEST_BODY); - - assert.ok( - !html.includes(''), - "Raw HTML in content type hint must be escaped" - ); - assert.ok( - html.includes("<img"), - "HTML in content type hint must be escaped" - ); - }); -}); - -suite("escapeHtml", () => { - test("escapes ampersands", () => { - assert.strictEqual(escapeHtml("a&b"), "a&b"); - }); - - test("escapes angle brackets", () => { - assert.strictEqual(escapeHtml("
"), "<div>"); - }); - - test("escapes double quotes", () => { - assert.strictEqual(escapeHtml('a"b'), "a"b"); - }); - - test("handles string with all special chars", () => { - assert.strictEqual( - escapeHtml('&'), - "<a href="x">&" - ); - }); - - test("returns empty string unchanged", () => { - assert.strictEqual(escapeHtml(""), ""); - }); - - test("returns plain text unchanged", () => { - assert.strictEqual(escapeHtml("hello world"), "hello world"); - }); -}); diff --git a/src/Nap.VsCode/src/test/unit/reportGenerator.test.ts b/src/Nap.VsCode/src/test/unit/reportGenerator.test.ts deleted file mode 100644 index 8a9e796..0000000 --- a/src/Nap.VsCode/src/test/unit/reportGenerator.test.ts +++ /dev/null @@ -1,431 +0,0 @@ -import * as assert from "assert"; -import * as fs from "fs"; -import * as os from "os"; -import * as path from "path"; -import { generatePlaylistReport } from "../../reportGenerator"; -import type { RunResult } from "../../types"; -import { - REPORT_FILE_EXTENSION, - REPORT_FILE_SUFFIX, - SECTION_LABEL_REQUEST, - SECTION_LABEL_REQUEST_BODY, - SECTION_LABEL_REQUEST_HEADERS, - SECTION_LABEL_RESPONSE, - SECTION_LABEL_RESPONSE_HEADERS, -} from "../../constants"; - -const MOCK_PASSED_STEP: RunResult = { - file: "/workspace/petstore/list-pets.nap", - passed: true, - statusCode: 200, - duration: 142, - body: '{"pets":[]}', - headers: { "content-type": "application/json" }, - assertions: [ - { target: "status", passed: true, expected: "200", actual: "200" }, - ], -}, - - MOCK_FAILED_STEP: RunResult = { - file: "/workspace/petstore/get-pet.nap", - passed: false, - statusCode: 404, - duration: 87, - error: "Not Found", - body: '{"message":"not found"}', - headers: { "content-type": "application/json" }, - assertions: [ - { target: "status", passed: false, expected: "200", actual: "404" }, - ], -}, - - MOCK_SCRIPT_STEP: RunResult = { - file: "/workspace/scripts/echo.fsx", - passed: true, - duration: 320, - log: ["Hello from script", "Done"], - assertions: [], -}, - - MOCK_POST_STEP: RunResult = { - file: "/workspace/petstore/create-pet.nap", - passed: true, - statusCode: 201, - duration: 95, - requestMethod: "POST", - requestUrl: "https://api.petstore.io/v1/pets", - requestHeaders: { "Content-Type": "application/json", "Authorization": "Bearer xyz" }, - requestBody: '{"name":"Fido","species":"dog"}', - requestBodyContentType: "application/json", - headers: { "content-type": "application/json", "location": "/v1/pets/42" }, - body: '{"id":42,"name":"Fido"}', - assertions: [ - { target: "status", passed: true, expected: "201", actual: "201" }, - ], -}; - -suite("Report Generator", () => { - test("produces valid HTML document with playlist name", () => { - const html = generatePlaylistReport("smoke", [MOCK_PASSED_STEP]); - - assert.ok( - html.includes(""), - "Report must be a valid HTML document" - ); - assert.ok( - html.includes("smoke"), - "Report must contain the playlist name in the hero" - ); - assert.ok( - html.includes(""), - "Report must have an HTML title element" - ); - }); - - test("shows all step file names and HTTP status codes", () => { - const html = generatePlaylistReport("smoke", [ - MOCK_PASSED_STEP, - MOCK_FAILED_STEP, - ]); - - assert.ok( - html.includes("list-pets.nap"), - "Report must contain passed step file name" - ); - assert.ok( - html.includes("get-pet.nap"), - "Report must contain failed step file name" - ); - assert.ok( - html.includes("200"), - "Report must show 200 status code" - ); - assert.ok( - html.includes("404"), - "Report must show 404 status code" - ); - }); - - test("shows PASSED and FAILED badges on individual steps", () => { - const html = generatePlaylistReport("smoke", [ - MOCK_PASSED_STEP, - MOCK_FAILED_STEP, - ]); - - assert.ok( - html.includes("PASSED"), - "Report must show PASSED badge" - ); - assert.ok( - html.includes("FAILED"), - "Report must show FAILED badge" - ); - }); - - test("shows step durations", () => { - const html = generatePlaylistReport("smoke", [ - MOCK_PASSED_STEP, - MOCK_FAILED_STEP, - ]); - - assert.ok(html.includes("142ms"), "Report must show 142ms duration"); - assert.ok(html.includes("87ms"), "Report must show 87ms duration"); - }); - - test("shows error details for failed steps", () => { - const html = generatePlaylistReport("smoke", [MOCK_FAILED_STEP]); - - assert.ok( - html.includes("Not Found"), - "Report must show error message for failed step" - ); - assert.ok( - html.includes("error-box"), - "Report must render error in styled error box" - ); - }); - - test("includes response headers inside Response group", () => { - const html = generatePlaylistReport("smoke", [MOCK_PASSED_STEP]); - - assert.ok( - html.includes(SECTION_LABEL_RESPONSE), - "Report must have Response group" - ); - assert.ok( - html.includes(SECTION_LABEL_RESPONSE_HEADERS), - "Report must have response headers section title inside Response group" - ); - assert.ok( - html.includes("content-type"), - "Report must show header key" - ); - assert.ok( - html.includes("application/json"), - "Report must show header value" - ); - }); - - test("includes response body with JSON content inside Response group", () => { - const html = generatePlaylistReport("smoke", [MOCK_PASSED_STEP]); - - assert.ok( - html.includes("Response Body"), - "Report must have response body section title" - ); - assert.ok( - html.includes("pets"), - "Report must show JSON content from response body" - ); - }); - - test("shows assertions with pass/fail indicators", () => { - const html = generatePlaylistReport("smoke", [ - MOCK_PASSED_STEP, - MOCK_FAILED_STEP, - ]); - - assert.ok( - html.includes("Assertions"), - "Report must have assertions section" - ); - assert.ok( - html.includes("status"), - "Report must show assertion target name" - ); - assert.ok( - html.includes("expected"), - "Report must show expected vs actual for failures" - ); - }); - - test("calculates correct pass rate for mixed results", () => { - const html = generatePlaylistReport("smoke", [ - MOCK_PASSED_STEP, - MOCK_FAILED_STEP, - ]); - - assert.ok( - html.includes("50%"), - "Report must show 50% pass rate for 1 of 2 passing" - ); - assert.ok( - html.includes("Pass Rate"), - "Report must have pass rate stat card" - ); - }); - - test("shows 100% pass rate when all steps pass", () => { - const html = generatePlaylistReport("smoke", [MOCK_PASSED_STEP]); - - assert.ok( - html.includes("100%"), - "Report must show 100% pass rate when all pass" - ); - assert.ok( - html.includes("All Steps Passed"), - "Report must show all-passed status banner" - ); - }); - - test("shows summary stats: passed, failed, duration", () => { - const html = generatePlaylistReport("smoke", [ - MOCK_PASSED_STEP, - MOCK_FAILED_STEP, - ]); - - assert.ok(html.includes("Duration"), "Report must show duration stat"); - assert.ok(html.includes("Passed"), "Report must show passed stat label"); - assert.ok(html.includes("Failed"), "Report must show failed stat label"); - }); - - test("renders script step output/log section", () => { - const html = generatePlaylistReport("scripts", [MOCK_SCRIPT_STEP]); - - assert.ok( - html.includes("echo.fsx"), - "Report must show script step file name" - ); - assert.ok( - html.includes("Hello from script"), - "Report must show script log output" - ); - assert.ok( - html.includes("Output"), - "Report must have output section title for script logs" - ); - }); - - test("has interactive expand/collapse for step details", () => { - const html = generatePlaylistReport("smoke", [MOCK_PASSED_STEP]); - - assert.ok( - html.includes("toggleStep"), - "Report must have toggleStep function for expand/collapse" - ); - assert.ok( - html.includes("step-chevron"), - "Report must have chevron indicators" - ); - }); - - test("zero results produces FAILED status, never PASSED", () => { - const html = generatePlaylistReport("empty-run", []); - - assert.ok( - html.includes("Some Steps Failed"), - "Zero results must show failure status banner — playlist must NEVER pass by default" - ); - assert.ok( - !html.includes("All Steps Passed"), - "Zero results must NOT show 'All Steps Passed' — 0 steps executed is a failure" - ); - assert.ok( - html.includes("0%"), - "Zero results must show 0% pass rate" - ); - }); - - test("step detail has collapsible Request group (closed by default)", () => { - const html = generatePlaylistReport("smoke", [MOCK_POST_STEP]); - - assert.ok( - html.includes("report-group"), - "Report must use report-group class for collapsible groups" - ); - assert.ok( - html.includes(SECTION_LABEL_REQUEST), - "Report must have a Request group" - ); - const requestGroupMatch = html.indexOf(`>${SECTION_LABEL_REQUEST}<`), - responseGroupMatch = html.indexOf(`>${SECTION_LABEL_RESPONSE}<`); - assert.ok(requestGroupMatch > -1, "Request group title must exist"); - assert.ok(responseGroupMatch > -1, "Response group title must exist"); - assert.ok( - requestGroupMatch < responseGroupMatch, - "Request group must appear before Response group" - ); - }); - - test("step detail has collapsible Response group (open by default)", () => { - const html = generatePlaylistReport("smoke", [MOCK_POST_STEP]); - - assert.ok( - html.includes('<details class="report-group" open>'), - "Response group must have the open attribute" - ); - }); - - test("Request group shows request URL and method", () => { - const html = generatePlaylistReport("smoke", [MOCK_POST_STEP]); - - assert.ok( - html.includes("https://api.petstore.io/v1/pets"), - "Report must show the request URL" - ); - assert.ok( - html.includes("POST"), - "Report must show the request method" - ); - assert.ok( - html.includes("request-method-tag"), - "Request method must use the styled tag class" - ); - }); - - test("Request group shows request headers", () => { - const html = generatePlaylistReport("smoke", [MOCK_POST_STEP]); - - assert.ok( - html.includes(SECTION_LABEL_REQUEST_HEADERS), - "Report must have Request Headers subsection" - ); - assert.ok( - html.includes("Authorization"), - "Request headers must show Authorization key" - ); - assert.ok( - html.includes("Bearer xyz"), - "Request headers must show Authorization value" - ); - }); - - test("Request group shows request body with content type", () => { - const html = generatePlaylistReport("smoke", [MOCK_POST_STEP]); - - assert.ok( - html.includes(SECTION_LABEL_REQUEST_BODY), - "Report must have Request Body subsection" - ); - assert.ok( - html.includes("Fido"), - "Request body must show JSON content" - ); - assert.ok( - html.includes("content-type-hint"), - "Request body must show content type hint" - ); - }); - - test("Response group contains assertions, headers, and body", () => { - const html = generatePlaylistReport("smoke", [MOCK_POST_STEP]); - - assert.ok( - html.includes(SECTION_LABEL_RESPONSE), - "Report must have Response group" - ); - assert.ok( - html.includes("Assertions"), - "Response group must contain assertions" - ); - assert.ok( - html.includes(SECTION_LABEL_RESPONSE_HEADERS), - "Response group must contain response headers" - ); - assert.ok( - html.includes("location"), - "Response headers must show location key" - ); - assert.ok( - html.includes("Response Body"), - "Response group must contain response body" - ); - }); - - test("Request group without URL/body still renders (no request details hint)", () => { - const html = generatePlaylistReport("smoke", [MOCK_PASSED_STEP]); - - assert.ok( - html.includes(SECTION_LABEL_REQUEST), - "Report must have Request group even without URL" - ); - }); - - test("report file can be written to and read from disk", () => { - const tmpDir = os.tmpdir(), - reportPath = path.join( - tmpDir, - `test-playlist${REPORT_FILE_SUFFIX}${REPORT_FILE_EXTENSION}` - ), - - html = generatePlaylistReport("test-playlist", [MOCK_PASSED_STEP]); - fs.writeFileSync(reportPath, html, "utf-8"); - - assert.ok( - fs.existsSync(reportPath), - "Report file must exist on disk after write" - ); - - const content = fs.readFileSync(reportPath, "utf-8"); - assert.ok( - content.includes("<!DOCTYPE html>"), - "Read-back content must be valid HTML" - ); - assert.ok( - content.includes("test-playlist"), - "Read-back content must contain playlist name" - ); - - fs.unlinkSync(reportPath); - }); -}); diff --git a/src/Napper.Cli/Napper.Cli.fsproj b/src/Napper.Cli/Napper.Cli.fsproj new file mode 100644 index 0000000..ee27a55 --- /dev/null +++ b/src/Napper.Cli/Napper.Cli.fsproj @@ -0,0 +1,24 @@ +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <OutputType>Exe</OutputType> + <AssemblyName>napper</AssemblyName> + <PackAsTool>true</PackAsTool> + <ToolCommandName>napper</ToolCommandName> + <PackageId>napper</PackageId> + <PackageOutputPath>./nupkg</PackageOutputPath> + <Description>CLI-first, test-oriented HTTP API testing tool</Description> + <PackageTags>http;api;testing;cli;rest;fsharp;dotnet-tool</PackageTags> + </PropertyGroup> + + <ItemGroup> + <Compile Include="Program.fs" /> + </ItemGroup> + + <ItemGroup> + <ProjectReference Include="..\Napper.Core\Napper.Core.fsproj" /> + <ProjectReference Include="..\DotHttp\DotHttp.fsproj" /> + </ItemGroup> + + +</Project> diff --git a/src/Napper.Cli/Program.fs b/src/Napper.Cli/Program.fs new file mode 100644 index 0000000..17739d7 --- /dev/null +++ b/src/Napper.Cli/Program.fs @@ -0,0 +1,536 @@ +// Specs: cli-run, cli-check, cli-generate, cli-convert, cli-env, cli-var, cli-output, cli-output-dir, cli-verbose, cli-exit-codes +open System +open System.IO +open Napper.Core + +/// Parse CLI arguments into a structured form +type CliArgs = + { Command: string // "run", "check", "generate", "convert", "help" + SubCommand: string option // e.g. "openapi" for "generate openapi", "http" for "convert http" + File: string option + Env: string option + EnvFile: string option // --env-file for convert command + Vars: Map<string, string> + Output: string // "pretty", "junit", "json", "ndjson" + OutputDir: string option // --output-dir for generate/convert command + DryRun: bool // --dry-run for convert command + Verbose: bool } + +let parseArgs (argv: string array) : CliArgs = + let mutable command = "help" + let mutable subCommand = None + let mutable file = None + let mutable env = None + let mutable envFile = None + let mutable vars = Map.empty + let mutable output = "pretty" + let mutable outputDir = None + let mutable dryRun = false + let mutable verbose = false + let mutable i = 0 + + if argv.Length > 0 then + command <- argv[0] + i <- 1 + + // For "generate openapi" or "convert http", consume the subcommand + if + (command = "generate" || command = "convert") + && i < argv.Length + && not (argv[i].StartsWith "--") + then + subCommand <- Some argv[i] + i <- i + 1 + + while i < argv.Length do + match argv[i] with + | "--env" when i + 1 < argv.Length -> + env <- Some argv[i + 1] + i <- i + 2 + | "--var" when i + 1 < argv.Length -> + let parts = argv[i + 1].Split([| '=' |], 2) + + if parts.Length = 2 then + vars <- vars |> Map.add (parts[0].Trim()) (parts[1].Trim()) + + i <- i + 2 + | "--output" when i + 1 < argv.Length -> + output <- argv[i + 1] + i <- i + 2 + | "--output-dir" when i + 1 < argv.Length -> + outputDir <- Some argv[i + 1] + i <- i + 2 + | "--env-file" when i + 1 < argv.Length -> + envFile <- Some argv[i + 1] + i <- i + 2 + | "--dry-run" -> + dryRun <- true + i <- i + 1 + | "--verbose" -> + verbose <- true + i <- i + 1 + | arg when not (arg.StartsWith "--") && file.IsNone -> + file <- Some arg + i <- i + 1 + | _ -> i <- i + 1 + + { Command = command + SubCommand = subCommand + File = file + Env = env + EnvFile = envFile + Vars = vars + Output = output + OutputDir = outputDir + DryRun = dryRun + Verbose = verbose } + +let printHelp () = + printfn "Nap — API testing tool" + printfn "" + printfn "Usage:" + printfn " nap run <file|folder> Run a .nap file, .naplist playlist, or folder" + printfn " nap check <file> Validate a .nap or .naplist file" + printfn " nap generate openapi <spec> --output-dir <dir> Generate .nap files from OpenAPI spec" + printfn " nap convert http <file|dir> --output-dir <dir> Convert .http files to .nap format" + printfn " nap help Show this help" + printfn "" + printfn "Options:" + printfn " --env <name> Environment name (loads .napenv.<name>)" + printfn " --env-file <path> Path to http-client.env.json (for convert)" + printfn " --var <key=value> Variable override (repeatable)" + printfn " --output <format> Output: pretty (default), junit, json, ndjson" + printfn " --output-dir <dir> Output directory for generate/convert commands" + printfn " --dry-run Preview without writing files (convert command)" + printfn " --verbose Enable debug-level logging" + +/// Print result as ndjson and return whether it passed +let private printNdjson (r: NapResult) : bool = + printfn "%s" (Output.formatJson r) + Console.Out.Flush() + r.Passed + +/// Format and print results, return exit code +let private formatAndExit (output: string) (results: NapResult list) : int = + match output with + | "junit" -> printf "%s" (Output.formatJUnit results) + | "json" -> printf "%s" (Output.formatJsonArray results) + | _ -> + for r in results do + printf "%s" (Output.formatPretty r) + + printf "%s" (Output.formatSummary results) + + if results |> List.forall (fun r -> r.Passed) then 0 else 1 + +/// Run all .nap files in a directory +let private runDirectory (args: CliArgs) (dirPath: string) : int = + let files = Directory.GetFiles(dirPath, "*.nap") |> Array.sort + + if files.Length = 0 then + eprintfn "No .nap files found in %s" dirPath + 2 + elif args.Output = "ndjson" then + let passed = + files + |> Array.forall (fun f -> Runner.runNapFile f args.Vars args.Env |> Async.RunSynchronously |> printNdjson) + + if passed then 0 else 1 + else + files + |> Array.map (fun f -> Runner.runNapFile f args.Vars args.Env |> Async.RunSynchronously) + |> Array.toList + |> formatAndExit args.Output + +/// Merge playlist vars with CLI overrides +let private mergeVars (playlist: NapPlaylist) (cliVars: Map<string, string>) : Map<string, string> = + let mutable v = playlist.Vars + + for kv in cliVars do + v <- v |> Map.add kv.Key kv.Value + + v + +/// Collect results from playlist steps recursively +let rec private collectSteps + (steps: PlaylistStep list) + (vars: Map<string, string>) + (baseDir: string) + (env: string option) + : NapResult list = + steps + |> List.collect (fun step -> + let full p = + Path.GetFullPath(Path.Combine(baseDir, p)) + + match step with + | NapFileStep p -> [ Runner.runNapFile (full p) vars env |> Async.RunSynchronously ] + | FolderRef p -> + Directory.GetFiles(full p, "*.nap") + |> Array.sort + |> Array.map (fun f -> Runner.runNapFile f vars env |> Async.RunSynchronously) + |> Array.toList + | PlaylistRef p -> + let fp = full p + + match File.ReadAllText(fp) |> Parser.parseNapList with + | Result.Ok nested -> collectSteps nested.Steps vars (Path.GetDirectoryName fp) env + | Result.Error _ -> [] + | ScriptStep p -> [ Runner.runScript (full p) |> Async.RunSynchronously ]) + +/// Stream playlist steps as ndjson, return whether all passed +let rec private streamSteps + (steps: PlaylistStep list) + (vars: Map<string, string>) + (baseDir: string) + (env: string option) + : bool = + steps + |> List.forall (fun step -> + let full p = + Path.GetFullPath(Path.Combine(baseDir, p)) + + match step with + | NapFileStep p -> Runner.runNapFile (full p) vars env |> Async.RunSynchronously |> printNdjson + | FolderRef p -> + Directory.GetFiles(full p, "*.nap") + |> Array.sort + |> Array.forall (fun f -> Runner.runNapFile f vars env |> Async.RunSynchronously |> printNdjson) + | PlaylistRef p -> + let fp = full p + + match File.ReadAllText(fp) |> Parser.parseNapList with + | Result.Ok nested -> streamSteps nested.Steps vars (Path.GetDirectoryName fp) env + | Result.Error _ -> false + | ScriptStep p -> Runner.runScript (full p) |> Async.RunSynchronously |> printNdjson) + +/// Run a .naplist playlist +let private runPlaylist (args: CliArgs) (filePath: string) : int = + let content = File.ReadAllText(filePath) + + match Parser.parseNapList content with + | Result.Error msg -> + Logger.error $"Playlist parse error: {msg}" + eprintfn "Error parsing playlist: %s" msg + 2 + | Result.Ok playlist -> + Logger.info $"Playlist loaded: {playlist.Steps.Length} steps" + let dir = Path.GetDirectoryName(filePath) + let env = playlist.Env |> Option.orElse args.Env + let vars = mergeVars playlist args.Vars + + match args.Output with + | "ndjson" -> if streamSteps playlist.Steps vars dir env then 0 else 1 + | _ -> collectSteps playlist.Steps vars dir env |> formatAndExit args.Output + +/// Run a single .nap file +let private runSingleNap (args: CliArgs) (filePath: string) : int = + let result = Runner.runNapFile filePath args.Vars args.Env |> Async.RunSynchronously + + match args.Output with + | "junit" -> printf "%s" (Output.formatJUnit [ result ]) + | "json" + | "ndjson" -> printf "%s" (Output.formatJson result) + | _ -> printf "%s" (Output.formatPretty result) + + if result.Passed then 0 else 1 + +let runFile (args: CliArgs) : int = + match args.File with + | None -> + eprintfn "Error: no file specified" + printHelp () + 2 + | Some f -> + let filePath = Path.GetFullPath(f) + Logger.info $"Processing: {filePath}" + + if not (File.Exists filePath) && not (Directory.Exists filePath) then + Logger.error $"File not found: {filePath}" + eprintfn "Error: %s not found" filePath + 2 + elif Directory.Exists filePath then + runDirectory args filePath + elif filePath.EndsWith ".naplist" then + runPlaylist args filePath + else + runSingleNap args filePath + +let private writeGenerated (outDir: string) (result: OpenApiGenerator.GenerationResult) : unit = + let writeFile (f: OpenApiGenerator.GeneratedFile) = + let fullPath = Path.Combine(outDir, f.FileName) + let dir = Path.GetDirectoryName(fullPath) + + if not (Directory.Exists dir) then + Directory.CreateDirectory(dir) |> ignore + + File.WriteAllText(fullPath, f.Content) + + writeFile result.Environment + + for nap in result.NapFiles do + writeFile nap + + writeFile result.Playlist + +/// Display generation results +let private displayGenerated (output: string) (generated: OpenApiGenerator.GenerationResult) (outDir: string) : unit = + match output with + | "json" -> printfn "{\"files\":%d,\"playlist\":\"%s\"}" generated.NapFiles.Length generated.Playlist.FileName + | _ -> + printfn "Generated %d .nap files from OpenAPI spec" generated.NapFiles.Length + printfn " Playlist: %s" generated.Playlist.FileName + printfn " Environment: %s" generated.Environment.FileName + printfn " Output: %s" outDir + +let generateOpenApi (args: CliArgs) : int = + match args.File with + | None -> + eprintfn "Error: no spec file specified" + eprintfn "Usage: nap generate openapi <spec.json> --output-dir <dir>" + 2 + | Some specFile -> + let specPath = Path.GetFullPath(specFile) + + if not (File.Exists specPath) then + eprintfn "Error: %s not found" specPath + 2 + else + let outDir = + args.OutputDir + |> Option.map Path.GetFullPath + |> Option.defaultWith (fun () -> Path.GetDirectoryName(specPath)) + + match File.ReadAllText(specPath) |> OpenApiGenerator.generate with + | Error msg -> + eprintfn "Error: %s" msg + 1 + | Ok generated -> + if not (Directory.Exists outDir) then + Directory.CreateDirectory(outDir) |> ignore + + writeGenerated outDir generated + displayGenerated args.Output generated outDir + 0 + +let checkFile (args: CliArgs) : int = + match args.File with + | None -> + eprintfn "Error: no file specified" + 2 + | Some file -> + let filePath = Path.GetFullPath(file) + + if not (File.Exists filePath) then + eprintfn "Error: %s not found" filePath + 2 + else + let content = File.ReadAllText(filePath) + + let result = + if filePath.EndsWith ".naplist" then + Parser.parseNapList content |> Result.map ignore + else + Parser.parseNapFile content |> Result.map ignore + + match result with + | Result.Ok _ -> + printfn "\x1b[32m✓\x1b[0m %s is valid" (Path.GetFileName filePath) + 0 + | Result.Error msg -> + eprintfn "\x1b[31m✗\x1b[0m %s" (Path.GetFileName filePath) + eprintfn " %s" msg + 1 + +/// Write a convert result to disk +let private writeConvertResult (outDir: string) (result: HttpToNapConverter.ConvertResult) : unit = + for fileName: string, content: string in result.GeneratedFiles do + let fullPath = Path.Combine(outDir, fileName) + let dir = Path.GetDirectoryName fullPath + + if not (String.IsNullOrEmpty dir) && not (Directory.Exists dir) then + Directory.CreateDirectory dir |> ignore + + File.WriteAllText(fullPath, content) + Logger.info $"Wrote: {fullPath}" + +/// Convert .http files to .nap format +let convertHttp (args: CliArgs) : int = + match args.File with + | None -> + eprintfn "Error: no file or directory specified" + eprintfn "Usage: nap convert http <file|dir> --output-dir <dir>" + 2 + | Some inputPath -> + let fullInput = Path.GetFullPath(inputPath) + Logger.info $"Converting: {fullInput}" + + if not (File.Exists fullInput) && not (Directory.Exists fullInput) then + eprintfn "Error: %s not found" fullInput + 2 + else + let httpFiles = + if Directory.Exists fullInput then + Directory.GetFiles(fullInput, "*.http") + |> Array.append (Directory.GetFiles(fullInput, "*.rest")) + |> Array.sort + |> Array.toList + else + [ fullInput ] + + if List.isEmpty httpFiles then + eprintfn "No .http or .rest files found in %s" fullInput + 2 + else + let outDir = + args.OutputDir + |> Option.map Path.GetFullPath + |> Option.defaultWith (fun () -> + if Directory.Exists fullInput then + fullInput + else + Path.GetDirectoryName(fullInput)) + + let mutable totalFiles = 0 + let mutable allWarnings = [] + + for httpPath in httpFiles do + let content = File.ReadAllText(httpPath) + + match DotHttp.Parser.parse content with + | Error msg -> eprintfn "Error parsing %s: %s" (Path.GetFileName httpPath) msg + | Ok(httpFile: DotHttp.HttpFile) -> + Logger.info + $"Parsed {httpPath}: {httpFile.Requests.Length} requests, dialect={httpFile.Dialect}" + + // Convert env files if present + match args.EnvFile with + | Some envFilePath -> + let envPath = Path.GetFullPath(envFilePath) + + if File.Exists envPath then + let envJson = File.ReadAllText(envPath) + let isPrivate = envPath.Contains("private") + + match HttpToNapConverter.convertEnvJson envJson isPrivate with + | Ok envFiles -> + if not args.DryRun then + for fn: string, c: string in envFiles do + let fp = Path.Combine(outDir, fn) + File.WriteAllText(fp, c) + Logger.info $"Wrote env: {fp}" + else + for fn: string, _ in envFiles do + printfn " [dry-run] Would write: %s" fn + | Error msg -> eprintfn "Warning: %s" msg + | None -> + // Auto-detect JetBrains env files next to input + let inputDir = + if Directory.Exists fullInput then + fullInput + else + Path.GetDirectoryName(fullInput) + + let jbEnvPath = Path.Combine(inputDir, "http-client.env.json") + let jbPrivatePath = Path.Combine(inputDir, "http-client.private.env.json") + + for envPath, isPrivate in [ jbEnvPath, false; jbPrivatePath, true ] do + if File.Exists envPath then + Logger.info $"Auto-detected env file: {envPath}" + let envJson = File.ReadAllText(envPath) + + match HttpToNapConverter.convertEnvJson envJson isPrivate with + | Ok envFiles -> + if not args.DryRun then + for (fn, c) in envFiles do + let fp = Path.Combine(outDir, fn) + File.WriteAllText(fp, c) + Logger.info $"Wrote env: {fp}" + else + for (fn, _) in envFiles do + printfn " [dry-run] Would write: %s" fn + | Error msg -> eprintfn "Warning: %s" msg + + let result = HttpToNapConverter.convert httpFile + + if args.DryRun then + printfn "Dry run for %s:" (Path.GetFileName httpPath) + + for fn: string, _ in result.GeneratedFiles do + printfn " Would write: %s" fn + else + if not (Directory.Exists outDir) then + Directory.CreateDirectory(outDir) |> ignore + + writeConvertResult outDir result + + totalFiles <- totalFiles + result.GeneratedFiles.Length + allWarnings <- allWarnings @ result.Warnings + + for w in allWarnings do + let prefix = + match w.RequestName with + | Some n -> sprintf "[%s] " n + | None -> "" + + eprintfn "Warning: %s%s" prefix w.Message + + match args.Output with + | "json" -> printfn "{\"files\":%d,\"warnings\":%d}" totalFiles allWarnings.Length + | _ -> + printfn "Converted %d requests to .nap files" totalFiles + printfn " Output: %s" outDir + + if not (List.isEmpty allWarnings) then + printfn " Warnings: %d" allWarnings.Length + + 0 + +[<EntryPoint>] +let main argv = + let args = parseArgs argv + Logger.init args.Verbose + let joinedArgs = argv |> String.concat " " + Logger.info $"CLI started: args={joinedArgs} cwd={Directory.GetCurrentDirectory()}" + + let exitCode = + match args.Command with + | "run" -> runFile args + | "check" -> checkFile args + | "generate" -> + match args.SubCommand with + | Some "openapi" -> generateOpenApi args + | Some other -> + eprintfn "Unknown generate target: %s" other + 2 + | None -> + eprintfn "Usage: nap generate openapi <spec.json> --output-dir <dir>" + 2 + | "convert" -> + match args.SubCommand with + | Some "http" -> convertHttp args + | Some other -> + eprintfn "Unknown convert target: %s" other + 2 + | None -> + eprintfn "Usage: nap convert http <file|dir> --output-dir <dir>" + 2 + | "version" + | "--version" -> + let v = Reflection.Assembly.GetExecutingAssembly().GetName().Version + printfn "%d.%d.%d" v.Major v.Minor v.Build + 0 + | "help" + | "--help" + | "-h" -> + printHelp () + 0 + | other -> + eprintfn "Unknown command: %s" other + printHelp () + 2 + + Logger.info $"CLI exiting with code {exitCode}" + Logger.close () + Environment.Exit(exitCode) + exitCode diff --git a/src/Napper.Cli/nupkg/napper.1.0.0.nupkg b/src/Napper.Cli/nupkg/napper.1.0.0.nupkg new file mode 100644 index 0000000..0998ff5 Binary files /dev/null and b/src/Napper.Cli/nupkg/napper.1.0.0.nupkg differ diff --git a/src/Napper.Core.Tests/.spec-cache/petstore.json b/src/Napper.Core.Tests/.spec-cache/petstore.json new file mode 100644 index 0000000..a878192 --- /dev/null +++ b/src/Napper.Core.Tests/.spec-cache/petstore.json @@ -0,0 +1 @@ +{"openapi":"3.0.4","info":{"title":"Swagger Petstore - OpenAPI 3.0","description":"This is a sample Pet Store Server based on the OpenAPI 3.0 specification. You can find out more about\nSwagger at [https://swagger.io](https://swagger.io). In the third iteration of the pet store, we've switched to the design first approach!\nYou can now help us improve the API whether it's by making changes to the definition itself or to the code.\nThat way, with time, we can improve the API in general, and expose some of the new features in OAS3.\n\nSome useful links:\n- [The Pet Store repository](https://github.com/swagger-api/swagger-petstore)\n- [The source API definition for the Pet Store](https://github.com/swagger-api/swagger-petstore/blob/master/src/main/resources/openapi.yaml)","termsOfService":"https://swagger.io/terms/","contact":{"email":"apiteam@swagger.io"},"license":{"name":"Apache 2.0","url":"https://www.apache.org/licenses/LICENSE-2.0.html"},"version":"1.0.27"},"externalDocs":{"description":"Find out more about Swagger","url":"https://swagger.io"},"servers":[{"url":"/api/v3"}],"tags":[{"name":"pet","description":"Everything about your Pets","externalDocs":{"description":"Find out more","url":"https://swagger.io"}},{"name":"store","description":"Access to Petstore orders","externalDocs":{"description":"Find out more about our store","url":"https://swagger.io"}},{"name":"user","description":"Operations about user"}],"paths":{"/pet":{"put":{"tags":["pet"],"summary":"Update an existing pet.","description":"Update an existing pet by Id.","operationId":"updatePet","requestBody":{"description":"Update an existent pet in the store","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Pet"}},"application/xml":{"schema":{"$ref":"#/components/schemas/Pet"}},"application/x-www-form-urlencoded":{"schema":{"$ref":"#/components/schemas/Pet"}}},"required":true},"responses":{"200":{"description":"Successful operation","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Pet"}},"application/xml":{"schema":{"$ref":"#/components/schemas/Pet"}}}},"400":{"description":"Invalid ID supplied"},"404":{"description":"Pet not found"},"422":{"description":"Validation exception"},"default":{"description":"Unexpected error"}},"security":[{"petstore_auth":["write:pets","read:pets"]}]},"post":{"tags":["pet"],"summary":"Add a new pet to the store.","description":"Add a new pet to the store.","operationId":"addPet","requestBody":{"description":"Create a new pet in the store","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Pet"}},"application/xml":{"schema":{"$ref":"#/components/schemas/Pet"}},"application/x-www-form-urlencoded":{"schema":{"$ref":"#/components/schemas/Pet"}}},"required":true},"responses":{"200":{"description":"Successful operation","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Pet"}},"application/xml":{"schema":{"$ref":"#/components/schemas/Pet"}}}},"400":{"description":"Invalid input"},"422":{"description":"Validation exception"},"default":{"description":"Unexpected error"}},"security":[{"petstore_auth":["write:pets","read:pets"]}]}},"/pet/findByStatus":{"get":{"tags":["pet"],"summary":"Finds Pets by status.","description":"Multiple status values can be provided with comma separated strings.","operationId":"findPetsByStatus","parameters":[{"name":"status","in":"query","description":"Status values that need to be considered for filter","required":true,"explode":true,"schema":{"type":"string","default":"available","enum":["available","pending","sold"]}}],"responses":{"200":{"description":"successful operation","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/Pet"}}},"application/xml":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/Pet"}}}}},"400":{"description":"Invalid status value"},"default":{"description":"Unexpected error"}},"security":[{"petstore_auth":["write:pets","read:pets"]}]}},"/pet/findByTags":{"get":{"tags":["pet"],"summary":"Finds Pets by tags.","description":"Multiple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing.","operationId":"findPetsByTags","parameters":[{"name":"tags","in":"query","description":"Tags to filter by","required":true,"explode":true,"schema":{"type":"array","items":{"type":"string"}}}],"responses":{"200":{"description":"successful operation","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/Pet"}}},"application/xml":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/Pet"}}}}},"400":{"description":"Invalid tag value"},"default":{"description":"Unexpected error"}},"security":[{"petstore_auth":["write:pets","read:pets"]}]}},"/pet/{petId}":{"get":{"tags":["pet"],"summary":"Find pet by ID.","description":"Returns a single pet.","operationId":"getPetById","parameters":[{"name":"petId","in":"path","description":"ID of pet to return","required":true,"schema":{"type":"integer","format":"int64"}}],"responses":{"200":{"description":"successful operation","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Pet"}},"application/xml":{"schema":{"$ref":"#/components/schemas/Pet"}}}},"400":{"description":"Invalid ID supplied"},"404":{"description":"Pet not found"},"default":{"description":"Unexpected error"}},"security":[{"api_key":[]},{"petstore_auth":["write:pets","read:pets"]}]},"post":{"tags":["pet"],"summary":"Updates a pet in the store with form data.","description":"Updates a pet resource based on the form data.","operationId":"updatePetWithForm","parameters":[{"name":"petId","in":"path","description":"ID of pet that needs to be updated","required":true,"schema":{"type":"integer","format":"int64"}},{"name":"name","in":"query","description":"Name of pet that needs to be updated","schema":{"type":"string"}},{"name":"status","in":"query","description":"Status of pet that needs to be updated","schema":{"type":"string"}}],"responses":{"200":{"description":"successful operation","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Pet"}},"application/xml":{"schema":{"$ref":"#/components/schemas/Pet"}}}},"400":{"description":"Invalid input"},"default":{"description":"Unexpected error"}},"security":[{"petstore_auth":["write:pets","read:pets"]}]},"delete":{"tags":["pet"],"summary":"Deletes a pet.","description":"Delete a pet.","operationId":"deletePet","parameters":[{"name":"api_key","in":"header","description":"","required":false,"schema":{"type":"string"}},{"name":"petId","in":"path","description":"Pet id to delete","required":true,"schema":{"type":"integer","format":"int64"}}],"responses":{"200":{"description":"Pet deleted"},"400":{"description":"Invalid pet value"},"default":{"description":"Unexpected error"}},"security":[{"petstore_auth":["write:pets","read:pets"]}]}},"/pet/{petId}/uploadImage":{"post":{"tags":["pet"],"summary":"Uploads an image.","description":"Upload image of the pet.","operationId":"uploadFile","parameters":[{"name":"petId","in":"path","description":"ID of pet to update","required":true,"schema":{"type":"integer","format":"int64"}},{"name":"additionalMetadata","in":"query","description":"Additional Metadata","required":false,"schema":{"type":"string"}}],"requestBody":{"content":{"application/octet-stream":{"schema":{"type":"string","format":"binary"}}}},"responses":{"200":{"description":"successful operation","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ApiResponse"}}}},"400":{"description":"No file uploaded"},"404":{"description":"Pet not found"},"default":{"description":"Unexpected error"}},"security":[{"petstore_auth":["write:pets","read:pets"]}]}},"/store/inventory":{"get":{"tags":["store"],"summary":"Returns pet inventories by status.","description":"Returns a map of status codes to quantities.","operationId":"getInventory","responses":{"200":{"description":"successful operation","content":{"application/json":{"schema":{"type":"object","additionalProperties":{"type":"integer","format":"int32"}}}}},"default":{"description":"Unexpected error"}},"security":[{"api_key":[]}]}},"/store/order":{"post":{"tags":["store"],"summary":"Place an order for a pet.","description":"Place a new order in the store.","operationId":"placeOrder","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/Order"}},"application/xml":{"schema":{"$ref":"#/components/schemas/Order"}},"application/x-www-form-urlencoded":{"schema":{"$ref":"#/components/schemas/Order"}}}},"responses":{"200":{"description":"successful operation","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Order"}}}},"400":{"description":"Invalid input"},"422":{"description":"Validation exception"},"default":{"description":"Unexpected error"}}}},"/store/order/{orderId}":{"get":{"tags":["store"],"summary":"Find purchase order by ID.","description":"For valid response try integer IDs with value <= 5 or > 10. Other values will generate exceptions.","operationId":"getOrderById","parameters":[{"name":"orderId","in":"path","description":"ID of order that needs to be fetched","required":true,"schema":{"type":"integer","format":"int64"}}],"responses":{"200":{"description":"successful operation","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Order"}},"application/xml":{"schema":{"$ref":"#/components/schemas/Order"}}}},"400":{"description":"Invalid ID supplied"},"404":{"description":"Order not found"},"default":{"description":"Unexpected error"}}},"delete":{"tags":["store"],"summary":"Delete purchase order by identifier.","description":"For valid response try integer IDs with value < 1000. Anything above 1000 or non-integers will generate API errors.","operationId":"deleteOrder","parameters":[{"name":"orderId","in":"path","description":"ID of the order that needs to be deleted","required":true,"schema":{"type":"integer","format":"int64"}}],"responses":{"200":{"description":"order deleted"},"400":{"description":"Invalid ID supplied"},"404":{"description":"Order not found"},"default":{"description":"Unexpected error"}}}},"/user":{"post":{"tags":["user"],"summary":"Create user.","description":"This can only be done by the logged in user.","operationId":"createUser","requestBody":{"description":"Created user object","content":{"application/json":{"schema":{"$ref":"#/components/schemas/User"}},"application/xml":{"schema":{"$ref":"#/components/schemas/User"}},"application/x-www-form-urlencoded":{"schema":{"$ref":"#/components/schemas/User"}}}},"responses":{"200":{"description":"successful operation","content":{"application/json":{"schema":{"$ref":"#/components/schemas/User"}},"application/xml":{"schema":{"$ref":"#/components/schemas/User"}}}},"default":{"description":"Unexpected error"}}}},"/user/createWithList":{"post":{"tags":["user"],"summary":"Creates list of users with given input array.","description":"Creates list of users with given input array.","operationId":"createUsersWithListInput","requestBody":{"content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/User"}}}}},"responses":{"200":{"description":"Successful operation","content":{"application/json":{"schema":{"$ref":"#/components/schemas/User"}},"application/xml":{"schema":{"$ref":"#/components/schemas/User"}}}},"default":{"description":"Unexpected error"}}}},"/user/login":{"get":{"tags":["user"],"summary":"Logs user into the system.","description":"Log into the system.","operationId":"loginUser","parameters":[{"name":"username","in":"query","description":"The user name for login","required":false,"schema":{"type":"string"}},{"name":"password","in":"query","description":"The password for login in clear text","required":false,"schema":{"type":"string"}}],"responses":{"200":{"description":"successful operation","headers":{"X-Rate-Limit":{"description":"calls per hour allowed by the user","schema":{"type":"integer","format":"int32"}},"X-Expires-After":{"description":"date in UTC when token expires","schema":{"type":"string","format":"date-time"}}},"content":{"application/xml":{"schema":{"type":"string"}},"application/json":{"schema":{"type":"string"}}}},"400":{"description":"Invalid username/password supplied"},"default":{"description":"Unexpected error"}}}},"/user/logout":{"get":{"tags":["user"],"summary":"Logs out current logged in user session.","description":"Log user out of the system.","operationId":"logoutUser","parameters":[],"responses":{"200":{"description":"successful operation"},"default":{"description":"Unexpected error"}}}},"/user/{username}":{"get":{"tags":["user"],"summary":"Get user by user name.","description":"Get user detail based on username.","operationId":"getUserByName","parameters":[{"name":"username","in":"path","description":"The name that needs to be fetched. Use user1 for testing","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"successful operation","content":{"application/json":{"schema":{"$ref":"#/components/schemas/User"}},"application/xml":{"schema":{"$ref":"#/components/schemas/User"}}}},"400":{"description":"Invalid username supplied"},"404":{"description":"User not found"},"default":{"description":"Unexpected error"}}},"put":{"tags":["user"],"summary":"Update user resource.","description":"This can only be done by the logged in user.","operationId":"updateUser","parameters":[{"name":"username","in":"path","description":"name that need to be deleted","required":true,"schema":{"type":"string"}}],"requestBody":{"description":"Update an existent user in the store","content":{"application/json":{"schema":{"$ref":"#/components/schemas/User"}},"application/xml":{"schema":{"$ref":"#/components/schemas/User"}},"application/x-www-form-urlencoded":{"schema":{"$ref":"#/components/schemas/User"}}}},"responses":{"200":{"description":"successful operation"},"400":{"description":"bad request"},"404":{"description":"user not found"},"default":{"description":"Unexpected error"}}},"delete":{"tags":["user"],"summary":"Delete user resource.","description":"This can only be done by the logged in user.","operationId":"deleteUser","parameters":[{"name":"username","in":"path","description":"The name that needs to be deleted","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"User deleted"},"400":{"description":"Invalid username supplied"},"404":{"description":"User not found"},"default":{"description":"Unexpected error"}}}}},"components":{"schemas":{"Order":{"type":"object","properties":{"id":{"type":"integer","format":"int64","example":10},"petId":{"type":"integer","format":"int64","example":198772},"quantity":{"type":"integer","format":"int32","example":7},"shipDate":{"type":"string","format":"date-time"},"status":{"type":"string","description":"Order Status","example":"approved","enum":["placed","approved","delivered"]},"complete":{"type":"boolean"}},"xml":{"name":"order"}},"Category":{"type":"object","properties":{"id":{"type":"integer","format":"int64","example":1},"name":{"type":"string","example":"Dogs"}},"xml":{"name":"category"}},"User":{"type":"object","properties":{"id":{"type":"integer","format":"int64","example":10},"username":{"type":"string","example":"theUser"},"firstName":{"type":"string","example":"John"},"lastName":{"type":"string","example":"James"},"email":{"type":"string","example":"john@email.com"},"password":{"type":"string","example":"12345"},"phone":{"type":"string","example":"12345"},"userStatus":{"type":"integer","description":"User Status","format":"int32","example":1}},"xml":{"name":"user"}},"Tag":{"type":"object","properties":{"id":{"type":"integer","format":"int64"},"name":{"type":"string"}},"xml":{"name":"tag"}},"Pet":{"required":["name","photoUrls"],"type":"object","properties":{"id":{"type":"integer","format":"int64","example":10},"name":{"type":"string","example":"doggie"},"category":{"$ref":"#/components/schemas/Category"},"photoUrls":{"type":"array","xml":{"wrapped":true},"items":{"type":"string","xml":{"name":"photoUrl"}}},"tags":{"type":"array","xml":{"wrapped":true},"items":{"$ref":"#/components/schemas/Tag"}},"status":{"type":"string","description":"pet status in the store","enum":["available","pending","sold"]}},"xml":{"name":"pet"}},"ApiResponse":{"type":"object","properties":{"code":{"type":"integer","format":"int32"},"type":{"type":"string"},"message":{"type":"string"}},"xml":{"name":"##default"}}},"requestBodies":{"Pet":{"description":"Pet object that needs to be added to the store","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Pet"}},"application/xml":{"schema":{"$ref":"#/components/schemas/Pet"}}}},"UserArray":{"description":"List of user object","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/User"}}}}}},"securitySchemes":{"petstore_auth":{"type":"oauth2","flows":{"implicit":{"authorizationUrl":"https://petstore3.swagger.io/oauth/authorize","scopes":{"write:pets":"modify pets in your account","read:pets":"read your pets"}}}},"api_key":{"type":"apiKey","name":"api_key","in":"header"}}}} \ No newline at end of file diff --git a/src/Napper.Core.Tests/.spec-cache/storefront.json b/src/Napper.Core.Tests/.spec-cache/storefront.json new file mode 100644 index 0000000..51859ef --- /dev/null +++ b/src/Napper.Core.Tests/.spec-cache/storefront.json @@ -0,0 +1,520 @@ +{ + "openapi": "3.1.0", + "info": { + "title": "E-commerce API", + "version": "1.0.0", + "description": "This is an e-commerce API spec for a storefront. \nIt includes authentication, product browsing, cart, and checkout operations.\nAuth is token-based. Explore, test, and mock this API freely.\n" + }, + "servers": [ + { + "url": "https://api.demo-ecommerce.com/v1", + "description": "Production environment" + }, + { + "url": "https://api.dev.demo-ecommerce.com/v1", + "description": "Development environment" + } + ], + "components": { + "securitySchemes": { + "BearerAuth": { + "type": "http", + "scheme": "bearer", + "bearerFormat": "JWT" + } + }, + "schemas": { + "Product": { + "type": "object", + "required": [ + "id", + "name", + "price", + "stock", + "category" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "example": "eda5cbc1-a615-4da5-ae73-4a33a9acfb6a" + }, + "name": { + "type": "string", + "example": "Worry Management" + }, + "description": { + "type": "string", + "example": "Mr street sell would civil. People through shake southern force." + }, + "price": { + "type": "number", + "format": "float", + "example": 91.37 + }, + "category": { + "type": "string", + "example": "wrong" + }, + "image_url": { + "type": "string", + "format": "uri", + "example": "https://dummyimage.com/766x809" + }, + "stock": { + "type": "integer", + "example": 94 + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "type": "string", + "format": "date-time" + } + } + }, + "CartItem": { + "type": "object", + "required": [ + "product_id", + "quantity" + ], + "properties": { + "product_id": { + "type": "string", + "format": "uuid" + }, + "quantity": { + "type": "integer", + "minimum": 1 + } + } + }, + "Address": { + "type": "object", + "required": [ + "line1", + "city", + "state", + "postal_code", + "country" + ], + "properties": { + "line1": { + "type": "string" + }, + "line2": { + "type": "string" + }, + "city": { + "type": "string" + }, + "state": { + "type": "string" + }, + "postal_code": { + "type": "string" + }, + "country": { + "type": "string" + } + } + }, + "Order": { + "type": "object", + "required": [ + "id", + "items", + "total_amount", + "status", + "created_at" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "items": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CartItem" + } + }, + "total_amount": { + "type": "number" + }, + "status": { + "type": "string", + "enum": [ + "pending", + "confirmed", + "shipped", + "delivered" + ] + }, + "created_at": { + "type": "string", + "format": "date-time" + } + } + } + } + }, + "paths": { + "/auth/register": { + "post": { + "summary": "Create a new user account", + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "required": [ + "email", + "password" + ], + "properties": { + "email": { + "type": "string", + "format": "email" + }, + "password": { + "type": "string", + "format": "password" + }, + "name": { + "type": "string" + } + } + } + } + } + }, + "responses": { + "201": { + "description": "User created" + }, + "400": { + "description": "Invalid input" + } + } + } + }, + "/auth/login": { + "post": { + "summary": "Login and get access token", + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "required": [ + "email", + "password" + ], + "properties": { + "email": { + "type": "string" + }, + "password": { + "type": "string" + } + } + } + } + } + }, + "responses": { + "200": { + "description": "Authenticated successfully" + }, + "401": { + "description": "Unauthorized" + } + } + } + }, + "/products": { + "get": { + "summary": "List all products with filters", + "parameters": [ + { + "name": "category", + "in": "query", + "schema": { + "type": "string" + } + }, + { + "name": "search", + "in": "query", + "schema": { + "type": "string" + } + }, + { + "name": "min_price", + "in": "query", + "schema": { + "type": "number" + } + }, + { + "name": "max_price", + "in": "query", + "schema": { + "type": "number" + } + } + ], + "responses": { + "200": { + "description": "List of products", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Product" + } + } + } + } + } + } + } + }, + "/products/{id}": { + "get": { + "summary": "Get product details by ID", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Product details", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Product" + } + } + } + } + } + } + }, + "/cart": { + "get": { + "summary": "Get current user's cart", + "security": [ + { + "BearerAuth": [] + } + ], + "responses": { + "200": { + "description": "Your cart items", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CartItem" + } + } + } + } + } + } + } + }, + "/cart/items": { + "post": { + "summary": "Add item to cart", + "security": [ + { + "BearerAuth": [] + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CartItem" + } + } + } + }, + "responses": { + "200": { + "description": "Item added to cart" + } + } + } + }, + "/checkout": { + "post": { + "summary": "Checkout and place order", + "security": [ + { + "BearerAuth": [] + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "required": [ + "address_id", + "payment_method_id" + ], + "properties": { + "address_id": { + "type": "string" + }, + "payment_method_id": { + "type": "string" + } + } + } + } + } + }, + "responses": { + "201": { + "description": "Order created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Order" + } + } + } + } + } + } + }, + "/orders": { + "get": { + "summary": "List your past orders", + "security": [ + { + "BearerAuth": [] + } + ], + "responses": { + "200": { + "description": "Order history", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Order" + } + } + } + } + } + } + } + }, + "/orders/{orderId}": { + "get": { + "summary": "Get order details", + "security": [ + { + "BearerAuth": [] + } + ], + "parameters": [ + { + "name": "orderId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "Order detail", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Order" + } + } + } + } + } + } + }, + "/addresses": { + "get": { + "summary": "Get your saved addresses", + "security": [ + { + "BearerAuth": [] + } + ], + "responses": { + "200": { + "description": "List of saved addresses", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Address" + } + } + } + } + } + } + }, + "post": { + "summary": "Add a new address", + "security": [ + { + "BearerAuth": [] + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Address" + } + } + } + }, + "responses": { + "201": { + "description": "Address added" + } + } + } + } + } +} \ No newline at end of file diff --git a/tests/Nap.Core.Tests/AssertionEdgeCaseTests.fs b/src/Napper.Core.Tests/AssertionEdgeCaseTests.fs similarity index 60% rename from tests/Nap.Core.Tests/AssertionEdgeCaseTests.fs rename to src/Napper.Core.Tests/AssertionEdgeCaseTests.fs index 8356ea8..8dceba1 100644 --- a/tests/Nap.Core.Tests/AssertionEdgeCaseTests.fs +++ b/src/Napper.Core.Tests/AssertionEdgeCaseTests.fs @@ -1,69 +1,84 @@ module AssertionEdgeCaseTests +// Specs: assert-status, assert-equals, assert-exists, assert-contains, assert-matches, assert-lt, assert-gt open System open Xunit -open Nap.Core +open Napper.Core let private makeResponse status headers body durationMs : NapResponse = - { - StatusCode = status - Headers = headers |> Map.ofList - Body = body - Duration = TimeSpan.FromMilliseconds(float durationMs) - } + { StatusCode = status + Headers = headers |> Map.ofList + Body = body + Duration = TimeSpan.FromMilliseconds(float durationMs) } let private ok200 body = - makeResponse 200 [("Content-Type", "application/json")] body 100 + makeResponse 200 [ ("Content-Type", "application/json") ] body 100 -// ─── Status assertions ──────────────────────────────────────── +// ─── Status assertions ───────────────────── Spec: assert-status [<Fact>] let ``Status equals various codes`` () = - for code in [200; 201; 204; 301; 400; 401; 403; 404; 500; 502; 503] do + for code in [ 200; 201; 204; 301; 400; 401; 403; 404; 500; 502; 503 ] do let response = makeResponse code [] "" 50 - let assertions = [{ Target = "status"; Op = Equals (string code) }] + + let assertions = + [ { Target = "status" + Op = Equals(string code) } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed, $"Expected status {code} to match") [<Fact>] let ``Status mismatch reports actual code`` () = let response = makeResponse 500 [] "" 50 - let assertions = [{ Target = "status"; Op = Equals "200" }] + let assertions = [ { Target = "status"; Op = Equals "200" } ] let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed) Assert.Equal("500", results[0].Actual) Assert.Equal("200", results[0].Expected) -// ─── Body assertions ────────────────────────────────────────── +// ─── Body assertions ─────────────────────── Spec: assert-equals, assert-exists, assert-contains [<Fact>] let ``Whole body equals`` () = let response = ok200 "hello world" - let assertions = [{ Target = "body"; Op = Equals "hello world" }] + + let assertions = + [ { Target = "body" + Op = Equals "hello world" } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) [<Fact>] let ``Whole body contains`` () = let response = ok200 "The quick brown fox" - let assertions = [{ Target = "body"; Op = Contains "QUICK" }] + + let assertions = + [ { Target = "body" + Op = Contains "QUICK" } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed, "Contains should be case-insensitive") [<Fact>] let ``Whole body exists`` () = let response = ok200 "anything" - let assertions = [{ Target = "body"; Op = Exists }] + let assertions = [ { Target = "body"; Op = Exists } ] let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) -// ─── JSON path extraction ───────────────────────────────────── +// ─── JSON path extraction ────────────────── Spec: assert-equals, assert-exists [<Fact>] let ``Deeply nested JSON path (3 levels)`` () = let body = """{"user": {"address": {"city": "Portland"}}}""" let response = ok200 body - let assertions = [{ Target = "body.user.address.city"; Op = Equals "Portland" }] + + let assertions = + [ { Target = "body.user.address.city" + Op = Equals "Portland" } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) @@ -71,7 +86,11 @@ let ``Deeply nested JSON path (3 levels)`` () = let ``JSON numeric field`` () = let body = """{"count": 42}""" let response = ok200 body - let assertions = [{ Target = "body.count"; Op = Equals "42" }] + + let assertions = + [ { Target = "body.count" + Op = Equals "42" } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) @@ -79,7 +98,11 @@ let ``JSON numeric field`` () = let ``JSON boolean true field`` () = let body = """{"active": true}""" let response = ok200 body - let assertions = [{ Target = "body.active"; Op = Equals "true" }] + + let assertions = + [ { Target = "body.active" + Op = Equals "true" } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) @@ -87,7 +110,11 @@ let ``JSON boolean true field`` () = let ``JSON boolean false field`` () = let body = """{"active": false}""" let response = ok200 body - let assertions = [{ Target = "body.active"; Op = Equals "false" }] + + let assertions = + [ { Target = "body.active" + Op = Equals "false" } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) @@ -95,7 +122,11 @@ let ``JSON boolean false field`` () = let ``JSON null field`` () = let body = """{"deleted_at": null}""" let response = ok200 body - let assertions = [{ Target = "body.deleted_at"; Op = Equals "null" }] + + let assertions = + [ { Target = "body.deleted_at" + Op = Equals "null" } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) @@ -103,7 +134,11 @@ let ``JSON null field`` () = let ``JSON null field exists`` () = let body = """{"deleted_at": null}""" let response = ok200 body - let assertions = [{ Target = "body.deleted_at"; Op = Exists }] + + let assertions = + [ { Target = "body.deleted_at" + Op = Exists } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) @@ -111,7 +146,7 @@ let ``JSON null field exists`` () = let ``JSON array field returns raw JSON`` () = let body = """{"tags": ["a", "b", "c"]}""" let response = ok200 body - let assertions = [{ Target = "body.tags"; Op = Exists }] + let assertions = [ { Target = "body.tags"; Op = Exists } ] let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) @@ -119,7 +154,7 @@ let ``JSON array field returns raw JSON`` () = let ``JSON nested object field returns raw JSON`` () = let body = """{"user": {"name": "Alice", "age": 30}}""" let response = ok200 body - let assertions = [{ Target = "body.user"; Op = Exists }] + let assertions = [ { Target = "body.user"; Op = Exists } ] let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) @@ -127,7 +162,11 @@ let ``JSON nested object field returns raw JSON`` () = let ``Missing JSON path fails exists`` () = let body = """{"name": "test"}""" let response = ok200 body - let assertions = [{ Target = "body.nonexistent"; Op = Exists }] + + let assertions = + [ { Target = "body.nonexistent" + Op = Exists } ] + let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed) Assert.Equal("<missing>", results[0].Actual) @@ -136,7 +175,11 @@ let ``Missing JSON path fails exists`` () = let ``Missing JSON path fails equals`` () = let body = """{"name": "test"}""" let response = ok200 body - let assertions = [{ Target = "body.missing"; Op = Equals "anything" }] + + let assertions = + [ { Target = "body.missing" + Op = Equals "anything" } ] + let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed) Assert.Equal("<missing>", results[0].Actual) @@ -144,120 +187,172 @@ let ``Missing JSON path fails equals`` () = [<Fact>] let ``Non-JSON body with body path returns missing`` () = let response = ok200 "plain text, not json" - let assertions = [{ Target = "body.field"; Op = Exists }] + let assertions = [ { Target = "body.field"; Op = Exists } ] let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed) [<Fact>] let ``Empty body with body path returns missing`` () = let response = ok200 "" - let assertions = [{ Target = "body.field"; Op = Exists }] + let assertions = [ { Target = "body.field"; Op = Exists } ] let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed) -// ─── Header assertions ──────────────────────────────────────── +// ─── Header assertions ───────────────────── Spec: assert-equals, assert-exists, assert-contains [<Fact>] let ``Header case-insensitive lookup`` () = - let response = makeResponse 200 [("content-type", "application/json")] "" 50 - let assertions = [{ Target = "headers.Content-Type"; Op = Contains "json" }] + let response = makeResponse 200 [ ("content-type", "application/json") ] "" 50 + + let assertions = + [ { Target = "headers.Content-Type" + Op = Contains "json" } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed, "Header lookup should be case-insensitive") [<Fact>] let ``Header exact match`` () = - let response = makeResponse 200 [("X-Custom", "hello")] "" 50 - let assertions = [{ Target = "headers.X-Custom"; Op = Equals "hello" }] + let response = makeResponse 200 [ ("X-Custom", "hello") ] "" 50 + + let assertions = + [ { Target = "headers.X-Custom" + Op = Equals "hello" } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) [<Fact>] let ``Header exists`` () = - let response = makeResponse 200 [("X-Request-Id", "abc-123")] "" 50 - let assertions = [{ Target = "headers.X-Request-Id"; Op = Exists }] + let response = makeResponse 200 [ ("X-Request-Id", "abc-123") ] "" 50 + + let assertions = + [ { Target = "headers.X-Request-Id" + Op = Exists } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) [<Fact>] let ``Missing header fails`` () = let response = makeResponse 200 [] "" 50 - let assertions = [{ Target = "headers.X-Missing"; Op = Exists }] + + let assertions = + [ { Target = "headers.X-Missing" + Op = Exists } ] + let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed) -// ─── Duration assertions ────────────────────────────────────── +// ─── Duration assertions ─────────────────── Spec: assert-lt, assert-gt [<Fact>] let ``Duration less than passes`` () = let response = makeResponse 200 [] "" 100 - let assertions = [{ Target = "duration"; Op = LessThan "500ms" }] + + let assertions = + [ { Target = "duration" + Op = LessThan "500ms" } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) [<Fact>] let ``Duration less than fails when over threshold`` () = let response = makeResponse 200 [] "" 600 - let assertions = [{ Target = "duration"; Op = LessThan "500ms" }] + + let assertions = + [ { Target = "duration" + Op = LessThan "500ms" } ] + let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed) [<Fact>] let ``Duration greater than passes`` () = let response = makeResponse 200 [] "" 600 - let assertions = [{ Target = "duration"; Op = GreaterThan "500ms" }] + + let assertions = + [ { Target = "duration" + Op = GreaterThan "500ms" } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) [<Fact>] let ``Duration greater than fails when under threshold`` () = let response = makeResponse 200 [] "" 100 - let assertions = [{ Target = "duration"; Op = GreaterThan "500ms" }] + + let assertions = + [ { Target = "duration" + Op = GreaterThan "500ms" } ] + let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed) -// ─── Contains assertion ─────────────────────────────────────── +// ─── Contains assertion ──────────────────── Spec: assert-contains [<Fact>] let ``Contains is case-insensitive`` () = - let response = makeResponse 200 [("Content-Type", "Application/JSON")] "" 50 - let assertions = [{ Target = "headers.Content-Type"; Op = Contains "json" }] + let response = makeResponse 200 [ ("Content-Type", "Application/JSON") ] "" 50 + + let assertions = + [ { Target = "headers.Content-Type" + Op = Contains "json" } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) [<Fact>] let ``Contains fails when substring absent`` () = let response = ok200 """{"type": "xml"}""" - let assertions = [{ Target = "body.type"; Op = Contains "json" }] + + let assertions = + [ { Target = "body.type" + Op = Contains "json" } ] + let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed) -// ─── Matches assertion ──────────────────────────────────────── +// ─── Matches assertion ───────────────────── Spec: assert-matches [<Fact>] let ``Matches with pattern`` () = let response = ok200 """{"email": "test@example.com"}""" - let assertions = [{ Target = "body.email"; Op = Matches "*@*.*" }] + + let assertions = + [ { Target = "body.email" + Op = Matches "*@*.*" } ] + let results = Runner.evaluateAssertions assertions response Assert.True(results[0].Passed) [<Fact>] let ``Matches fails when pattern does not match`` () = let response = ok200 """{"email": "not-an-email"}""" - let assertions = [{ Target = "body.email"; Op = Matches "*@*.*" }] + + let assertions = + [ { Target = "body.email" + Op = Matches "*@*.*" } ] + let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed) -// ─── Multiple assertions mixed results ──────────────────────── +// ─── Multiple assertions mixed results ──── Spec: assert-status, assert-equals, assert-exists, assert-contains [<Fact>] let ``Multiple assertions with mixed pass/fail`` () = - let response = makeResponse 404 [("Content-Type", "application/json")] """{"error": "not found"}""" 50 - let assertions = [ - { Target = "status"; Op = Equals "200" } - { Target = "headers.Content-Type"; Op = Contains "json" } - { Target = "body.error"; Op = Equals "not found" } - { Target = "body.id"; Op = Exists } - ] + let response = + makeResponse 404 [ ("Content-Type", "application/json") ] """{"error": "not found"}""" 50 + + let assertions = + [ { Target = "status"; Op = Equals "200" } + { Target = "headers.Content-Type" + Op = Contains "json" } + { Target = "body.error" + Op = Equals "not found" } + { Target = "body.id"; Op = Exists } ] + let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed, "status should fail (404 != 200)") Assert.True(results[1].Passed, "header should pass") @@ -267,25 +362,37 @@ let ``Multiple assertions with mixed pass/fail`` () = [<Fact>] let ``All assertions pass for healthy response`` () = let body = """{"id": 1, "name": "Alice", "active": true}""" - let response = makeResponse 200 [("Content-Type", "application/json"); ("X-Request-Id", "abc")] body 50 - let assertions = [ - { Target = "status"; Op = Equals "200" } - { Target = "body.id"; Op = Exists } - { Target = "body.name"; Op = Equals "Alice" } - { Target = "body.active"; Op = Equals "true" } - { Target = "headers.Content-Type"; Op = Contains "json" } - { Target = "headers.X-Request-Id"; Op = Exists } - { Target = "duration"; Op = LessThan "1000ms" } - ] + + let response = + makeResponse 200 [ ("Content-Type", "application/json"); ("X-Request-Id", "abc") ] body 50 + + let assertions = + [ { Target = "status"; Op = Equals "200" } + { Target = "body.id"; Op = Exists } + { Target = "body.name" + Op = Equals "Alice" } + { Target = "body.active" + Op = Equals "true" } + { Target = "headers.Content-Type" + Op = Contains "json" } + { Target = "headers.X-Request-Id" + Op = Exists } + { Target = "duration" + Op = LessThan "1000ms" } ] + let results = Runner.evaluateAssertions assertions response Assert.All(results, fun r -> Assert.True(r.Passed, $"{r.Assertion.Target}: expected {r.Expected}, got {r.Actual}")) -// ─── Unknown target ─────────────────────────────────────────── +// ─── Unknown target ──────────────────────── Spec: assert-exists [<Fact>] let ``Unknown target returns missing`` () = let response = ok200 "" - let assertions = [{ Target = "unknown_target"; Op = Exists }] + + let assertions = + [ { Target = "unknown_target" + Op = Exists } ] + let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed) Assert.Equal("<missing>", results[0].Actual) diff --git a/tests/Nap.Core.Tests/CliArgTests.fs b/src/Napper.Core.Tests/CliArgTests.fs similarity index 76% rename from tests/Nap.Core.Tests/CliArgTests.fs rename to src/Napper.Core.Tests/CliArgTests.fs index 641346f..62acd3e 100644 --- a/tests/Nap.Core.Tests/CliArgTests.fs +++ b/src/Napper.Core.Tests/CliArgTests.fs @@ -1,41 +1,54 @@ module CliArgTests +// Specs: cli-run, cli-check, cli-var, cli-exit-codes, cli-output, +// output-json, output-junit, output-ndjson, output-pretty open System -open System.Diagnostics open System.IO +open System.Xml.Linq open Xunit -/// Run the CLI and capture output + exit code -let private runCli (args: string) (cwd: string) : int * string * string = - let projectPath = Path.GetFullPath(Path.Combine(__SOURCE_DIRECTORY__, "../../src/Nap.Cli/Nap.Cli.fsproj")) - let psi = ProcessStartInfo() - psi.FileName <- "dotnet" - psi.Arguments <- sprintf "run --project %s -- %s" projectPath args - psi.WorkingDirectory <- cwd - psi.RedirectStandardOutput <- true - psi.RedirectStandardError <- true - psi.UseShellExecute <- false - psi.CreateNoWindow <- true - - use proc = Process.Start(psi) - let stdout = proc.StandardOutput.ReadToEnd() - let stderr = proc.StandardError.ReadToEnd() - proc.WaitForExit() - proc.ExitCode, stdout, stderr +let private runCli args cwd = TestHelpers.runCli args cwd let private createTempDir () = - let dir = Path.Combine(Path.GetTempPath(), sprintf "nap-arg-test-%s" (Guid.NewGuid().ToString("N"))) + let dir = + Path.Combine(Path.GetTempPath(), sprintf "nap-arg-test-%s" (Guid.NewGuid().ToString("N"))) + Directory.CreateDirectory(dir) |> ignore dir let private cleanupDir (dir: string) = - if Directory.Exists(dir) then Directory.Delete(dir, true) + if Directory.Exists(dir) then + Directory.Delete(dir, true) -// ─── Help variations ────────────────────────────────────────── +// ─── Version in Directory.Build.props ────── Spec: build-version + +[<Fact>] +let ``Directory.Build.props declares a non-empty Version`` () = + let repoRoot = + let mutable d = DirectoryInfo(AppContext.BaseDirectory) + + while d <> null + && not (File.Exists(Path.Combine(d.FullName, "Directory.Build.props"))) do + d <- d.Parent + + d.FullName + + let propsPath = Path.Combine(repoRoot, "Directory.Build.props") + let doc = XDocument.Load(propsPath) + + let versionEl = doc.Descendants(XName.Get "Version") |> Seq.tryHead + + Assert.True(versionEl.IsSome, "Directory.Build.props must contain a <Version> element") + let propsVersion = versionEl.Value.Value.Trim() + Assert.False(String.IsNullOrWhiteSpace(propsVersion), "Version must not be empty") + Assert.Matches(@"^\d+\.\d+\.\d+", propsVersion) + +// ─── Help variations ─────────────────────── Spec: cli-exit-codes [<Fact>] let ``No args shows help with exit 0`` () = let dir = createTempDir () + try let exitCode, stdout, _ = runCli "" dir Assert.Equal(0, exitCode) @@ -46,6 +59,7 @@ let ``No args shows help with exit 0`` () = [<Fact>] let ``help command shows all options`` () = let dir = createTempDir () + try let _, stdout, _ = runCli "help" dir Assert.Contains("nap run", stdout) @@ -59,6 +73,7 @@ let ``help command shows all options`` () = [<Fact>] let ``--help flag shows usage`` () = let dir = createTempDir () + try let exitCode, stdout, _ = runCli "--help" dir Assert.Equal(0, exitCode) @@ -69,6 +84,7 @@ let ``--help flag shows usage`` () = [<Fact>] let ``-h flag shows usage`` () = let dir = createTempDir () + try let exitCode, stdout, _ = runCli "-h" dir Assert.Equal(0, exitCode) @@ -76,11 +92,12 @@ let ``-h flag shows usage`` () = finally cleanupDir dir -// ─── Unknown command ────────────────────────────────────────── +// ─── Unknown command ─────────────────────── Spec: cli-exit-codes [<Fact>] let ``unknown command returns exit 2`` () = let dir = createTempDir () + try let exitCode, _, stderr = runCli "bogus" dir Assert.Equal(2, exitCode) @@ -88,11 +105,12 @@ let ``unknown command returns exit 2`` () = finally cleanupDir dir -// ─── check edge cases ───────────────────────────────────────── +// ─── check edge cases ────────────────────── Spec: cli-check, cli-exit-codes [<Fact>] let ``check no file returns exit 2`` () = let dir = createTempDir () + try let exitCode, _, stderr = runCli "check" dir Assert.Equal(2, exitCode) @@ -103,6 +121,7 @@ let ``check no file returns exit 2`` () = [<Fact>] let ``check missing file returns exit 2`` () = let dir = createTempDir () + try let exitCode, _, stderr = runCli "check ghost.nap" dir Assert.Equal(2, exitCode) @@ -110,11 +129,12 @@ let ``check missing file returns exit 2`` () = finally cleanupDir dir -// ─── run edge cases ─────────────────────────────────────────── +// ─── run edge cases ──────────────────────── Spec: cli-run, cli-exit-codes [<Fact>] let ``run no file returns exit 2`` () = let dir = createTempDir () + try let exitCode, _, stderr = runCli "run" dir Assert.Equal(2, exitCode) @@ -125,6 +145,7 @@ let ``run no file returns exit 2`` () = [<Fact>] let ``run missing file returns exit 2`` () = let dir = createTempDir () + try let exitCode, _, stderr = runCli "run ghost.nap" dir Assert.Equal(2, exitCode) @@ -135,6 +156,7 @@ let ``run missing file returns exit 2`` () = [<Fact>] let ``run empty directory returns exit 2`` () = let dir = createTempDir () + try let exitCode, _, stderr = runCli (sprintf "run %s" dir) dir Assert.Equal(2, exitCode) @@ -142,11 +164,12 @@ let ``run empty directory returns exit 2`` () = finally cleanupDir dir -// ─── --var with equals in value ─────────────────────────────── +// ─── --var with equals in value ──────────── Spec: cli-var [<Fact>] let ``--var handles equals in value`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "test.nap"), "GET https://httpbin.org/get") let exitCode, _, _ = runCli "run test.nap --var token=abc==def --output json" dir @@ -154,11 +177,12 @@ let ``--var handles equals in value`` () = finally cleanupDir dir -// ─── Flags before file path ────────────────────────────────── +// ─── Flags before file path ──────────────── Spec: cli-run [<Fact>] let ``flags before file path work`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "test.nap"), "GET https://httpbin.org/get") let exitCode, _, _ = runCli "run --output json test.nap" dir @@ -166,11 +190,12 @@ let ``flags before file path work`` () = finally cleanupDir dir -// ─── All output formats ────────────────────────────────────── +// ─── All output formats ─────────────────── Spec: cli-output, output-json, output-junit, output-ndjson, output-pretty [<Fact>] let ``json output is valid JSON`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "test.nap"), "GET https://httpbin.org/get") let _, stdout, _ = runCli "run test.nap --output json" dir @@ -182,6 +207,7 @@ let ``json output is valid JSON`` () = [<Fact>] let ``junit output is valid XML`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "test.nap"), "GET https://httpbin.org/get") let _, stdout, _ = runCli "run test.nap --output junit" dir @@ -193,6 +219,7 @@ let ``junit output is valid XML`` () = [<Fact>] let ``ndjson output gives one line per result`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "test.nap"), "GET https://httpbin.org/get") let _, stdout, _ = runCli "run test.nap --output ndjson" dir @@ -206,6 +233,7 @@ let ``ndjson output gives one line per result`` () = [<Fact>] let ``pretty output is default`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "test.nap"), "GET https://httpbin.org/get") let _, stdout, _ = runCli "run test.nap" dir diff --git a/tests/Nap.Core.Tests/CliIntegrationTests.fs b/src/Napper.Core.Tests/CliIntegrationTests.fs similarity index 81% rename from tests/Nap.Core.Tests/CliIntegrationTests.fs rename to src/Napper.Core.Tests/CliIntegrationTests.fs index 3dbedd8..de0c220 100644 --- a/tests/Nap.Core.Tests/CliIntegrationTests.fs +++ b/src/Napper.Core.Tests/CliIntegrationTests.fs @@ -1,42 +1,31 @@ module CliIntegrationTests +// Specs: cli-run, cli-check, cli-env, cli-var, cli-exit-codes, cli-output, +// nap-minimal, nap-full, nap-assert, nap-body, nap-request, http-methods, +// env-interpolation, env-named, env-resolution, collection-folder, +// naplist-file, naplist-steps, naplist-nested, naplist-script-step, +// script-fsx, script-csx, output-json, output-junit, output-pretty, output-ndjson open System -open System.Diagnostics open System.IO open Xunit -open Nap.Core - -/// Run the CLI as a process and capture output + exit code -let private runCli (args: string) (cwd: string) : int * string * string = - let projectPath = Path.GetFullPath(Path.Combine(__SOURCE_DIRECTORY__, "../../src/Nap.Cli/Nap.Cli.fsproj")) - let psi = ProcessStartInfo() - psi.FileName <- "dotnet" - psi.Arguments <- $"run --project {projectPath} -- {args}" - psi.WorkingDirectory <- cwd - psi.RedirectStandardOutput <- true - psi.RedirectStandardError <- true - psi.UseShellExecute <- false - psi.CreateNoWindow <- true - - use proc = Process.Start(psi) - let stdout = proc.StandardOutput.ReadToEnd() - let stderr = proc.StandardError.ReadToEnd() - proc.WaitForExit() - proc.ExitCode, stdout, stderr +open Napper.Core + +let private runCli args cwd = TestHelpers.runCli args cwd + +let private runCliSlow args cwd = + TestHelpers.runCliWithTimeout TestHelpers.ScriptTimeoutMs args cwd let private createTempDir () = - let dir = Path.Combine(Path.GetTempPath(), $"nap-cli-test-{Guid.NewGuid():N}") - Directory.CreateDirectory(dir) |> ignore - dir + TestHelpers.createTempDir "nap-cli-test" -let private cleanupDir (dir: string) = - if Directory.Exists(dir) then Directory.Delete(dir, true) +let private cleanupDir dir = TestHelpers.cleanupDir dir -// ─── Help command ───────────────────────────────────────────── +// ─── Help command ────────────────────────── Spec: cli-exit-codes [<Fact>] let ``CLI help returns exit code 0`` () = let dir = createTempDir () + try let exitCode, stdout, _ = runCli "help" dir Assert.Equal(0, exitCode) @@ -49,6 +38,7 @@ let ``CLI help returns exit code 0`` () = [<Fact>] let ``CLI --help returns exit code 0`` () = let dir = createTempDir () + try let exitCode, stdout, _ = runCli "--help" dir Assert.Equal(0, exitCode) @@ -56,11 +46,12 @@ let ``CLI --help returns exit code 0`` () = finally cleanupDir dir -// ─── Check command ──────────────────────────────────────────── +// ─── Check command ───────────────────────── Spec: cli-check, nap-minimal, nap-full, naplist-file, cli-exit-codes [<Fact>] let ``CLI check valid shorthand nap file`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "test.nap"), "GET https://example.com") let exitCode, stdout, _ = runCli "check test.nap" dir @@ -72,8 +63,11 @@ let ``CLI check valid shorthand nap file`` () = [<Fact>] let ``CLI check valid full format nap file`` () = let dir = createTempDir () + try - let content = "[request]\nmethod = POST\nurl = https://example.com\n\n[assert]\nstatus = 201\n" + let content = + "[request]\nmethod = POST\nurl = https://example.com\n\n[assert]\nstatus = 201\n" + File.WriteAllText(Path.Combine(dir, "test.nap"), content) let exitCode, stdout, _ = runCli "check test.nap" dir Assert.Equal(0, exitCode) @@ -84,6 +78,7 @@ let ``CLI check valid full format nap file`` () = [<Fact>] let ``CLI check valid naplist file`` () = let dir = createTempDir () + try let content = "[meta]\nname = \"Test\"\n\n[steps]\n./test.nap\n" File.WriteAllText(Path.Combine(dir, "test.naplist"), content) @@ -96,6 +91,7 @@ let ``CLI check valid naplist file`` () = [<Fact>] let ``CLI check invalid nap file returns non-zero exit code`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "bad.nap"), "[request]\n# no method, no url\n") let exitCode, _, _ = runCli "check bad.nap" dir @@ -106,6 +102,7 @@ let ``CLI check invalid nap file returns non-zero exit code`` () = [<Fact>] let ``CLI check missing file returns exit code 2`` () = let dir = createTempDir () + try let exitCode, _, stderr = runCli "check nonexistent.nap" dir Assert.Equal(2, exitCode) @@ -116,6 +113,7 @@ let ``CLI check missing file returns exit code 2`` () = [<Fact>] let ``CLI check with no file returns exit code 2`` () = let dir = createTempDir () + try let exitCode, _, stderr = runCli "check" dir Assert.Equal(2, exitCode) @@ -123,11 +121,12 @@ let ``CLI check with no file returns exit code 2`` () = finally cleanupDir dir -// ─── Run command: single file ───────────────────────────────── +// ─── Run command: single file ────────────── Spec: cli-run, nap-minimal, nap-assert, cli-exit-codes [<Fact>] let ``CLI run shorthand GET against jsonplaceholder`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "test.nap"), "GET https://jsonplaceholder.typicode.com/posts/1") let exitCode, stdout, _ = runCli "run test.nap --output json" dir @@ -141,8 +140,11 @@ let ``CLI run shorthand GET against jsonplaceholder`` () = [<Fact>] let ``CLI run with assertions that pass`` () = let dir = createTempDir () + try - let content = "[request]\nmethod = GET\nurl = https://httpbin.org/get\n\n[assert]\nstatus = 200\n" + let content = + "[request]\nmethod = GET\nurl = https://httpbin.org/get\n\n[assert]\nstatus = 200\n" + File.WriteAllText(Path.Combine(dir, "test.nap"), content) let exitCode, stdout, _ = runCli "run test.nap --output json" dir Assert.Equal(0, exitCode) @@ -154,8 +156,11 @@ let ``CLI run with assertions that pass`` () = [<Fact>] let ``CLI run with failing assertion returns exit code 1`` () = let dir = createTempDir () + try - let content = "[request]\nmethod = GET\nurl = https://httpbin.org/get\n\n[assert]\nstatus = 404\n" + let content = + "[request]\nmethod = GET\nurl = https://httpbin.org/get\n\n[assert]\nstatus = 404\n" + File.WriteAllText(Path.Combine(dir, "test.nap"), content) let exitCode, stdout, _ = runCli "run test.nap --output json" dir Assert.Equal(1, exitCode) @@ -167,6 +172,7 @@ let ``CLI run with failing assertion returns exit code 1`` () = [<Fact>] let ``CLI run with missing file returns exit code 2`` () = let dir = createTempDir () + try let exitCode, _, stderr = runCli "run missing.nap" dir Assert.Equal(2, exitCode) @@ -177,6 +183,7 @@ let ``CLI run with missing file returns exit code 2`` () = [<Fact>] let ``CLI run with no file returns exit code 2`` () = let dir = createTempDir () + try let exitCode, _, stderr = runCli "run" dir Assert.Equal(2, exitCode) @@ -184,11 +191,12 @@ let ``CLI run with no file returns exit code 2`` () = finally cleanupDir dir -// ─── Run command: output formats ────────────────────────────── +// ─── Run command: output formats ─────────── Spec: cli-output, output-json, output-junit, output-pretty [<Fact>] let ``CLI run with json output is valid JSON`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "test.nap"), "GET https://httpbin.org/get") let _, stdout, _ = runCli "run test.nap --output json" dir @@ -200,6 +208,7 @@ let ``CLI run with json output is valid JSON`` () = [<Fact>] let ``CLI run with junit output is valid XML`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "test.nap"), "GET https://httpbin.org/get") let _, stdout, _ = runCli "run test.nap --output junit" dir @@ -211,6 +220,7 @@ let ``CLI run with junit output is valid XML`` () = [<Fact>] let ``CLI run with pretty output shows status`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "test.nap"), "GET https://httpbin.org/get") let _, stdout, _ = runCli "run test.nap" dir @@ -218,11 +228,12 @@ let ``CLI run with pretty output shows status`` () = finally cleanupDir dir -// ─── Run command: directory ─────────────────────────────────── +// ─── Run command: directory ──────────────── Spec: cli-run, collection-folder [<Fact>] let ``CLI run directory executes all nap files`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "a.nap"), "GET https://httpbin.org/get") File.WriteAllText(Path.Combine(dir, "b.nap"), "GET https://httpbin.org/get") @@ -236,6 +247,7 @@ let ``CLI run directory executes all nap files`` () = [<Fact>] let ``CLI run empty directory returns exit code 2`` () = let dir = createTempDir () + try let exitCode, _, stderr = runCli $"run {dir}" dir Assert.Equal(2, exitCode) @@ -243,13 +255,16 @@ let ``CLI run empty directory returns exit code 2`` () = finally cleanupDir dir -// ─── Run command: --var flag ────────────────────────────────── +// ─── Run command: --var flag ─────────────── Spec: cli-var, env-interpolation [<Fact>] let ``CLI run with --var substitutes variable`` () = let dir = createTempDir () + try - let content = "[request]\nmethod = GET\nurl = https://httpbin.org/status/{{code}}\n\n[assert]\nstatus = {{code}}\n" + let content = + "[request]\nmethod = GET\nurl = https://httpbin.org/status/{{code}}\n\n[assert]\nstatus = {{code}}\n" + File.WriteAllText(Path.Combine(dir, "test.nap"), content) let exitCode, stdout, _ = runCli "run test.nap --var code=200 --output json" dir Assert.Equal(0, exitCode) @@ -258,14 +273,18 @@ let ``CLI run with --var substitutes variable`` () = finally cleanupDir dir -// ─── Run command: --env flag ────────────────────────────────── +// ─── Run command: --env flag ─────────────── Spec: cli-env, env-named, env-resolution [<Fact>] let ``CLI run with --env loads named environment`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, ".napenv.staging"), "statusCode = \"200\"") - let content = "[request]\nmethod = GET\nurl = https://httpbin.org/status/{{statusCode}}\n\n[assert]\nstatus = {{statusCode}}\n" + + let content = + "[request]\nmethod = GET\nurl = https://httpbin.org/status/{{statusCode}}\n\n[assert]\nstatus = {{statusCode}}\n" + File.WriteAllText(Path.Combine(dir, "test.nap"), content) let exitCode, stdout, _ = runCli "run test.nap --env staging --output json" dir Assert.Equal(0, exitCode) @@ -274,11 +293,12 @@ let ``CLI run with --env loads named environment`` () = finally cleanupDir dir -// ─── Run command: playlist ──────────────────────────────────── +// ─── Run command: playlist ───────────────── Spec: naplist-file, naplist-steps, output-ndjson [<Fact>] let ``CLI run naplist executes all steps`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "a.nap"), "GET https://httpbin.org/get") File.WriteAllText(Path.Combine(dir, "b.nap"), "GET https://httpbin.org/get") @@ -293,6 +313,7 @@ let ``CLI run naplist executes all steps`` () = [<Fact>] let ``CLI run naplist with ndjson streams results`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "a.nap"), "GET https://httpbin.org/get") File.WriteAllText(Path.Combine(dir, "b.nap"), "GET https://httpbin.org/get") @@ -301,22 +322,24 @@ let ``CLI run naplist with ndjson streams results`` () = let lines = stdout.Split('\n', StringSplitOptions.RemoveEmptyEntries) Assert.Equal(0, exitCode) Assert.Equal(2, lines.Length) + for line in lines do let doc = System.Text.Json.JsonDocument.Parse(line) Assert.True(doc.RootElement.TryGetProperty("file") |> fst) finally cleanupDir dir -// ─── Run command: script step ───────────────────────────────── +// ─── Run command: script step ────────────── Spec: naplist-script-step, script-fsx [<Fact>] let ``CLI run naplist with script step`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "setup.fsx"), "printfn \"[setup] ready\"") File.WriteAllText(Path.Combine(dir, "test.nap"), "GET https://httpbin.org/get") File.WriteAllText(Path.Combine(dir, "suite.naplist"), "[steps]\nsetup.fsx\ntest.nap\n") - let exitCode, stdout, _ = runCli "run suite.naplist --output json" dir + let exitCode, stdout, _ = runCliSlow "run suite.naplist --output json" dir Assert.Equal(0, exitCode) let doc = System.Text.Json.JsonDocument.Parse(stdout) Assert.Equal(2, doc.RootElement.GetArrayLength()) @@ -331,24 +354,26 @@ let ``CLI run naplist with script step`` () = [<Fact>] let ``CLI run naplist with failing script returns exit code 1`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "bad.fsx"), "failwith \"boom\"") File.WriteAllText(Path.Combine(dir, "suite.naplist"), "[steps]\nbad.fsx\n") - let exitCode, _, _ = runCli "run suite.naplist --output json" dir + let exitCode, _, _ = runCliSlow "run suite.naplist --output json" dir Assert.Equal(1, exitCode) finally cleanupDir dir -// ─── Run command: C# script step ───────────────────────────── +// ─── Run command: C# script step ─────────── Spec: naplist-script-step, script-csx [<Fact>] let ``CLI run naplist with CSX script step`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "setup.csx"), "Console.WriteLine(\"[csx-setup] ready\");") File.WriteAllText(Path.Combine(dir, "test.nap"), "GET https://jsonplaceholder.typicode.com/posts/1") File.WriteAllText(Path.Combine(dir, "suite.naplist"), "[steps]\nsetup.csx\ntest.nap\n") - let exitCode, stdout, _ = runCli "run suite.naplist --output json" dir + let exitCode, stdout, _ = runCliSlow "run suite.naplist --output json" dir Assert.Equal(0, exitCode) let doc = System.Text.Json.JsonDocument.Parse(stdout) Assert.Equal(2, doc.RootElement.GetArrayLength()) @@ -362,25 +387,27 @@ let ``CLI run naplist with CSX script step`` () = [<Fact>] let ``CLI run naplist with failing CSX script returns exit code 1`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "bad.csx"), "throw new Exception(\"boom\");") File.WriteAllText(Path.Combine(dir, "suite.naplist"), "[steps]\nbad.csx\n") - let exitCode, _, _ = runCli "run suite.naplist --output json" dir + let exitCode, _, _ = runCliSlow "run suite.naplist --output json" dir Assert.Equal(1, exitCode) finally cleanupDir dir -// ─── Run command: mixed F# + C# script steps ──────────────── +// ─── Run command: mixed F# + C# script steps Spec: script-fsx, script-csx, script-dispatch [<Fact>] let ``CLI run naplist with mixed FSX and CSX scripts`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "setup.fsx"), "printfn \"[fsx] setup done\"") File.WriteAllText(Path.Combine(dir, "test.nap"), "GET https://jsonplaceholder.typicode.com/posts/1") File.WriteAllText(Path.Combine(dir, "teardown.csx"), "Console.WriteLine(\"[csx] teardown done\");") File.WriteAllText(Path.Combine(dir, "suite.naplist"), "[steps]\nsetup.fsx\ntest.nap\nteardown.csx\n") - let exitCode, stdout, _ = runCli "run suite.naplist --output json" dir + let exitCode, stdout, _ = runCliSlow "run suite.naplist --output json" dir Assert.Equal(0, exitCode) let doc = System.Text.Json.JsonDocument.Parse(stdout) Assert.Equal(3, doc.RootElement.GetArrayLength()) @@ -397,11 +424,12 @@ let ``CLI run naplist with mixed FSX and CSX scripts`` () = finally cleanupDir dir -// ─── Unknown command ────────────────────────────────────────── +// ─── Unknown command ─────────────────────── Spec: cli-exit-codes [<Fact>] let ``CLI unknown command returns exit code 2`` () = let dir = createTempDir () + try let exitCode, _, stderr = runCli "bogus" dir Assert.Equal(2, exitCode) @@ -409,26 +437,30 @@ let ``CLI unknown command returns exit code 2`` () = finally cleanupDir dir -// ─── Run command: POST with body ────────────────────────────── +// ─── Run command: POST with body ─────────── Spec: cli-run, nap-body, nap-request, nap-headers [<Fact>] let ``CLI run POST with JSON body`` () = let dir = createTempDir () let tq = "\"\"\"" + try let content = - "[request]\n" + - "method = POST\n" + - "url = https://httpbin.org/post\n\n" + - "[request.headers]\n" + - "Content-Type = application/json\n\n" + - "[request.body]\n" + - "content-type = application/json\n" + - tq + "\n" + - "{\"name\": \"test\"}\n" + - tq + "\n\n" + - "[assert]\n" + - "status = 200\n" + "[request]\n" + + "method = POST\n" + + "url = https://httpbin.org/post\n\n" + + "[request.headers]\n" + + "Content-Type = application/json\n\n" + + "[request.body]\n" + + "content-type = application/json\n" + + tq + + "\n" + + "{\"name\": \"test\"}\n" + + tq + + "\n\n" + + "[assert]\n" + + "status = 200\n" + File.WriteAllText(Path.Combine(dir, "post.nap"), content) let exitCode, stdout, _ = runCli "run post.nap --output json" dir Assert.Equal(0, exitCode) @@ -438,13 +470,14 @@ let ``CLI run POST with JSON body`` () = finally cleanupDir dir -// ─── Run command: nested playlists ──────────────────────────── +// ─── Run command: nested playlists ───────── Spec: naplist-nested [<Fact>] let ``CLI run nested naplist`` () = let dir = createTempDir () let subdir = Path.Combine(dir, "sub") Directory.CreateDirectory(subdir) |> ignore + try File.WriteAllText(Path.Combine(subdir, "inner.nap"), "GET https://httpbin.org/get") File.WriteAllText(Path.Combine(subdir, "inner.naplist"), "[steps]\ninner.nap\n") diff --git a/tests/Nap.Core.Tests/CsxScriptTests.fs b/src/Napper.Core.Tests/CsxScriptTests.fs similarity index 78% rename from tests/Nap.Core.Tests/CsxScriptTests.fs rename to src/Napper.Core.Tests/CsxScriptTests.fs index 4b9b530..020d15b 100644 --- a/tests/Nap.Core.Tests/CsxScriptTests.fs +++ b/src/Napper.Core.Tests/CsxScriptTests.fs @@ -1,24 +1,30 @@ module CsxScriptTests +// Specs: script-csx, script-runner open System open System.IO open Xunit -open Nap.Core +open Napper.Core let private createTempCsx (content: string) : string = let dir = Path.GetTempPath() - let path = Path.Combine(dir, sprintf "nap-test-%s.csx" (Guid.NewGuid().ToString("N"))) + + let path = + Path.Combine(dir, sprintf "nap-test-%s.csx" (Guid.NewGuid().ToString("N"))) + File.WriteAllText(path, content) path let private cleanupScript (path: string) = - if File.Exists(path) then File.Delete(path) + if File.Exists(path) then + File.Delete(path) -// ─── Passing C# scripts ───────────────────────────────────── +// ─── Passing C# scripts ─────────────────── Spec: script-csx [<Fact>] let ``CSX script with single output line`` () = let path = createTempCsx "Console.WriteLine(\"hello from csharp\");" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed, $"Script should pass. Error: {result.Error}") @@ -28,8 +34,11 @@ let ``CSX script with single output line`` () = [<Fact>] let ``CSX script with multiple output lines`` () = - let script = "Console.WriteLine(\"line1\");\nConsole.WriteLine(\"line2\");\nConsole.WriteLine(\"line3\");" + let script = + "Console.WriteLine(\"line1\");\nConsole.WriteLine(\"line2\");\nConsole.WriteLine(\"line3\");" + let path = createTempCsx script + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed, $"Script should pass. Error: {result.Error}") @@ -40,6 +49,7 @@ let ``CSX script with multiple output lines`` () = [<Fact>] let ``CSX script with no output`` () = let path = createTempCsx "var x = 1 + 1;" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed, $"Script should pass. Error: {result.Error}") @@ -50,6 +60,7 @@ let ``CSX script with no output`` () = [<Fact>] let ``CSX result has no HTTP response`` () = let path = createTempCsx "Console.WriteLine(\"ok\");" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Response.IsNone) @@ -59,6 +70,7 @@ let ``CSX result has no HTTP response`` () = [<Fact>] let ``CSX result has no assertions`` () = let path = createTempCsx "Console.WriteLine(\"ok\");" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.Empty(result.Assertions) @@ -68,17 +80,19 @@ let ``CSX result has no assertions`` () = [<Fact>] let ``CSX result has correct file path`` () = let path = createTempCsx "Console.WriteLine(\"ok\");" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.Equal(path, result.File) finally cleanupScript path -// ─── Failing C# scripts ───────────────────────────────────── +// ─── Failing C# scripts ─────────────────── Spec: script-csx [<Fact>] let ``CSX script with compilation error fails`` () = let path = createTempCsx "int x = \"not an int\";" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.False(result.Passed) @@ -88,7 +102,9 @@ let ``CSX script with compilation error fails`` () = [<Fact>] let ``CSX script with explicit exit code 1 fails`` () = - let path = createTempCsx "Console.WriteLine(\"about to fail\");\nEnvironment.Exit(1);" + let path = + createTempCsx "Console.WriteLine(\"about to fail\");\nEnvironment.Exit(1);" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.False(result.Passed) @@ -98,7 +114,9 @@ let ``CSX script with explicit exit code 1 fails`` () = [<Fact>] let ``CSX failed script still captures stdout before failure`` () = - let path = createTempCsx "Console.WriteLine(\"before error\");\nEnvironment.Exit(1);" + let path = + createTempCsx "Console.WriteLine(\"before error\");\nEnvironment.Exit(1);" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.False(result.Passed) @@ -109,6 +127,7 @@ let ``CSX failed script still captures stdout before failure`` () = [<Fact>] let ``CSX script with runtime exception fails`` () = let path = createTempCsx "throw new Exception(\"boom\");" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.False(result.Passed) @@ -116,12 +135,15 @@ let ``CSX script with runtime exception fails`` () = finally cleanupScript path -// ─── C# scripts doing actual work ─────────────────────────── +// ─── C# scripts doing actual work ────────── Spec: script-csx [<Fact>] let ``CSX script can do computation and print result`` () = - let script = "var sum = Enumerable.Range(1, 10).Sum();\nConsole.WriteLine($\"Sum: {sum}\");" + let script = + "var sum = Enumerable.Range(1, 10).Sum();\nConsole.WriteLine($\"Sum: {sum}\");" + let path = createTempCsx script + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed, $"Script should pass. Error: {result.Error}") @@ -131,8 +153,11 @@ let ``CSX script can do computation and print result`` () = [<Fact>] let ``CSX script can read environment variables`` () = - let script = "Console.WriteLine($\"PATH exists: {Environment.GetEnvironmentVariable(\"PATH\") != null}\");" + let script = + "Console.WriteLine($\"PATH exists: {Environment.GetEnvironmentVariable(\"PATH\") != null}\");" + let path = createTempCsx script + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed, $"Script should pass. Error: {result.Error}") @@ -142,19 +167,27 @@ let ``CSX script can read environment variables`` () = [<Fact>] let ``CSX script can write and read temp file`` () = - let tempFile = Path.Combine(Path.GetTempPath(), sprintf "nap-csx-io-%s.txt" (Guid.NewGuid().ToString("N"))) + let tempFile = + Path.Combine(Path.GetTempPath(), sprintf "nap-csx-io-%s.txt" (Guid.NewGuid().ToString("N"))) + let script = - sprintf "var path = @\"%s\";\nSystem.IO.File.WriteAllText(path, \"hello from csx\");\nvar content = System.IO.File.ReadAllText(path);\nConsole.WriteLine($\"Read: {content}\");\nSystem.IO.File.Delete(path);" tempFile + sprintf + "var path = @\"%s\";\nSystem.IO.File.WriteAllText(path, \"hello from csx\");\nvar content = System.IO.File.ReadAllText(path);\nConsole.WriteLine($\"Read: {content}\");\nSystem.IO.File.Delete(path);" + tempFile + let path = createTempCsx script + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed, $"Script should pass. Error: {result.Error}") Assert.Contains("Read: hello from csx", result.Log) finally cleanupScript path - if File.Exists(tempFile) then File.Delete(tempFile) -// ─── Non-existent C# script ───────────────────────────────── + if File.Exists(tempFile) then + File.Delete(tempFile) + +// ─── Non-existent C# script ─────────────── Spec: script-csx [<Fact>] let ``Non-existent CSX script path fails`` () = @@ -163,34 +196,43 @@ let ``Non-existent CSX script path fails`` () = Assert.False(result.Passed) Assert.True(result.Error.IsSome) -// ─── C# script with HTTP call ──────────────────────────────── +// ─── C# script with HTTP call ───────────── Spec: script-csx [<Fact>] let ``CSX script can make HTTP request`` () = - let script = """ + let script = + """ using System.Net.Http; var client = new HttpClient(); var response = await client.GetAsync("https://jsonplaceholder.typicode.com/posts/1"); Console.WriteLine($"Status: {(int)response.StatusCode}"); """ + let path = createTempCsx script + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed, $"Script should pass. Error: {result.Error}") - Assert.True(result.Log |> List.exists (fun l -> l.Contains("Status: 200")), - $"Should contain status 200. Log: {result.Log}") + + Assert.True( + result.Log |> List.exists (fun l -> l.Contains("Status: 200")), + $"Should contain status 200. Log: {result.Log}" + ) finally cleanupScript path -// ─── C# script with async/await ────────────────────────────── +// ─── C# script with async/await ─────────── Spec: script-csx [<Fact>] let ``CSX script with async await`` () = - let script = """ + let script = + """ var result = await Task.Run(() => 42); Console.WriteLine($"Async result: {result}"); """ + let path = createTempCsx script + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed, $"Script should pass. Error: {result.Error}") diff --git a/tests/Nap.Core.Tests/EnvironmentEdgeCaseTests.fs b/src/Napper.Core.Tests/EnvironmentEdgeCaseTests.fs similarity index 57% rename from tests/Nap.Core.Tests/EnvironmentEdgeCaseTests.fs rename to src/Napper.Core.Tests/EnvironmentEdgeCaseTests.fs index 1a947c6..28b86e0 100644 --- a/tests/Nap.Core.Tests/EnvironmentEdgeCaseTests.fs +++ b/src/Napper.Core.Tests/EnvironmentEdgeCaseTests.fs @@ -1,10 +1,12 @@ module EnvironmentEdgeCaseTests +// Specs: env-file, env-interpolation, env-resolution, env-base, env-named, env-local, cli-var, +// nap-request, nap-headers, nap-body, nap-assert, assert-lt, assert-gt, assert-exists open System.IO open Xunit -open Nap.Core +open Napper.Core -// ─── parseEnvFile edge cases ────────────────────────────────── +// ─── parseEnvFile edge cases ────────────── Spec: env-file [<Fact>] let ``Empty env file returns empty map`` () = @@ -46,159 +48,204 @@ let ``Env file multiple variables`` () = Assert.Equal("abc123", vars["token"]) Assert.Equal("42", vars["userId"]) -// ─── resolveVars edge cases ─────────────────────────────────── +// ─── resolveVars edge cases ──────────────── Spec: env-interpolation [<Fact>] let ``Multiple variables in one string`` () = - let vars = Map.ofList [("host", "api.com"); ("port", "8080"); ("path", "users")] + let vars = Map.ofList [ ("host", "api.com"); ("port", "8080"); ("path", "users") ] let result = Environment.resolveVars vars "https://{{host}}:{{port}}/{{path}}" Assert.Equal("https://api.com:8080/users", result) [<Fact>] let ``No variables in string returns unchanged`` () = - let vars = Map.ofList [("key", "value")] + let vars = Map.ofList [ ("key", "value") ] let result = Environment.resolveVars vars "no variables here" Assert.Equal("no variables here", result) [<Fact>] let ``Empty string returns empty`` () = - let vars = Map.ofList [("key", "value")] + let vars = Map.ofList [ ("key", "value") ] let result = Environment.resolveVars vars "" Assert.Equal("", result) [<Fact>] let ``Variable with underscores`` () = - let vars = Map.ofList [("my_var", "resolved")] + let vars = Map.ofList [ ("my_var", "resolved") ] let result = Environment.resolveVars vars "{{my_var}}" Assert.Equal("resolved", result) [<Fact>] let ``Adjacent variables`` () = - let vars = Map.ofList [("a", "hello"); ("b", "world")] + let vars = Map.ofList [ ("a", "hello"); ("b", "world") ] let result = Environment.resolveVars vars "{{a}}{{b}}" Assert.Equal("helloworld", result) [<Fact>] let ``Mixed resolved and unresolved`` () = - let vars = Map.ofList [("known", "yes")] + let vars = Map.ofList [ ("known", "yes") ] let result = Environment.resolveVars vars "{{known}} and {{unknown}}" Assert.Equal("yes and {{unknown}}", result) -// ─── resolveNapFile edge cases ──────────────────────────────── +// ─── resolveNapFile edge cases ───────────── Spec: env-interpolation, nap-request, nap-headers, nap-body, nap-assert [<Fact>] let ``resolveNapFile resolves URL`` () = - let vars = Map.ofList [("baseUrl", "https://api.example.com"); ("id", "42")] - let napFile: NapFile = { - Meta = { Name = None; Description = None; Tags = [] } - Vars = Map.empty - Request = { Method = GET; Url = "{{baseUrl}}/users/{{id}}"; Headers = Map.empty; Body = None } - Assertions = [] - Script = { Pre = None; Post = None } - } + let vars = Map.ofList [ ("baseUrl", "https://api.example.com"); ("id", "42") ] + + let napFile: NapFile = + { Meta = + { Name = None + Description = None + Tags = [] } + Vars = Map.empty + Request = + { Method = GET + Url = "{{baseUrl}}/users/{{id}}" + Headers = Map.empty + Body = None } + Assertions = [] + Script = { Pre = None; Post = None } } + let resolved = Environment.resolveNapFile vars napFile Assert.Equal("https://api.example.com/users/42", resolved.Request.Url) [<Fact>] let ``resolveNapFile resolves headers`` () = - let vars = Map.ofList [("token", "abc123")] - let napFile: NapFile = { - Meta = { Name = None; Description = None; Tags = [] } - Vars = Map.empty - Request = { - Method = GET - Url = "https://example.com" - Headers = Map.ofList [("Authorization", "Bearer {{token}}")] - Body = None - } - Assertions = [] - Script = { Pre = None; Post = None } - } + let vars = Map.ofList [ ("token", "abc123") ] + + let napFile: NapFile = + { Meta = + { Name = None + Description = None + Tags = [] } + Vars = Map.empty + Request = + { Method = GET + Url = "https://example.com" + Headers = Map.ofList [ ("Authorization", "Bearer {{token}}") ] + Body = None } + Assertions = [] + Script = { Pre = None; Post = None } } + let resolved = Environment.resolveNapFile vars napFile Assert.Equal("Bearer abc123", resolved.Request.Headers["Authorization"]) [<Fact>] let ``resolveNapFile resolves body content`` () = - let vars = Map.ofList [("userId", "42")] - let napFile: NapFile = { - Meta = { Name = None; Description = None; Tags = [] } - Vars = Map.empty - Request = { - Method = POST - Url = "https://example.com" - Headers = Map.empty - Body = Some { ContentType = "application/json"; Content = """{"userId": {{userId}}}""" } - } - Assertions = [] - Script = { Pre = None; Post = None } - } + let vars = Map.ofList [ ("userId", "42") ] + + let napFile: NapFile = + { Meta = + { Name = None + Description = None + Tags = [] } + Vars = Map.empty + Request = + { Method = POST + Url = "https://example.com" + Headers = Map.empty + Body = + Some + { ContentType = "application/json" + Content = """{"userId": {{userId}}}""" } } + Assertions = [] + Script = { Pre = None; Post = None } } + let resolved = Environment.resolveNapFile vars napFile Assert.Equal("""{"userId": 42}""", resolved.Request.Body.Value.Content) [<Fact>] let ``resolveNapFile resolves assertion values`` () = - let vars = Map.ofList [("expectedStatus", "201")] - let napFile: NapFile = { - Meta = { Name = None; Description = None; Tags = [] } - Vars = Map.empty - Request = { Method = GET; Url = "https://example.com"; Headers = Map.empty; Body = None } - Assertions = [ - { Target = "status"; Op = Equals "{{expectedStatus}}" } - { Target = "body.name"; Op = Contains "{{expectedStatus}}" } - ] - Script = { Pre = None; Post = None } - } + let vars = Map.ofList [ ("expectedStatus", "201") ] + + let napFile: NapFile = + { Meta = + { Name = None + Description = None + Tags = [] } + Vars = Map.empty + Request = + { Method = GET + Url = "https://example.com" + Headers = Map.empty + Body = None } + Assertions = + [ { Target = "status" + Op = Equals "{{expectedStatus}}" } + { Target = "body.name" + Op = Contains "{{expectedStatus}}" } ] + Script = { Pre = None; Post = None } } + let resolved = Environment.resolveNapFile vars napFile Assert.Equal(Equals "201", resolved.Assertions[0].Op) Assert.Equal(Contains "201", resolved.Assertions[1].Op) [<Fact>] let ``resolveNapFile resolves LessThan and GreaterThan`` () = - let vars = Map.ofList [("maxDuration", "500ms")] - let napFile: NapFile = { - Meta = { Name = None; Description = None; Tags = [] } - Vars = Map.empty - Request = { Method = GET; Url = "https://example.com"; Headers = Map.empty; Body = None } - Assertions = [ - { Target = "duration"; Op = LessThan "{{maxDuration}}" } - ] - Script = { Pre = None; Post = None } - } + let vars = Map.ofList [ ("maxDuration", "500ms") ] + + let napFile: NapFile = + { Meta = + { Name = None + Description = None + Tags = [] } + Vars = Map.empty + Request = + { Method = GET + Url = "https://example.com" + Headers = Map.empty + Body = None } + Assertions = + [ { Target = "duration" + Op = LessThan "{{maxDuration}}" } ] + Script = { Pre = None; Post = None } } + let resolved = Environment.resolveNapFile vars napFile Assert.Equal(LessThan "500ms", resolved.Assertions[0].Op) [<Fact>] let ``resolveNapFile preserves Exists op unchanged`` () = - let vars = Map.ofList [("unused", "value")] - let napFile: NapFile = { - Meta = { Name = None; Description = None; Tags = [] } - Vars = Map.empty - Request = { Method = GET; Url = "https://example.com"; Headers = Map.empty; Body = None } - Assertions = [{ Target = "body.id"; Op = Exists }] - Script = { Pre = None; Post = None } - } + let vars = Map.ofList [ ("unused", "value") ] + + let napFile: NapFile = + { Meta = + { Name = None + Description = None + Tags = [] } + Vars = Map.empty + Request = + { Method = GET + Url = "https://example.com" + Headers = Map.empty + Body = None } + Assertions = [ { Target = "body.id"; Op = Exists } ] + Script = { Pre = None; Post = None } } + let resolved = Environment.resolveNapFile vars napFile Assert.Equal(Exists, resolved.Assertions[0].Op) -// ─── loadEnvironment priority ───────────────────────────────── +// ─── loadEnvironment priority ────────────── Spec: env-resolution, env-base, env-named, env-local, cli-var [<Fact>] let ``loadEnvironment file vars are lowest priority`` () = let dir = Path.GetTempPath() - let fileVars = Map.ofList [("key", "from-file"); ("unique", "file-only")] + let fileVars = Map.ofList [ ("key", "from-file"); ("unique", "file-only") ] let result = Environment.loadEnvironment dir None Map.empty fileVars Assert.Equal("from-file", result["key"]) Assert.Equal("file-only", result["unique"]) [<Fact>] let ``loadEnvironment CLI vars override everything`` () = - let dir = Path.Combine(Path.GetTempPath(), "nap-env-test-" + System.Guid.NewGuid().ToString("N")) + let dir = + Path.Combine(Path.GetTempPath(), "nap-env-test-" + System.Guid.NewGuid().ToString("N")) + Directory.CreateDirectory(dir) |> ignore + try File.WriteAllText(Path.Combine(dir, ".napenv"), "key = base-value") File.WriteAllText(Path.Combine(dir, ".napenv.local"), "key = local-value") - let cliVars = Map.ofList [("key", "cli-wins")] - let fileVars = Map.ofList [("key", "file-value")] + let cliVars = Map.ofList [ ("key", "cli-wins") ] + let fileVars = Map.ofList [ ("key", "file-value") ] let result = Environment.loadEnvironment dir None cliVars fileVars Assert.Equal("cli-wins", result["key"]) finally @@ -206,8 +253,11 @@ let ``loadEnvironment CLI vars override everything`` () = [<Fact>] let ``loadEnvironment named env overrides base`` () = - let dir = Path.Combine(Path.GetTempPath(), "nap-env-test-" + System.Guid.NewGuid().ToString("N")) + let dir = + Path.Combine(Path.GetTempPath(), "nap-env-test-" + System.Guid.NewGuid().ToString("N")) + Directory.CreateDirectory(dir) |> ignore + try File.WriteAllText(Path.Combine(dir, ".napenv"), "key = base-value") File.WriteAllText(Path.Combine(dir, ".napenv.staging"), "key = staging-value") @@ -218,8 +268,11 @@ let ``loadEnvironment named env overrides base`` () = [<Fact>] let ``loadEnvironment local overrides named env`` () = - let dir = Path.Combine(Path.GetTempPath(), "nap-env-test-" + System.Guid.NewGuid().ToString("N")) + let dir = + Path.Combine(Path.GetTempPath(), "nap-env-test-" + System.Guid.NewGuid().ToString("N")) + Directory.CreateDirectory(dir) |> ignore + try File.WriteAllText(Path.Combine(dir, ".napenv"), "key = base") File.WriteAllText(Path.Combine(dir, ".napenv.staging"), "key = staging") @@ -231,14 +284,17 @@ let ``loadEnvironment local overrides named env`` () = [<Fact>] let ``loadEnvironment merges distinct keys from all sources`` () = - let dir = Path.Combine(Path.GetTempPath(), "nap-env-test-" + System.Guid.NewGuid().ToString("N")) + let dir = + Path.Combine(Path.GetTempPath(), "nap-env-test-" + System.Guid.NewGuid().ToString("N")) + Directory.CreateDirectory(dir) |> ignore + try File.WriteAllText(Path.Combine(dir, ".napenv"), "base_key = from-base") File.WriteAllText(Path.Combine(dir, ".napenv.staging"), "staging_key = from-staging") File.WriteAllText(Path.Combine(dir, ".napenv.local"), "local_key = from-local") - let fileVars = Map.ofList [("file_key", "from-file")] - let cliVars = Map.ofList [("cli_key", "from-cli")] + let fileVars = Map.ofList [ ("file_key", "from-file") ] + let cliVars = Map.ofList [ ("cli_key", "from-cli") ] let result = Environment.loadEnvironment dir (Some "staging") cliVars fileVars Assert.Equal("from-base", result["base_key"]) Assert.Equal("from-staging", result["staging_key"]) @@ -250,11 +306,14 @@ let ``loadEnvironment merges distinct keys from all sources`` () = [<Fact>] let ``loadEnvironment with no env files returns fileVars merged with cliVars`` () = - let dir = Path.Combine(Path.GetTempPath(), "nap-env-empty-" + System.Guid.NewGuid().ToString("N")) + let dir = + Path.Combine(Path.GetTempPath(), "nap-env-empty-" + System.Guid.NewGuid().ToString("N")) + Directory.CreateDirectory(dir) |> ignore + try - let fileVars = Map.ofList [("a", "1")] - let cliVars = Map.ofList [("b", "2")] + let fileVars = Map.ofList [ ("a", "1") ] + let cliVars = Map.ofList [ ("b", "2") ] let result = Environment.loadEnvironment dir None cliVars fileVars Assert.Equal("1", result["a"]) Assert.Equal("2", result["b"]) diff --git a/src/Napper.Core.Tests/HttpConvertE2eTests.fs b/src/Napper.Core.Tests/HttpConvertE2eTests.fs new file mode 100644 index 0000000..41940c6 --- /dev/null +++ b/src/Napper.Core.Tests/HttpConvertE2eTests.fs @@ -0,0 +1,249 @@ +module HttpConvertE2eTests +// Specs: http-convert, http-convert-outdir, http-convert-dryrun, http-convert-envfile, +// http-convert-mapping, http-convert-naming, http-convert-output, +// http-convert-env, cli-exit-codes + +open System.IO +open Xunit + +let private createTempDir () = + TestHelpers.createTempDir "nap-http-convert-e2e" + +let private cleanupDir dir = TestHelpers.cleanupDir dir +let private runCli args cwd = TestHelpers.runCli args cwd + +let private writeFile (dir: string) (name: string) (content: string) : string = + let filePath = Path.Combine(dir, name) + File.WriteAllText(filePath, content) + filePath + +let private convertFile (httpPath: string) (outDir: string) (cwd: string) = + runCli (sprintf "convert http %s --output-dir %s" httpPath outDir) cwd + +let private convertWithFlags (httpPath: string) (outDir: string) (flags: string) (cwd: string) = + runCli (sprintf "convert http %s --output-dir %s %s" httpPath outDir flags) cwd + +[<Fact>] +let ``Spec http-convert: single file exits 0`` () = + let dir = createTempDir () + let outDir = createTempDir () + + try + let exitCode, stdout, _ = + convertFile (writeFile dir "t.http" "GET https://api.example.com/users\n") outDir dir + + Assert.Equal(0, exitCode) + Assert.Contains("Converted", stdout) + finally + cleanupDir dir + cleanupDir outDir + +[<Fact>] +let ``Spec http-convert-output: generates .nap on disk`` () = + let dir = createTempDir () + let outDir = createTempDir () + + try + convertFile (writeFile dir "t.http" "GET https://api.example.com/users\n") outDir dir + |> ignore + + let naps = Directory.GetFiles(outDir, "*.nap") + Assert.True(naps.Length >= 1) + Assert.Contains("GET https://api.example.com/users", File.ReadAllText(naps[0])) + finally + cleanupDir dir + cleanupDir outDir + +[<Fact>] +let ``Spec http-convert-output: multi-request generates one nap each`` () = + let dir = createTempDir () + let outDir = createTempDir () + + try + let e, _, _ = + convertFile + (writeFile dir "m.http" "GET https://a.com\n\n###\nPOST https://b.com\n\n###\nDELETE https://c.com\n") + outDir + dir + + Assert.Equal(0, e) + Assert.Equal(3, Directory.GetFiles(outDir, "*.nap").Length) + finally + cleanupDir dir + cleanupDir outDir + +[<Fact>] +let ``Spec http-convert-naming: numeric prefix and nap ext`` () = + let dir = createTempDir () + let outDir = createTempDir () + + try + convertFile + (writeFile dir "n.http" "### First\nGET https://a.com\n\n### Second\nPOST https://b.com\n") + outDir + dir + |> ignore + + let naps = Directory.GetFiles(outDir, "*.nap") |> Array.sort + Assert.True(Path.GetFileName(naps[0]).StartsWith("01_")) + Assert.True(Path.GetFileName(naps[1]).StartsWith("02_")) + finally + cleanupDir dir + cleanupDir outDir + +[<Fact>] +let ``Spec http-convert-mapping: generated nap has correct sections`` () = + let dir = createTempDir () + let outDir = createTempDir () + + try + convertFile + (writeFile + dir + "p.http" + "### Create User\nPOST https://api.com/users\nContent-Type: application/json\nAuthorization: Bearer token\n\n{\"name\":\"Alice\"}\n") + outDir + dir + |> ignore + + let c = File.ReadAllText(Directory.GetFiles(outDir, "*.nap")[0]) + Assert.Contains("name = Create User", c) + Assert.Contains("POST https://api.com/users", c) + Assert.Contains("Authorization = Bearer token", c) + Assert.Contains("[request.body]", c) + finally + cleanupDir dir + cleanupDir outDir + +[<Fact>] +let ``Spec http-convert-dryrun: no files written`` () = + let dir = createTempDir () + let outDir = createTempDir () + + try + let e, stdout, _ = + convertWithFlags (writeFile dir "t.http" "GET https://api.com/users\n") outDir "--dry-run" dir + + Assert.Equal(0, e) + Assert.Contains("Would write", stdout) + Assert.Equal(0, Directory.GetFiles(outDir, "*.nap").Length) + finally + cleanupDir dir + cleanupDir outDir + +[<Fact>] +let ``Spec http-convert-envfile: converts env JSON to napenv`` () = + let dir = createTempDir () + let outDir = createTempDir () + + try + writeFile dir "t.http" "GET https://{{host}}/users\n" |> ignore + + let envPath = + writeFile dir "env.json" """{"dev":{"host":"localhost:8080"},"prod":{"host":"api.example.com"}}""" + + let e, _, _ = + convertWithFlags (Path.Combine(dir, "t.http")) outDir (sprintf "--env-file %s" envPath) dir + + Assert.Equal(0, e) + Assert.True(File.Exists(Path.Combine(outDir, ".napenv.dev"))) + Assert.True(File.Exists(Path.Combine(outDir, ".napenv.prod"))) + finally + cleanupDir dir + cleanupDir outDir + +[<Fact>] +let ``Spec http-convert-env: auto-detects http-client.env.json`` () = + let dir = createTempDir () + let outDir = createTempDir () + + try + let httpPath = writeFile dir "t.http" "GET https://{{host}}/users\n" + + writeFile dir "http-client.env.json" """{"staging":{"host":"staging.api.com"}}""" + |> ignore + + convertFile httpPath outDir dir |> ignore + Assert.True(File.Exists(Path.Combine(outDir, ".napenv.staging"))) + Assert.Contains("host = \"staging.api.com\"", File.ReadAllText(Path.Combine(outDir, ".napenv.staging"))) + finally + cleanupDir dir + cleanupDir outDir + +[<Fact>] +let ``Spec http-convert: json output reports counts`` () = + let dir = createTempDir () + let outDir = createTempDir () + + try + let _, stdout, _ = + convertWithFlags + (writeFile dir "t.http" "GET https://a.com\n\n###\nPOST https://b.com\n") + outDir + "--output json" + dir + + Assert.Contains("\"files\":", stdout) + Assert.Contains("\"warnings\":", stdout) + finally + cleanupDir dir + cleanupDir outDir + +[<Fact>] +let ``Spec cli-exit-codes: convert missing file returns 2`` () = + let dir = createTempDir () + + try + let e, _, stderr = runCli "convert http nonexistent.http --output-dir ." dir + Assert.Equal(2, e) + Assert.Contains("not found", stderr) + finally + cleanupDir dir + +[<Fact>] +let ``Spec cli-exit-codes: convert no file returns 2`` () = + let dir = createTempDir () + + try + let e, _, stderr = runCli "convert http" dir + Assert.Equal(2, e) + Assert.Contains("no file", stderr.ToLowerInvariant()) + finally + cleanupDir dir + +[<Fact>] +let ``Spec http-convert-output: directory converts all http files`` () = + let dir = createTempDir () + let outDir = createTempDir () + + try + writeFile dir "a.http" "### Login\nPOST https://api.com/login\n" |> ignore + + writeFile dir "u.http" "### List\nGET https://api.com/users\n\n### Get\nGET https://api.com/users/1\n" + |> ignore + + let e, _, _ = convertFile dir outDir dir + Assert.Equal(0, e) + Assert.Equal(3, Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories).Length) + finally + cleanupDir dir + cleanupDir outDir + +[<Fact>] +let ``Spec http-convert-mapping: MS vars in generated nap`` () = + let dir = createTempDir () + let outDir = createTempDir () + + try + convertFile + (writeFile dir "ms.http" "@baseUrl = https://api.example.com\n@apiKey = abc123\n\nGET {{baseUrl}}/data\n") + outDir + dir + |> ignore + + let c = File.ReadAllText(Directory.GetFiles(outDir, "*.nap")[0]) + Assert.Contains("[vars]", c) + Assert.Contains("baseUrl = \"https://api.example.com\"", c) + finally + cleanupDir dir + cleanupDir outDir diff --git a/src/Napper.Core.Tests/HttpMethodTests.fs b/src/Napper.Core.Tests/HttpMethodTests.fs new file mode 100644 index 0000000..215793b --- /dev/null +++ b/src/Napper.Core.Tests/HttpMethodTests.fs @@ -0,0 +1,32 @@ +module HttpMethodTests + +open Xunit +open Napper.Core + +[<Fact>] +let ``GET.ToNetMethod returns HttpMethod.Get`` () = + Assert.Equal(System.Net.Http.HttpMethod.Get, GET.ToNetMethod()) + +[<Fact>] +let ``POST.ToNetMethod returns HttpMethod.Post`` () = + Assert.Equal(System.Net.Http.HttpMethod.Post, POST.ToNetMethod()) + +[<Fact>] +let ``PUT.ToNetMethod returns HttpMethod.Put`` () = + Assert.Equal(System.Net.Http.HttpMethod.Put, PUT.ToNetMethod()) + +[<Fact>] +let ``PATCH.ToNetMethod returns HttpMethod.Patch`` () = + Assert.Equal(System.Net.Http.HttpMethod.Patch, PATCH.ToNetMethod()) + +[<Fact>] +let ``DELETE.ToNetMethod returns HttpMethod.Delete`` () = + Assert.Equal(System.Net.Http.HttpMethod.Delete, DELETE.ToNetMethod()) + +[<Fact>] +let ``HEAD.ToNetMethod returns HttpMethod.Head`` () = + Assert.Equal(System.Net.Http.HttpMethod.Head, HEAD.ToNetMethod()) + +[<Fact>] +let ``OPTIONS.ToNetMethod returns HttpMethod.Options`` () = + Assert.Equal(System.Net.Http.HttpMethod.Options, OPTIONS.ToNetMethod()) diff --git a/src/Napper.Core.Tests/HttpToNapConverterTests.fs b/src/Napper.Core.Tests/HttpToNapConverterTests.fs new file mode 100644 index 0000000..85e120f --- /dev/null +++ b/src/Napper.Core.Tests/HttpToNapConverterTests.fs @@ -0,0 +1,246 @@ +module HttpToNapConverterTests +// Specs: http-convert, http-convert-mapping, http-convert-env, http-convert-scripts, +// http-convert-output, http-convert-naming + +open Xunit +open DotHttp +open Napper.Core.HttpToNapConverter +open Napper.Core.OpenApiTypes + +let private parseAndConvert (input: string) : ConvertResult = + match DotHttp.Parser.parse input with + | Ok httpFile -> convert httpFile + | Error e -> failwith $"Parse failed: {e}" + +let private firstContent (r: ConvertResult) : string = snd r.GeneratedFiles[0] +let private firstName (r: ConvertResult) : string = fst r.GeneratedFiles[0] +let private fileAt (r: ConvertResult) (i: int) : string * string = r.GeneratedFiles[i] + +[<Fact>] +let ``Spec http-convert-mapping: GET mapped to request section`` () = + let c = firstContent (parseAndConvert "GET https://api.example.com/users\n") + Assert.Contains("[request]", c) + Assert.Contains("GET https://api.example.com/users", c) + +[<Fact>] +let ``Spec http-convert-mapping: headers mapped to request.headers`` () = + let c = + firstContent ( + parseAndConvert "GET https://api.example.com\nAuthorization: Bearer token123\nAccept: application/json\n" + ) + + Assert.Contains("[request.headers]", c) + Assert.Contains("Authorization = Bearer token123", c) + Assert.Contains("Accept = application/json", c) + +[<Fact>] +let ``Spec http-convert-mapping: body mapped with triple quotes`` () = + let c = + firstContent ( + parseAndConvert + "POST https://api.example.com/users\nContent-Type: application/json\n\n{\"name\":\"Alice\"}\n" + ) + + Assert.Contains("[request.body]", c) + Assert.Contains("\"\"\"", c) + Assert.Contains("{\"name\":\"Alice\"}", c) + +[<Fact>] +let ``Spec http-convert-mapping: no body produces no body section`` () = + Assert.DoesNotContain("[request.body]", firstContent (parseAndConvert "GET https://api.example.com\n")) + +[<Fact>] +let ``Spec http-convert-mapping: variable interpolation preserved`` () = + let c = + firstContent (parseAndConvert "GET https://{{host}}/api/{{version}}/users\n") + + Assert.Contains("{{host}}", c) + Assert.Contains("{{version}}", c) + +[<Fact>] +let ``Spec http-convert-mapping: HTTP version dropped`` () = + let c = firstContent (parseAndConvert "GET https://api.example.com HTTP/1.1\n") + Assert.DoesNotContain("HTTP/1.1", c) + Assert.Contains("GET https://api.example.com", c) + +[<Fact>] +let ``Spec http-convert-mapping: separator name becomes meta`` () = + let c = + firstContent (parseAndConvert "### Get Users\nGET https://api.example.com/users\n") + + Assert.Contains("[meta]", c) + Assert.Contains("name = Get Users", c) + +[<Fact>] +let ``Spec http-convert-mapping: MS name directive becomes meta`` () = + let c = + firstContent (parseAndConvert "# @name GetUsers\nGET https://api.example.com/users\n") + + Assert.Contains("[meta]", c) + Assert.Contains("name = GetUsers", c) + +[<Fact>] +let ``Spec http-convert-mapping: unnamed request has no meta`` () = + Assert.DoesNotContain("[meta]", firstContent (parseAndConvert "GET https://api.example.com/users\n")) + +[<Fact>] +let ``Spec http-convert-mapping: MS file-level vars mapped`` () = + let c = + firstContent (parseAndConvert "@baseUrl = https://api.example.com\n@token = abc123\n\nGET {{baseUrl}}/users\n") + + Assert.Contains("[vars]", c) + Assert.Contains("baseUrl = \"https://api.example.com\"", c) + Assert.Contains("token = \"abc123\"", c) + +[<Fact>] +let ``Spec http-convert-mapping: no vars when none defined`` () = + Assert.DoesNotContain("[vars]", firstContent (parseAndConvert "GET https://api.example.com\n")) + +[<Fact>] +let ``Spec http-convert-mapping: body content-type from headers`` () = + Assert.Contains( + "content-type = text/xml", + firstContent (parseAndConvert "POST https://api.com\nContent-Type: text/xml\n\n<root/>\n") + ) + +[<Fact>] +let ``Spec http-convert-mapping: body defaults to application/json`` () = + Assert.Contains( + "content-type = application/json", + firstContent (parseAndConvert "POST https://api.com\n\n{\"x\":1}\n") + ) + +[<Fact>] +let ``Spec http-convert-naming: numeric prefix`` () = + let r = + parseAndConvert "### First\nGET https://a.com\n\n### Second\nPOST https://b.com\n" + + Assert.StartsWith("01_", fst (fileAt r 0)) + Assert.StartsWith("02_", fst (fileAt r 1)) + +[<Fact>] +let ``Spec http-convert-naming: slugified name`` () = + let n = + firstName (parseAndConvert "### Get All Users\nGET https://api.example.com/users\n") + + Assert.Contains("get-all-users", n) + Assert.EndsWith(".nap", n) + +[<Fact>] +let ``Spec http-convert-naming: method-url slug for unnamed`` () = + let n = firstName (parseAndConvert "GET https://api.example.com/users\n") + Assert.Contains("get-", n) + Assert.EndsWith(".nap", n) + +[<Fact>] +let ``Spec http-convert-naming: nap extension on all files`` () = + for (name, _) in (parseAndConvert "GET https://a.com\n\n###\nPOST https://b.com\n").GeneratedFiles do + Assert.EndsWith(NapExtension, name) + +[<Fact>] +let ``Spec http-convert-output: one nap per request`` () = + Assert.Equal( + 3, + (parseAndConvert + "GET https://a.com\n\n###\nPOST https://b.com\nContent-Type: application/json\n\n{\"n\":1}\n\n###\nDELETE https://c.com\n") + .GeneratedFiles.Length + ) + +[<Fact>] +let ``Spec http-convert-output: correct method per file`` () = + let r = + parseAndConvert "GET https://a.com\n\n###\nPOST https://b.com\n\n###\nDELETE https://c.com\n" + + Assert.Contains("GET https://a.com", snd (fileAt r 0)) + Assert.Contains("POST https://b.com", snd (fileAt r 1)) + Assert.Contains("DELETE https://c.com", snd (fileAt r 2)) + +[<Fact>] +let ``Spec http-convert-scripts: pre-script generates warning`` () = + let r = parseAndConvert "< {% console.log('setup') %}\nGET https://api.com\n" + Assert.True(r.Warnings.Length >= 1) + Assert.Contains("Script block not converted", r.Warnings[0].Message) + +[<Fact>] +let ``Spec http-convert-scripts: post-script generates warning`` () = + let r = + parseAndConvert "GET https://api.com\n> {% client.test('ok', function(){}) %}\n" + + Assert.True(r.Warnings.Length >= 1) + Assert.Contains("Script block not converted", r.Warnings[0].Message) + +[<Fact>] +let ``Spec http-convert-scripts: warning includes request name`` () = + let r = + parseAndConvert "### Auth Test\nGET https://api.com\n> {% client.test('ok', function(){}) %}\n" + + Assert.Equal(Some "Auth Test", r.Warnings[0].RequestName) + +[<Fact>] +let ``Spec http-convert-scripts: no scripts no warnings`` () = + Assert.Empty((parseAndConvert "GET https://api.com\n").Warnings) + +[<Fact>] +let ``Spec http-convert-env: public env generates named napenv`` () = + match + convertEnvJson """{"dev":{"host":"localhost:8080","token":"abc"},"prod":{"host":"api.example.com"}}""" false + with + | Ok files -> + Assert.Equal(2, files.Length) + Assert.Equal(".napenv.dev", fst (files |> List.find (fun (n, _) -> n.Contains("dev")))) + Assert.Contains("host = \"localhost:8080\"", snd files[0]) + | Error e -> failwith e + +[<Fact>] +let ``Spec http-convert-env: private env generates napenv.local`` () = + match convertEnvJson """{"dev":{"secret":"s3cret"}}""" true with + | Ok files -> + Assert.Equal(".napenv.local", fst files[0]) + Assert.Contains("secret = \"s3cret\"", snd files[0]) + | Error e -> failwith e + +[<Fact>] +let ``Spec http-convert-env: invalid JSON returns error`` () = + match convertEnvJson "not json{" false with + | Error e -> Assert.Contains("Failed to parse environment JSON", e) + | Ok _ -> failwith "Expected error" + +[<Fact>] +let ``Spec http-convert-env: empty object produces empty list`` () = + match convertEnvJson "{}" false with + | Ok files -> Assert.Empty(files) + | Error e -> failwith e + +[<Fact>] +let ``Spec http-convert: full REST API round-trip`` () = + let input = + "@baseUrl = https://api.example.com\n@token = mytoken\n\n### List Users\nGET {{baseUrl}}/users\nAuthorization: Bearer {{token}}\nAccept: application/json\n\n### Create User\nPOST {{baseUrl}}/users\nContent-Type: application/json\n\n{\"name\": \"Alice\"}\n\n### Delete User\nDELETE {{baseUrl}}/users/42\nAuthorization: Bearer {{token}}\n" + + let r = parseAndConvert input + Assert.Equal(3, r.GeneratedFiles.Length) + Assert.Contains("name = List Users", snd (fileAt r 0)) + Assert.Contains("GET {{baseUrl}}/users", snd (fileAt r 0)) + Assert.DoesNotContain("[request.body]", snd (fileAt r 0)) + Assert.Contains("POST {{baseUrl}}/users", snd (fileAt r 1)) + Assert.Contains("[request.body]", snd (fileAt r 1)) + Assert.Contains("DELETE {{baseUrl}}/users/42", snd (fileAt r 2)) + +[<Fact>] +let ``Spec http-convert: comments preserved`` () = + Assert.Contains( + "# This is a health check", + firstContent (parseAndConvert "# This is a health check\nGET https://api.com/health\n") + ) + +[<Fact>] +let ``Spec http-convert: sections in correct order`` () = + let c = + firstContent ( + parseAndConvert + "@baseUrl = https://api.com\n\n### Create\nPOST {{baseUrl}}/items\nContent-Type: application/json\n\n{\"name\":\"test\"}\n" + ) + + Assert.True(c.IndexOf("[meta]") < c.IndexOf("[vars]")) + Assert.True(c.IndexOf("[vars]") < c.IndexOf("[request]")) + Assert.True(c.IndexOf("[request]") < c.IndexOf("[request.headers]")) + Assert.True(c.IndexOf("[request.headers]") < c.IndexOf("[request.body]")) diff --git a/src/Napper.Core.Tests/LoggerTests.fs b/src/Napper.Core.Tests/LoggerTests.fs new file mode 100644 index 0000000..e26d865 --- /dev/null +++ b/src/Napper.Core.Tests/LoggerTests.fs @@ -0,0 +1,89 @@ +module LoggerTests + +open System +open System.IO +open Xunit +open Napper.Core + +// Logger uses global mutable state — tests must run sequentially +[<CollectionDefinition("Logger", DisableParallelization = true)>] +type LoggerCollection() = class end + +[<Collection("Logger")>] +type LoggerTests() = + + let withLogger (verbose: bool) (action: unit -> unit) : string = + Logger.init verbose + action () + Logger.close () + let dir = AppContext.BaseDirectory + let logFiles = Directory.GetFiles(dir, "napper-*.log") |> Array.sortDescending + Assert.True(logFiles.Length >= 1, "Must create at least one log file") + let content = File.ReadAllText(logFiles[0]) + File.Delete(logFiles[0]) + content + + [<Fact>] + member _.``init creates log file in base directory``() = + let content = withLogger false (fun () -> Logger.info "test init") + Assert.Contains("test init", content) + + [<Fact>] + member _.``info writes INFO level``() = + let content = withLogger false (fun () -> Logger.info "info message") + Assert.Contains("[INFO]", content) + Assert.Contains("info message", content) + + [<Fact>] + member _.``warn writes WARN level``() = + let content = withLogger false (fun () -> Logger.warn "warn message") + Assert.Contains("[WARN]", content) + Assert.Contains("warn message", content) + + [<Fact>] + member _.``error writes ERROR level``() = + let content = withLogger false (fun () -> Logger.error "error message") + Assert.Contains("[ERROR]", content) + Assert.Contains("error message", content) + + [<Fact>] + member _.``debug is suppressed when not verbose``() = + let content = + withLogger false (fun () -> + Logger.debug "should be hidden" + Logger.info "should be visible") + + Assert.DoesNotContain("should be hidden", content) + Assert.Contains("should be visible", content) + + [<Fact>] + member _.``debug is written when verbose``() = + let content = withLogger true (fun () -> Logger.debug "debug visible") + Assert.Contains("[DEBUG]", content) + Assert.Contains("debug visible", content) + + [<Fact>] + member _.``log entries have ISO timestamp``() = + let content = withLogger false (fun () -> Logger.info "timestamp check") + Assert.Contains("[20", content) + Assert.Contains("T", content) + Assert.Contains("Z]", content) + + [<Fact>] + member _.``close flushes and allows re-init``() = + let content1 = withLogger false (fun () -> Logger.info "first session") + Assert.Contains("first session", content1) + let content2 = withLogger false (fun () -> Logger.info "second session") + Assert.Contains("second session", content2) + + [<Fact>] + member _.``multiple log entries in one session``() = + let content = + withLogger false (fun () -> + Logger.info "line one" + Logger.warn "line two" + Logger.error "line three") + + Assert.Contains("line one", content) + Assert.Contains("line two", content) + Assert.Contains("line three", content) diff --git a/tests/Nap.Core.Tests/Nap.Core.Tests.fsproj b/src/Napper.Core.Tests/Napper.Core.Tests.fsproj similarity index 66% rename from tests/Nap.Core.Tests/Nap.Core.Tests.fsproj rename to src/Napper.Core.Tests/Napper.Core.Tests.fsproj index c7f8edf..beec430 100644 --- a/tests/Nap.Core.Tests/Nap.Core.Tests.fsproj +++ b/src/Napper.Core.Tests/Napper.Core.Tests.fsproj @@ -1,11 +1,13 @@ -<Project Sdk="Microsoft.NET.Sdk"> +<Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <IsPackable>false</IsPackable> </PropertyGroup> <ItemGroup> + <Compile Include="TestHelpers.fs" /> <Compile Include="Tests.fs" /> + <Compile Include="HttpMethodTests.fs" /> <Compile Include="ParserEdgeCaseTests.fs" /> <Compile Include="AssertionEdgeCaseTests.fs" /> <Compile Include="EnvironmentEdgeCaseTests.fs" /> @@ -14,9 +16,17 @@ <Compile Include="ScriptEdgeCaseTests.fs" /> <Compile Include="CsxScriptTests.fs" /> <Compile Include="CliIntegrationTests.fs" /> + <Compile Include="HttpToNapConverterTests.fs" /> <Compile Include="OpenApiGeneratorTests.fs" /> <Compile Include="OpenApiCliTests.fs" /> + <Compile Include="LoggerTests.fs" /> + <Compile Include="RunnerE2eTests.fs" /> <Compile Include="OpenApiE2eTests.fs" /> + <Compile Include="HttpConvertE2eTests.fs" /> + </ItemGroup> + + <ItemGroup> + <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" /> </ItemGroup> <ItemGroup> @@ -27,7 +37,7 @@ </ItemGroup> <ItemGroup> - <ProjectReference Include="..\..\src\Nap.Core\Nap.Core.fsproj" /> + <ProjectReference Include="..\Napper.Core\Napper.Core.fsproj" /> </ItemGroup> -</Project> \ No newline at end of file +</Project> diff --git a/src/Napper.Core.Tests/OpenApiCliTests.fs b/src/Napper.Core.Tests/OpenApiCliTests.fs new file mode 100644 index 0000000..cc12995 --- /dev/null +++ b/src/Napper.Core.Tests/OpenApiCliTests.fs @@ -0,0 +1,305 @@ +module OpenApiCliTests +// Specs: openapi-generate, openapi-oas3, openapi-tag-dirs, openapi-auth, +// openapi-baseurl, openapi-napenv-gen, openapi-naplist-gen, openapi-params, +// openapi-body-gen, openapi-nap-gen + +open System +open System.Net.Http +open Xunit +open Napper.Core.OpenApiTypes + +/// Direct F# API tests against the live Petstore OpenAPI spec. +/// CLI-based e2e tests are in OpenApiE2eTests.fs — these test +/// the OpenApiGenerator.generate function without a CLI process. + +// --- Constants --- + +[<Literal>] +let private PetstoreSpecUrl = "https://petstore3.swagger.io/api/v3/openapi.json" + +[<Literal>] +let private BeeceptorSpecUrl = "https://beeceptor.com/docs/storefront-sample.json" + +[<Literal>] +let private MinExpectedNapFiles = 10 + +[<Literal>] +let private BeeceptorExpectedNapFiles = 11 + +[<Literal>] +let private BeeceptorBaseUrlDomain = "api.demo-ecommerce.com" + +[<Literal>] +let private BeeceptorAuthRegisterPath = "/auth/register" + +[<Literal>] +let private BeeceptorAuthLoginPath = "/auth/login" + +[<Literal>] +let private BeeceptorProductsPath = "/products" + +[<Literal>] +let private BeeceptorCartPath = "/cart" + +[<Literal>] +let private BeeceptorCheckoutPath = "/checkout" + +[<Literal>] +let private BeeceptorOrdersPath = "/orders" + +[<Literal>] +let private BeeceptorAddressesPath = "/addresses" + +[<Literal>] +let private PetTagFolder = "pet" + +[<Literal>] +let private StoreTagFolder = "store" + +[<Literal>] +let private UserTagFolder = "user" + +// --- Helpers --- + +let private httpClient = new HttpClient() + +let private downloadSpec () : string = + httpClient.GetStringAsync(PetstoreSpecUrl) + |> Async.AwaitTask + |> Async.RunSynchronously + +let private downloadBeeceptorSpec () : string = + httpClient.GetStringAsync(BeeceptorSpecUrl) + |> Async.AwaitTask + |> Async.RunSynchronously + +let private generateFromUrl (url: string) : Napper.Core.OpenApiGenerator.GenerationResult = + let spec = + httpClient.GetStringAsync(url) |> Async.AwaitTask |> Async.RunSynchronously + + match Napper.Core.OpenApiGenerator.generate spec with + | Ok result -> result + | Error msg -> failwith $"Expected Ok but got Error: {msg}" + +// --- E2E: F# API directly (no CLI process) --- Spec: openapi-generate, openapi-oas3 + +[<Fact>] +let ``OpenApiGenerator.generate succeeds with live Petstore spec`` () = + let specContent = downloadSpec () + + match Napper.Core.OpenApiGenerator.generate specContent with + | Ok result -> + Assert.True(result.NapFiles.Length >= MinExpectedNapFiles) + Assert.False(String.IsNullOrEmpty(result.Playlist.Content)) + Assert.False(String.IsNullOrEmpty(result.Environment.Content)) + | Error msg -> Assert.Fail($"Expected Ok but got Error: {msg}") + +[<Fact>] +let ``OpenApiGenerator.generate produces correct tag folders for Petstore`` () = + let specContent = downloadSpec () + + match Napper.Core.OpenApiGenerator.generate specContent with + | Ok result -> + let hasPet = + result.NapFiles + |> List.exists (fun f -> f.FileName.StartsWith($"{PetTagFolder}/")) + + let hasStore = + result.NapFiles + |> List.exists (fun f -> f.FileName.StartsWith($"{StoreTagFolder}/")) + + let hasUser = + result.NapFiles + |> List.exists (fun f -> f.FileName.StartsWith($"{UserTagFolder}/")) + + Assert.True(hasPet, "Should have pet/ files") + Assert.True(hasStore, "Should have store/ files") + Assert.True(hasUser, "Should have user/ files") + | Error msg -> Assert.Fail($"Expected Ok but got Error: {msg}") + +[<Fact>] +let ``OpenApiGenerator.generate includes api_key auth for Petstore`` () = + let specContent = downloadSpec () + + match Napper.Core.OpenApiGenerator.generate specContent with + | Ok result -> + let hasApiKey = + result.NapFiles + |> List.exists (fun f -> f.Content.Contains(SectionRequestHeaders) && f.Content.Contains("api_key")) + + Assert.True(hasApiKey, "At least one endpoint should have api_key auth header") + | Error msg -> Assert.Fail($"Expected Ok but got Error: {msg}") + +[<Fact>] +let ``OpenApiGenerator.generate produces baseUrl in environment`` () = + let specContent = downloadSpec () + + match Napper.Core.OpenApiGenerator.generate specContent with + | Ok result -> + Assert.Contains(BaseUrlKey, result.Environment.Content) + Assert.Contains("/api/v3", result.Environment.Content) + | Error msg -> Assert.Fail($"Expected Ok but got Error: {msg}") + +[<Fact>] +let ``OpenApiGenerator.generate produces playlist referencing all files`` () = + let specContent = downloadSpec () + + match Napper.Core.OpenApiGenerator.generate specContent with + | Ok result -> + Assert.Contains(SectionSteps, result.Playlist.Content) + + for napFile in result.NapFiles do + Assert.Contains(napFile.FileName, result.Playlist.Content) + | Error msg -> Assert.Fail($"Expected Ok but got Error: {msg}") + +[<Fact>] +let ``OpenApiGenerator.generate produces vars for all path param endpoints`` () = + let specContent = downloadSpec () + + match Napper.Core.OpenApiGenerator.generate specContent with + | Ok result -> + let paramFiles = + result.NapFiles + |> List.filter (fun f -> + f.Content.Contains("{{petId}}") + || f.Content.Contains("{{orderId}}") + || f.Content.Contains("{{username}}")) + + Assert.True(paramFiles.Length >= 3, $"Must have at least 3 path param endpoints, got {paramFiles.Length}") + + for f in paramFiles do + Assert.Contains(SectionVars, f.Content) + Assert.Contains(VarsPlaceholder, f.Content) + | Error msg -> Assert.Fail($"Expected Ok but got Error: {msg}") + +[<Fact>] +let ``OpenApiGenerator.generate produces request bodies for POST endpoints with JSON schema`` () = + let specContent = downloadSpec () + + match Napper.Core.OpenApiGenerator.generate specContent with + | Ok result -> + let postFilesWithBody = + result.NapFiles + |> List.filter (fun f -> f.Content.Contains("POST") && f.Content.Contains(SectionRequestBody)) + + Assert.True(postFilesWithBody.Length >= 1, "At least one POST endpoint must have [request.body]") + + for f in postFilesWithBody do + Assert.Contains("Content-Type = application/json", f.Content) + Assert.Contains("\"\"\"", f.Content) + + let allPostFiles = + result.NapFiles |> List.filter (fun f -> f.Content.Contains("POST")) + + for f in allPostFiles do + Assert.Contains(SectionRequestHeaders, f.Content) + | Error msg -> Assert.Fail($"Expected Ok but got Error: {msg}") + +// --- E2E: Beeceptor URL proves URL content drives output --- Spec: openapi-nap-gen, openapi-baseurl, openapi-auth, openapi-naplist-gen + +[<Fact>] +let ``Beeceptor URL generates exactly 11 nap files`` () = + let result = generateFromUrl BeeceptorSpecUrl + Assert.Equal(BeeceptorExpectedNapFiles, result.NapFiles.Length) + +[<Fact>] +let ``Beeceptor URL generates base URL with demo-ecommerce domain`` () = + let result = generateFromUrl BeeceptorSpecUrl + Assert.Contains(BeeceptorBaseUrlDomain, result.Environment.Content) + +[<Fact>] +let ``Beeceptor URL generates auth register endpoint`` () = + let result = generateFromUrl BeeceptorSpecUrl + + let hasRegister = + result.NapFiles + |> List.exists (fun f -> f.Content.Contains BeeceptorAuthRegisterPath) + + Assert.True(hasRegister, "Must have auth/register endpoint from beeceptor spec") + +[<Fact>] +let ``Beeceptor URL generates auth login endpoint`` () = + let result = generateFromUrl BeeceptorSpecUrl + + let hasLogin = + result.NapFiles + |> List.exists (fun f -> f.Content.Contains BeeceptorAuthLoginPath) + + Assert.True(hasLogin, "Must have auth/login endpoint from beeceptor spec") + +[<Fact>] +let ``Beeceptor URL generates products endpoint`` () = + let result = generateFromUrl BeeceptorSpecUrl + + let hasProducts = + result.NapFiles + |> List.exists (fun f -> f.Content.Contains BeeceptorProductsPath) + + Assert.True(hasProducts, "Must have products endpoint from beeceptor spec") + +[<Fact>] +let ``Beeceptor URL generates cart endpoint`` () = + let result = generateFromUrl BeeceptorSpecUrl + + let hasCart = + result.NapFiles |> List.exists (fun f -> f.Content.Contains BeeceptorCartPath) + + Assert.True(hasCart, "Must have cart endpoint from beeceptor spec") + +[<Fact>] +let ``Beeceptor URL generates checkout endpoint`` () = + let result = generateFromUrl BeeceptorSpecUrl + + let hasCheckout = + result.NapFiles + |> List.exists (fun f -> f.Content.Contains BeeceptorCheckoutPath) + + Assert.True(hasCheckout, "Must have checkout endpoint from beeceptor spec") + +[<Fact>] +let ``Beeceptor URL generates orders endpoint`` () = + let result = generateFromUrl BeeceptorSpecUrl + + let hasOrders = + result.NapFiles |> List.exists (fun f -> f.Content.Contains BeeceptorOrdersPath) + + Assert.True(hasOrders, "Must have orders endpoint from beeceptor spec") + +[<Fact>] +let ``Beeceptor URL generates addresses endpoint`` () = + let result = generateFromUrl BeeceptorSpecUrl + + let hasAddresses = + result.NapFiles + |> List.exists (fun f -> f.Content.Contains BeeceptorAddressesPath) + + Assert.True(hasAddresses, "Must have addresses endpoint from beeceptor spec") + +[<Fact>] +let ``Beeceptor URL generates bearer auth on secured endpoints`` () = + let result = generateFromUrl BeeceptorSpecUrl + + let bearerFiles = + result.NapFiles + |> List.filter (fun f -> f.Content.Contains "Authorization = Bearer {{token}}") + + Assert.True(bearerFiles.Length >= 7, $"Must have at least 7 bearer auth endpoints, got {bearerFiles.Length}") + +[<Fact>] +let ``Beeceptor URL output is different from Petstore URL output`` () = + let beeceptor = generateFromUrl BeeceptorSpecUrl + let petstore = generateFromUrl PetstoreSpecUrl + Assert.Contains(BeeceptorBaseUrlDomain, beeceptor.Environment.Content) + Assert.DoesNotContain(BeeceptorBaseUrlDomain, petstore.Environment.Content) + Assert.Contains("/api/v3", petstore.Environment.Content) + Assert.DoesNotContain("/api/v3", beeceptor.Environment.Content) + Assert.NotEqual(beeceptor.NapFiles.Length, petstore.NapFiles.Length) + +[<Fact>] +let ``Beeceptor URL playlist contains E-commerce API title`` () = + let result = generateFromUrl BeeceptorSpecUrl + Assert.Contains("E-commerce API", result.Playlist.Content) + Assert.Contains(SectionSteps, result.Playlist.Content) + + for napFile in result.NapFiles do + Assert.Contains(napFile.FileName, result.Playlist.Content) diff --git a/tests/Nap.Core.Tests/OpenApiE2eTests.fs b/src/Napper.Core.Tests/OpenApiE2eTests.fs similarity index 75% rename from tests/Nap.Core.Tests/OpenApiE2eTests.fs rename to src/Napper.Core.Tests/OpenApiE2eTests.fs index 410b399..edf924d 100644 --- a/tests/Nap.Core.Tests/OpenApiE2eTests.fs +++ b/src/Napper.Core.Tests/OpenApiE2eTests.fs @@ -1,36 +1,22 @@ module OpenApiE2eTests +// Specs: cli-generate, cli-exit-codes, openapi-generate, openapi-nap-gen, openapi-tag-dirs, +// openapi-naplist-gen, openapi-napenv-gen, openapi-baseurl, openapi-params, +// openapi-body-gen, openapi-assert-gen, openapi-query-params, openapi-auth, +// openapi-meta-flag, nap-meta, nap-request, nap-body, nap-vars open System -open System.Diagnostics open System.IO open System.Net.Http open Xunit // ─── Infrastructure ───────────────────────────────────────── -let private runCli (args: string) (cwd: string) : int * string * string = - let projectPath = Path.GetFullPath(Path.Combine(__SOURCE_DIRECTORY__, "../../src/Nap.Cli/Nap.Cli.fsproj")) - let psi = ProcessStartInfo() - psi.FileName <- "dotnet" - psi.Arguments <- $"run --project {projectPath} -- {args}" - psi.WorkingDirectory <- cwd - psi.RedirectStandardOutput <- true - psi.RedirectStandardError <- true - psi.UseShellExecute <- false - psi.CreateNoWindow <- true - use proc = Process.Start(psi) - let stdout = proc.StandardOutput.ReadToEnd() - let stderr = proc.StandardError.ReadToEnd() - proc.WaitForExit() - proc.ExitCode, stdout, stderr +let private runCli args cwd = TestHelpers.runCli args cwd let private createTempDir () = - let dir = Path.Combine(Path.GetTempPath(), $"nap-openapi-e2e-{Guid.NewGuid():N}") - Directory.CreateDirectory(dir) |> ignore - dir + TestHelpers.createTempDir "nap-openapi-e2e" -let private cleanupDir (dir: string) = - if Directory.Exists(dir) then Directory.Delete(dir, true) +let private cleanupDir dir = TestHelpers.cleanupDir dir [<Literal>] let private PetstoreUrl = "https://petstore3.swagger.io/api/v3/openapi.json" @@ -41,16 +27,17 @@ let private BeeceptorUrl = "https://beeceptor.com/docs/storefront-sample.json" [<Literal>] let private BeeceptorEndpointCount = 11 -let private specCacheDir = - Path.Combine(__SOURCE_DIRECTORY__, ".spec-cache") +let private specCacheDir = Path.Combine(__SOURCE_DIRECTORY__, ".spec-cache") let private cachedDownload (url: string) (fileName: string) : string = let cachePath = Path.Combine(specCacheDir, fileName) + if File.Exists(cachePath) then File.ReadAllText(cachePath) else if not (Directory.Exists(specCacheDir)) then Directory.CreateDirectory(specCacheDir) |> ignore + use client = new HttpClient() let json = client.GetStringAsync(url).Result File.WriteAllText(cachePath, json) @@ -72,11 +59,12 @@ let private generatePetstore (outDir: string) : int * string * string = downloadSpec () |> ignore runCli $"generate openapi {specPath} --output-dir {outDir}" specCacheDir -// ─── CLI generate openapi: Petstore E2E ───────────────────── +// ─── CLI generate openapi: Petstore E2E ── Spec: cli-generate, openapi-nap-gen, openapi-tag-dirs, openapi-naplist-gen, openapi-napenv-gen, openapi-baseurl, openapi-params, openapi-body-gen, openapi-assert-gen, openapi-query-params, openapi-auth, openapi-meta-flag [<Fact>] let ``Petstore generate exits with code 0`` () = let outDir = createTempDir () + try let exitCode, stdout, _ = generatePetstore outDir Assert.Equal(0, exitCode) @@ -87,6 +75,7 @@ let ``Petstore generate exits with code 0`` () = [<Fact>] let ``Petstore generates napenv with base URL`` () = let outDir = createTempDir () + try generatePetstore outDir |> ignore let envFile = Path.Combine(outDir, ".napenv") @@ -99,6 +88,7 @@ let ``Petstore generates napenv with base URL`` () = [<Fact>] let ``Petstore generates naplist file`` () = let outDir = createTempDir () + try generatePetstore outDir |> ignore let naplists = Directory.GetFiles(outDir, "*.naplist") @@ -112,6 +102,7 @@ let ``Petstore generates naplist file`` () = [<Fact>] let ``Petstore creates tag subdirectories`` () = let outDir = createTempDir () + try generatePetstore outDir |> ignore let petDir = Path.Combine(outDir, "pet") @@ -126,6 +117,7 @@ let ``Petstore creates tag subdirectories`` () = [<Fact>] let ``Petstore pet folder has nap files for CRUD operations`` () = let outDir = createTempDir () + try generatePetstore outDir |> ignore let petDir = Path.Combine(outDir, "pet") @@ -137,10 +129,12 @@ let ``Petstore pet folder has nap files for CRUD operations`` () = [<Fact>] let ``Petstore nap files contain meta with generated flag`` () = let outDir = createTempDir () + try generatePetstore outDir |> ignore let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) Assert.True(allNaps.Length > 0, "Must have at least one .nap file") + for napFile in allNaps do let content = File.ReadAllText(napFile) Assert.Contains("[meta]", content) @@ -151,9 +145,11 @@ let ``Petstore nap files contain meta with generated flag`` () = [<Fact>] let ``Petstore nap files contain request section with baseUrl`` () = let outDir = createTempDir () + try generatePetstore outDir |> ignore let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + for napFile in allNaps do let content = File.ReadAllText(napFile) Assert.Contains("[request]", content) @@ -164,9 +160,11 @@ let ``Petstore nap files contain request section with baseUrl`` () = [<Fact>] let ``Petstore nap files contain assert section`` () = let outDir = createTempDir () + try generatePetstore outDir |> ignore let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + for napFile in allNaps do let content = File.ReadAllText(napFile) Assert.Contains("[assert]", content) @@ -177,15 +175,19 @@ let ``Petstore nap files contain assert section`` () = [<Fact>] let ``Petstore POST endpoints have request body`` () = let outDir = createTempDir () + try generatePetstore outDir |> ignore let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + let postFiles = allNaps |> Array.filter (fun f -> let content = File.ReadAllText(f) content.Contains("POST {{baseUrl}}")) + Assert.True(postFiles.Length >= 1, "Must have at least one POST endpoint") + for f in postFiles do let content = File.ReadAllText(f) Assert.Contains("[request.headers]", content) @@ -196,15 +198,22 @@ let ``Petstore POST endpoints have request body`` () = [<Fact>] let ``Petstore path param endpoints have vars section`` () = let outDir = createTempDir () + try generatePetstore outDir |> ignore let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + let paramFiles = allNaps |> Array.filter (fun f -> let content = File.ReadAllText(f) - content.Contains("{{petId}}") || content.Contains("{{orderId}}") || content.Contains("{{username}}")) + + content.Contains("{{petId}}") + || content.Contains("{{orderId}}") + || content.Contains("{{username}}")) + Assert.True(paramFiles.Length >= 1, "Must have endpoints with path params") + for f in paramFiles do let content = File.ReadAllText(f) Assert.Contains("[vars]", content) @@ -215,6 +224,7 @@ let ``Petstore path param endpoints have vars section`` () = [<Fact>] let ``Petstore naplist references all generated nap files`` () = let outDir = createTempDir () + try generatePetstore outDir |> ignore let naplists = Directory.GetFiles(outDir, "*.naplist") @@ -222,20 +232,27 @@ let ``Petstore naplist references all generated nap files`` () = let playlistContent = File.ReadAllText(naplists[0]) let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) Assert.True(allNaps.Length >= 10, $"Petstore must produce at least 10 nap files, got {allNaps.Length}") + + for napFile in allNaps do + let napName = Path.GetFileName(napFile) + Assert.True(playlistContent.Contains(napName), $"Playlist must reference {napName}") finally cleanupDir outDir [<Fact>] let ``Petstore api_key auth adds header`` () = let outDir = createTempDir () + try generatePetstore outDir |> ignore let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + let apiKeyFiles = allNaps |> Array.filter (fun f -> let content = File.ReadAllText(f) content.Contains("api_key = {{apiKey}}")) + Assert.True(apiKeyFiles.Length >= 1, "At least one endpoint must use api_key auth header") finally cleanupDir outDir @@ -243,25 +260,31 @@ let ``Petstore api_key auth adds header`` () = [<Fact>] let ``Petstore query param endpoints have params in URL`` () = let outDir = createTempDir () + try generatePetstore outDir |> ignore let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + let queryFiles = allNaps |> Array.filter (fun f -> let content = File.ReadAllText(f) content.Contains("?") && content.Contains("={{")) + Assert.True(queryFiles.Length >= 1, "Must have endpoints with query params in URL") finally cleanupDir outDir -// ─── Error handling ───────────────────────────────────────── +// ─── Error handling ──────────────────────── Spec: cli-generate, cli-exit-codes [<Fact>] let ``Generate with missing spec returns exit code 2`` () = let dir = createTempDir () + try - let exitCode, _, stderr = runCli "generate openapi nonexistent.json --output-dir ." dir + let exitCode, _, stderr = + runCli "generate openapi nonexistent.json --output-dir ." dir + Assert.Equal(2, exitCode) Assert.Contains("not found", stderr) finally @@ -270,6 +293,7 @@ let ``Generate with missing spec returns exit code 2`` () = [<Fact>] let ``Generate with no spec file returns exit code 2`` () = let dir = createTempDir () + try let exitCode, _, stderr = runCli "generate openapi" dir Assert.Equal(2, exitCode) @@ -280,6 +304,7 @@ let ``Generate with no spec file returns exit code 2`` () = [<Fact>] let ``Generate with invalid JSON returns exit code 1`` () = let dir = createTempDir () + try File.WriteAllText(Path.Combine(dir, "bad.json"), "not valid json{{{") let exitCode, _, stderr = runCli "generate openapi bad.json --output-dir ." dir @@ -288,11 +313,12 @@ let ``Generate with invalid JSON returns exit code 1`` () = finally cleanupDir dir -// ─── CLI generate openapi: Beeceptor Storefront E2E ───────── +// ─── CLI generate openapi: Beeceptor E2E ── Spec: cli-generate, openapi-nap-gen, openapi-naplist-gen, openapi-napenv-gen, openapi-baseurl, openapi-body-gen, openapi-auth, openapi-query-params, openapi-params, openapi-assert-gen [<Fact>] let ``Beeceptor generate exits with code 0`` () = let outDir = createTempDir () + try let exitCode, stdout, _ = generateBeeceptor outDir Assert.Equal(0, exitCode) @@ -303,6 +329,7 @@ let ``Beeceptor generate exits with code 0`` () = [<Fact>] let ``Beeceptor generates napenv with base URL`` () = let outDir = createTempDir () + try generateBeeceptor outDir |> ignore let envFile = Path.Combine(outDir, ".napenv") @@ -316,6 +343,7 @@ let ``Beeceptor generates napenv with base URL`` () = [<Fact>] let ``Beeceptor generates all 11 nap files`` () = let outDir = createTempDir () + try generateBeeceptor outDir |> ignore let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) @@ -326,6 +354,7 @@ let ``Beeceptor generates all 11 nap files`` () = [<Fact>] let ``Beeceptor generates naplist referencing all endpoints`` () = let outDir = createTempDir () + try generateBeeceptor outDir |> ignore let naplists = Directory.GetFiles(outDir, "*.naplist") @@ -340,9 +369,11 @@ let ``Beeceptor generates naplist referencing all endpoints`` () = [<Fact>] let ``Beeceptor nap files all have meta and assert sections`` () = let outDir = createTempDir () + try generateBeeceptor outDir |> ignore let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + for napFile in allNaps do let content = File.ReadAllText(napFile) Assert.Contains("[meta]", content) @@ -357,9 +388,11 @@ let ``Beeceptor nap files all have meta and assert sections`` () = [<Fact>] let ``Beeceptor POST endpoints have request body and headers`` () = let outDir = createTempDir () + try generateBeeceptor outDir |> ignore let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + let postFiles = allNaps |> Array.filter (fun f -> @@ -367,6 +400,7 @@ let ``Beeceptor POST endpoints have request body and headers`` () = content.Contains "POST {{baseUrl}}") // auth/register, auth/login, cart/items, checkout, addresses POST = 5 Assert.True(postFiles.Length >= 5, $"Must have at least 5 POST endpoints, got {postFiles.Length}") + for f in postFiles do let content = File.ReadAllText(f) Assert.Contains("[request.headers]", content) @@ -377,9 +411,11 @@ let ``Beeceptor POST endpoints have request body and headers`` () = [<Fact>] let ``Beeceptor bearer auth endpoints have Authorization header`` () = let outDir = createTempDir () + try generateBeeceptor outDir |> ignore let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + let authFiles = allNaps |> Array.filter (fun f -> @@ -393,14 +429,17 @@ let ``Beeceptor bearer auth endpoints have Authorization header`` () = [<Fact>] let ``Beeceptor products endpoint has query params`` () = let outDir = createTempDir () + try generateBeeceptor outDir |> ignore let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + let queryFiles = allNaps |> Array.filter (fun f -> let content = File.ReadAllText(f) content.Contains("category={{category}}")) + Assert.True(queryFiles.Length >= 1, "Must have products endpoint with category query param") let content = File.ReadAllText(queryFiles[0]) Assert.Contains("search={{search}}", content) @@ -412,15 +451,19 @@ let ``Beeceptor products endpoint has query params`` () = [<Fact>] let ``Beeceptor path param endpoints have vars section`` () = let outDir = createTempDir () + try generateBeeceptor outDir |> ignore let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + let paramFiles = allNaps |> Array.filter (fun f -> let content = File.ReadAllText(f) content.Contains("{{id}}") || content.Contains("{{orderId}}")) + Assert.True(paramFiles.Length >= 2, $"Must have at least 2 path param endpoints, got {paramFiles.Length}") + for f in paramFiles do let content = File.ReadAllText(f) Assert.Contains("[vars]", content) @@ -431,16 +474,109 @@ let ``Beeceptor path param endpoints have vars section`` () = [<Fact>] let ``Beeceptor checkout endpoint asserts 201 status`` () = let outDir = createTempDir () + try generateBeeceptor outDir |> ignore let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + let checkoutFiles = allNaps |> Array.filter (fun f -> let content = File.ReadAllText(f) content.Contains("POST {{baseUrl}}/checkout")) + Assert.True(checkoutFiles.Length >= 1, "Must have checkout endpoint") let content = File.ReadAllText(checkoutFiles[0]) Assert.Contains("status = 201", content) finally cleanupDir outDir + +// ─── Petstore: complete .nap format validation ── Spec: nap-file, nap-meta, nap-request, nap-assert + +[<Fact>] +let ``Petstore every nap file has correct section ordering`` () = + let outDir = createTempDir () + + try + generatePetstore outDir |> ignore + let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + Assert.True(allNaps.Length >= 10, $"Must have at least 10 nap files, got {allNaps.Length}") + + for napFile in allNaps do + let content = File.ReadAllText(napFile) + let metaIdx = content.IndexOf("[meta]") + let requestIdx = content.IndexOf("[request]") + let assertIdx = content.IndexOf("[assert]") + Assert.True(metaIdx >= 0, $"[meta] missing in {Path.GetFileName napFile}") + Assert.True(requestIdx > metaIdx, $"[request] must follow [meta] in {Path.GetFileName napFile}") + Assert.True(assertIdx > requestIdx, $"[assert] must follow [request] in {Path.GetFileName napFile}") + finally + cleanupDir outDir + +[<Fact>] +let ``Petstore POST endpoints include actual JSON body content`` () = + let outDir = createTempDir () + + try + generatePetstore outDir |> ignore + let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + + let postFilesWithBody = + allNaps + |> Array.filter (fun f -> + let content = File.ReadAllText(f) + content.Contains("POST {{baseUrl}}") && content.Contains("[request.body]")) + + Assert.True(postFilesWithBody.Length >= 1, "Must have POST endpoints with body") + + for f in postFilesWithBody do + let content = File.ReadAllText(f) + Assert.Contains("\"\"\"", content) + let tripleQuoteCount = content.Split("\"\"\"").Length - 1 + + Assert.True( + tripleQuoteCount >= 2, + $"Body must have opening and closing triple-quotes in {Path.GetFileName f}" + ) + finally + cleanupDir outDir + +[<Fact>] +let ``Petstore every path param endpoint has matching vars`` () = + let outDir = createTempDir () + + try + generatePetstore outDir |> ignore + let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + + for napFile in allNaps do + let content = File.ReadAllText(napFile) + + let hasPathParam = + content.Contains("{{petId}}") + || content.Contains("{{orderId}}") + || content.Contains("{{username}}") + + if hasPathParam then + Assert.Contains("[vars]", content) + Assert.Contains("REPLACE_ME", content) + finally + cleanupDir outDir + +[<Fact>] +let ``Beeceptor every path param endpoint has matching vars`` () = + let outDir = createTempDir () + + try + generateBeeceptor outDir |> ignore + let allNaps = Directory.GetFiles(outDir, "*.nap", SearchOption.AllDirectories) + + for napFile in allNaps do + let content = File.ReadAllText(napFile) + let hasPathParam = content.Contains("{{id}}") || content.Contains("{{orderId}}") + + if hasPathParam then + Assert.Contains("[vars]", content) + Assert.Contains("REPLACE_ME", content) + finally + cleanupDir outDir diff --git a/tests/Nap.Core.Tests/OpenApiGeneratorTests.fs b/src/Napper.Core.Tests/OpenApiGeneratorTests.fs similarity index 65% rename from tests/Nap.Core.Tests/OpenApiGeneratorTests.fs rename to src/Napper.Core.Tests/OpenApiGeneratorTests.fs index e8a356d..3f3309d 100644 --- a/tests/Nap.Core.Tests/OpenApiGeneratorTests.fs +++ b/src/Napper.Core.Tests/OpenApiGeneratorTests.fs @@ -1,7 +1,11 @@ module OpenApiGeneratorTests +// Specs: openapi-generate, openapi-oas3, openapi-swagger2, openapi-nap-gen, openapi-tag-dirs, +// openapi-naplist-gen, openapi-napenv-gen, openapi-baseurl, openapi-params, +// openapi-body-gen, openapi-assert-gen, openapi-query-params, openapi-auth, +// openapi-meta-flag, nap-meta, nap-request, nap-headers, nap-body, nap-vars, nap-assert open Xunit -open Nap.Core.OpenApiGenerator +open Napper.Core.OpenApiGenerator // --- Helpers --- @@ -16,12 +20,15 @@ let private firstFile (gen: GenerationResult) : GeneratedFile = | [] -> failwith "Expected at least one generated nap file" let private fileAt (gen: GenerationResult) (index: int) : GeneratedFile = - if index < gen.NapFiles.Length then gen.NapFiles[index] - else failwith $"Expected nap file at index {index}" + if index < gen.NapFiles.Length then + gen.NapFiles[index] + else + failwith $"Expected nap file at index {index}" // --- Minimal specs --- -let private minimalOas3 = """ +let private minimalOas3 = + """ { "openapi": "3.0.0", "info": { "title": "Test API" }, @@ -36,7 +43,8 @@ let private minimalOas3 = """ } }""" -let private minimalSwagger2 = """ +let private minimalSwagger2 = + """ { "swagger": "2.0", "info": { "title": "Legacy API" }, @@ -53,7 +61,8 @@ let private minimalSwagger2 = """ } }""" -let private multiMethodSpec = """ +let private multiMethodSpec = + """ { "openapi": "3.0.0", "info": { "title": "CRUD API" }, @@ -95,7 +104,7 @@ let private multiMethodSpec = """ } }""" -// --- Error handling --- +// --- Error handling --- Spec: openapi-generate [<Fact>] let ``Rejects invalid JSON`` () = @@ -121,7 +130,7 @@ let ``Rejects null input`` () = | Error _ -> () | Ok _ -> failwith "Expected error" -// --- OpenAPI 3.x --- +// --- OpenAPI 3.x --- Spec: openapi-oas3, openapi-nap-gen, openapi-naplist-gen, openapi-napenv-gen, openapi-baseurl, openapi-meta-flag [<Fact>] let ``OAS3 generates correct number of nap files`` () = @@ -170,6 +179,7 @@ let ``OAS3 playlist has naplist extension`` () = [<Fact>] let ``OAS3 playlist references generated files`` () = let gen = unwrap minimalOas3 + for f in gen.NapFiles do Assert.Contains(f.FileName, gen.Playlist.Content) @@ -178,7 +188,7 @@ let ``OAS3 generated flag in meta`` () = let content = (unwrap minimalOas3 |> firstFile).Content Assert.Contains("generated = true", content) -// --- Swagger 2.x --- +// --- Swagger 2.x --- Spec: openapi-swagger2, openapi-baseurl [<Fact>] let ``Swagger2 extracts base URL from host and basePath`` () = @@ -192,7 +202,7 @@ let ``Swagger2 generates nap file`` () = let content = (firstFile gen).Content Assert.Contains("GET {{baseUrl}}/items", content) -// --- Multiple endpoints --- +// --- Multiple endpoints --- Spec: openapi-nap-gen, openapi-params, openapi-assert-gen [<Fact>] let ``Generates one nap file per operation`` () = @@ -209,39 +219,40 @@ let ``Files are numbered sequentially`` () = [<Fact>] let ``Path params converted from single to double braces`` () = let gen = unwrap multiMethodSpec - let petFile = - gen.NapFiles |> List.find (fun f -> f.Content.Contains("getPetById")) + let petFile = gen.NapFiles |> List.find (fun f -> f.Content.Contains("getPetById")) Assert.Contains("{{petId}}", petFile.Content) Assert.DoesNotContain("/pets/{petId}", petFile.Content) [<Fact>] let ``POST gets status 201 assertion`` () = let gen = unwrap multiMethodSpec - let postFile = - gen.NapFiles |> List.find (fun f -> f.Content.Contains("Create pet")) + let postFile = gen.NapFiles |> List.find (fun f -> f.Content.Contains("Create pet")) Assert.Contains("status = 201", postFile.Content) [<Fact>] let ``DELETE gets status 204 assertion`` () = let gen = unwrap multiMethodSpec + let deleteFile = gen.NapFiles |> List.find (fun f -> f.Content.Contains("Delete pet")) + Assert.Contains("status = 204", deleteFile.Content) [<Fact>] let ``Uses operationId for file name`` () = let gen = unwrap multiMethodSpec + let opIdFile = gen.NapFiles |> List.tryFind (fun f -> f.FileName.Contains("getPetById")) + Assert.True(opIdFile.IsSome, "must use operationId in filename") -// --- Request bodies --- +// --- Request bodies --- Spec: openapi-body-gen, nap-headers, nap-body [<Fact>] let ``POST includes Content-Type and Accept headers`` () = let gen = unwrap multiMethodSpec - let postFile = - gen.NapFiles |> List.find (fun f -> f.Content.Contains("Create pet")) + let postFile = gen.NapFiles |> List.find (fun f -> f.Content.Contains("Create pet")) Assert.Contains("[request.headers]", postFile.Content) Assert.Contains("Content-Type = application/json", postFile.Content) Assert.Contains("Accept = application/json", postFile.Content) @@ -254,18 +265,16 @@ let ``GET does not get request headers section`` () = [<Fact>] let ``POST generates request body from schema`` () = let gen = unwrap multiMethodSpec - let postFile = - gen.NapFiles |> List.find (fun f -> f.Content.Contains("Create pet")) + let postFile = gen.NapFiles |> List.find (fun f -> f.Content.Contains("Create pet")) Assert.Contains("[request.body]", postFile.Content) Assert.Contains("\"\"\"", postFile.Content) -// --- Vars block --- +// --- Vars block --- Spec: openapi-params, nap-vars [<Fact>] let ``Path with params generates vars section`` () = let gen = unwrap multiMethodSpec - let petFile = - gen.NapFiles |> List.find (fun f -> f.Content.Contains("getPetById")) + let petFile = gen.NapFiles |> List.find (fun f -> f.Content.Contains("getPetById")) Assert.Contains("[vars]", petFile.Content) Assert.Contains("petId = \"REPLACE_ME\"", petFile.Content) @@ -276,7 +285,8 @@ let ``Path without params has no vars section`` () = [<Fact>] let ``Multiple path params each get a var entry`` () = - let spec = """ + let spec = + """ { "openapi": "3.0.0", "info": { "title": "Multi Param" }, @@ -289,16 +299,18 @@ let ``Multiple path params each get a var entry`` () = } } }""" + let gen = unwrap spec let content = (firstFile gen).Content Assert.Contains("orgId = \"REPLACE_ME\"", content) Assert.Contains("userId = \"REPLACE_ME\"", content) -// --- Response body assertions --- +// --- Response body assertions --- Spec: openapi-assert-gen, nap-assert [<Fact>] let ``OAS3 response schema generates body field assertions`` () = - let spec = """ + let spec = + """ { "openapi": "3.0.0", "info": { "title": "Schema API" }, @@ -328,6 +340,7 @@ let ``OAS3 response schema generates body field assertions`` () = } } }""" + let gen = unwrap spec let content = (firstFile gen).Content Assert.Contains("body.id exists", content) @@ -339,9 +352,10 @@ let ``No body assertions when response has no schema`` () = let content = (unwrap minimalOas3 |> firstFile).Content Assert.DoesNotContain("body.", content) -// --- Tag-based folders --- +// --- Tag-based folders --- Spec: openapi-tag-dirs -let private taggedSpec = """ +let private taggedSpec = + """ { "openapi": "3.0.0", "info": { "title": "Tagged API" }, @@ -385,7 +399,10 @@ let private taggedSpec = """ [<Fact>] let ``Tagged operations get tag subdirectory`` () = let gen = unwrap taggedSpec - let userFiles = gen.NapFiles |> List.filter (fun f -> f.FileName.StartsWith("users/")) + + let userFiles = + gen.NapFiles |> List.filter (fun f -> f.FileName.StartsWith("users/")) + Assert.Equal(2, userFiles.Length) [<Fact>] @@ -397,7 +414,10 @@ let ``Different tags create different subdirectories`` () = [<Fact>] let ``Untagged operations stay in root`` () = let gen = unwrap taggedSpec - let healthFile = gen.NapFiles |> List.find (fun f -> f.Content.Contains("Health check")) + + let healthFile = + gen.NapFiles |> List.find (fun f -> f.Content.Contains("Health check")) + Assert.DoesNotContain("/", healthFile.FileName) [<Fact>] @@ -406,11 +426,12 @@ let ``Playlist references files with subdirectory paths`` () = Assert.Contains("./users/", gen.Playlist.Content) Assert.Contains("./pets/", gen.Playlist.Content) -// --- Query parameters --- +// --- Query parameters --- Spec: openapi-query-params, nap-request, nap-vars [<Fact>] let ``Query params appended to URL`` () = - let spec = """ + let spec = + """ { "openapi": "3.0.0", "info": { "title": "Query API" }, @@ -427,12 +448,14 @@ let ``Query params appended to URL`` () = } } }""" + let content = (unwrap spec |> firstFile).Content Assert.Contains("?q={{q}}&limit={{limit}}", content) [<Fact>] let ``Query params added to vars section`` () = - let spec = """ + let spec = + """ { "openapi": "3.0.0", "info": { "title": "Query API" }, @@ -448,15 +471,17 @@ let ``Query params added to vars section`` () = } } }""" + let content = (unwrap spec |> firstFile).Content Assert.Contains("[vars]", content) Assert.Contains("q = \"REPLACE_ME\"", content) -// --- Auth schemes --- +// --- Auth schemes --- Spec: openapi-auth, nap-headers [<Fact>] let ``Bearer auth adds Authorization header`` () = - let spec = """ + let spec = + """ { "openapi": "3.0.0", "info": { "title": "Auth API" }, @@ -475,6 +500,7 @@ let ``Bearer auth adds Authorization header`` () = } } }""" + let content = (unwrap spec |> firstFile).Content Assert.Contains("[request.headers]", content) Assert.Contains("Authorization = Bearer {{token}}", content) @@ -482,7 +508,8 @@ let ``Bearer auth adds Authorization header`` () = [<Fact>] let ``API key auth adds custom header`` () = - let spec = """ + let spec = + """ { "openapi": "3.0.0", "info": { "title": "API Key API" }, @@ -501,13 +528,15 @@ let ``API key auth adds custom header`` () = } } }""" + let content = (unwrap spec |> firstFile).Content Assert.Contains("X-API-Key = {{apiKey}}", content) Assert.Contains("apiKey = \"REPLACE_ME\"", content) [<Fact>] let ``Global security applies to all operations`` () = - let spec = """ + let spec = + """ { "openapi": "3.0.0", "info": { "title": "Global Auth" }, @@ -526,6 +555,7 @@ let ``Global security applies to all operations`` () = } } }""" + let content = (unwrap spec |> firstFile).Content Assert.Contains("Authorization = Bearer {{token}}", content) @@ -534,11 +564,162 @@ let ``No auth headers when no security defined`` () = let content = (unwrap minimalOas3 |> firstFile).Content Assert.DoesNotContain("Authorization", content) -// --- Base URL fallback --- +[<Fact>] +let ``Basic auth adds Authorization header with Basic prefix`` () = + let spec = + """ + { + "openapi": "3.0.0", + "info": { "title": "Basic Auth API" }, + "paths": { + "/secure": { + "get": { + "summary": "Secure endpoint", + "security": [{ "basicAuth": [] }], + "responses": { "200": { "description": "OK" } } + } + } + }, + "components": { + "securitySchemes": { + "basicAuth": { "type": "http", "scheme": "basic" } + } + } + }""" + + let content = (unwrap spec |> firstFile).Content + Assert.Contains("[request.headers]", content) + Assert.Contains("Authorization = Basic {{basicAuth}}", content) + Assert.Contains("[vars]", content) + Assert.Contains("basicAuth = \"REPLACE_ME\"", content) + +// --- Body content verification --- Spec: openapi-body-gen, nap-body + +[<Fact>] +let ``POST body contains actual JSON from schema`` () = + let gen = unwrap multiMethodSpec + let postFile = gen.NapFiles |> List.find (fun f -> f.Content.Contains("Create pet")) + Assert.Contains("[request.body]", postFile.Content) + Assert.Contains("\"\"\"", postFile.Content) + Assert.Contains("\"name\"", postFile.Content) + Assert.Contains("\"age\"", postFile.Content) + +[<Fact>] +let ``Nested object schema generates nested JSON body`` () = + let spec = + """ + { + "openapi": "3.0.0", + "info": { "title": "Nested API" }, + "paths": { + "/orders": { + "post": { + "summary": "Create order", + "requestBody": { + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "item": { "type": "string" }, + "quantity": { "type": "integer" }, + "address": { + "type": "object", + "properties": { + "street": { "type": "string" }, + "city": { "type": "string" } + } + } + } + } + } + } + }, + "responses": { "201": { "description": "Created" } } + } + } + } + }""" + + let content = (unwrap spec |> firstFile).Content + Assert.Contains("[request.body]", content) + Assert.Contains("\"item\"", content) + Assert.Contains("\"street\"", content) + Assert.Contains("\"city\"", content) + +// --- All path param endpoints must have vars --- Spec: openapi-params, nap-vars + +[<Fact>] +let ``Every endpoint with path params has vars section`` () = + let gen = unwrap multiMethodSpec + + let paramFiles = + gen.NapFiles |> List.filter (fun f -> f.Content.Contains("{{petId}}")) + + Assert.True(paramFiles.Length >= 2, $"Must have at least 2 petId endpoints, got {paramFiles.Length}") + + for f in paramFiles do + Assert.Contains("[vars]", f.Content) + Assert.Contains("petId = \"REPLACE_ME\"", f.Content) + +// --- Complete .nap file format validation --- Spec: nap-file, nap-meta, nap-request, nap-assert + +[<Fact>] +let ``Generated nap file has correct section ordering`` () = + let content = (unwrap minimalOas3 |> firstFile).Content + let metaIdx = content.IndexOf("[meta]") + let requestIdx = content.IndexOf("[request]") + let assertIdx = content.IndexOf("[assert]") + Assert.True(metaIdx >= 0, "Must have [meta]") + Assert.True(requestIdx > metaIdx, "[request] must come after [meta]") + Assert.True(assertIdx > requestIdx, "[assert] must come after [request]") + +[<Fact>] +let ``POST nap file has full section chain`` () = + let gen = unwrap multiMethodSpec + let postFile = gen.NapFiles |> List.find (fun f -> f.Content.Contains("Create pet")) + let content = postFile.Content + let metaIdx = content.IndexOf("[meta]") + let requestIdx = content.IndexOf("[request]") + let headersIdx = content.IndexOf("[request.headers]") + let bodyIdx = content.IndexOf("[request.body]") + let assertIdx = content.IndexOf("[assert]") + Assert.True(metaIdx >= 0, "Must have [meta]") + Assert.True(requestIdx > metaIdx, "[request] must come after [meta]") + Assert.True(headersIdx > requestIdx, "[request.headers] must come after [request]") + Assert.True(bodyIdx > headersIdx, "[request.body] must come after [request.headers]") + Assert.True(assertIdx > bodyIdx, "[assert] must come after [request.body]") + +// --- Playlist format validation --- Spec: naplist-file + +[<Fact>] +let ``Playlist has meta section with API title`` () = + let gen = unwrap minimalOas3 + Assert.Contains("[meta]", gen.Playlist.Content) + Assert.Contains("name = Test API", gen.Playlist.Content) + +[<Fact>] +let ``Playlist steps reference files with relative paths`` () = + let gen = unwrap minimalOas3 + Assert.Contains("[steps]", gen.Playlist.Content) + + for f in gen.NapFiles do + Assert.Contains($"./{f.FileName}", gen.Playlist.Content) + +// --- Environment file format --- Spec: env-file + +[<Fact>] +let ``Environment file has baseUrl key-value pair`` () = + let gen = unwrap minimalOas3 + Assert.Equal(".napenv", gen.Environment.FileName) + Assert.Contains("baseUrl = https://api.test.com/v1", gen.Environment.Content) + +// --- Base URL fallback --- Spec: openapi-baseurl [<Fact>] let ``Falls back to default URL when no servers or host`` () = - let spec = """ + let spec = + """ { "openapi": "3.0.0", "info": { "title": "No Servers" }, @@ -551,5 +732,6 @@ let ``Falls back to default URL when no servers or host`` () = } } }""" + let gen = unwrap spec Assert.Contains("https://api.example.com", gen.Environment.Content) diff --git a/tests/Nap.Core.Tests/OutputEdgeCaseTests.fs b/src/Napper.Core.Tests/OutputEdgeCaseTests.fs similarity index 75% rename from tests/Nap.Core.Tests/OutputEdgeCaseTests.fs rename to src/Napper.Core.Tests/OutputEdgeCaseTests.fs index 1f8acf8..76c97eb 100644 --- a/tests/Nap.Core.Tests/OutputEdgeCaseTests.fs +++ b/src/Napper.Core.Tests/OutputEdgeCaseTests.fs @@ -1,35 +1,48 @@ module OutputEdgeCaseTests +// Specs: output-json, output-junit, output-pretty open System open Xunit -open Nap.Core +open Napper.Core let private makeResult file passed statusCode body assertions error log : NapResult = - { - File = file - Request = { Method = GET; Url = "https://example.com"; Headers = Map.empty; Body = None } - Response = - if statusCode > 0 then - Some { - StatusCode = statusCode - Headers = Map.ofList [("Content-Type", "application/json")] - Body = body - Duration = TimeSpan.FromMilliseconds(50.0) - } - else None - Assertions = assertions - Passed = passed - Error = error - Log = log - } + { File = file + Request = + { Method = GET + Url = "https://example.com" + Headers = Map.empty + Body = None } + Response = + if statusCode > 0 then + Some + { StatusCode = statusCode + Headers = Map.ofList [ ("Content-Type", "application/json") ] + Body = body + Duration = TimeSpan.FromMilliseconds(50.0) } + else + None + Assertions = assertions + Passed = passed + Error = error + Log = log } let private passedAssertion target expected : AssertionResult = - { Assertion = { Target = target; Op = Equals expected }; Passed = true; Expected = expected; Actual = expected } + { Assertion = + { Target = target + Op = Equals expected } + Passed = true + Expected = expected + Actual = expected } let private failedAssertion target expected actual : AssertionResult = - { Assertion = { Target = target; Op = Equals expected }; Passed = false; Expected = expected; Actual = actual } + { Assertion = + { Target = target + Op = Equals expected } + Passed = false + Expected = expected + Actual = actual } -// ─── JSON output ────────────────────────────────────────────── +// ─── JSON output ─────────────────────────── Spec: output-json [<Fact>] let ``JSON output has correct file field`` () = @@ -63,10 +76,9 @@ let ``JSON output with headers`` () = [<Fact>] let ``JSON output with assertions`` () = - let assertions = [ - passedAssertion "status" "200" - failedAssertion "body.id" "42" "99" - ] + let assertions = + [ passedAssertion "status" "200"; failedAssertion "body.id" "42" "99" ] + let result = makeResult "test.nap" false 200 "" assertions None [] let json = Output.formatJson result let doc = System.Text.Json.JsonDocument.Parse(json) @@ -101,13 +113,13 @@ let ``JSON output bodyLength field`` () = let doc = System.Text.Json.JsonDocument.Parse(json) Assert.Equal(body.Length, doc.RootElement.GetProperty("bodyLength").GetInt32()) -// ─── JSON array output ──────────────────────────────────────── +// ─── JSON array output ───────────────────── Spec: output-json [<Fact>] let ``JSON array with multiple results`` () = let r1 = makeResult "a.nap" true 200 "" [] None [] let r2 = makeResult "b.nap" false 404 "" [] None [] - let json = Output.formatJsonArray [r1; r2] + let json = Output.formatJsonArray [ r1; r2 ] let doc = System.Text.Json.JsonDocument.Parse(json) Assert.Equal(System.Text.Json.JsonValueKind.Array, doc.RootElement.ValueKind) Assert.Equal(2, doc.RootElement.GetArrayLength()) @@ -117,7 +129,7 @@ let ``JSON array with multiple results`` () = [<Fact>] let ``JSON array with single result`` () = let r = makeResult "only.nap" true 200 "" [] None [] - let json = Output.formatJsonArray [r] + let json = Output.formatJsonArray [ r ] let doc = System.Text.Json.JsonDocument.Parse(json) Assert.Equal(1, doc.RootElement.GetArrayLength()) @@ -127,32 +139,36 @@ let ``JSON array empty`` () = let doc = System.Text.Json.JsonDocument.Parse(json) Assert.Equal(0, doc.RootElement.GetArrayLength()) -// ─── JUnit output ───────────────────────────────────────────── +// ─── JUnit output ────────────────────────── Spec: output-junit [<Fact>] let ``JUnit contains XML declaration`` () = let result = makeResult "test.nap" true 200 "" [] None [] - let xml = Output.formatJUnit [result] + let xml = Output.formatJUnit [ result ] Assert.Contains("<?xml", xml) [<Fact>] let ``JUnit with passing test has no failure element`` () = - let result = makeResult "pass.nap" true 200 "" [passedAssertion "status" "200"] None [] - let xml = Output.formatJUnit [result] + let result = + makeResult "pass.nap" true 200 "" [ passedAssertion "status" "200" ] None [] + + let xml = Output.formatJUnit [ result ] Assert.Contains("testcase name=\"pass\"", xml) Assert.DoesNotContain("<failure", xml) [<Fact>] let ``JUnit with failing test has failure element`` () = - let result = makeResult "fail.nap" false 404 "" [failedAssertion "status" "200" "404"] None [] - let xml = Output.formatJUnit [result] + let result = + makeResult "fail.nap" false 404 "" [ failedAssertion "status" "200" "404" ] None [] + + let xml = Output.formatJUnit [ result ] Assert.Contains("<failure", xml) Assert.Contains("status", xml) [<Fact>] let ``JUnit with error result shows error message`` () = let result = makeResult "error.nap" false 0 "" [] (Some "Connection refused") [] - let xml = Output.formatJUnit [result] + let xml = Output.formatJUnit [ result ] Assert.Contains("<failure", xml) Assert.Contains("Connection refused", xml) @@ -161,17 +177,17 @@ let ``JUnit with mixed results counts failures`` () = let r1 = makeResult "a.nap" true 200 "" [] None [] let r2 = makeResult "b.nap" false 500 "" [] None [] let r3 = makeResult "c.nap" true 201 "" [] None [] - let xml = Output.formatJUnit [r1; r2; r3] + let xml = Output.formatJUnit [ r1; r2; r3 ] Assert.Contains("tests=\"3\"", xml) Assert.Contains("failures=\"1\"", xml) [<Fact>] let ``JUnit time attribute is in seconds`` () = let result = makeResult "test.nap" true 200 "" [] None [] - let xml = Output.formatJUnit [result] + let xml = Output.formatJUnit [ result ] Assert.Contains("time=\"0.050\"", xml) -// ─── Pretty output ──────────────────────────────────────────── +// ─── Pretty output ───────────────────────── Spec: output-pretty [<Fact>] let ``Pretty output contains PASS for passing result`` () = @@ -193,14 +209,16 @@ let ``Pretty output shows error message`` () = [<Fact>] let ``Pretty output shows log lines`` () = - let result = makeResult "script.fsx" true 0 "" [] None ["[setup] line 1"; "[setup] line 2"] + let result = + makeResult "script.fsx" true 0 "" [] None [ "[setup] line 1"; "[setup] line 2" ] + let pretty = Output.formatPretty result Assert.Contains("[setup] line 1", pretty) Assert.Contains("[setup] line 2", pretty) [<Fact>] let ``Pretty output shows failed assertion with expected/actual`` () = - let assertions = [failedAssertion "status" "200" "404"] + let assertions = [ failedAssertion "status" "200" "404" ] let result = makeResult "test.nap" false 404 "" assertions None [] let pretty = Output.formatPretty result Assert.Contains("expected: 200", pretty) @@ -213,34 +231,32 @@ let ``Pretty output shows status code and method`` () = Assert.Contains("200", pretty) Assert.Contains("GET", pretty) -// ─── Summary output ─────────────────────────────────────────── +// ─── Summary output ──────────────────────── Spec: output-pretty [<Fact>] let ``Summary all passed`` () = - let results = [ - makeResult "a.nap" true 200 "" [] None [] - makeResult "b.nap" true 201 "" [] None [] - ] + let results = + [ makeResult "a.nap" true 200 "" [] None [] + makeResult "b.nap" true 201 "" [] None [] ] + let summary = Output.formatSummary results Assert.Contains("2/2 passed", summary) Assert.Contains("0 failed", summary) [<Fact>] let ``Summary with failures`` () = - let results = [ - makeResult "a.nap" true 200 "" [] None [] - makeResult "b.nap" false 404 "" [] None [] - makeResult "c.nap" false 500 "" [] None [] - ] + let results = + [ makeResult "a.nap" true 200 "" [] None [] + makeResult "b.nap" false 404 "" [] None [] + makeResult "c.nap" false 500 "" [] None [] ] + let summary = Output.formatSummary results Assert.Contains("1/3 passed", summary) Assert.Contains("2 failed", summary) [<Fact>] let ``Summary with all failures`` () = - let results = [ - makeResult "a.nap" false 500 "" [] None [] - ] + let results = [ makeResult "a.nap" false 500 "" [] None [] ] let summary = Output.formatSummary results Assert.Contains("0/1 passed", summary) Assert.Contains("1 failed", summary) diff --git a/tests/Nap.Core.Tests/ParserEdgeCaseTests.fs b/src/Napper.Core.Tests/ParserEdgeCaseTests.fs similarity index 72% rename from tests/Nap.Core.Tests/ParserEdgeCaseTests.fs rename to src/Napper.Core.Tests/ParserEdgeCaseTests.fs index 0d6874a..9134592 100644 --- a/tests/Nap.Core.Tests/ParserEdgeCaseTests.fs +++ b/src/Napper.Core.Tests/ParserEdgeCaseTests.fs @@ -1,9 +1,14 @@ module ParserEdgeCaseTests +// Specs: nap-minimal, nap-full, nap-file, nap-meta, nap-vars, nap-request, nap-headers, +// nap-body, nap-assert, nap-script, nap-comments, http-methods, +// naplist-file, naplist-meta, naplist-vars, naplist-steps, naplist-nap-step, +// naplist-folder-step, naplist-script-step, +// assert-status, assert-exists, assert-contains, assert-matches, assert-lt, assert-gt open Xunit -open Nap.Core +open Napper.Core -// ─── Shorthand: all HTTP methods ────────────────────────────── +// ─── Shorthand: all HTTP methods ─────────── Spec: nap-minimal, http-methods [<Fact>] let ``Parse shorthand PUT`` () = @@ -68,11 +73,12 @@ let ``Shorthand has empty meta and no assertions`` () = Assert.Equal(None, nap.Script.Post) | Error e -> failwith e -// ─── Full format: meta variations ───────────────────────────── +// ─── Full format: meta variations ────────── Spec: nap-meta, nap-file [<Fact>] let ``Parse meta with description`` () = - let input = """ + let input = + """ [meta] name = "My Request" description = "A detailed description" @@ -81,6 +87,7 @@ description = "A detailed description" method = GET url = https://example.com """ + match Parser.parseNapFile input with | Ok nap -> Assert.Equal(Some "My Request", nap.Meta.Name) @@ -89,7 +96,8 @@ url = https://example.com [<Fact>] let ``Parse meta with empty tags`` () = - let input = """ + let input = + """ [meta] name = "No tags" @@ -97,17 +105,20 @@ name = "No tags" method = GET url = https://example.com """ + match Parser.parseNapFile input with | Ok nap -> Assert.Empty(nap.Meta.Tags) | Error e -> failwith e [<Fact>] let ``Parse without meta block`` () = - let input = """ + let input = + """ [request] method = POST url = https://example.com/create """ + match Parser.parseNapFile input with | Ok nap -> Assert.Equal(None, nap.Meta.Name) @@ -116,27 +127,33 @@ url = https://example.com/create [<Fact>] let ``Request defaults to GET when method missing`` () = - let input = """ + let input = + """ [request] url = https://example.com """ + match Parser.parseNapFile input with | Ok nap -> Assert.Equal(GET, nap.Request.Method) | Error e -> failwith e -// ─── Full format: body variations ───────────────────────────── +// ─── Full format: body variations ────────── Spec: nap-body [<Fact>] let ``Body without content-type defaults to application/json`` () = let tq = "\"\"\"" + let input = - "[request]\n" + - "method = POST\n" + - "url = https://example.com\n\n" + - "[request.body]\n" + - tq + "\n" + - "{\"key\": \"value\"}\n" + - tq + "\n" + "[request]\n" + + "method = POST\n" + + "url = https://example.com\n\n" + + "[request.body]\n" + + tq + + "\n" + + "{\"key\": \"value\"}\n" + + tq + + "\n" + match Parser.parseNapFile input with | Ok nap -> Assert.True(nap.Request.Body.IsSome) @@ -145,7 +162,8 @@ let ``Body without content-type defaults to application/json`` () = [<Fact>] let ``Body with inline content (not triple-quoted)`` () = - let input = """ + let input = + """ [request] method = POST url = https://example.com @@ -154,6 +172,7 @@ url = https://example.com content-type = text/plain content = Hello world """ + match Parser.parseNapFile input with | Ok nap -> Assert.True(nap.Request.Body.IsSome) @@ -163,55 +182,61 @@ content = Hello world [<Fact>] let ``No body block yields None`` () = - let input = """ + let input = + """ [request] method = GET url = https://example.com """ + match Parser.parseNapFile input with | Ok nap -> Assert.True(nap.Request.Body.IsNone) | Error e -> failwith e -// ─── Full format: multiple sections combined ────────────────── +// ─── Full format: multiple sections combined Spec: nap-full, nap-meta, nap-vars, nap-request, nap-headers, nap-body, nap-assert, nap-script, nap-comments [<Fact>] let ``Full format with all sections`` () = let tq = "\"\"\"" + let input = - "# File-level comment\n\n" + - "[meta]\n" + - "name = \"Full test\"\n" + - "description = \"Everything\"\n" + - "tags = [\"smoke\", \"integration\"]\n\n" + - "# Vars comment\n" + - "[vars]\n" + - "baseUrl = \"https://api.example.com\"\n" + - "userId = \"42\"\n\n" + - "[request]\n" + - "method = POST\n" + - "url = {{baseUrl}}/users/{{userId}}\n\n" + - "[request.headers]\n" + - "Authorization = Bearer token123\n" + - "Accept = application/json\n\n" + - "[request.body]\n" + - "content-type = application/json\n" + - tq + "\n" + - "{ \"name\": \"test\" }\n" + - tq + "\n\n" + - "[assert]\n" + - "status = 201\n" + - "body.id exists\n" + - "headers.Content-Type contains \"json\"\n" + - "duration < 1000ms\n" + - "body.name = test\n\n" + - "[script]\n" + - "pre = ./setup.fsx\n" + - "post = ./teardown.fsx\n" + "# File-level comment\n\n" + + "[meta]\n" + + "name = \"Full test\"\n" + + "description = \"Everything\"\n" + + "tags = [\"smoke\", \"integration\"]\n\n" + + "# Vars comment\n" + + "[vars]\n" + + "baseUrl = \"https://api.example.com\"\n" + + "userId = \"42\"\n\n" + + "[request]\n" + + "method = POST\n" + + "url = {{baseUrl}}/users/{{userId}}\n\n" + + "[request.headers]\n" + + "Authorization = Bearer token123\n" + + "Accept = application/json\n\n" + + "[request.body]\n" + + "content-type = application/json\n" + + tq + + "\n" + + "{ \"name\": \"test\" }\n" + + tq + + "\n\n" + + "[assert]\n" + + "status = 201\n" + + "body.id exists\n" + + "headers.Content-Type contains \"json\"\n" + + "duration < 1000ms\n" + + "body.name = test\n\n" + + "[script]\n" + + "pre = ./setup.fsx\n" + + "post = ./teardown.fsx\n" + match Parser.parseNapFile input with | Ok nap -> Assert.Equal(Some "Full test", nap.Meta.Name) Assert.Equal(Some "Everything", nap.Meta.Description) - Assert.Equal<string list>(["smoke"; "integration"], nap.Meta.Tags) + Assert.Equal<string list>([ "smoke"; "integration" ], nap.Meta.Tags) Assert.Equal("https://api.example.com", nap.Vars["baseUrl"]) Assert.Equal("42", nap.Vars["userId"]) Assert.Equal(POST, nap.Request.Method) @@ -222,11 +247,12 @@ let ``Full format with all sections`` () = Assert.Equal(Some "./teardown.fsx", nap.Script.Post) | Error e -> failwith e -// ─── Assertion operators ────────────────────────────────────── +// ─── Assertion operators ─────────────────── Spec: nap-assert, assert-status, assert-exists, assert-contains, assert-matches, assert-lt, assert-gt [<Fact>] let ``Parse all assertion operators`` () = - let input = """ + let input = + """ [request] method = GET url = https://example.com @@ -239,22 +265,44 @@ body.pattern matches "^\\d+$" duration < 500ms body.count > 10 """ + match Parser.parseNapFile input with | Ok nap -> Assert.Equal(6, nap.Assertions.Length) Assert.Equal({ Target = "status"; Op = Equals "200" }, nap.Assertions[0]) Assert.Equal({ Target = "body.id"; Op = Exists }, nap.Assertions[1]) - Assert.Equal({ Target = "headers.Content-Type"; Op = Contains "json" }, nap.Assertions[2]) - Assert.Equal({ Target = "body.pattern"; Op = Matches "^\\\\d+$" }, nap.Assertions[3]) - Assert.Equal({ Target = "duration"; Op = LessThan "500ms" }, nap.Assertions[4]) - Assert.Equal({ Target = "body.count"; Op = GreaterThan "10" }, nap.Assertions[5]) + + Assert.Equal( + { Target = "headers.Content-Type" + Op = Contains "json" }, + nap.Assertions[2] + ) + + Assert.Equal( + { Target = "body.pattern" + Op = Matches "^\\\\d+$" }, + nap.Assertions[3] + ) + + Assert.Equal( + { Target = "duration" + Op = LessThan "500ms" }, + nap.Assertions[4] + ) + + Assert.Equal( + { Target = "body.count" + Op = GreaterThan "10" }, + nap.Assertions[5] + ) | Error e -> failwith e -// ─── Naplist variations ─────────────────────────────────────── +// ─── Naplist variations ──────────────────── Spec: naplist-file, naplist-meta, naplist-vars, naplist-steps, naplist-nap-step, naplist-folder-step, naplist-script-step [<Fact>] let ``Naplist with folder refs`` () = - let input = """ + let input = + """ [meta] name = "With folders" @@ -262,6 +310,7 @@ name = "With folders" auth ./tests/01_basic.nap """ + match Parser.parseNapList input with | Ok pl -> Assert.Equal(2, pl.Steps.Length) @@ -271,13 +320,15 @@ auth [<Fact>] let ``Naplist with comments between steps`` () = - let input = """ + let input = + """ [steps] # First step ./01_login.nap # Second step ./02_get-user.nap """ + match Parser.parseNapList input with | Ok pl -> Assert.Equal(2, pl.Steps.Length) @@ -287,20 +338,23 @@ let ``Naplist with comments between steps`` () = [<Fact>] let ``Naplist with no env defaults to None`` () = - let input = """ + let input = + """ [meta] name = "No env" [steps] ./test.nap """ + match Parser.parseNapList input with | Ok pl -> Assert.Equal(None, pl.Env) | Error e -> failwith e [<Fact>] let ``Naplist with env set`` () = - let input = """ + let input = + """ [meta] name = "Staging" env = staging @@ -308,13 +362,15 @@ env = staging [steps] ./test.nap """ + match Parser.parseNapList input with | Ok pl -> Assert.Equal(Some "staging", pl.Env) | Error e -> failwith e [<Fact>] let ``Naplist with vars`` () = - let input = """ + let input = + """ [vars] timeout = "5000" baseUrl = "https://staging.example.com" @@ -322,6 +378,7 @@ baseUrl = "https://staging.example.com" [steps] ./test.nap """ + match Parser.parseNapList input with | Ok pl -> Assert.Equal("5000", pl.Vars["timeout"]) @@ -330,7 +387,8 @@ baseUrl = "https://staging.example.com" [<Fact>] let ``Naplist with mixed step types`` () = - let input = """ + let input = + """ [steps] ./scripts/setup.fsx ./auth/login.nap @@ -338,6 +396,7 @@ crud ./regression.naplist ./scripts/teardown.fsx """ + match Parser.parseNapList input with | Ok pl -> Assert.Equal(5, pl.Steps.Length) @@ -350,17 +409,19 @@ crud [<Fact>] let ``Naplist empty steps section`` () = - let input = """ + let input = + """ [meta] name = "Empty" [steps] """ + match Parser.parseNapList input with | Ok pl -> Assert.Empty(pl.Steps) | Error e -> failwith e -// ─── Parse errors ───────────────────────────────────────────── +// ─── Parse errors ────────────────────────── Spec: nap-file [<Fact>] let ``Parse error on completely invalid input`` () = @@ -369,11 +430,13 @@ let ``Parse error on completely invalid input`` () = [<Fact>] let ``Parse quoted values preserve spaces`` () = - let input = """ + let input = + """ [request] method = GET url = "https://example.com/path with spaces" """ + match Parser.parseNapFile input with | Ok nap -> Assert.Equal("https://example.com/path with spaces", nap.Request.Url) | Error e -> failwith e diff --git a/src/Napper.Core.Tests/RunnerE2eTests.fs b/src/Napper.Core.Tests/RunnerE2eTests.fs new file mode 100644 index 0000000..2f29777 --- /dev/null +++ b/src/Napper.Core.Tests/RunnerE2eTests.fs @@ -0,0 +1,263 @@ +module RunnerE2eTests + +open System +open System.IO +open Xunit +open Napper.Core + +let private createTempDir () = + let dir = Path.Combine(Path.GetTempPath(), $"nap-runner-e2e-{Guid.NewGuid():N}") + Directory.CreateDirectory(dir) |> ignore + dir + +let private cleanupDir (dir: string) = + if Directory.Exists(dir) then + Directory.Delete(dir, true) + +let private writeNapFile (dir: string) (name: string) (content: string) : string = + let filePath = Path.Combine(dir, name) + File.WriteAllText(filePath, content) + filePath + +// ─── runNapFile: successful GET with assertions ────────────── + +[<Fact>] +let ``runNapFile GET with assertions passes`` () = + let dir = createTempDir () + + try + let nap = "GET https://httpbin.org/get" + let filePath = writeNapFile dir "test.nap" nap + let result = Runner.runNapFile filePath Map.empty None |> Async.RunSynchronously + Assert.True(result.Passed, "Simple GET should pass") + Assert.True(result.Response.IsSome, "Must have response") + Assert.Equal(200, result.Response.Value.StatusCode) + Assert.True(result.Error.IsNone) + finally + cleanupDir dir + +[<Fact>] +let ``runNapFile full format GET with assertions`` () = + let dir = createTempDir () + + try + let nap = + "[request]\nmethod = GET\nurl = https://httpbin.org/get\n\n[assert]\nstatus = 200\nbody.url exists" + + let filePath = writeNapFile dir "full.nap" nap + let result = Runner.runNapFile filePath Map.empty None |> Async.RunSynchronously + Assert.True(result.Passed, "Full format GET should pass") + Assert.True(result.Assertions.Length >= 2, $"Must have 2+ assertions, got {result.Assertions.Length}") + Assert.True(result.Assertions |> List.forall (fun a -> a.Passed)) + finally + cleanupDir dir + +// ─── runNapFile: POST with body ────────────────────────────── + +[<Fact>] +let ``runNapFile POST with body`` () = + let dir = createTempDir () + + try + let nap = + "[request]\nmethod = POST\nurl = https://httpbin.org/post\n\n[request.headers]\nContent-Type = application/json\n\n[request.body]\ncontent-type = application/json\n\"\"\"\n{\"key\": \"value\"}\n\"\"\"\n\n[assert]\nstatus = 200" + + let filePath = writeNapFile dir "post.nap" nap + let result = Runner.runNapFile filePath Map.empty None |> Async.RunSynchronously + Assert.True(result.Passed, $"POST should pass. Error: {result.Error}") + Assert.Equal(200, result.Response.Value.StatusCode) + finally + cleanupDir dir + +// ─── runNapFile: assertion failure ─────────────────────────── + +[<Fact>] +let ``runNapFile wrong status assertion fails`` () = + let dir = createTempDir () + + try + let nap = + "[request]\nmethod = GET\nurl = https://httpbin.org/get\n\n[assert]\nstatus = 404" + + let filePath = writeNapFile dir "fail.nap" nap + let result = Runner.runNapFile filePath Map.empty None |> Async.RunSynchronously + Assert.False(result.Passed, "Wrong status should fail") + Assert.True(result.Assertions.Length >= 1, "Must have assertions") + Assert.False(result.Assertions[0].Passed) + Assert.Equal("404", result.Assertions[0].Expected) + Assert.Equal("200", result.Assertions[0].Actual) + finally + cleanupDir dir + +// ─── runNapFile: variable substitution ─────────────────────── + +[<Fact>] +let ``runNapFile substitutes CLI variables`` () = + let dir = createTempDir () + + try + let nap = + "[request]\nmethod = GET\nurl = https://httpbin.org/status/{{code}}\n\n[assert]\nstatus = {{code}}" + + let filePath = writeNapFile dir "vars.nap" nap + let vars = Map.ofList [ "code", "200" ] + let result = Runner.runNapFile filePath vars None |> Async.RunSynchronously + Assert.True(result.Passed, $"Var substitution should work. Error: {result.Error}") + finally + cleanupDir dir + +// ─── runNapFile: parse error ───────────────────────────────── + +[<Fact>] +let ``runNapFile parse error returns error result`` () = + let dir = createTempDir () + + try + let filePath = writeNapFile dir "bad.nap" "[meta]\nname = test\n" + let result = Runner.runNapFile filePath Map.empty None |> Async.RunSynchronously + Assert.False(result.Passed) + Assert.True(result.Error.IsSome, "Must have error") + finally + cleanupDir dir + +// ─── runNapFile: request failure ───────────────────────────── + +[<Fact>] +let ``runNapFile unreachable URL returns error`` () = + let dir = createTempDir () + + try + let nap = "GET https://this-domain-does-not-exist-napper-test.invalid/api" + let filePath = writeNapFile dir "bad-url.nap" nap + let result = Runner.runNapFile filePath Map.empty None |> Async.RunSynchronously + Assert.False(result.Passed) + Assert.True(result.Error.IsSome) + Assert.Contains("Request failed", result.Error.Value) + finally + cleanupDir dir + +// ─── runNapFile: environment loading ───────────────────────── + +[<Fact>] +let ``runNapFile loads vars from napenv`` () = + let dir = createTempDir () + + try + File.WriteAllText(Path.Combine(dir, ".napenv"), "code = 200\n") + + let nap = + "[request]\nmethod = GET\nurl = https://httpbin.org/status/{{code}}\n\n[assert]\nstatus = {{code}}" + + let filePath = writeNapFile dir "env.nap" nap + let result = Runner.runNapFile filePath Map.empty None |> Async.RunSynchronously + Assert.True(result.Passed, $"napenv vars should resolve. Error: {result.Error}") + finally + cleanupDir dir + +// ─── runNapFile: header contains assertion ─────────────────── + +[<Fact>] +let ``runNapFile contains assertion on header`` () = + let dir = createTempDir () + + try + let nap = + "[request]\nmethod = GET\nurl = https://httpbin.org/get\n\n[assert]\nstatus = 200\nheaders.Content-Type contains json" + + let filePath = writeNapFile dir "hdr.nap" nap + let result = Runner.runNapFile filePath Map.empty None |> Async.RunSynchronously + Assert.True(result.Passed, $"Header contains should pass. Error: {result.Error}") + finally + cleanupDir dir + +// ─── runNapFile: duration assertion ────────────────────────── + +[<Fact>] +let ``runNapFile duration less than assertion`` () = + let dir = createTempDir () + + try + let nap = + "[request]\nmethod = GET\nurl = https://httpbin.org/get\n\n[assert]\nstatus = 200\nduration < 30000ms" + + let filePath = writeNapFile dir "dur.nap" nap + let result = Runner.runNapFile filePath Map.empty None |> Async.RunSynchronously + Assert.True(result.Passed, $"Duration should pass. Error: {result.Error}") + finally + cleanupDir dir + +// ─── evaluateAssertions: all operators ─────────────────────── + +[<Fact>] +let ``evaluateAssertions covers all assertion operators`` () = + let response: NapResponse = + { StatusCode = 200 + Headers = Map.ofList [ "Content-Type", "application/json" ] + Body = "{\"id\": 42, \"name\": \"test\", \"active\": true, \"score\": null}" + Duration = TimeSpan.FromMilliseconds(100.0) } + + let assertions = + [ { Target = "status"; Op = Equals "200" } + { Target = "body.id"; Op = Exists } + { Target = "body.name" + Op = Equals "test" } + { Target = "body.active" + Op = Equals "true" } + { Target = "body.score"; Op = Exists } + { Target = "headers.Content-Type" + Op = Contains "json" } + { Target = "duration" + Op = LessThan "5000ms" } + { Target = "duration" + Op = GreaterThan "1ms" } + { Target = "body.name" + Op = Matches "t*t" } + { Target = "body"; Op = Exists } + { Target = "body"; Op = Contains "id" } ] + + let results = Runner.evaluateAssertions assertions response + Assert.Equal(11, results.Length) + + for r in results do + Assert.True(r.Passed, $"Assertion on {r.Assertion.Target} should pass: expected={r.Expected} actual={r.Actual}") + +[<Fact>] +let ``evaluateAssertions missing targets all fail`` () = + let response: NapResponse = + { StatusCode = 200 + Headers = Map.empty + Body = "{}" + Duration = TimeSpan.FromMilliseconds(50.0) } + + let assertions = + [ { Target = "body.nonexistent" + Op = Exists } + { Target = "body.missing" + Op = Equals "value" } + { Target = "headers.X-Missing" + Op = Contains "x" } + { Target = "body.nope" + Op = Matches "abc" } + { Target = "unknown_target" + Op = Exists } ] + + let results = Runner.evaluateAssertions assertions response + Assert.True(results |> List.forall (fun r -> not r.Passed)) + +[<Fact>] +let ``evaluateAssertions numeric comparison edge cases`` () = + let response: NapResponse = + { StatusCode = 200 + Headers = Map.empty + Body = "{}" + Duration = TimeSpan.FromMilliseconds(50.0) } + + let assertions = + [ { Target = "duration" + Op = LessThan "not-a-number" } + { Target = "duration" + Op = GreaterThan "999999ms" } ] + + let results = Runner.evaluateAssertions assertions response + Assert.False(results[0].Passed, "LessThan with non-numeric should fail") + Assert.False(results[1].Passed, "GreaterThan with huge value should fail") diff --git a/tests/Nap.Core.Tests/ScriptEdgeCaseTests.fs b/src/Napper.Core.Tests/ScriptEdgeCaseTests.fs similarity index 79% rename from tests/Nap.Core.Tests/ScriptEdgeCaseTests.fs rename to src/Napper.Core.Tests/ScriptEdgeCaseTests.fs index da16e76..a6e38ef 100644 --- a/tests/Nap.Core.Tests/ScriptEdgeCaseTests.fs +++ b/src/Napper.Core.Tests/ScriptEdgeCaseTests.fs @@ -1,24 +1,30 @@ module ScriptEdgeCaseTests +// Specs: script-fsx, script-runner open System open System.IO open Xunit -open Nap.Core +open Napper.Core let private createTempScript (content: string) : string = let dir = Path.GetTempPath() - let path = Path.Combine(dir, sprintf "nap-test-%s.fsx" (Guid.NewGuid().ToString("N"))) + + let path = + Path.Combine(dir, sprintf "nap-test-%s.fsx" (Guid.NewGuid().ToString("N"))) + File.WriteAllText(path, content) path let private cleanupScript (path: string) = - if File.Exists(path) then File.Delete(path) + if File.Exists(path) then + File.Delete(path) -// ─── Passing scripts ────────────────────────────────────────── +// ─── Passing scripts ─────────────────────── Spec: script-fsx [<Fact>] let ``Script with single output line`` () = let path = createTempScript "printfn \"hello\"" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed) @@ -28,7 +34,9 @@ let ``Script with single output line`` () = [<Fact>] let ``Script with multiple output lines`` () = - let path = createTempScript "printfn \"line1\"\nprintfn \"line2\"\nprintfn \"line3\"" + let path = + createTempScript "printfn \"line1\"\nprintfn \"line2\"\nprintfn \"line3\"" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed) @@ -39,6 +47,7 @@ let ``Script with multiple output lines`` () = [<Fact>] let ``Script with no output`` () = let path = createTempScript "let x = 1 + 1\n()" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed) @@ -49,6 +58,7 @@ let ``Script with no output`` () = [<Fact>] let ``Script result has no HTTP response`` () = let path = createTempScript "printfn \"ok\"" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Response.IsNone) @@ -58,6 +68,7 @@ let ``Script result has no HTTP response`` () = [<Fact>] let ``Script result has no assertions`` () = let path = createTempScript "printfn \"ok\"" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.Empty(result.Assertions) @@ -67,17 +78,19 @@ let ``Script result has no assertions`` () = [<Fact>] let ``Script result has correct file path`` () = let path = createTempScript "printfn \"ok\"" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.Equal(path, result.File) finally cleanupScript path -// ─── Failing scripts ────────────────────────────────────────── +// ─── Failing scripts ─────────────────────── Spec: script-fsx [<Fact>] let ``Script with type error fails`` () = let path = createTempScript "let x: int = \"string\"" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.False(result.Passed) @@ -88,6 +101,7 @@ let ``Script with type error fails`` () = [<Fact>] let ``Script with explicit exit code 1 fails`` () = let path = createTempScript "printfn \"about to fail\"\nexit 1" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.False(result.Passed) @@ -98,6 +112,7 @@ let ``Script with explicit exit code 1 fails`` () = [<Fact>] let ``Failed script still captures stdout before failure`` () = let path = createTempScript "printfn \"before error\"\nexit 1" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.False(result.Passed) @@ -108,6 +123,7 @@ let ``Failed script still captures stdout before failure`` () = [<Fact>] let ``Script with runtime exception fails`` () = let path = createTempScript "failwith \"boom\"" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.False(result.Passed) @@ -115,11 +131,13 @@ let ``Script with runtime exception fails`` () = finally cleanupScript path -// ─── Script doing actual work ───────────────────────────────── +// ─── Script doing actual work ────────────── Spec: script-fsx [<Fact>] let ``Script can do computation and print result`` () = - let path = createTempScript "let result = [1..10] |> List.sum\nprintfn \"Sum: %d\" result" + let path = + createTempScript "let result = [1..10] |> List.sum\nprintfn \"Sum: %d\" result" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed) @@ -129,7 +147,9 @@ let ``Script can do computation and print result`` () = [<Fact>] let ``Script can read environment variables`` () = - let path = createTempScript "printfn \"PATH exists: %b\" (System.Environment.GetEnvironmentVariable(\"PATH\") <> null)" + let path = + createTempScript "printfn \"PATH exists: %b\" (System.Environment.GetEnvironmentVariable(\"PATH\") <> null)" + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed) @@ -139,20 +159,29 @@ let ``Script can read environment variables`` () = [<Fact>] let ``Script can write and read temp file`` () = - let tempFile = Path.Combine(Path.GetTempPath(), sprintf "nap-script-io-%s.txt" (Guid.NewGuid().ToString("N"))) + let tempFile = + Path.Combine(Path.GetTempPath(), sprintf "nap-script-io-%s.txt" (Guid.NewGuid().ToString("N"))) + let escapedPath = tempFile.Replace("\\", "\\\\") + let script = - sprintf "let path = \"%s\"\nSystem.IO.File.WriteAllText(path, \"hello from script\")\nlet content = System.IO.File.ReadAllText(path)\nprintfn \"Read: %%s\" content\nSystem.IO.File.Delete(path)" escapedPath + sprintf + "let path = \"%s\"\nSystem.IO.File.WriteAllText(path, \"hello from script\")\nlet content = System.IO.File.ReadAllText(path)\nprintfn \"Read: %%s\" content\nSystem.IO.File.Delete(path)" + escapedPath + let path = createTempScript script + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed) Assert.Contains("Read: hello from script", result.Log) finally cleanupScript path - if File.Exists(tempFile) then File.Delete(tempFile) -// ─── Non-existent script ────────────────────────────────────── + if File.Exists(tempFile) then + File.Delete(tempFile) + +// ─── Non-existent script ─────────────────── Spec: script-fsx [<Fact>] let ``Non-existent script path fails`` () = @@ -161,30 +190,37 @@ let ``Non-existent script path fails`` () = Assert.False(result.Passed) Assert.True(result.Error.IsSome) -// ─── Script with HTTP call ──────────────────────────────────── +// ─── Script with HTTP call ───────────────── Spec: script-fsx [<Fact>] let ``Script can make HTTP request`` () = - let script = """ + let script = + """ open System.Net.Http let client = new HttpClient() let response = client.GetAsync("https://httpbin.org/get") |> Async.AwaitTask |> Async.RunSynchronously printfn "Status: %d" (int response.StatusCode) """ + let path = createTempScript script + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed, $"Script should pass. Error: {result.Error}") - Assert.True(result.Log |> List.exists (fun l -> l.Contains("Status: 200")), - $"Should contain status 200. Log: {result.Log}") + + Assert.True( + result.Log |> List.exists (fun l -> l.Contains("Status: 200")), + $"Should contain status 200. Log: {result.Log}" + ) finally cleanupScript path -// ─── Script with async computation ──────────────────────────── +// ─── Script with async computation ───────── Spec: script-fsx [<Fact>] let ``Script with async workflow`` () = - let script = """ + let script = + """ let work = async { do! Async.Sleep(100) return 42 @@ -192,7 +228,9 @@ let work = async { let result = work |> Async.RunSynchronously printfn "Async result: %d" result """ + let path = createTempScript script + try let result = Runner.runScript path |> Async.RunSynchronously Assert.True(result.Passed) diff --git a/src/Napper.Core.Tests/TestHelpers.fs b/src/Napper.Core.Tests/TestHelpers.fs new file mode 100644 index 0000000..014b25e --- /dev/null +++ b/src/Napper.Core.Tests/TestHelpers.fs @@ -0,0 +1,102 @@ +module TestHelpers + +open System +open System.Diagnostics +open System.IO + +// --- Constants --- + +[<Literal>] +let NapperBinaryName = "napper" + +[<Literal>] +let DefaultTimeoutMs = 5_000 + +[<Literal>] +let ScriptTimeoutMs = 30_000 + +// --- CLI runner: uses the installed binary, never recompiles --- + +let private logLock = obj () + +let log (msg: string) = + lock logLock (fun () -> + Console.Error.WriteLine(msg) + Console.Error.Flush()) + +let private findRepoRoot () : string option = + let mutable dir = DirectoryInfo(AppContext.BaseDirectory) + + while dir <> null + && not (File.Exists(Path.Combine(dir.FullName, "Directory.Build.props"))) do + dir <- dir.Parent + + if dir <> null then Some dir.FullName else None + +let private findNapper () : string = + let home = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile) + let dotnetTool = Path.Combine(home, ".dotnet", "tools", NapperBinaryName) + let localBin = Path.Combine(home, ".local", "bin", NapperBinaryName) + + match findRepoRoot () with + | Some root -> + let buildBin = + Path.Combine(root, "src", "Napper.Cli", "bin", "Debug", "net10.0", NapperBinaryName) + + if File.Exists buildBin then + log $"[test] Using build output binary: %s{buildBin}" + buildBin + elif File.Exists dotnetTool then + dotnetTool + elif File.Exists localBin then + localBin + else + NapperBinaryName + | None -> + if File.Exists dotnetTool then dotnetTool + elif File.Exists localBin then localBin + else NapperBinaryName + +let runCliWithTimeout (timeoutMs: int) (args: string) (cwd: string) : int * string * string = + let binary = findNapper () + let sw = Stopwatch.StartNew() + log $"[test] napper %s{args}" + let psi = ProcessStartInfo() + psi.FileName <- binary + psi.Arguments <- args + psi.WorkingDirectory <- cwd + psi.RedirectStandardOutput <- true + psi.RedirectStandardError <- true + psi.RedirectStandardInput <- true + psi.UseShellExecute <- false + psi.CreateNoWindow <- true + use proc = Process.Start(psi) + proc.StandardInput.Close() + let stdoutTask = proc.StandardOutput.ReadToEndAsync() + let stderrTask = proc.StandardError.ReadToEndAsync() + + if not (proc.WaitForExit(timeoutMs)) then + proc.Kill(true) + sw.Stop() + log $"[test] TIMEOUT after %d{timeoutMs}ms | napper %s{args}" + failwith $"napper process timed out after %d{timeoutMs}ms: napper %s{args}" + + let stdout = stdoutTask.Result + let stderr = stderrTask.Result + sw.Stop() + log $"[test] napper %s{args} | exit=%d{proc.ExitCode} elapsed=%d{sw.ElapsedMilliseconds}ms" + proc.ExitCode, stdout, stderr + +let runCli (args: string) (cwd: string) : int * string * string = + runCliWithTimeout DefaultTimeoutMs args cwd + +// --- Temp directory helpers --- + +let createTempDir (prefix: string) : string = + let dir = Path.Combine(Path.GetTempPath(), $"{prefix}-{Guid.NewGuid():N}") + Directory.CreateDirectory(dir) |> ignore + dir + +let cleanupDir (dir: string) : unit = + if Directory.Exists(dir) then + Directory.Delete(dir, true) diff --git a/tests/Nap.Core.Tests/Tests.fs b/src/Napper.Core.Tests/Tests.fs similarity index 67% rename from tests/Nap.Core.Tests/Tests.fs rename to src/Napper.Core.Tests/Tests.fs index 28e877b..6a00a92 100644 --- a/tests/Nap.Core.Tests/Tests.fs +++ b/src/Napper.Core.Tests/Tests.fs @@ -1,14 +1,19 @@ module Tests +// Specs: nap-minimal, nap-full, nap-meta, nap-vars, nap-request, nap-headers, nap-body, +// nap-assert, nap-script, nap-comments, http-methods, env-interpolation, env-file, +// env-resolution, cli-var, assert-status, assert-equals, assert-exists, assert-contains, +// assert-lt, script-fsx, output-json, output-junit open System open Xunit -open Nap.Core +open Napper.Core -// ─── Parser: Shorthand ───────────────────────────────────────── +// ─── Parser: Shorthand ──────────────────────── Spec: nap-minimal, http-methods [<Fact>] let ``Parse shorthand GET request`` () = let result = Parser.parseNapFile "GET https://example.com/api" + match result with | Result.Ok nap -> Assert.Equal(GET, nap.Request.Method) @@ -19,15 +24,17 @@ let ``Parse shorthand GET request`` () = [<Fact>] let ``Parse shorthand POST request`` () = let result = Parser.parseNapFile "POST https://example.com/api" + match result with | Result.Ok nap -> Assert.Equal(POST, nap.Request.Method) | Result.Error e -> failwith e -// ─── Parser: Full format ─────────────────────────────────────── +// ─── Parser: Full format ──────────────────── Spec: nap-full, nap-meta, nap-request, nap-headers, nap-body, nap-assert, nap-vars, nap-script, nap-comments [<Fact>] let ``Parse full format with meta and request`` () = - let input = """ + let input = + """ [meta] name = "Test request" tags = ["smoke", "users"] @@ -36,18 +43,21 @@ tags = ["smoke", "users"] method = GET url = https://example.com/users """ + let result = Parser.parseNapFile input + match result with | Result.Ok nap -> Assert.Equal(Some "Test request", nap.Meta.Name) - Assert.Equal<string list>(["smoke"; "users"], nap.Meta.Tags) + Assert.Equal<string list>([ "smoke"; "users" ], nap.Meta.Tags) Assert.Equal(GET, nap.Request.Method) Assert.Equal("https://example.com/users", nap.Request.Url) | Result.Error e -> failwith e [<Fact>] let ``Parse full format with comments`` () = - let input = """ + let input = + """ # This is a comment [meta] name = "Commented request" @@ -57,7 +67,9 @@ name = "Commented request" method = POST url = https://example.com/create """ + let result = Parser.parseNapFile input + match result with | Result.Ok nap -> Assert.Equal(Some "Commented request", nap.Meta.Name) @@ -66,7 +78,8 @@ url = https://example.com/create [<Fact>] let ``Parse full format with headers`` () = - let input = """ + let input = + """ [request] method = GET url = https://example.com @@ -75,7 +88,9 @@ url = https://example.com Authorization = Bearer mytoken Accept = application/json """ + let result = Parser.parseNapFile input + match result with | Result.Ok nap -> Assert.Equal("Bearer mytoken", nap.Request.Headers["Authorization"]) @@ -84,7 +99,8 @@ Accept = application/json [<Fact>] let ``Parse full format with assertions`` () = - let input = """ + let input = + """ [request] method = GET url = https://example.com @@ -95,19 +111,32 @@ body.id exists headers.Content-Type contains "json" duration < 500ms """ + let result = Parser.parseNapFile input + match result with | Result.Ok nap -> Assert.Equal(4, nap.Assertions.Length) Assert.Equal({ Target = "status"; Op = Equals "200" }, nap.Assertions[0]) Assert.Equal({ Target = "body.id"; Op = Exists }, nap.Assertions[1]) - Assert.Equal({ Target = "headers.Content-Type"; Op = Contains "json" }, nap.Assertions[2]) - Assert.Equal({ Target = "duration"; Op = LessThan "500ms" }, nap.Assertions[3]) + + Assert.Equal( + { Target = "headers.Content-Type" + Op = Contains "json" }, + nap.Assertions[2] + ) + + Assert.Equal( + { Target = "duration" + Op = LessThan "500ms" }, + nap.Assertions[3] + ) | Result.Error e -> failwith e [<Fact>] let ``Parse full format with vars`` () = - let input = """ + let input = + """ [vars] userId = "42" baseUrl = "https://example.com" @@ -116,7 +145,9 @@ baseUrl = "https://example.com" method = GET url = {{baseUrl}}/users/{{userId}} """ + let result = Parser.parseNapFile input + match result with | Result.Ok nap -> Assert.Equal("42", nap.Vars["userId"]) @@ -126,7 +157,8 @@ url = {{baseUrl}}/users/{{userId}} [<Fact>] let ``Parse full format with script block`` () = - let input = """ + let input = + """ [request] method = GET url = https://example.com @@ -135,7 +167,9 @@ url = https://example.com pre = ./scripts/auth.fsx post = ./scripts/validate.fsx """ + let result = Parser.parseNapFile input + match result with | Result.Ok nap -> Assert.Equal(Some "./scripts/auth.fsx", nap.Script.Pre) @@ -145,20 +179,25 @@ post = ./scripts/validate.fsx [<Fact>] let ``Parse full format with request body`` () = let tq = "\"\"\"" + let input = - "[request]\n" + - "method = POST\n" + - "url = https://example.com/api\n" + - "\n" + - "[request.body]\n" + - "content-type = application/json\n" + - tq + "\n" + - "{ \"name\": \"test\" }\n" + - tq + "\n" + - "\n" + - "[assert]\n" + - "status = 201\n" + "[request]\n" + + "method = POST\n" + + "url = https://example.com/api\n" + + "\n" + + "[request.body]\n" + + "content-type = application/json\n" + + tq + + "\n" + + "{ \"name\": \"test\" }\n" + + tq + + "\n" + + "\n" + + "[assert]\n" + + "status = 201\n" + let result = Parser.parseNapFile input + match result with | Result.Ok nap -> Assert.Equal(POST, nap.Request.Method) @@ -172,31 +211,37 @@ let ``Parse full format with request body`` () = [<Fact>] let ``Parse full format with headers and body`` () = let tq = "\"\"\"" + let input = - "[request]\n" + - "method = POST\n" + - "url = https://example.com/api\n" + - "\n" + - "[request.headers]\n" + - "Accept = application/json\n" + - "\n" + - "[request.body]\n" + - "content-type = application/json\n" + - tq + "\n" + - "{ \"key\": \"value\" }\n" + - tq + "\n" + "[request]\n" + + "method = POST\n" + + "url = https://example.com/api\n" + + "\n" + + "[request.headers]\n" + + "Accept = application/json\n" + + "\n" + + "[request.body]\n" + + "content-type = application/json\n" + + tq + + "\n" + + "{ \"key\": \"value\" }\n" + + tq + + "\n" + let result = Parser.parseNapFile input + match result with | Result.Ok nap -> Assert.Equal("application/json", nap.Request.Headers["Accept"]) Assert.True(nap.Request.Body.IsSome) | Result.Error e -> failwith e -// ─── Parser: .naplist ────────────────────────────────────────── +// ─── Parser: .naplist ─────────────────────── Spec: naplist-file, naplist-meta, naplist-vars, naplist-steps, naplist-nap-step, naplist-folder-step, naplist-script-step [<Fact>] let ``Parse naplist with steps`` () = - let input = """ + let input = + """ [meta] name = "Smoke Suite" env = staging @@ -210,7 +255,9 @@ timeout = "5000" ./regression.naplist ./scripts/setup.fsx """ + let result = Parser.parseNapList input + match result with | Result.Ok playlist -> Assert.Equal(Some "Smoke Suite", playlist.Meta.Name) @@ -223,102 +270,111 @@ timeout = "5000" Assert.Equal(ScriptStep "./scripts/setup.fsx", playlist.Steps[3]) | Result.Error e -> failwith e -// ─── Environment ─────────────────────────────────────────────── +// ─── Environment ─────────────────────────── Spec: env-file, env-interpolation, env-resolution, cli-var [<Fact>] let ``Parse env file`` () = - let content = """ + let content = + """ baseUrl = "https://example.com" token = "abc123" # comment empty = """ + let vars = Environment.parseEnvFile content Assert.Equal("https://example.com", vars["baseUrl"]) Assert.Equal("abc123", vars["token"]) [<Fact>] let ``Resolve variables in string`` () = - let vars = Map.ofList [("baseUrl", "https://example.com"); ("id", "42")] + let vars = Map.ofList [ ("baseUrl", "https://example.com"); ("id", "42") ] Assert.Equal("https://example.com/users/42", Environment.resolveVars vars "{{baseUrl}}/users/{{id}}") [<Fact>] let ``Unresolved variables remain`` () = - let vars = Map.ofList [("baseUrl", "https://example.com")] + let vars = Map.ofList [ ("baseUrl", "https://example.com") ] Assert.Equal("https://example.com/{{unknown}}", Environment.resolveVars vars "{{baseUrl}}/{{unknown}}") [<Fact>] let ``CLI vars override file vars`` () = let dir = System.IO.Path.GetTempPath() - let fileVars = Map.ofList [("key", "file-value")] - let cliVars = Map.ofList [("key", "cli-value")] + let fileVars = Map.ofList [ ("key", "file-value") ] + let cliVars = Map.ofList [ ("key", "cli-value") ] let result = Environment.loadEnvironment dir None cliVars fileVars Assert.Equal("cli-value", result["key"]) -// ─── Assertions ──────────────────────────────────────────────── +// ─── Assertions ──────────────────────────── Spec: assert-status, assert-equals, assert-exists, assert-contains, assert-lt [<Fact>] let ``Assert status equals`` () = - let response: NapResponse = { - StatusCode = 200 - Headers = Map.ofList [("Content-Type", "application/json")] - Body = """{"id": 42, "name": "Alice"}""" - Duration = TimeSpan.FromMilliseconds(100.0) - } - let assertions = [ - { Target = "status"; Op = Equals "200" } - { Target = "body.id"; Op = Exists } - { Target = "body.name"; Op = Equals "Alice" } - { Target = "headers.Content-Type"; Op = Contains "json" } - { Target = "duration"; Op = LessThan "500ms" } - ] + let response: NapResponse = + { StatusCode = 200 + Headers = Map.ofList [ ("Content-Type", "application/json") ] + Body = """{"id": 42, "name": "Alice"}""" + Duration = TimeSpan.FromMilliseconds(100.0) } + + let assertions = + [ { Target = "status"; Op = Equals "200" } + { Target = "body.id"; Op = Exists } + { Target = "body.name" + Op = Equals "Alice" } + { Target = "headers.Content-Type" + Op = Contains "json" } + { Target = "duration" + Op = LessThan "500ms" } ] + let results = Runner.evaluateAssertions assertions response Assert.All(results, fun r -> Assert.True(r.Passed, $"{r.Assertion.Target}: expected {r.Expected}, got {r.Actual}")) [<Fact>] let ``Assert status fails on mismatch`` () = - let response: NapResponse = { - StatusCode = 404 - Headers = Map.empty - Body = "" - Duration = TimeSpan.FromMilliseconds(50.0) - } - let assertions = [{ Target = "status"; Op = Equals "200" }] + let response: NapResponse = + { StatusCode = 404 + Headers = Map.empty + Body = "" + Duration = TimeSpan.FromMilliseconds(50.0) } + + let assertions = [ { Target = "status"; Op = Equals "200" } ] let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed) Assert.Equal("404", results[0].Actual) [<Fact>] let ``Assert body path missing`` () = - let response: NapResponse = { - StatusCode = 200 - Headers = Map.empty - Body = """{"name": "test"}""" - Duration = TimeSpan.FromMilliseconds(50.0) - } - let assertions = [{ Target = "body.missing"; Op = Exists }] + let response: NapResponse = + { StatusCode = 200 + Headers = Map.empty + Body = """{"name": "test"}""" + Duration = TimeSpan.FromMilliseconds(50.0) } + + let assertions = [ { Target = "body.missing"; Op = Exists } ] let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed) [<Fact>] let ``Assert duration greater than`` () = - let response: NapResponse = { - StatusCode = 200 - Headers = Map.empty - Body = "" - Duration = TimeSpan.FromMilliseconds(600.0) - } - let assertions = [{ Target = "duration"; Op = LessThan "500ms" }] + let response: NapResponse = + { StatusCode = 200 + Headers = Map.empty + Body = "" + Duration = TimeSpan.FromMilliseconds(600.0) } + + let assertions = + [ { Target = "duration" + Op = LessThan "500ms" } ] + let results = Runner.evaluateAssertions assertions response Assert.False(results[0].Passed) -// ─── Script execution ────────────────────────────────────────── +// ─── Script execution ────────────────────── Spec: script-fsx, script-runner [<Fact>] let ``runScript executes fsx and captures stdout`` () = let dir = System.IO.Path.GetTempPath() let scriptPath = System.IO.Path.Combine(dir, "nap-test-script.fsx") System.IO.File.WriteAllText(scriptPath, "printfn \"[test] hello from script\"\nprintfn \"[test] done\"") + try let result = Runner.runScript scriptPath |> Async.RunSynchronously Assert.True(result.Passed, $"Script should pass, but got error: {result.Error}") @@ -341,6 +397,7 @@ let ``runScript reports failure for invalid script`` () = let dir = System.IO.Path.GetTempPath() let scriptPath = System.IO.Path.Combine(dir, "nap-test-bad-script.fsx") System.IO.File.WriteAllText(scriptPath, "let x: int = \"not an int\"") + try let result = Runner.runScript scriptPath |> Async.RunSynchronously Assert.False(result.Passed, "Invalid script should fail") @@ -357,15 +414,19 @@ let ``runScript reports failure for invalid script`` () = [<Fact>] let ``JSON output includes log field for script results`` () = - let result: NapResult = { - File = "setup.fsx" - Request = { Method = GET; Url = ""; Headers = Map.empty; Body = None } - Response = None - Assertions = [] - Passed = true - Error = None - Log = ["[setup] Seeded data"; "[setup] Done"] - } + let result: NapResult = + { File = "setup.fsx" + Request = + { Method = GET + Url = "" + Headers = Map.empty + Body = None } + Response = None + Assertions = [] + Passed = true + Error = None + Log = [ "[setup] Seeded data"; "[setup] Done" ] } + let json = Output.formatJson result let doc = System.Text.Json.JsonDocument.Parse(json) let root = doc.RootElement @@ -385,15 +446,24 @@ let ``JSON output includes log field for script results`` () = [<Fact>] let ``JSON output omits log field when empty`` () = - let result: NapResult = { - File = "test.nap" - Request = { Method = GET; Url = "https://example.com"; Headers = Map.empty; Body = None } - Response = Some { StatusCode = 200; Headers = Map.empty; Body = ""; Duration = TimeSpan.FromMilliseconds(50.0) } - Assertions = [] - Passed = true - Error = None - Log = [] - } + let result: NapResult = + { File = "test.nap" + Request = + { Method = GET + Url = "https://example.com" + Headers = Map.empty + Body = None } + Response = + Some + { StatusCode = 200 + Headers = Map.empty + Body = "" + Duration = TimeSpan.FromMilliseconds(50.0) } + Assertions = [] + Passed = true + Error = None + Log = [] } + let json = Output.formatJson result let doc = System.Text.Json.JsonDocument.Parse(json) let root = doc.RootElement @@ -406,20 +476,33 @@ let ``JSON output omits log field when empty`` () = Assert.True(root.TryGetProperty("headers") |> fst, "Should have headers for HTTP result") Assert.Equal(0, root.GetProperty("assertions").GetArrayLength()) -// ─── Output ──────────────────────────────────────────────────── +// ─── Output ──────────────────────────────── Spec: output-json, output-junit [<Fact>] let ``JUnit output is valid XML`` () = - let result: NapResult = { - File = "test.nap" - Request = { Method = GET; Url = "https://example.com"; Headers = Map.empty; Body = None } - Response = Some { StatusCode = 200; Headers = Map.empty; Body = ""; Duration = TimeSpan.FromMilliseconds(50.0) } - Assertions = [{ Assertion = { Target = "status"; Op = Equals "200" }; Passed = true; Expected = "200"; Actual = "200" }] - Passed = true - Error = None - Log = [] - } - let xml = Output.formatJUnit [result] + let result: NapResult = + { File = "test.nap" + Request = + { Method = GET + Url = "https://example.com" + Headers = Map.empty + Body = None } + Response = + Some + { StatusCode = 200 + Headers = Map.empty + Body = "" + Duration = TimeSpan.FromMilliseconds(50.0) } + Assertions = + [ { Assertion = { Target = "status"; Op = Equals "200" } + Passed = true + Expected = "200" + Actual = "200" } ] + Passed = true + Error = None + Log = [] } + + let xml = Output.formatJUnit [ result ] Assert.Contains("<?xml", xml) Assert.Contains("testsuites", xml) Assert.Contains("testcase", xml) @@ -430,15 +513,24 @@ let ``JUnit output is valid XML`` () = [<Fact>] let ``JSON output is parseable`` () = - let result: NapResult = { - File = "test.nap" - Request = { Method = GET; Url = "https://example.com"; Headers = Map.empty; Body = None } - Response = Some { StatusCode = 200; Headers = Map.empty; Body = """{"ok":true}"""; Duration = TimeSpan.FromMilliseconds(50.0) } - Assertions = [] - Passed = true - Error = None - Log = [] - } + let result: NapResult = + { File = "test.nap" + Request = + { Method = GET + Url = "https://example.com" + Headers = Map.empty + Body = None } + Response = + Some + { StatusCode = 200 + Headers = Map.empty + Body = """{"ok":true}""" + Duration = TimeSpan.FromMilliseconds(50.0) } + Assertions = [] + Passed = true + Error = None + Log = [] } + let json = Output.formatJson result let doc = System.Text.Json.JsonDocument.Parse(json) let root = doc.RootElement diff --git a/src/Napper.Core.Tests/coverage.runsettings b/src/Napper.Core.Tests/coverage.runsettings new file mode 100644 index 0000000..6cbc2dc --- /dev/null +++ b/src/Napper.Core.Tests/coverage.runsettings @@ -0,0 +1,14 @@ +<?xml version="1.0" encoding="utf-8"?> +<RunSettings> + <DataCollectionRunSettings> + <DataCollectors> + <DataCollector friendlyName="XPlat Code Coverage"> + <Configuration> + <Format>cobertura,lcov</Format> + <Include>[Napper.Core]*</Include> + <IncludeTestAssembly>false</IncludeTestAssembly> + </Configuration> + </DataCollector> + </DataCollectors> + </DataCollectionRunSettings> +</RunSettings> diff --git a/src/Napper.Core.Tests/xunit.runner.json b/src/Napper.Core.Tests/xunit.runner.json new file mode 100644 index 0000000..2f4fa7c --- /dev/null +++ b/src/Napper.Core.Tests/xunit.runner.json @@ -0,0 +1,4 @@ +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json", + "stopOnFail": true +} diff --git a/src/Napper.Core/CurlGenerator.fs b/src/Napper.Core/CurlGenerator.fs new file mode 100644 index 0000000..f83b577 --- /dev/null +++ b/src/Napper.Core/CurlGenerator.fs @@ -0,0 +1,38 @@ +/// Generate curl commands from parsed NapRequest data. +/// Shared by CLI and LSP — no IDE-specific code. +module Napper.Core.CurlGenerator + +open Napper.Core + +let private methodString (m: HttpMethod) : string = + match m with + | GET -> "GET" + | POST -> "POST" + | PUT -> "PUT" + | PATCH -> "PATCH" + | DELETE -> "DELETE" + | HEAD -> "HEAD" + | OPTIONS -> "OPTIONS" + +let private escapeShellArg (s: string) : string = s.Replace("'", "'\\''") + +let private headerFlag (key: string) (value: string) : string = + $" -H '{escapeShellArg key}: {escapeShellArg value}'" + +let private bodyFlag (body: RequestBody) : string = $" -d '{escapeShellArg body.Content}'" + +/// Generate a curl command string from a NapRequest +let toCurl (request: NapRequest) : string = + let sb = System.Text.StringBuilder() + + sb.Append($"curl -X {methodString request.Method} '{escapeShellArg request.Url}'") + |> ignore + + request.Headers |> Map.iter (fun k v -> sb.Append(headerFlag k v) |> ignore) + + request.Body + |> Option.iter (fun b -> + sb.Append($" -H 'Content-Type: {escapeShellArg b.ContentType}'") |> ignore + sb.Append(bodyFlag b) |> ignore) + + sb.ToString() diff --git a/src/Nap.Core/Environment.fs b/src/Napper.Core/Environment.fs similarity index 59% rename from src/Nap.Core/Environment.fs rename to src/Napper.Core/Environment.fs index e3cf277..5c7978d 100644 --- a/src/Nap.Core/Environment.fs +++ b/src/Napper.Core/Environment.fs @@ -1,18 +1,22 @@ -module Nap.Core.Environment +// Specs: env-file, env-base, env-local, env-named, env-resolution, env-interpolation, cli-var +module Napper.Core.Environment open System open System.IO + /// Parse a .napenv file (simple key = value format, TOML-like) let parseEnvFile (content: string) : Map<string, string> = - content.Split([|'\n'; '\r'|], StringSplitOptions.RemoveEmptyEntries) + content.Split([| '\n'; '\r' |], StringSplitOptions.RemoveEmptyEntries) |> Array.choose (fun line -> let line = line.Trim() - if line = "" || line.StartsWith "#" then None + + if line = "" || line.StartsWith "#" then + None elif line.Contains "=" then - let parts = line.Split([|'='|], 2) - Some (parts.[0].Trim(), parts.[1].Trim().Trim('"')) - else None - ) + let parts = line.Split([| '=' |], 2) + Some(parts.[0].Trim(), parts.[1].Trim().Trim('"')) + else + None) |> Map.ofArray let private mergeInto (source: Map<string, string>) (target: Map<string, string>) = @@ -25,14 +29,21 @@ let private mergeInto (source: Map<string, string>) (target: Map<string, string> /// 3. Named environment file (.napenv.{name}) /// 4. Base .napenv /// 5. [vars] block in the .nap file -let loadEnvironment (dir: string) (envName: string option) (cliVars: Map<string, string>) (fileVars: Map<string, string>) : Map<string, string> = +let loadEnvironment + (dir: string) + (envName: string option) + (cliVars: Map<string, string>) + (fileVars: Map<string, string>) + : Map<string, string> = let readIfExists path = if File.Exists path then File.ReadAllText path |> parseEnvFile - else Map.empty + else + Map.empty let baseEnv = readIfExists (Path.Combine(dir, ".napenv")) Logger.debug $"Loaded .napenv: {baseEnv.Count} vars" + let namedEnv = match envName with | Some name -> @@ -40,6 +51,7 @@ let loadEnvironment (dir: string) (envName: string option) (cliVars: Map<string, Logger.debug $"Loaded .napenv.{name}: {env.Count} vars" env | None -> Map.empty + let localEnv = readIfExists (Path.Combine(dir, ".napenv.local")) Logger.debug $"Loaded .napenv.local: {localEnv.Count} vars" @@ -53,17 +65,23 @@ let loadEnvironment (dir: string) (envName: string option) (cliVars: Map<string, let resolveVars (vars: Map<string, string>) (input: string) : string = let sb = System.Text.StringBuilder() let mutable i = 0 + while i < input.Length do if i + 3 < input.Length && input.[i] = '{' && input.[i + 1] = '{' then let start = i + 2 let mutable j = start - while j < input.Length && input.[j] <> '}' && Char.IsLetterOrDigit(input.[j]) || input.[j] = '_' do + + while j < input.Length && input.[j] <> '}' && Char.IsLetterOrDigit(input.[j]) + || input.[j] = '_' do j <- j + 1 + if j + 1 < input.Length && input.[j] = '}' && input.[j + 1] = '}' && j > start then let key = input.Substring(start, j - start) + match Map.tryFind key vars with | Some v -> sb.Append(v) |> ignore | None -> sb.Append(input, i, j + 2 - i) |> ignore + i <- j + 2 else sb.Append(input.[i]) |> ignore @@ -71,31 +89,55 @@ let resolveVars (vars: Map<string, string>) (input: string) : string = else sb.Append(input.[i]) |> ignore i <- i + 1 + sb.ToString() +/// Detect available environment names by scanning a directory for .napenv.{name} files. +/// Excludes .napenv (base) and .napenv.local (secrets). Returns sorted unique names. +let detectEnvironmentNames (dir: string) : string list = + if not (Directory.Exists dir) then + [] + else + let prefix = ".napenv." + let localSuffix = ".local" + + Directory.GetFiles(dir, ".napenv.*") + |> Array.choose (fun path -> + let fileName = Path.GetFileName(path) + + if fileName = ".napenv" then + None + elif fileName.EndsWith(localSuffix) then + None + elif fileName.StartsWith(prefix) then + Some(fileName.Substring(prefix.Length)) + else + None) + |> Array.distinct + |> Array.sort + |> Array.toList + /// Resolve all variables in a NapFile's request let resolveNapFile (vars: Map<string, string>) (napFile: NapFile) : NapFile = let resolve = resolveVars vars + { napFile with - Request = { - napFile.Request with + Request = + { napFile.Request with Url = resolve napFile.Request.Url Headers = napFile.Request.Headers |> Map.map (fun _ v -> resolve v) Body = napFile.Request.Body - |> Option.map (fun b -> { b with Content = resolve b.Content }) - } + |> Option.map (fun b -> { b with Content = resolve b.Content }) } Assertions = - napFile.Assertions |> List.map (fun a -> + napFile.Assertions + |> List.map (fun a -> { a with Op = match a.Op with - | Equals v -> Equals (resolve v) - | Contains v -> Contains (resolve v) - | Matches v -> Matches (resolve v) - | LessThan v -> LessThan (resolve v) - | GreaterThan v -> GreaterThan (resolve v) - | Exists -> Exists - } - ) - } + | Equals v -> Equals(resolve v) + | Contains v -> Contains(resolve v) + | Matches v -> Matches(resolve v) + | LessThan v -> LessThan(resolve v) + | GreaterThan v -> GreaterThan(resolve v) + | Exists -> Exists }) } diff --git a/src/Napper.Core/HttpToNapConverter.fs b/src/Napper.Core/HttpToNapConverter.fs new file mode 100644 index 0000000..329a292 --- /dev/null +++ b/src/Napper.Core/HttpToNapConverter.fs @@ -0,0 +1,162 @@ +module Napper.Core.HttpToNapConverter + +open System +open System.Text.Json +open DotHttp +open Napper.Core.OpenApiTypes + +type ConvertWarning = + { RequestName: string option + Message: string } + +type ConvertResult = + { GeneratedFiles: (string * string) list + Warnings: ConvertWarning list } + +[<Literal>] +let private ScriptWarningPrefix = "Script block not converted: " + +[<Literal>] +let private EnvParseError = "Failed to parse environment JSON" + +let private slugify (text: string) : string = + text.ToLowerInvariant() + |> Seq.map (fun c -> if Char.IsLetterOrDigit c then c else '-') + |> String.Concat + |> fun s -> s.Trim('-') + +let private requestSlug (req: HttpRequest) : string = + match req.Name with + | Some name -> slugify name + | None -> + let urlPart = + let uri = req.Url.TrimStart('/') + let noQuery = uri.Split([| '?' |], 2).[0] + slugify noQuery + + sprintf "%s-%s" (req.Method.ToLowerInvariant()) urlPart + +let private padIndex (idx: int) (total: int) : string = + let digits = + if total >= PadLargeThreshold then + PadDigitsLarge + else + PadDigitsDefault + + (string (idx + 1)).PadLeft(digits, '0') + +let private buildMeta (req: HttpRequest) : string list = + match req.Name with + | Some name -> [ SectionMeta; sprintf "%s = %s" KeyName name; "" ] + | None -> [] + +let private buildVars (fileVars: (string * string) list) : string list = + if List.isEmpty fileVars then + [] + else + let lines = fileVars |> List.map (fun (k, v) -> sprintf "%s = \"%s\"" k v) + [ SectionVars ] @ lines @ [ "" ] + +let private buildRequest (req: HttpRequest) : string list = + [ SectionRequest; sprintf "%s %s" (req.Method.ToUpperInvariant()) req.Url; "" ] + +let private buildHeaders (req: HttpRequest) : string list = + if List.isEmpty req.Headers then + [] + else + let lines = req.Headers |> List.map (fun (k, v) -> sprintf "%s = %s" k v) + [ SectionRequestHeaders ] @ lines @ [ "" ] + +let private buildBody (req: HttpRequest) : string list = + match req.Body with + | None -> [] + | Some body -> + let contentType = + req.Headers + |> List.tryFind (fun (k, _) -> String.Equals(k, HeaderContentType, StringComparison.OrdinalIgnoreCase)) + |> Option.map snd + |> Option.defaultValue ContentTypeJson + + [ SectionRequestBody + sprintf "content-type = %s" contentType + TripleQuote + body + TripleQuote + "" ] + +let private buildComments (req: HttpRequest) : string list = + if List.isEmpty req.Comments then + [] + else + req.Comments |> List.map (sprintf "# %s") + +let private buildNapContent (req: HttpRequest) (fileVars: (string * string) list) : string = + (buildComments req + @ buildMeta req + @ buildVars fileVars + @ buildRequest req + @ buildHeaders req + @ buildBody req) + |> String.concat "\n" + +let private checkWarnings (req: HttpRequest) : ConvertWarning list = + let warnings = ResizeArray<ConvertWarning>() + + match req.PreScript with + | Some s -> + warnings.Add + { RequestName = req.Name + Message = sprintf "%s%s" ScriptWarningPrefix (s.Substring(0, min 50 s.Length)) } + | None -> () + + match req.PostScript with + | Some s -> + warnings.Add + { RequestName = req.Name + Message = sprintf "%s%s" ScriptWarningPrefix (s.Substring(0, min 50 s.Length)) } + | None -> () + + Seq.toList warnings + +let convert (httpFile: HttpFile) : ConvertResult = + let total = httpFile.Requests.Length + + let files = + httpFile.Requests + |> List.mapi (fun i req -> + let prefix = padIndex i total + let slug = requestSlug req + let fileName = sprintf "%s_%s%s" prefix slug NapExtension + let content = buildNapContent req httpFile.FileVariables + (fileName, content)) + + let warnings = httpFile.Requests |> List.collect checkWarnings + + { GeneratedFiles = files + Warnings = warnings } + +let convertEnvJson (json: string) (isPrivate: bool) : Result<(string * string) list, string> = + try + let doc = JsonDocument.Parse(json) + + let files = + [ for prop in doc.RootElement.EnumerateObject() do + let envName = prop.Name + + let vars = + [ for v in prop.Value.EnumerateObject() do + sprintf "%s = \"%s\"" v.Name (v.Value.GetString()) ] + + let content = String.Join("\n", vars) + "\n" + + let fileName = + if isPrivate then + sprintf "%s.local" NapenvExtension + else + sprintf "%s.%s" NapenvExtension envName + + (fileName, content) ] + + Ok files + with ex -> + Error(sprintf "%s: %s" EnvParseError ex.Message) diff --git a/src/Nap.Core/Logger.fs b/src/Napper.Core/Logger.fs similarity index 77% rename from src/Nap.Core/Logger.fs rename to src/Napper.Core/Logger.fs index cc3caaa..d2ff2f6 100644 --- a/src/Nap.Core/Logger.fs +++ b/src/Napper.Core/Logger.fs @@ -1,4 +1,5 @@ -module Nap.Core.Logger +// Specs: cli-verbose +module Napper.Core.Logger open System open System.IO @@ -13,21 +14,22 @@ type LogLevel = let private levelTag (level: LogLevel) : string = match level with | Debug -> "DEBUG" - | Info -> "INFO" - | Warn -> "WARN" + | Info -> "INFO" + | Warn -> "WARN" | Error -> "ERROR" -let mutable private minLevel : LogLevel = Info -let mutable private writer : StreamWriter option = None +let mutable private minLevel: LogLevel = Info +let mutable private writer: StreamWriter option = None let private formatLine (level: LogLevel) (message: string) : string = let ts = DateTime.UtcNow.ToString("yyyy-MM-ddTHH:mm:ss.fffZ") $"[{ts}] [{levelTag level}] {message}" -/// Initialize the logger: creates timestamped log file in binary dir +/// Initialize the logger: creates timestamped log file in base directory let init (verbose: bool) : unit = minLevel <- if verbose then Debug else Info let dir = AppContext.BaseDirectory + Directory.CreateDirectory(dir) |> ignore let ts = DateTime.UtcNow.ToString("yyyy-MM-ddTHH-mm-ss") let fileName = $"napper-{ts}.log" let filePath = Path.Combine(dir, fileName) @@ -42,8 +44,8 @@ let log (level: LogLevel) (message: string) : unit = | None -> () let debug msg = log Debug msg -let info msg = log Info msg -let warn msg = log Warn msg +let info msg = log Info msg +let warn msg = log Warn msg let error msg = log Error msg /// Flush and close the log file diff --git a/src/Nap.Core/Nap.Core.fsproj b/src/Napper.Core/Napper.Core.fsproj similarity index 74% rename from src/Nap.Core/Nap.Core.fsproj rename to src/Napper.Core/Napper.Core.fsproj index 42cc1fd..8203d04 100644 --- a/src/Nap.Core/Nap.Core.fsproj +++ b/src/Napper.Core/Napper.Core.fsproj @@ -13,6 +13,9 @@ <Compile Include="Runner.fs" /> <Compile Include="Output.fs" /> <Compile Include="OpenApiGenerator.fs" /> + <Compile Include="HttpToNapConverter.fs" /> + <Compile Include="CurlGenerator.fs" /> + <Compile Include="SectionScanner.fs" /> </ItemGroup> <ItemGroup> @@ -20,4 +23,8 @@ <PackageReference Include="Microsoft.OpenApi" Version="3.3.1" /> </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\DotHttp\DotHttp.fsproj" /> + </ItemGroup> + </Project> diff --git a/src/Nap.Core/OpenApiGenerator.fs b/src/Napper.Core/OpenApiGenerator.fs similarity index 69% rename from src/Nap.Core/OpenApiGenerator.fs rename to src/Napper.Core/OpenApiGenerator.fs index 79ce3fe..449197c 100644 --- a/src/Nap.Core/OpenApiGenerator.fs +++ b/src/Napper.Core/OpenApiGenerator.fs @@ -1,31 +1,33 @@ -module Nap.Core.OpenApiGenerator +// Specs: openapi-generate, openapi-input, openapi-oas3, openapi-swagger2, openapi-nap-gen, +// openapi-tag-dirs, openapi-naplist-gen, openapi-napenv-gen, openapi-baseurl, +// openapi-params, openapi-body-gen, openapi-assert-gen, openapi-query-params, +// openapi-auth, openapi-error-gen, openapi-ref, openapi-meta-flag +module Napper.Core.OpenApiGenerator open System open System.Net.Http open System.Text.Json open Microsoft.OpenApi -open Nap.Core.OpenApiTypes +open Napper.Core.OpenApiTypes -// Type aliases so tests/callers can use: open Nap.Core.OpenApiGenerator +// Type aliases so tests/callers can use: open Napper.Core.OpenApiGenerator type GeneratedFile = OpenApiTypes.GeneratedFile type GenerationResult = OpenApiTypes.GenerationResult // --- Internal types --- [<NoComparison; NoEquality>] -type private EndpointInfo = { - Method: string - UrlPath: string - Operation: OpenApiOperation - QueryParams: string list - AuthHeaders: AuthHeader list -} +type private EndpointInfo = + { Method: string + UrlPath: string + Operation: OpenApiOperation + QueryParams: string list + AuthHeaders: AuthHeader list } [<NoComparison; NoEquality>] -type private TagGroup = { - Tag: string option - Endpoints: EndpointInfo list -} +type private TagGroup = + { Tag: string option + Endpoints: EndpointInfo list } // --- Null-safe helpers --- @@ -46,24 +48,31 @@ let private safeDict (dict: Collections.Generic.IDictionary<'K, 'V>) : ('K * 'V) // --- HTTP method order --- -let private methodOrder = [ - HttpMethod.Get; HttpMethod.Post; HttpMethod.Put - HttpMethod.Patch; HttpMethod.Delete; HttpMethod.Head; HttpMethod.Options -] +let private methodOrder = + [ HttpMethod.Get + HttpMethod.Post + HttpMethod.Put + HttpMethod.Patch + HttpMethod.Delete + HttpMethod.Head + HttpMethod.Options ] // --- Pure text helpers --- let private convertPathParams (urlPath: string) : string = let sb = Text.StringBuilder() + for c in urlPath do if c = '{' then sb.Append("{{") |> ignore elif c = '}' then sb.Append("}}") |> ignore else sb.Append(c) |> ignore + sb.ToString() let private splitOnDelimiters (text: string) : string list = let parts = Collections.Generic.List<string>() let current = Text.StringBuilder() + for c in text do if c = '/' || c = '{' || c = '}' || c = ' ' then if current.Length > 0 then @@ -71,8 +80,10 @@ let private splitOnDelimiters (text: string) : string list = current.Clear() |> ignore else current.Append(c) |> ignore + if current.Length > 0 then parts.Add(current.ToString().ToLowerInvariant()) + Seq.toList parts let private pathToSlug (method: string) (urlPath: string) : string = @@ -94,26 +105,38 @@ let rec private generateExample (schema: IOpenApiSchema) (w: Utf8JsonWriter) : u and private writeByType (schema: IOpenApiSchema) (w: Utf8JsonWriter) : unit = let t = schema.Type - if not t.HasValue then w.WriteNullValue() + + if not t.HasValue then + w.WriteNullValue() else let v = t.Value - if v.HasFlag(JsonSchemaType.String) then w.WriteStringValue(SchemaExampleString) - elif v.HasFlag(JsonSchemaType.Number) then w.WriteNumberValue(0) - elif v.HasFlag(JsonSchemaType.Integer) then w.WriteNumberValue(0) - elif v.HasFlag(JsonSchemaType.Boolean) then w.WriteBooleanValue(true) + + if v.HasFlag(JsonSchemaType.String) then + w.WriteStringValue(SchemaExampleString) + elif v.HasFlag(JsonSchemaType.Number) then + w.WriteNumberValue(0) + elif v.HasFlag(JsonSchemaType.Integer) then + w.WriteNumberValue(0) + elif v.HasFlag(JsonSchemaType.Boolean) then + w.WriteBooleanValue(true) elif v.HasFlag(JsonSchemaType.Array) then w.WriteStartArray() + match box schema.Items with | null -> () | _ -> generateExample schema.Items w + w.WriteEndArray() elif v.HasFlag(JsonSchemaType.Object) then w.WriteStartObject() + for k, propSchema in safeDict schema.Properties do w.WritePropertyName(k) generateExample propSchema w + w.WriteEndObject() - else w.WriteNullValue() + else + w.WriteNullValue() let private schemaToJson (schema: IOpenApiSchema) : string = use stream = new IO.MemoryStream() @@ -137,10 +160,10 @@ let private extractRequestBody (op: OpenApiOperation) : string option = | null -> match box media.Schema with | null -> None - | _ -> Some (schemaToJson media.Schema) + | _ -> Some(schemaToJson media.Schema) | _ -> let opts = JsonSerializerOptions(WriteIndented = true) - Some (media.Example.ToJsonString(opts)) + Some(media.Example.ToJsonString(opts)) | _ -> None // --- Status code helpers --- @@ -167,6 +190,7 @@ let private extractResponseSchema (responses: OpenApiResponses) : IOpenApiSchema | _ when responses.Count = 0 -> None | _ -> let code = string (findSuccessStatus responses) + match responses.TryGetValue(code) with | true, resp -> match box resp.Content with @@ -186,15 +210,19 @@ let private extractPathParams (urlPath: string) : string list = let result = Collections.Generic.List<string>() let current = Text.StringBuilder() let mutable inside = false + for c in urlPath do if c = '{' then inside <- true current.Clear() |> ignore elif c = '}' && inside then inside <- false - if current.Length > 0 then result.Add(current.ToString()) + + if current.Length > 0 then + result.Add(current.ToString()) elif inside then current.Append(c) |> ignore + Seq.toList result let private extractQueryParams (op: OpenApiOperation) : string list = @@ -208,19 +236,31 @@ let private resolveScheme (scheme: OpenApiSecuritySchemeReference) : AuthHeader match box scheme with | null -> None | _ -> - if not scheme.Type.HasValue then None + if not scheme.Type.HasValue then + None else match scheme.Type.Value with | SecuritySchemeType.Http -> if scheme.Scheme = BearerScheme then - Some { HeaderName = AuthHeaderName; HeaderValue = sprintf "%s{{token}}" AuthBearerPrefix; VarName = "token" } + Some + { HeaderName = AuthHeaderName + HeaderValue = sprintf "%s{{token}}" AuthBearerPrefix + VarName = "token" } elif scheme.Scheme = BasicScheme then - Some { HeaderName = AuthHeaderName; HeaderValue = sprintf "%s{{basicAuth}}" AuthBasicPrefix; VarName = "basicAuth" } - else None + Some + { HeaderName = AuthHeaderName + HeaderValue = sprintf "%s{{basicAuth}}" AuthBasicPrefix + VarName = "basicAuth" } + else + None | SecuritySchemeType.ApiKey when scheme.In.HasValue && scheme.In.Value = ParameterLocation.Header -> if not (String.IsNullOrEmpty(scheme.Name)) then - Some { HeaderName = scheme.Name; HeaderValue = "{{apiKey}}"; VarName = "apiKey" } - else None + Some + { HeaderName = scheme.Name + HeaderValue = "{{apiKey}}" + VarName = "apiKey" } + else + None | _ -> None let private resolveAuth (doc: OpenApiDocument) (op: OpenApiOperation) : AuthHeader list = @@ -228,6 +268,7 @@ let private resolveAuth (doc: OpenApiDocument) (op: OpenApiOperation) : AuthHead match box doc.Components with | null -> null | _ -> doc.Components.SecuritySchemes + match box schemes with | null -> [] | _ when schemes.Count = 0 -> [] @@ -235,8 +276,9 @@ let private resolveAuth (doc: OpenApiDocument) (op: OpenApiOperation) : AuthHead let opSec = safeList op.Security let globalSec = safeList doc.Security let reqs = if not (List.isEmpty opSec) then opSec else globalSec - reqs |> List.collect (fun req -> - req |> Seq.choose (fun kv -> resolveScheme kv.Key) |> Seq.toList) + + reqs + |> List.collect (fun req -> req |> Seq.choose (fun kv -> resolveScheme kv.Key) |> Seq.toList) // --- Base URL extraction --- @@ -245,56 +287,88 @@ let private extractBaseUrl (doc: OpenApiDocument) : string = | first :: _ when not (String.IsNullOrEmpty(first.Url)) -> first.Url | _ -> DefaultBaseUrl -let private methodHasBody (m: string) : bool = - m = "post" || m = "put" || m = "patch" +let private methodHasBody (m: string) : bool = m = "post" || m = "put" || m = "patch" let private padIndex (idx: int) (total: int) : string = - let digits = if total >= PadLargeThreshold then PadDigitsLarge else PadDigitsDefault + let digits = + if total >= PadLargeThreshold then + PadDigitsLarge + else + PadDigitsDefault + (string (idx + 1)).PadLeft(digits, '0') // --- .nap content builders --- let private buildMeta (ep: EndpointInfo) : string list = let name = - if not (String.IsNullOrEmpty(ep.Operation.Summary)) then ep.Operation.Summary - elif not (String.IsNullOrEmpty(ep.Operation.OperationId)) then ep.Operation.OperationId - else pathToSlug ep.Method ep.UrlPath - let lines = [ SectionMeta; sprintf "%s = %s" KeyName name; sprintf "%s = %s" KeyGenerated ValueTrue ] + if not (String.IsNullOrEmpty(ep.Operation.Summary)) then + ep.Operation.Summary + elif not (String.IsNullOrEmpty(ep.Operation.OperationId)) then + ep.Operation.OperationId + else + pathToSlug ep.Method ep.UrlPath + + let lines = + [ SectionMeta + sprintf "%s = %s" KeyName name + sprintf "%s = %s" KeyGenerated ValueTrue ] + if not (String.IsNullOrEmpty(ep.Operation.Description)) then lines @ [ sprintf "%s = %s" KeyDescription ep.Operation.Description; "" ] - else lines @ [ "" ] + else + lines @ [ "" ] let private buildVars (ep: EndpointInfo) : string list = let pathP = extractPathParams ep.UrlPath let authV = ep.AuthHeaders |> List.map (fun a -> a.VarName) let all = pathP @ ep.QueryParams @ authV - if List.isEmpty all then [] + + if List.isEmpty all then + [] else let seen = Collections.Generic.HashSet<string>() let unique = all |> List.filter (fun v -> seen.Add(v)) - [ SectionVars ] @ (unique |> List.map (fun v -> sprintf "%s = \"%s\"" v VarsPlaceholder)) @ [ "" ] + + [ SectionVars ] + @ (unique |> List.map (fun v -> sprintf "%s = \"%s\"" v VarsPlaceholder)) + @ [ "" ] let private buildQuery (qp: string list) : string = - if List.isEmpty qp then "" - else sprintf "?%s" (qp |> List.map (fun p -> sprintf "%s={{%s}}" p p) |> String.concat "&") + if List.isEmpty qp then + "" + else + sprintf "?%s" (qp |> List.map (fun p -> sprintf "%s={{%s}}" p p) |> String.concat "&") let private buildRequest (ep: EndpointInfo) : string list = - let url = sprintf "%s%s%s" BaseUrlVar (convertPathParams ep.UrlPath) (buildQuery ep.QueryParams) + let url = + sprintf "%s%s%s" BaseUrlVar (convertPathParams ep.UrlPath) (buildQuery ep.QueryParams) + [ SectionRequest; sprintf "%s %s" (ep.Method.ToUpperInvariant()) url; "" ] let private buildHeaders (ep: EndpointInfo) : string list = let hasBody = methodHasBody ep.Method let hasAuth = not (List.isEmpty ep.AuthHeaders) - if not hasBody && not hasAuth then [] + + if not hasBody && not hasAuth then + [] else let body = - if hasBody then [ sprintf "%s = %s" HeaderContentType ContentTypeJson; sprintf "%s = %s" HeaderAccept ContentTypeJson ] - else [] - let auth = ep.AuthHeaders |> List.map (fun a -> sprintf "%s = %s" a.HeaderName a.HeaderValue) + if hasBody then + [ sprintf "%s = %s" HeaderContentType ContentTypeJson + sprintf "%s = %s" HeaderAccept ContentTypeJson ] + else + [] + + let auth = + ep.AuthHeaders + |> List.map (fun a -> sprintf "%s = %s" a.HeaderName a.HeaderValue) + [ SectionRequestHeaders ] @ body @ auth @ [ "" ] let private buildBody (ep: EndpointInfo) : string list = - if not (methodHasBody ep.Method) then [] + if not (methodHasBody ep.Method) then + [] else match extractRequestBody ep.Operation with | None -> [] @@ -302,16 +376,23 @@ let private buildBody (ep: EndpointInfo) : string list = let private buildAssertions (op: OpenApiOperation) : string list = let status = sprintf "%s%d" AssertStatusPrefix (findSuccessStatus op.Responses) + let bodyAsserts = match extractResponseSchema op.Responses with | None -> [] | Some schema -> safeDict schema.Properties |> List.map (fun (k, _) -> sprintf "%s%s%s" AssertBodyPrefix k AssertBodyExistsSuffix) + [ SectionAssert; status ] @ bodyAsserts @ [ "" ] let private buildNapContent (ep: EndpointInfo) : string = - (buildMeta ep @ buildVars ep @ buildRequest ep @ buildHeaders ep @ buildBody ep @ buildAssertions ep.Operation) + (buildMeta ep + @ buildVars ep + @ buildRequest ep + @ buildHeaders ep + @ buildBody ep + @ buildAssertions ep.Operation) |> String.concat "\n" // --- Collectors --- @@ -323,80 +404,115 @@ let private collectEndpoints (doc: OpenApiDocument) : EndpointInfo list = doc.Paths |> Seq.collect (fun pathKv -> let pathItem = pathKv.Value + match box pathItem.Operations with | null -> Seq.empty | _ -> - methodOrder |> Seq.choose (fun httpMethod -> + methodOrder + |> Seq.choose (fun httpMethod -> match pathItem.Operations.TryGetValue(httpMethod) with | true, op -> let method = httpMethod.Method.ToLowerInvariant() - Some { - Method = method; UrlPath = pathKv.Key; Operation = op - QueryParams = extractQueryParams op; AuthHeaders = resolveAuth doc op - } + + Some + { Method = method + UrlPath = pathKv.Key + Operation = op + QueryParams = extractQueryParams op + AuthHeaders = resolveAuth doc op } | _ -> None)) |> Seq.toList let private groupByTag (eps: EndpointInfo list) : TagGroup list = let groups = Collections.Generic.Dictionary<string, EndpointInfo list>() + for ep in eps do let tag = match safeSeq ep.Operation.Tags with | first :: _ when not (String.IsNullOrEmpty(first.Name)) -> first.Name | _ -> "" + match groups.TryGetValue(tag) with | true, existing -> groups.[tag] <- existing @ [ ep ] | _ -> groups.[tag] <- [ ep ] - [ for kv in groups -> { Tag = (if kv.Key = "" then None else Some kv.Key); Endpoints = kv.Value } ] + + [ for kv in groups -> + { Tag = (if kv.Key = "" then None else Some kv.Key) + Endpoints = kv.Value } ] let private genGroupFiles (group: TagGroup) (idx: int ref) (total: int) : GeneratedFile list = - group.Endpoints |> List.map (fun ep -> + group.Endpoints + |> List.map (fun ep -> let slug = - if not (String.IsNullOrEmpty(ep.Operation.OperationId)) then ep.Operation.OperationId - else pathToSlug ep.Method ep.UrlPath + if not (String.IsNullOrEmpty(ep.Operation.OperationId)) then + ep.Operation.OperationId + else + pathToSlug ep.Method ep.UrlPath + let prefix = padIndex idx.Value total idx.Value <- idx.Value + 1 let baseName = sprintf "%s_%s%s" prefix slug NapExtension - let fileName = match group.Tag with Some t -> sprintf "%s/%s" (titleToSlug t) baseName | None -> baseName - { FileName = fileName; Content = buildNapContent ep }) + + let fileName = + match group.Tag with + | Some t -> sprintf "%s/%s" (titleToSlug t) baseName + | None -> baseName + + { FileName = fileName + Content = buildNapContent ep }) let private buildPlaylist (title: string) (files: string list) : string = - ([ SectionMeta; sprintf "%s = %s" KeyName title; ""; SectionSteps ] @ (files |> List.map (sprintf "./%s")) @ [ "" ]) + ([ SectionMeta; sprintf "%s = %s" KeyName title; ""; SectionSteps ] + @ (files |> List.map (sprintf "./%s")) + @ [ "" ]) |> String.concat "\n" -let private buildEnv (baseUrl: string) : string = - sprintf "%s = %s\n" BaseUrlKey baseUrl +let private buildEnv (baseUrl: string) : string = sprintf "%s = %s\n" BaseUrlKey baseUrl // --- Main entry point --- let generate (jsonText: string) : Result<GenerationResult, string> = try let result = OpenApiDocument.Parse(jsonText) + match box result.Document with | null -> Error ParseError | _ -> let doc = result.Document + match box doc.Paths with | null -> Error InvalidSpecError | _ -> let endpoints = collectEndpoints doc - if List.isEmpty endpoints then Error NoEndpointsError + + if List.isEmpty endpoints then + Error NoEndpointsError else let baseUrl = extractBaseUrl doc + let title = match box doc.Info with | null -> DefaultTitle | _ when String.IsNullOrEmpty(doc.Info.Title) -> DefaultTitle | _ -> doc.Info.Title + let idx = ref 0 + let napFiles = groupByTag endpoints |> List.collect (fun g -> genGroupFiles g idx endpoints.Length) + let playlist = { FileName = sprintf "%s%s" (titleToSlug title) NaplistExtension Content = buildPlaylist title (napFiles |> List.map (fun f -> f.FileName)) } + let environment = { FileName = NapenvExtension Content = buildEnv baseUrl } - Ok { NapFiles = napFiles; Playlist = playlist; Environment = environment } - with _ -> Error ParseError + + Ok + { NapFiles = napFiles + Playlist = playlist + Environment = environment } + with _ -> + Error ParseError diff --git a/src/Nap.Core/OpenApiTypes.fs b/src/Napper.Core/OpenApiTypes.fs similarity index 81% rename from src/Nap.Core/OpenApiTypes.fs rename to src/Napper.Core/OpenApiTypes.fs index 88a56ee..4d15b87 100644 --- a/src/Nap.Core/OpenApiTypes.fs +++ b/src/Napper.Core/OpenApiTypes.fs @@ -1,4 +1,5 @@ -module Nap.Core.OpenApiTypes +// Specs: openapi-generate, openapi-nap-gen, openapi-meta-flag +module Napper.Core.OpenApiTypes // --- String constants for .nap file generation (single location) --- @@ -124,27 +125,21 @@ let BasicScheme = "basic" // --- Auth descriptor --- -type AuthHeader = { - HeaderName: string - HeaderValue: string - VarName: string -} +type AuthHeader = + { HeaderName: string + HeaderValue: string + VarName: string } // --- Output types --- -type GeneratedFile = { - FileName: string - Content: string -} - -type GenerationResult = { - NapFiles: GeneratedFile list - Playlist: GeneratedFile - Environment: GeneratedFile -} - -type GenerateSummary = { - FileCount: int - Files: string list - PlaylistPath: string -} +type GeneratedFile = { FileName: string; Content: string } + +type GenerationResult = + { NapFiles: GeneratedFile list + Playlist: GeneratedFile + Environment: GeneratedFile } + +type GenerateSummary = + { FileCount: int + Files: string list + PlaylistPath: string } diff --git a/src/Nap.Core/Output.fs b/src/Napper.Core/Output.fs similarity index 91% rename from src/Nap.Core/Output.fs rename to src/Napper.Core/Output.fs index 85cede6..2c87b77 100644 --- a/src/Nap.Core/Output.fs +++ b/src/Napper.Core/Output.fs @@ -1,9 +1,10 @@ -module Nap.Core.Output +// Specs: output-pretty, output-junit, output-json, output-ndjson +module Napper.Core.Output open System open System.Text open System.Xml -open Nap.Core +open Napper.Core /// Pretty-print a NapResult to the console let formatPretty (result: NapResult) : string = @@ -17,8 +18,7 @@ let formatPretty (result: NapResult) : string = appendLine $"\x1b[{statusColor}m[{status}]\x1b[0m {fileName}" match result.Error with - | Some err -> - appendLine $" Error: {err}" + | Some err -> appendLine $" Error: {err}" | None -> () match result.Response with @@ -27,12 +27,15 @@ let formatPretty (result: NapResult) : string = if resp.StatusCode >= 200 && resp.StatusCode < 300 then "32" elif resp.StatusCode >= 400 then "31" else "33" - appendLine $" \x1b[{statusColor}m{resp.StatusCode}\x1b[0m {result.Request.Method} {result.Request.Url} ({resp.Duration.TotalMilliseconds:F0}ms)" + + appendLine + $" \x1b[{statusColor}m{resp.StatusCode}\x1b[0m {result.Request.Method} {result.Request.Url} ({resp.Duration.TotalMilliseconds:F0}ms)" // Assertions for a in result.Assertions do let icon = if a.Passed then "\x1b[32m✓\x1b[0m" else "\x1b[31m✗\x1b[0m" let target = a.Assertion.Target + let opStr = match a.Assertion.Op with | Equals v -> $"= {v}" @@ -41,6 +44,7 @@ let formatPretty (result: NapResult) : string = | Matches v -> $"matches \"{v}\"" | LessThan v -> $"< {v}" | GreaterThan v -> $"> {v}" + if a.Passed then appendLine $" {icon} {target} {opStr}" else @@ -70,19 +74,24 @@ let formatJUnit (results: NapResult list) : string = let totalTests = results.Length let failures = results |> List.filter (fun r -> not r.Passed) |> List.length + let totalTime = results |> List.choose (fun r -> r.Response |> Option.map (fun resp -> resp.Duration.TotalSeconds)) |> List.sum - sb.AppendLine($"<testsuites tests=\"{totalTests}\" failures=\"{failures}\" time=\"{totalTime:F3}\">") |> ignore - sb.AppendLine($" <testsuite name=\"nap\" tests=\"{totalTests}\" failures=\"{failures}\" time=\"{totalTime:F3}\">") |> ignore + sb.AppendLine($"<testsuites tests=\"{totalTests}\" failures=\"{failures}\" time=\"{totalTime:F3}\">") + |> ignore + + sb.AppendLine($" <testsuite name=\"nap\" tests=\"{totalTests}\" failures=\"{failures}\" time=\"{totalTime:F3}\">") + |> ignore for result in results do let name = result.File |> System.IO.Path.GetFileNameWithoutExtension |> System.Security.SecurityElement.Escape + let time = result.Response |> Option.map (fun r -> r.Duration.TotalSeconds) @@ -92,16 +101,17 @@ let formatJUnit (results: NapResult list) : string = sb.AppendLine($" <testcase name=\"{name}\" time=\"{time:F3}\" />") |> ignore else sb.AppendLine($" <testcase name=\"{name}\" time=\"{time:F3}\">") |> ignore + let failureMsg = match result.Error with | Some err -> err | None -> result.Assertions |> List.filter (fun a -> not a.Passed) - |> List.map (fun a -> - $"{a.Assertion.Target}: expected {a.Expected}, got {a.Actual}") + |> List.map (fun a -> $"{a.Assertion.Target}: expected {a.Expected}, got {a.Actual}") |> String.concat "; " |> System.Security.SecurityElement.Escape + sb.AppendLine($" <failure message=\"{failureMsg}\" />") |> ignore sb.AppendLine(" </testcase>") |> ignore @@ -125,8 +135,10 @@ let formatJson (result: NapResult) : string = writer.WriteString("requestMethod", string result.Request.Method) writer.WriteString("requestUrl", result.Request.Url) writer.WriteStartObject("requestHeaders") + for kv in result.Request.Headers do writer.WriteString(kv.Key, kv.Value) + writer.WriteEndObject() match result.Request.Body with @@ -142,12 +154,15 @@ let formatJson (result: NapResult) : string = writer.WriteNumber("bodyLength", resp.Body.Length) writer.WriteString("body", resp.Body) writer.WriteStartObject("headers") + for kv in resp.Headers do writer.WriteString(kv.Key, kv.Value) + writer.WriteEndObject() | None -> () writer.WriteStartArray("assertions") + for a in result.Assertions do writer.WriteStartObject() writer.WriteString("target", a.Assertion.Target) @@ -155,12 +170,15 @@ let formatJson (result: NapResult) : string = writer.WriteString("expected", a.Expected) writer.WriteString("actual", a.Actual) writer.WriteEndObject() + writer.WriteEndArray() if result.Log.Length > 0 then writer.WriteStartArray("log") + for line in result.Log do writer.WriteStringValue(line) + writer.WriteEndArray() writer.WriteEndObject() @@ -172,9 +190,11 @@ let formatJsonArray (results: NapResult list) : string = use stream = new System.IO.MemoryStream() use writer = new System.Text.Json.Utf8JsonWriter(stream) writer.WriteStartArray() + for result in results do let json = formatJson result writer.WriteRawValue(json) + writer.WriteEndArray() writer.Flush() Encoding.UTF8.GetString(stream.ToArray()) diff --git a/src/Nap.Core/Parser.fs b/src/Napper.Core/Parser.fs similarity index 54% rename from src/Nap.Core/Parser.fs rename to src/Napper.Core/Parser.fs index 1bad839..7ef909e 100644 --- a/src/Nap.Core/Parser.fs +++ b/src/Napper.Core/Parser.fs @@ -1,22 +1,26 @@ -module Nap.Core.Parser +// Specs: nap-file, nap-minimal, nap-full, nap-meta, nap-vars, nap-request, nap-headers, nap-body, +// nap-assert, nap-script, nap-comments, http-methods, env-interpolation, +// naplist-file, naplist-meta, naplist-vars, naplist-steps +module Napper.Core.Parser open FParsec -open Nap.Core +open Napper.Core // ─── Utility parsers ─────────────────────────────────────────── -let private ws : Parser<unit, unit> = spaces -let private ws1 : Parser<unit, unit> = spaces1 -let private lineEnd : Parser<unit, unit> = skipNewline <|> eof -let private commentLine : Parser<unit, unit> = pchar '#' >>. skipRestOfLine true -let private blankOrComment : Parser<unit, unit> = ws >>. optional commentLine >>. ws -let private skipCommentsAndBlanks : Parser<unit, unit> = +let private ws: Parser<unit, unit> = spaces +let private ws1: Parser<unit, unit> = spaces1 +let private commentLine: Parser<unit, unit> = pchar '#' >>. skipRestOfLine true + +let private skipCommentsAndBlanks: Parser<unit, unit> = skipMany (attempt (ws >>. commentLine)) >>. ws + let private quotedString = between (pchar '"') (pchar '"') (manySatisfy (fun c -> c <> '"')) + let private unquotedValue = - many1Satisfy (fun c -> c <> '\n' && c <> '\r' && c <> '#') - |>> fun s -> s.Trim() + many1Satisfy (fun c -> c <> '\n' && c <> '\r' && c <> '#') |>> fun s -> s.Trim() + let private value = quotedString <|> unquotedValue // ─── Section header ──────────────────────────────────────────── @@ -28,38 +32,38 @@ let private sectionHeader name = let private keyValue = notFollowedBy (pstring "\"\"\"") - >>. ws >>. many1Satisfy (fun c -> c <> '=' && c <> '\n' && c <> '\r' && c <> '[' && c <> '#') + >>. ws + >>. many1Satisfy (fun c -> c <> '=' && c <> '\n' && c <> '\r' && c <> '[' && c <> '#') |>> fun s -> s.Trim() .>>. (pchar '=' >>. ws >>. value .>> skipRestOfLine true) // ─── Shorthand parser (GET https://...) ──────────────────────── let private httpMethodStr = - choice [ - stringCIReturn "GET" GET - stringCIReturn "POST" POST - stringCIReturn "PUT" PUT - stringCIReturn "PATCH" PATCH - stringCIReturn "DELETE" DELETE - stringCIReturn "HEAD" HEAD - stringCIReturn "OPTIONS" OPTIONS - ] - -let private shorthandParser : Parser<NapFile, unit> = + choice + [ stringCIReturn "GET" GET + stringCIReturn "POST" POST + stringCIReturn "PUT" PUT + stringCIReturn "PATCH" PATCH + stringCIReturn "DELETE" DELETE + stringCIReturn "HEAD" HEAD + stringCIReturn "OPTIONS" OPTIONS ] + +let private shorthandParser: Parser<NapFile, unit> = ws >>. httpMethodStr .>> ws1 .>>. restOfLine true |>> fun (method, url) -> - { - Meta = { Name = None; Description = None; Tags = [] } - Vars = Map.empty - Request = { - Method = method - Url = url.Trim() - Headers = Map.empty - Body = None - } - Assertions = [] - Script = { Pre = None; Post = None } - } + { Meta = + { Name = None + Description = None + Tags = [] } + Vars = Map.empty + Request = + { Method = method + Url = url.Trim() + Headers = Map.empty + Body = None } + Assertions = [] + Script = { Pre = None; Post = None } } // ─── Meta block ──────────────────────────────────────────────── @@ -67,24 +71,22 @@ let private metaBlock = sectionHeader "meta" >>. many (keyValue .>> ws) |>> fun kvs -> let m = Map.ofList kvs - { - Name = Map.tryFind "name" m - Description = Map.tryFind "description" m - Tags = - match Map.tryFind "tags" m with - | Some t -> - t.Trim('[', ']').Split(',') - |> Array.map (fun s -> s.Trim().Trim('"')) - |> Array.filter (fun s -> s <> "") - |> Array.toList - | None -> [] - } + + { Name = Map.tryFind "name" m + Description = Map.tryFind "description" m + Tags = + match Map.tryFind "tags" m with + | Some t -> + t.Trim('[', ']').Split(',') + |> Array.map (fun s -> s.Trim().Trim('"')) + |> Array.filter (fun s -> s <> "") + |> Array.toList + | None -> [] } // ─── Vars block ──────────────────────────────────────────────── let private varsBlock = - sectionHeader "vars" >>. many (keyValue .>> ws) - |>> Map.ofList + sectionHeader "vars" >>. many (keyValue .>> ws) |>> Map.ofList // ─── Request block ───────────────────────────────────────────── @@ -92,6 +94,7 @@ let private requestBlock = sectionHeader "request" >>. many (keyValue .>> ws) |>> fun kvs -> let m = Map.ofList kvs + let method = match Map.tryFind "method" m with | Some "GET" -> GET @@ -103,17 +106,18 @@ let private requestBlock = | Some "OPTIONS" -> OPTIONS | Some other -> failwithf "Unknown HTTP method: %s" other | None -> GET + let url = match Map.tryFind "url" m with | Some u -> u | None -> failwith "Missing 'url' in [request] block" + method, url // ─── Request headers block ───────────────────────────────────── let private requestHeadersBlock = - sectionHeader "request.headers" >>. many (keyValue .>> ws) - |>> Map.ofList + sectionHeader "request.headers" >>. many (keyValue .>> ws) |>> Map.ofList // ─── Request body block ──────────────────────────────────────── @@ -125,12 +129,21 @@ let private requestBodyBlock = .>>. opt (ws >>. tripleQuoted .>> ws) |>> fun (kvs, body) -> let m = Map.ofList kvs - let contentType = Map.tryFind "content-type" m |> Option.defaultValue "application/json" + + let contentType = + Map.tryFind "content-type" m |> Option.defaultValue "application/json" + match body with - | Some content -> Some { ContentType = contentType; Content = content.Trim() } + | Some content -> + Some + { ContentType = contentType + Content = content.Trim() } | None -> match Map.tryFind "content" m with - | Some content -> Some { ContentType = contentType; Content = content } + | Some content -> + Some + { ContentType = contentType + Content = content } | None -> None // ─── Assert block ────────────────────────────────────────────── @@ -139,27 +152,32 @@ let private assertionLine = ws >>. many1Satisfy (fun c -> c <> '\n' && c <> '\r' && c <> '#' && c <> '[') |>> fun line -> let line = line.Trim() - if line = "" then None + + if line = "" then + None else - let parts = line.Split([|' '|], 3, System.StringSplitOptions.RemoveEmptyEntries) + let parts = line.Split([| ' ' |], 3, System.StringSplitOptions.RemoveEmptyEntries) + match parts with - | [| target; "exists" |] -> - Some { Target = target; Op = Exists } - | [| target; "="; value |] -> - Some { Target = target; Op = Equals value } + | [| target; "exists" |] -> Some { Target = target; Op = Exists } + | [| target; "="; value |] -> Some { Target = target; Op = Equals value } | [| target; "contains"; value |] -> - Some { Target = target; Op = Contains (value.Trim('"')) } + Some + { Target = target + Op = Contains(value.Trim('"')) } | [| target; "matches"; value |] -> - Some { Target = target; Op = Matches (value.Trim('"')) } - | [| target; "<"; value |] -> - Some { Target = target; Op = LessThan value } + Some + { Target = target + Op = Matches(value.Trim('"')) } + | [| target; "<"; value |] -> Some { Target = target; Op = LessThan value } | [| target; ">"; value |] -> - Some { Target = target; Op = GreaterThan value } + Some + { Target = target + Op = GreaterThan value } | _ -> None let private assertBlock = - sectionHeader "assert" >>. many (assertionLine .>> ws) - |>> List.choose id + sectionHeader "assert" >>. many (assertionLine .>> ws) |>> List.choose id // ─── Script block ────────────────────────────────────────────── @@ -167,59 +185,75 @@ let private scriptBlock = sectionHeader "script" >>. many (keyValue .>> ws) |>> fun kvs -> let m = Map.ofList kvs - { Pre = Map.tryFind "pre" m; Post = Map.tryFind "post" m } + + { Pre = Map.tryFind "pre" m + Post = Map.tryFind "post" m } // ─── Full .nap parser ────────────────────────────────────────── -let private skip : Parser<unit, unit> = skipCommentsAndBlanks - -let private fullParser : Parser<NapFile, unit> = - skip - >>. opt (attempt metaBlock) .>> skip - .>>. opt (attempt varsBlock) .>> skip - .>>. requestBlock .>> skip - .>>. opt (attempt requestHeadersBlock) .>> skip - .>>. opt (attempt requestBodyBlock) .>> skip - .>>. opt (attempt assertBlock) .>> skip - .>>. opt (attempt scriptBlock) .>> skip +let private skip: Parser<unit, unit> = skipCommentsAndBlanks + +let private fullParser: Parser<NapFile, unit> = + skip >>. opt (attempt metaBlock) .>> skip .>>. opt (attempt varsBlock) .>> skip + .>>. requestBlock + .>> skip + .>>. opt (attempt requestHeadersBlock) + .>> skip + .>>. opt (attempt requestBodyBlock) + .>> skip + .>>. opt (attempt assertBlock) + .>> skip + .>>. opt (attempt scriptBlock) + .>> skip .>> eof |>> fun ((((((meta, vars), (method, url)), headers), body), asserts), script) -> - { - Meta = meta |> Option.defaultValue { Name = None; Description = None; Tags = [] } - Vars = vars |> Option.defaultValue Map.empty - Request = { - Method = method - Url = url - Headers = headers |> Option.defaultValue Map.empty - Body = body |> Option.defaultWith (fun () -> None) - } - Assertions = asserts |> Option.defaultValue [] - Script = script |> Option.defaultValue { Pre = None; Post = None } - } + { Meta = + meta + |> Option.defaultValue + { Name = None + Description = None + Tags = [] } + Vars = vars |> Option.defaultValue Map.empty + Request = + { Method = method + Url = url + Headers = headers |> Option.defaultValue Map.empty + Body = body |> Option.defaultWith (fun () -> None) } + Assertions = asserts |> Option.defaultValue [] + Script = script |> Option.defaultValue { Pre = None; Post = None } } // ─── Public API ──────────────────────────────────────────────── let parseNapFile (input: string) : Result<NapFile, string> = - // Try shorthand first (just "GET https://...") - let shortResult = run shorthandParser input - match shortResult with - | Success(result, _, _) -> Result.Ok result - | Failure _ -> - // Try full format - let fullResult = run fullParser input - match fullResult with + try + // Try shorthand first (just "GET https://...") + let shortResult = run shorthandParser input + + match shortResult with | Success(result, _, _) -> Result.Ok result - | Failure(msg, _, _) -> Result.Error msg + | Failure _ -> + // Try full format + let fullResult = run fullParser input + + match fullResult with + | Success(result, _, _) -> Result.Ok result + | Failure(msg, _, _) -> Result.Error msg + with ex -> + Result.Error ex.Message /// Parse a .naplist file let parseNapList (input: string) : Result<NapPlaylist, string> = let lines = - input.Split([|'\n'; '\r'|], System.StringSplitOptions.RemoveEmptyEntries) + input.Split([| '\n'; '\r' |], System.StringSplitOptions.RemoveEmptyEntries) |> Array.map (fun s -> s.Trim()) |> Array.filter (fun s -> s <> "" && not (s.StartsWith "#")) |> Array.toList - let mutable meta = { Name = None; Description = None; Tags = [] } + let mutable meta = + { Name = None + Description = None + Tags = [] } + let mutable env = None let mutable vars = Map.empty let mutable steps = [] @@ -229,16 +263,17 @@ let parseNapList (input: string) : Result<NapPlaylist, string> = if line.StartsWith "[" then currentSection <- line.Trim('[', ']').ToLowerInvariant() elif currentSection = "meta" && line.Contains "=" then - let parts = line.Split([|'='|], 2) + let parts = line.Split([| '=' |], 2) let key = parts[0].Trim() let value = parts[1].Trim().Trim('"') + match key with | "name" -> meta <- { meta with Name = Some value } | "description" -> meta <- { meta with Description = Some value } | "env" -> env <- Some value | _ -> () elif currentSection = "vars" && line.Contains "=" then - let parts = line.Split([|'='|], 2) + let parts = line.Split([| '=' |], 2) vars <- vars |> Map.add (parts[0].Trim()) (parts[1].Trim().Trim('"')) elif currentSection = "steps" then let step = @@ -247,12 +282,12 @@ let parseNapList (input: string) : Result<NapPlaylist, string> = elif line.EndsWith ".fsx" then ScriptStep line elif line.EndsWith ".csx" then ScriptStep line elif not (line.Contains ".") then FolderRef line - else NapFileStep line // default to nap file - steps <- steps @ [step] - - Result.Ok { - Meta = meta - Env = env - Vars = vars - Steps = steps - } + else NapFileStep line // default to nap file + + steps <- steps @ [ step ] + + Result.Ok + { Meta = meta + Env = env + Vars = vars + Steps = steps } diff --git a/src/Napper.Core/Runner.fs b/src/Napper.Core/Runner.fs new file mode 100644 index 0000000..f547182 --- /dev/null +++ b/src/Napper.Core/Runner.fs @@ -0,0 +1,336 @@ +// Specs: cli-run, nap-assert, assert-status, assert-equals, assert-exists, assert-contains, +// assert-matches, assert-lt, assert-gt, script-fsx, script-csx, script-dispatch, +// env-interpolation, collection-folder, collection-sort, naplist-steps, +// naplist-nap-step, naplist-folder-step, naplist-nested, naplist-script-step, naplist-var-scope +module Napper.Core.Runner + +open System +open System.Diagnostics +open System.Net.Http +open System.Text +open System.Text.Json +open System.Text.RegularExpressions +open Napper.Core + +let private httpClient = new HttpClient() + +/// Execute an HTTP request from a resolved NapRequest +let executeRequest (request: NapRequest) : Async<NapResponse> = + async { + Logger.info $"HTTP {request.Method} {request.Url}" + Logger.debug $"Request headers: {request.Headers.Count} headers" + let msg = new HttpRequestMessage(request.Method.ToNetMethod(), request.Url) + + // Add headers + for kv in request.Headers do + // Content headers need to go on the content object + if kv.Key.Equals("Content-Type", StringComparison.OrdinalIgnoreCase) then + () + else + msg.Headers.TryAddWithoutValidation(kv.Key, kv.Value) |> ignore + + // Add body if present + match request.Body with + | Some body -> msg.Content <- new StringContent(body.Content, Encoding.UTF8, body.ContentType) + | None -> () + + let sw = Stopwatch.StartNew() + let! response = httpClient.SendAsync(msg) |> Async.AwaitTask + sw.Stop() + + let! body = response.Content.ReadAsStringAsync() |> Async.AwaitTask + Logger.info $"HTTP {int response.StatusCode} in {sw.Elapsed.TotalMilliseconds:F0}ms" + Logger.debug $"Response body length: {body.Length}" + + let headers = + response.Headers + |> Seq.append response.Content.Headers + |> Seq.map (fun kv -> kv.Key, kv.Value |> String.concat ", ") + |> Map.ofSeq + + return + { StatusCode = int response.StatusCode + Headers = headers + Body = body + Duration = sw.Elapsed } + } + +/// Walk a dot-delimited path into a JSON body and return the leaf value as a string. +/// e.g. tryGetJsonPath "user.name" body → Some "Alice" +/// Returns None if the path doesn't exist or the body isn't valid JSON. +let private tryGetJsonPath (path: string) (body: string) : string option = + try + let doc = JsonDocument.Parse(body) + let parts = path.Split('.') + let mutable current = doc.RootElement + let mutable found = true + + for part in parts do + if found then + match current.ValueKind with + | JsonValueKind.Object -> + match current.TryGetProperty(part) with + | true, prop -> current <- prop + | false, _ -> found <- false + | _ -> found <- false + + if found then + match current.ValueKind with + | JsonValueKind.String -> Some(current.GetString()) + | JsonValueKind.Number -> Some(current.GetRawText()) + | JsonValueKind.True -> Some "true" + | JsonValueKind.False -> Some "false" + | JsonValueKind.Null -> Some "null" + | _ -> Some(current.GetRawText()) + else + None + with _ -> + None + +/// Resolve an assertion target (e.g. "status", "body.id", "headers.Content-Type") +/// to the actual string value from the HTTP response. +/// Returns None when the target doesn't exist in the response. +let private resolveTarget (response: NapResponse) (target: string) : string option = + if target = "status" then + Some(string response.StatusCode) + elif target = "duration" then + Some(sprintf "%.0fms" response.Duration.TotalMilliseconds) + elif target.StartsWith "headers." then + let headerName = target.Substring(8) + + response.Headers + |> Map.tryFind headerName + |> Option.orElseWith (fun () -> + response.Headers + |> Map.tryPick (fun k v -> + if k.Equals(headerName, StringComparison.OrdinalIgnoreCase) then + Some v + else + None)) + elif target.StartsWith "body." then + tryGetJsonPath (target.Substring(5)) response.Body + elif target = "body" then + Some response.Body + else + None + +/// Parse a numeric value from a string, stripping a trailing "ms" duration suffix. +/// e.g. "500ms" → Some 500.0, "42" → Some 42.0, "abc" → None +let private parseNum (s: string) : float option = + let s = s.TrimEnd('m', 's') + + match Double.TryParse(s) with + | true, v -> Some v + | _ -> None + +/// Compare two numeric values (actual vs expected) using the given comparator. +/// Returns false if either value is missing or non-numeric. +let private compareNumeric (cmp: float -> float -> bool) (actual: string option) (expected: string) : bool = + match actual with + | Some a -> + match parseNum a, parseNum expected with + | Some av, Some ev -> cmp av ev + | _ -> false + | None -> false + +/// Convert a glob pattern (using * and ? wildcards) to a regex and test a value against it. +let private globMatch (pattern: string) (value: string) : bool = + let regexPattern = + pattern.ToCharArray() + |> Array.map (fun c -> + match c with + | '*' -> ".*" + | '?' -> "." + | c when ".+^${}()|[]\\".Contains(c) -> $"\\{c}" + | c -> string c) + |> String.concat "" + + Regex.IsMatch(value, $"^{regexPattern}$") + +/// Build an AssertionResult from an assertion, its pass/fail state, and display strings. +let private makeResult + (assertion: Assertion) + (passed: bool) + (expected: string) + (actual: string option) + : AssertionResult = + { Assertion = assertion + Passed = passed + Expected = expected + Actual = actual |> Option.defaultValue "<missing>" } + +/// Evaluate a single assertion operator against the resolved actual value. +let private evaluateOp (assertion: Assertion) (actual: string option) : AssertionResult = + match assertion.Op with + | Equals expected -> + let passed = + actual |> Option.map (fun a -> a = expected) |> Option.defaultValue false + + makeResult assertion passed expected actual + | Exists -> + let passed = actual.IsSome + + { Assertion = assertion + Passed = passed + Expected = "exists" + Actual = if actual.IsSome then "exists" else "<missing>" } + | Contains expected -> + let passed = + actual + |> Option.map (fun a -> a.Contains(expected, StringComparison.OrdinalIgnoreCase)) + |> Option.defaultValue false + + makeResult assertion passed $"contains \"{expected}\"" actual + | Matches pattern -> + let passed = + actual |> Option.map (fun a -> globMatch pattern a) |> Option.defaultValue false + + { Assertion = assertion + Passed = passed + Expected = $"matches \"{pattern}\"" + Actual = actual |> Option.defaultValue "<missing>" } + | LessThan expected -> makeResult assertion (compareNumeric (<) actual expected) $"< {expected}" actual + | GreaterThan expected -> makeResult assertion (compareNumeric (>) actual expected) $"> {expected}" actual + +/// Evaluate all assertions against an HTTP response. +/// Each assertion's target is resolved to the actual response value, +/// then the operator (=, exists, contains, matches, <, >) is applied. +let evaluateAssertions (assertions: Assertion list) (response: NapResponse) : AssertionResult list = + assertions + |> List.map (fun assertion -> resolveTarget response assertion.Target |> evaluateOp assertion) + +/// Determine the dotnet CLI arguments for a script file +let private scriptArgs (scriptPath: string) : string = + if scriptPath.EndsWith ".csx" then + $"script \"{scriptPath}\"" + else + $"fsi \"{scriptPath}\"" + +/// Run a script (.fsx or .csx) and capture its output +let runScript (scriptPath: string) : Async<NapResult> = + async { + Logger.info $"Script start: {scriptPath}" + let psi = ProcessStartInfo() + psi.FileName <- "dotnet" + psi.Arguments <- scriptArgs scriptPath + psi.WorkingDirectory <- System.IO.Path.GetDirectoryName(scriptPath) + psi.RedirectStandardOutput <- true + psi.RedirectStandardError <- true + psi.UseShellExecute <- false + psi.CreateNoWindow <- true + + let sw = Stopwatch.StartNew() + + try + use proc = Process.Start(psi) + let! stdout = proc.StandardOutput.ReadToEndAsync() |> Async.AwaitTask + let! stderr = proc.StandardError.ReadToEndAsync() |> Async.AwaitTask + do! proc.WaitForExitAsync() |> Async.AwaitTask + sw.Stop() + + let logLines = + stdout.Split('\n') + |> Array.map (fun l -> l.TrimEnd('\r')) + |> Array.filter (fun l -> l.Length > 0) + |> Array.toList + + let passed = proc.ExitCode = 0 + Logger.info $"Script exit code: {proc.ExitCode}" + + let error = + if passed then None + elif stderr.Length > 0 then Some stderr + else Some $"Script exited with code {proc.ExitCode}" + + return + { File = scriptPath + Request = + { Method = GET + Url = "" + Headers = Map.empty + Body = None } + Response = None + Assertions = [] + Passed = passed + Error = error + Log = logLines } + with ex -> + sw.Stop() + Logger.error $"Script failed: {ex.Message}" + + return + { File = scriptPath + Request = + { Method = GET + Url = "" + Headers = Map.empty + Body = None } + Response = None + Assertions = [] + Passed = false + Error = Some $"Script failed to start: {ex.Message}" + Log = [] } + } + +/// Run a single .nap file end-to-end +let runNapFile (filePath: string) (vars: Map<string, string>) (envName: string option) : Async<NapResult> = + async { + Logger.info $"File: {filePath}" + let dir = System.IO.Path.GetDirectoryName(filePath) + let content = System.IO.File.ReadAllText(filePath) + + match Parser.parseNapFile content with + | Error msg -> + Logger.error $"Parse error in {filePath}: {msg}" + + return + { File = filePath + Request = + { Method = GET + Url = "" + Headers = Map.empty + Body = None } + Response = None + Assertions = [] + Passed = false + Error = Some $"Parse error: {msg}" + Log = [] } + | Ok napFile -> + // Resolve variables + let allVars = Environment.loadEnvironment dir envName vars napFile.Vars + Logger.debug $"Resolved {allVars.Count} variables" + let resolved = Environment.resolveNapFile allVars napFile + + try + let! response = executeRequest resolved.Request + let assertionResults = evaluateAssertions resolved.Assertions response + let passed = assertionResults |> List.filter (fun r -> r.Passed) |> List.length + let total = assertionResults.Length + Logger.info $"Assertions: {passed}/{total} passed" + + for a in assertionResults do + let status = if a.Passed then "PASS" else "FAIL" + Logger.debug $"Assertion {a.Assertion.Target}: {status}" + + let allPassed = assertionResults |> List.forall (fun r -> r.Passed) + + return + { File = filePath + Request = resolved.Request + Response = Some response + Assertions = assertionResults + Passed = allPassed + Error = None + Log = [] } + with ex -> + Logger.error $"Request failed: {ex.Message}" + + return + { File = filePath + Request = resolved.Request + Response = None + Assertions = [] + Passed = false + Error = Some $"Request failed: {ex.Message}" + Log = [] } + } diff --git a/src/Napper.Core/SectionScanner.fs b/src/Napper.Core/SectionScanner.fs new file mode 100644 index 0000000..6c8d00a --- /dev/null +++ b/src/Napper.Core/SectionScanner.fs @@ -0,0 +1,96 @@ +/// Scan .nap and .naplist files for section headers and their line positions. +/// Complements Parser.fs — the parser gives you the data, this gives you the positions. +/// Used by the LSP for document symbols / outline navigation. +module Napper.Core.SectionScanner + +/// A located section header with its line number (0-based) and name +type SectionLocation = + { Name: string + Line: int + EndLine: int } + +/// Known .nap section names +let private napSections = + Set.ofList + [ "meta" + "vars" + "request" + "request.headers" + "request.body" + "assert" + "script" ] + +/// Known .naplist section names +let private naplistSections = Set.ofList [ "meta"; "vars"; "steps" ] + +let private isSectionHeader (line: string) : string option = + let trimmed = line.Trim() + + if trimmed.StartsWith "[" && trimmed.EndsWith "]" then + Some(trimmed.TrimStart('[').TrimEnd(']').ToLowerInvariant()) + else + None + +let private isShorthandRequest (line: string) : bool = + let methods = [ "GET"; "POST"; "PUT"; "PATCH"; "DELETE"; "HEAD"; "OPTIONS" ] + let trimmed = line.TrimStart() + methods |> List.exists (fun m -> trimmed.StartsWith(m + " ")) + +/// Scan a .nap file for section locations. Returns sections in file order. +/// Also detects shorthand requests (e.g. "GET https://...") as a synthetic "request" section. +let scanNapSections (content: string) : SectionLocation list = + let lines = content.Split([| '\n' |]) + let mutable sections: SectionLocation list = [] + let mutable lastSectionStart = -1 + let mutable lastName = "" + + let closeSection (endLine: int) = + if lastSectionStart >= 0 then + sections <- + sections + @ [ { Name = lastName + Line = lastSectionStart + EndLine = endLine } ] + + for i in 0 .. lines.Length - 1 do + let line = lines[i] + + match isSectionHeader line with + | Some name when napSections.Contains name -> + closeSection (i - 1) + lastSectionStart <- i + lastName <- name + | _ -> + if i = 0 && isShorthandRequest line then + closeSection (i - 1) + lastSectionStart <- 0 + lastName <- "request" + + closeSection (lines.Length - 1) + sections + +/// Scan a .naplist file for section locations. Returns sections in file order. +let scanNaplistSections (content: string) : SectionLocation list = + let lines = content.Split([| '\n' |]) + let mutable sections: SectionLocation list = [] + let mutable lastSectionStart = -1 + let mutable lastName = "" + + let closeSection (endLine: int) = + if lastSectionStart >= 0 then + sections <- + sections + @ [ { Name = lastName + Line = lastSectionStart + EndLine = endLine } ] + + for i in 0 .. lines.Length - 1 do + match isSectionHeader lines[i] with + | Some name when naplistSections.Contains name -> + closeSection (i - 1) + lastSectionStart <- i + lastName <- name + | _ -> () + + closeSection (lines.Length - 1) + sections diff --git a/src/Napper.Core/Types.fs b/src/Napper.Core/Types.fs new file mode 100644 index 0000000..670e613 --- /dev/null +++ b/src/Napper.Core/Types.fs @@ -0,0 +1,110 @@ +// Specs: nap-file, nap-meta, nap-vars, nap-request, nap-headers, nap-body, nap-assert, nap-script, +// http-methods, env-interpolation, assert-status, assert-equals, assert-exists, assert-contains, +// assert-matches, assert-lt, assert-gt, naplist-file, naplist-steps, naplist-nap-step, +// naplist-folder-step, naplist-nested, naplist-script-step +namespace Napper.Core + +open System +open System.Net.Http + +/// Assertion operators used in [assert] blocks +type AssertOp = + | Equals of string + | Exists + | Contains of string + | Matches of string + | LessThan of string + | GreaterThan of string + +/// A single assertion line, e.g. status = 200, body.id exists +type Assertion = + { Target: string // e.g. "status", "body.id", "headers.Content-Type", "duration" + Op: AssertOp } + +/// HTTP method +type HttpMethod = + | GET + | POST + | PUT + | PATCH + | DELETE + | HEAD + | OPTIONS + + member this.ToNetMethod() = + match this with + | GET -> System.Net.Http.HttpMethod.Get + | POST -> System.Net.Http.HttpMethod.Post + | PUT -> System.Net.Http.HttpMethod.Put + | PATCH -> System.Net.Http.HttpMethod.Patch + | DELETE -> System.Net.Http.HttpMethod.Delete + | HEAD -> System.Net.Http.HttpMethod.Head + | OPTIONS -> System.Net.Http.HttpMethod.Options + +/// Script references (pre/post hooks) +type ScriptRef = + { Pre: string option + Post: string option } + +/// Metadata block [meta] +type NapMeta = + { Name: string option + Description: string option + Tags: string list } + +/// Request body +type RequestBody = + { ContentType: string; Content: string } + +/// The request definition from a .nap file +type NapRequest = + { Method: HttpMethod + Url: string + Headers: Map<string, string> + Body: RequestBody option } + +/// A fully parsed .nap file +type NapFile = + { Meta: NapMeta + Vars: Map<string, string> + Request: NapRequest + Assertions: Assertion list + Script: ScriptRef } + +/// Result of evaluating a single assertion +type AssertionResult = + { Assertion: Assertion + Passed: bool + Expected: string + Actual: string } + +/// The HTTP response captured after running a request +type NapResponse = + { StatusCode: int + Headers: Map<string, string> + Body: string + Duration: TimeSpan } + +/// Overall result of running a single .nap file +type NapResult = + { File: string + Request: NapRequest + Response: NapResponse option + Assertions: AssertionResult list + Passed: bool + Error: string option + Log: string list } + +/// A step in a .naplist playlist +type PlaylistStep = + | NapFileStep of string // path to a .nap file + | PlaylistRef of string // path to another .naplist + | FolderRef of string // path to a folder + | ScriptStep of string // path to an .fsx or .csx orchestration script + +/// A parsed .naplist file +type NapPlaylist = + { Meta: NapMeta + Env: string option + Vars: Map<string, string> + Steps: PlaylistStep list } diff --git a/src/Napper.Lsp.Tests/LspClient.fs b/src/Napper.Lsp.Tests/LspClient.fs new file mode 100644 index 0000000..8140fb5 --- /dev/null +++ b/src/Napper.Lsp.Tests/LspClient.fs @@ -0,0 +1,142 @@ +/// Test client that launches napper-lsp and communicates via JSON-RPC over stdio. +/// This is the exact same protocol VSCode and Zed use. +module Napper.Lsp.Tests.LspClient + +open System +open System.Diagnostics +open System.IO +open System.Text +open System.Text.Json.Nodes +open System.Threading +open System.Threading.Tasks +open Xunit + +let private lspBinaryPath = + let baseDir = AppContext.BaseDirectory + let repoRoot = DirectoryInfo(baseDir).Parent.Parent.Parent.Parent.Parent.FullName + Path.Combine(repoRoot, "src", "Napper.Lsp", "bin", "Debug", "net10.0", "napper-lsp") + +/// Encode a JSON-RPC message with Content-Length header (LSP wire format) +let private encodeMessage (json: string) : byte[] = + let body = Encoding.UTF8.GetBytes(json) + let header = $"Content-Length: {body.Length}\r\n\r\n" + Array.append (Encoding.UTF8.GetBytes(header)) body + +/// Read a single LSP response from the stream (Content-Length header + body) +let private readMessage (reader: StreamReader) (ct: CancellationToken) : Task<JsonNode option> = + task { + let mutable contentLength = 0 + let mutable headerLine = "" + + let! firstLine = reader.ReadLineAsync(ct) + headerLine <- firstLine + + while not (String.IsNullOrEmpty(headerLine)) do + if headerLine.StartsWith("Content-Length:", StringComparison.OrdinalIgnoreCase) then + contentLength <- headerLine.Substring(15).Trim() |> int + + let! nextLine = reader.ReadLineAsync(ct) + headerLine <- nextLine + + if contentLength = 0 then + return None + else + let buffer = Array.zeroCreate<char> contentLength + let! _read = reader.ReadBlockAsync(buffer, 0, contentLength) + let json = String(buffer) + return Some(JsonNode.Parse(json)) + } + +/// Helper: create a JsonValue from a string +let str (s: string) : JsonNode = JsonValue.Create(s) + +/// Helper: create a JsonValue from an int +let num (n: int) : JsonNode = JsonValue.Create(n) + +/// A running LSP server process for integration testing +type LspServerProcess() = + let proc = new Process() + let mutable started = false + + member this.Start() : unit = + Assert.True(File.Exists(lspBinaryPath), $"LSP binary not found at {lspBinaryPath}") + proc.StartInfo.FileName <- lspBinaryPath + proc.StartInfo.UseShellExecute <- false + proc.StartInfo.RedirectStandardInput <- true + proc.StartInfo.RedirectStandardOutput <- true + proc.StartInfo.RedirectStandardError <- true + proc.StartInfo.CreateNoWindow <- true + let ok = proc.Start() + Assert.True(ok, "Failed to start napper-lsp process") + started <- true + + member this.SendRequest(method: string, id: int, ?paramObj: JsonNode) : Task<JsonNode> = + task { + let request = JsonObject() + request["jsonrpc"] <- str "2.0" + request["id"] <- num id + request["method"] <- str method + + match paramObj with + | Some p -> request["params"] <- p + | None -> () + + let json = request.ToJsonString() + let bytes = encodeMessage json + do! proc.StandardInput.BaseStream.WriteAsync(bytes, 0, bytes.Length) + do! proc.StandardInput.BaseStream.FlushAsync() + + use cts = new CancellationTokenSource(TimeSpan.FromSeconds(10.0)) + let reader = proc.StandardOutput + let mutable result: JsonNode option = None + + while result.IsNone do + let! msg = readMessage reader cts.Token + + match msg with + | Some node when node["id"] <> null && node["id"].GetValue<int>() = id -> result <- Some node + | Some _ -> () + | None -> failwith "Stream ended before response received" + + return result.Value + } + + member this.SendNotification(method: string, ?paramObj: JsonNode) : Task = + task { + let notification = JsonObject() + notification["jsonrpc"] <- str "2.0" + notification["method"] <- str method + + match paramObj with + | Some p -> notification["params"] <- p + | None -> () + + let json = notification.ToJsonString() + let bytes = encodeMessage json + do! proc.StandardInput.BaseStream.WriteAsync(bytes, 0, bytes.Length) + do! proc.StandardInput.BaseStream.FlushAsync() + } + + member this.SendRaw(data: byte[]) : Task = + task { + do! proc.StandardInput.BaseStream.WriteAsync(data, 0, data.Length) + do! proc.StandardInput.BaseStream.FlushAsync() + } + + member _.IsRunning: bool = started && not proc.HasExited + + member _.ReadStdErr() : string = + if proc.HasExited then + proc.StandardError.ReadToEnd() + else + "" + + member this.Kill() : unit = + if started && not proc.HasExited then + proc.Kill() + proc.WaitForExit(3000) |> ignore + + interface IDisposable with + member this.Dispose() = + this.Kill() + proc.Dispose() diff --git a/src/Napper.Lsp.Tests/LspIntegrationTests.fs b/src/Napper.Lsp.Tests/LspIntegrationTests.fs new file mode 100644 index 0000000..37b1da4 --- /dev/null +++ b/src/Napper.Lsp.Tests/LspIntegrationTests.fs @@ -0,0 +1,413 @@ +/// Integration tests for napper-lsp. +/// Every test launches the real binary and talks JSON-RPC over stdio — +/// the exact same protocol VSCode and Zed use. +module Napper.Lsp.Tests.LspIntegrationTests + +open System.Text +open System.Text.Json.Nodes +open System.Threading.Tasks +open Xunit +open Napper.Lsp.Tests.LspClient + +/// Build the standard initialize params +let private initializeParams () : JsonNode = + let p = JsonObject() + p["processId"] <- num 1 + p["capabilities"] <- JsonObject() + p["rootUri"] <- str "file:///tmp/test-workspace" + p :> JsonNode + +/// Run a full initialize handshake (initialize request + initialized notification) +let private handshake (server: LspServerProcess) : Task<JsonNode> = + task { + let! response = server.SendRequest("initialize", 1, initializeParams ()) + do! server.SendNotification("initialized", JsonObject()) + return response + } + +/// Build a textDocument/didOpen params object +let private didOpenParams (uri: string) (version: int) (text: string) : JsonNode = + let p = JsonObject() + let td = JsonObject() + td["uri"] <- str uri + td["languageId"] <- str "nap" + td["version"] <- num version + td["text"] <- str text + p["textDocument"] <- td + p :> JsonNode + +[<Fact>] +let ``initialize handshake returns capabilities`` () : Task = + task { + use server = new LspServerProcess() + server.Start() + + let! response = server.SendRequest("initialize", 1, initializeParams ()) + + Assert.NotNull(response["result"]) + Assert.Null(response["error"]) + + let result = response["result"] + Assert.NotNull(result["capabilities"]) + + // TextDocumentSync must be Full (1 = Full in LSP spec) + let sync = result["capabilities"]["textDocumentSync"] + Assert.NotNull(sync) + Assert.Equal(1, sync.GetValue<int>()) + + // Server info + let serverInfo = result["serverInfo"] + Assert.NotNull(serverInfo) + Assert.Equal("napper-lsp", serverInfo["name"].GetValue<string>()) + Assert.NotNull(serverInfo["version"]) + + Assert.True(server.IsRunning, "Server died after initialize") + } + +[<Fact>] +let ``initialized notification accepted without error`` () : Task = + task { + use server = new LspServerProcess() + server.Start() + + let! _initResponse = server.SendRequest("initialize", 1, initializeParams ()) + do! server.SendNotification("initialized", JsonObject()) + do! Task.Delay(200) + + Assert.True(server.IsRunning, "Server died after initialized notification") + } + +[<Fact>] +let ``textDocument/didOpen tracks document`` () : Task = + task { + use server = new LspServerProcess() + server.Start() + let! _ = handshake server + + let napContent = "[request]\nmethod = GET\nurl = https://example.com\n" + do! server.SendNotification("textDocument/didOpen", didOpenParams "file:///tmp/test.nap" 1 napContent) + do! Task.Delay(200) + + Assert.True(server.IsRunning, "Server died after didOpen") + } + +[<Fact>] +let ``textDocument/didChange updates document`` () : Task = + task { + use server = new LspServerProcess() + server.Start() + let! _ = handshake server + + // Open + do! + server.SendNotification( + "textDocument/didOpen", + didOpenParams "file:///tmp/test.nap" 1 "[request]\nmethod = GET\nurl = https://example.com\n" + ) + + // Change + let changeParams = JsonObject() + let versionedDoc = JsonObject() + versionedDoc["uri"] <- str "file:///tmp/test.nap" + versionedDoc["version"] <- num 2 + changeParams["textDocument"] <- versionedDoc + + let change = JsonObject() + change["text"] <- str "[request]\nmethod = POST\nurl = https://example.com/users\n" + let changes = JsonArray() + changes.Add(change) + changeParams["contentChanges"] <- changes + + do! server.SendNotification("textDocument/didChange", changeParams) + do! Task.Delay(200) + + Assert.True(server.IsRunning, "Server died after didChange") + } + +[<Fact>] +let ``textDocument/didClose removes document`` () : Task = + task { + use server = new LspServerProcess() + server.Start() + let! _ = handshake server + + do! + server.SendNotification( + "textDocument/didOpen", + didOpenParams "file:///tmp/test.nap" 1 "GET https://example.com\n" + ) + + let closeParams = JsonObject() + let closeDoc = JsonObject() + closeDoc["uri"] <- str "file:///tmp/test.nap" + closeParams["textDocument"] <- closeDoc + + do! server.SendNotification("textDocument/didClose", closeParams) + do! Task.Delay(200) + + Assert.True(server.IsRunning, "Server died after didClose") + } + +[<Fact>] +let ``shutdown and exit clean lifecycle`` () : Task = + task { + use server = new LspServerProcess() + server.Start() + let! _ = handshake server + + let! shutdownResponse = server.SendRequest("shutdown", 2) + // Shutdown returns result (may be null for void) with no error + Assert.Null(shutdownResponse["error"]) + Assert.True(server.IsRunning, "Server died before exit notification") + + do! server.SendNotification("exit") + do! Task.Delay(1000) + + Assert.False(server.IsRunning, "Server should have exited after exit notification") + } + +[<Fact>] +let ``malformed request with unknown params does not crash server`` () : Task = + task { + use server = new LspServerProcess() + server.Start() + let! _ = handshake server + + // Send a valid JSON-RPC request with a bogus method and garbage params + let bogusParams = JsonObject() + bogusParams["garbage"] <- str "nonsense" + bogusParams["moreGarbage"] <- num 42 + let! response = server.SendRequest("textDocument/totallyBogusMethod", 999, bogusParams) + + // Should return an error, not crash + Assert.NotNull(response["error"]) + Assert.True(server.IsRunning, "Server crashed on malformed request") + + // Verify it still responds to a valid request after the bogus one + let! shutdownResponse = server.SendRequest("shutdown", 100) + Assert.Null(shutdownResponse["error"]) + } + +[<Fact>] +let ``unknown method returns LSP error`` () : Task = + task { + use server = new LspServerProcess() + server.Start() + let! _ = handshake server + + let! response = server.SendRequest("textDocument/somethingThatDoesNotExist", 42) + + Assert.NotNull(response["error"]) + Assert.True(server.IsRunning, "Server crashed on unknown method") + } + +// ─── Document Symbols ──────────────────────────────────── + +let private docSymbolParams (uri: string) : JsonNode = + let p = JsonObject() + let td = JsonObject() + td["uri"] <- str uri + p["textDocument"] <- td + p :> JsonNode + +[<Fact>] +let ``documentSymbol returns sections for nap file`` () : Task = + task { + use server = new LspServerProcess() + server.Start() + let! _ = handshake server + + let uri = "file:///tmp/test.nap" + + let content = + "[meta]\nname = \"Test\"\n\n[request]\nmethod = GET\nurl = https://example.com\n\n[assert]\nstatus = 200\n" + + do! server.SendNotification("textDocument/didOpen", didOpenParams uri 1 content) + + let! response = server.SendRequest("textDocument/documentSymbol", 10, docSymbolParams uri) + + Assert.Null(response["error"]) + Assert.NotNull(response["result"]) + + let symbols = response["result"] :?> JsonArray + Assert.True(symbols.Count >= 3, $"Expected at least 3 symbols (meta, request, assert), got {symbols.Count}") + + // Check section names + let names = symbols |> Seq.map (fun s -> s["name"].GetValue<string>()) |> Seq.toList + Assert.Contains("[meta]", names) + Assert.Contains("[request]", names) + Assert.Contains("[assert]", names) + } + +[<Fact>] +let ``documentSymbol returns sections for naplist file`` () : Task = + task { + use server = new LspServerProcess() + server.Start() + let! _ = handshake server + + let uri = "file:///tmp/test.naplist" + + let content = + "[meta]\nname = \"Smoke tests\"\n\n[steps]\nauth/login.nap\nusers/get-user.nap\n" + + do! server.SendNotification("textDocument/didOpen", didOpenParams uri 1 content) + + let! response = server.SendRequest("textDocument/documentSymbol", 11, docSymbolParams uri) + + Assert.Null(response["error"]) + Assert.NotNull(response["result"]) + + let symbols = response["result"] :?> JsonArray + Assert.True(symbols.Count >= 2, $"Expected at least 2 symbols (meta, steps), got {symbols.Count}") + + let names = symbols |> Seq.map (fun s -> s["name"].GetValue<string>()) |> Seq.toList + Assert.Contains("[meta]", names) + Assert.Contains("[steps]", names) + } + +// ─── Code Lens ─────────────────────────────────────────── + +let private codeLensParams (uri: string) : JsonNode = + let p = JsonObject() + let td = JsonObject() + td["uri"] <- str uri + p["textDocument"] <- td + p :> JsonNode + +[<Fact>] +let ``codeLens returns lenses for nap file with request section`` () : Task = + task { + use server = new LspServerProcess() + server.Start() + let! _ = handshake server + + let uri = "file:///tmp/test.nap" + let content = "[request]\nmethod = GET\nurl = https://example.com\n" + do! server.SendNotification("textDocument/didOpen", didOpenParams uri 1 content) + + let! response = server.SendRequest("textDocument/codeLens", 12, codeLensParams uri) + + Assert.Null(response["error"]) + Assert.NotNull(response["result"]) + + let lenses = response["result"] :?> JsonArray + Assert.True(lenses.Count >= 1, $"Expected at least 1 code lens, got {lenses.Count}") + + // First lens should be on line 0 (where [request] is) + let firstLens = lenses[0] + Assert.NotNull(firstLens["range"]) + let rangeNode = firstLens["range"] + let startNode = rangeNode["start"] + let startLine = startNode["line"].GetValue<int>() + Assert.Equal(0, startLine) + } + +// ─── Execute Command: requestInfo ──────────────────────── + +let private executeCommandParams (command: string) (arg: string) : JsonNode = + let p = JsonObject() + p["command"] <- str command + let args = JsonArray() + args.Add(str arg) + p["arguments"] <- args + p :> JsonNode + +[<Fact>] +let ``executeCommand requestInfo returns method and URL`` () : Task = + task { + use server = new LspServerProcess() + server.Start() + let! _ = handshake server + + let uri = "file:///tmp/test.nap" + let content = "[request]\nmethod = POST\nurl = https://api.example.com/users\n" + do! server.SendNotification("textDocument/didOpen", didOpenParams uri 1 content) + + let! response = + server.SendRequest("workspace/executeCommand", 20, executeCommandParams "napper.requestInfo" uri) + + Assert.Null(response["error"]) + Assert.NotNull(response["result"]) + + let result = response["result"] + Assert.Equal("POST", result["method"].GetValue<string>()) + Assert.Equal("https://api.example.com/users", result["url"].GetValue<string>()) + } + +// ─── Execute Command: copyCurl ─────────────────────────── + +[<Fact>] +let ``executeCommand copyCurl returns curl string`` () : Task = + task { + use server = new LspServerProcess() + server.Start() + let! _ = handshake server + + let uri = "file:///tmp/test.nap" + let content = "[request]\nmethod = GET\nurl = https://example.com/api\n" + do! server.SendNotification("textDocument/didOpen", didOpenParams uri 1 content) + + let! response = server.SendRequest("workspace/executeCommand", 21, executeCommandParams "napper.copyCurl" uri) + + Assert.Null(response["error"]) + Assert.NotNull(response["result"]) + + let curl = response["result"].GetValue<string>() + Assert.Contains("curl", curl) + Assert.Contains("GET", curl) + Assert.Contains("https://example.com/api", curl) + } + +// ─── Execute Command: listEnvironments ─────────────────── + +[<Fact>] +let ``executeCommand listEnvironments returns env names`` () : Task = + task { + use server = new LspServerProcess() + server.Start() + let! _ = handshake server + + // Create temp .napenv files + let tmpDir = + System.IO.Path.Combine(System.IO.Path.GetTempPath(), $"napper-lsp-test-{System.Guid.NewGuid()}") + + System.IO.Directory.CreateDirectory(tmpDir) |> ignore + System.IO.File.WriteAllText(System.IO.Path.Combine(tmpDir, ".napenv"), "baseUrl = https://example.com") + + System.IO.File.WriteAllText( + System.IO.Path.Combine(tmpDir, ".napenv.staging"), + "baseUrl = https://staging.example.com" + ) + + System.IO.File.WriteAllText( + System.IO.Path.Combine(tmpDir, ".napenv.production"), + "baseUrl = https://prod.example.com" + ) + + System.IO.File.WriteAllText(System.IO.Path.Combine(tmpDir, ".napenv.local"), "secret = hunter2") + + try + let rootUri = $"file://{tmpDir}" + + let! response = + server.SendRequest( + "workspace/executeCommand", + 22, + executeCommandParams "napper.listEnvironments" rootUri + ) + + Assert.Null(response["error"]) + Assert.NotNull(response["result"]) + + let envs = response["result"] :?> JsonArray + let envNames = envs |> Seq.map (fun e -> e.GetValue<string>()) |> Seq.toList + + // Should find staging and production, NOT base (.napenv) or local (.napenv.local) + Assert.Contains("staging", envNames) + Assert.Contains("production", envNames) + Assert.DoesNotContain("local", envNames) + Assert.Equal(2, envs.Count) + finally + System.IO.Directory.Delete(tmpDir, true) + } diff --git a/src/Napper.Lsp.Tests/Napper.Lsp.Tests.fsproj b/src/Napper.Lsp.Tests/Napper.Lsp.Tests.fsproj new file mode 100644 index 0000000..5b390f2 --- /dev/null +++ b/src/Napper.Lsp.Tests/Napper.Lsp.Tests.fsproj @@ -0,0 +1,20 @@ +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <IsPackable>false</IsPackable> + <NuGetAuditMode>direct</NuGetAuditMode> + </PropertyGroup> + + <ItemGroup> + <Compile Include="LspClient.fs" /> + <Compile Include="LspIntegrationTests.fs" /> + </ItemGroup> + + <ItemGroup> + <PackageReference Include="coverlet.collector" Version="8.0.0" /> + <PackageReference Include="Microsoft.NET.Test.Sdk" Version="18.3.0" /> + <PackageReference Include="xunit" Version="2.9.3" /> + <PackageReference Include="xunit.runner.visualstudio" Version="3.1.5" /> + </ItemGroup> + +</Project> diff --git a/src/Napper.Lsp.Tests/coverage.runsettings b/src/Napper.Lsp.Tests/coverage.runsettings new file mode 100644 index 0000000..62c7520 --- /dev/null +++ b/src/Napper.Lsp.Tests/coverage.runsettings @@ -0,0 +1,14 @@ +<?xml version="1.0" encoding="utf-8"?> +<RunSettings> + <DataCollectionRunSettings> + <DataCollectors> + <DataCollector friendlyName="XPlat Code Coverage"> + <Configuration> + <Format>cobertura,lcov</Format> + <Include>[Napper.Lsp]*</Include> + <IncludeTestAssembly>false</IncludeTestAssembly> + </Configuration> + </DataCollector> + </DataCollectors> + </DataCollectionRunSettings> +</RunSettings> diff --git a/src/Napper.Lsp/Client.fs b/src/Napper.Lsp/Client.fs new file mode 100644 index 0000000..1035e67 --- /dev/null +++ b/src/Napper.Lsp/Client.fs @@ -0,0 +1,35 @@ +namespace Napper.Lsp + +open Ionide.LanguageServerProtocol +open Ionide.LanguageServerProtocol.JsonRpc + +/// Wraps the LSP client connection for sending notifications back to the IDE +type Client(notificationSender: Server.ClientNotificationSender, requestSender: Server.ClientRequestSender) = + inherit LspClient() + + member this.LogDebug(message: string) : Async<unit> = + this.WindowLogMessage( + { Type = Types.MessageType.Debug + Message = message } + ) + + member this.LogInfo(message: string) : Async<unit> = + this.WindowLogMessage( + { Type = Types.MessageType.Info + Message = message } + ) + + override this.WindowLogMessage p = + match box p with + | null -> async { () } + | value -> notificationSender "window/logMessage" value |> Async.Ignore + + override this.WindowShowMessage p = + match box p with + | null -> async { () } + | value -> notificationSender "window/showMessage" value |> Async.Ignore + + override this.WindowShowMessageRequest p = + match box p with + | null -> async { return Result.Error(Error.InternalError("Parameter was null")) } + | value -> requestSender.Send "window/showMessageRequest" value diff --git a/src/Napper.Lsp/Napper.Lsp.fsproj b/src/Napper.Lsp/Napper.Lsp.fsproj new file mode 100644 index 0000000..07a9722 --- /dev/null +++ b/src/Napper.Lsp/Napper.Lsp.fsproj @@ -0,0 +1,24 @@ +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <OutputType>Exe</OutputType> + <AssemblyName>napper-lsp</AssemblyName> + <NuGetAuditMode>direct</NuGetAuditMode> + </PropertyGroup> + + <ItemGroup> + <Compile Include="Workspace.fs" /> + <Compile Include="Client.fs" /> + <Compile Include="Server.fs" /> + <Compile Include="Program.fs" /> + </ItemGroup> + + <ItemGroup> + <PackageReference Include="Ionide.LanguageServerProtocol" Version="0.7.0" /> + </ItemGroup> + + <ItemGroup> + <ProjectReference Include="..\Napper.Core\Napper.Core.fsproj" /> + </ItemGroup> + +</Project> diff --git a/src/Napper.Lsp/Program.fs b/src/Napper.Lsp/Program.fs new file mode 100644 index 0000000..508f72e --- /dev/null +++ b/src/Napper.Lsp/Program.fs @@ -0,0 +1,85 @@ +/// Entry point for the napper-lsp language server. +/// LSP takes over stdio — do NOT read/write to stdin/stdout directly. +module Napper.Lsp.Program + +open System +open System.Threading.Tasks +open Ionide.LanguageServerProtocol +open Ionide.LanguageServerProtocol.JsonUtils +open Napper.Lsp +open Newtonsoft.Json +open StreamJsonRpc + +let private defaultJsonRpcFormatter () = + let fmt = new JsonMessageFormatter() + fmt.JsonSerializer.NullValueHandling <- NullValueHandling.Ignore + fmt.JsonSerializer.ConstructorHandling <- ConstructorHandling.AllowNonPublicDefaultConstructor + fmt.JsonSerializer.MissingMemberHandling <- MissingMemberHandling.Ignore + fmt.JsonSerializer.Converters.Add(StrictNumberConverter()) + fmt.JsonSerializer.Converters.Add(StrictStringConverter()) + fmt.JsonSerializer.Converters.Add(StrictBoolConverter()) + fmt.JsonSerializer.Converters.Add(SingleCaseUnionConverter()) + fmt.JsonSerializer.Converters.Add(OptionConverter()) + fmt.JsonSerializer.Converters.Add(ErasedUnionConverter()) + fmt.JsonSerializer.ContractResolver <- OptionAndCamelCasePropertyNamesContractResolver() + fmt + +let private createRpc (handler: IJsonRpcMessageHandler) : JsonRpc = + let rec (|HandleableException|_|) (e: exn) = + match e with + | :? LocalRpcException -> Some() + | :? TaskCanceledException -> Some() + | :? OperationCanceledException -> Some() + | :? JsonSerializationException -> Some() + | :? AggregateException as aex -> aex.InnerExceptions |> Seq.tryHead |> Option.bind (|HandleableException|_|) + | _ -> None + + let strategy = ActivityTracingStrategy() + + { new JsonRpc(handler, ActivityTracingStrategy = strategy) with + member _.IsFatalException(ex: Exception) = + match ex with + | HandleableException -> false + | _ -> true + + member this.CreateErrorDetails(request: Protocol.JsonRpcRequest, ex: Exception) = + match ex with + | :? JsonSerializationException as jex -> + let isSerializable = this.ExceptionStrategy = ExceptionProcessing.ISerializable + + let data: obj = + if isSerializable then + (jex :> obj) + else + Protocol.CommonErrorData(jex) + + Protocol.JsonRpcError.ErrorDetail( + Code = Protocol.JsonRpcErrorCode.ParseError, + Message = jex.Message, + Data = data + ) + | _ -> base.CreateErrorDetails(request, ex) } + +let private startServer () = + let input = Console.OpenStandardInput() + let output = Console.OpenStandardOutput() + + let requestHandlings: Map<string, Mappings.ServerRequestHandling<_>> = + Server.defaultRequestHandlings () + + Server.start + requestHandlings + input + output + (fun (notifier, requester) -> new Client(notifier, requester)) + (fun client -> new NapLspServer(client)) + createRpc + +[<EntryPoint>] +let main _args = + try + let result = startServer () + int result + with ex -> + eprintfn $"napper-lsp crashed: %A{ex}" + 1 diff --git a/src/Napper.Lsp/Server.fs b/src/Napper.Lsp/Server.fs new file mode 100644 index 0000000..6f7d5e5 --- /dev/null +++ b/src/Napper.Lsp/Server.fs @@ -0,0 +1,288 @@ +namespace Napper.Lsp + +open Ionide.LanguageServerProtocol +open Ionide.LanguageServerProtocol.Types +open Napper.Core +open Newtonsoft.Json.Linq + +/// LSP server — lifecycle, document sync, symbols, code lens, and commands. +/// All domain logic lives in Napper.Core. This file is protocol glue only. +type NapLspServer(client: Client) = + inherit LspServer() + + let serverName = "napper-lsp" + let serverVersion = "0.1.0" + + let commandCopyCurl = "napper.copyCurl" + let commandListEnvs = "napper.listEnvironments" + let commandRequestInfo = "napper.requestInfo" + + let capabilities: ServerCapabilities = + { ServerCapabilities.Default with + TextDocumentSync = Some(U2.C2 TextDocumentSyncKind.Full) + DocumentSymbolProvider = Some(U2.C1 true) + CodeLensProvider = + Some + { ResolveProvider = Some false + WorkDoneProgress = None } + ExecuteCommandProvider = + Some + { Commands = [| commandCopyCurl; commandListEnvs; commandRequestInfo |] + WorkDoneProgress = None } } + + // ─── Helpers ───────────────────────────────────────────── + + let isNapFile (uri: string) : bool = uri.EndsWith ".nap" + let isNaplistFile (uri: string) : bool = uri.EndsWith ".naplist" + + let symbolKindForSection (name: string) : SymbolKind = + match name with + | "meta" -> SymbolKind.Namespace + | "request" -> SymbolKind.Function + | "request.headers" -> SymbolKind.Struct + | "request.body" -> SymbolKind.Struct + | "assert" -> SymbolKind.Function + | "script" -> SymbolKind.Function + | "vars" -> SymbolKind.Variable + | "steps" -> SymbolKind.Array + | _ -> SymbolKind.Key + + let sectionToSymbol (section: SectionScanner.SectionLocation) : DocumentSymbol = + let range = + { Start = + { Line = uint32 section.Line + Character = 0u } + End = + { Line = uint32 section.EndLine + Character = 0u } } + + { Name = $"[{section.Name}]" + Detail = None + Kind = symbolKindForSection section.Name + Tags = None + Deprecated = None + Range = range + SelectionRange = range + Children = None } + + let getDocumentText (uri: string) : string option = + Workspace.tryGetDocument uri |> Option.map _.Text + + let uriToFilePath (uri: string) : string = + if uri.StartsWith "file://" then + System.Uri(uri).LocalPath + else + uri + + let uriToDirectoryPath (uri: string) : string = + uriToFilePath uri |> System.IO.Path.GetDirectoryName + + let parseRequestFromUri (uri: string) : NapRequest option = + getDocumentText uri + |> Option.bind (fun text -> + match Parser.parseNapFile text with + | Result.Ok napFile -> Some napFile.Request + | Result.Error _ -> None) + + let methodString (m: HttpMethod) : string = + match m with + | GET -> "GET" + | POST -> "POST" + | PUT -> "PUT" + | PATCH -> "PATCH" + | DELETE -> "DELETE" + | HEAD -> "HEAD" + | OPTIONS -> "OPTIONS" + + // ─── Lifecycle ─────────────────────────────────────────── + + override _.Initialize(_param) = + async { + Logger.info $"{serverName} initializing" + do! client.LogInfo $"{serverName} v{serverVersion} initializing" + + return + Result.Ok + { InitializeResult.Capabilities = capabilities + ServerInfo = + Some + { InitializeResultServerInfo.Name = serverName + Version = Some serverVersion } } + } + + override _.Initialized(_param) = + async { + Logger.info $"{serverName} initialized" + do! client.LogInfo $"{serverName} ready" + } + + override _.Shutdown() = + async { + Logger.info $"{serverName} shutting down" + return Result.Ok() + } + + override _.Exit() = + async { Logger.info $"{serverName} exiting" } + + // ─── Document Sync ─────────────────────────────────────── + + override _.TextDocumentDidOpen(param) = + async { + let doc = param.TextDocument + Workspace.openDocument doc.Uri (int doc.Version) doc.Text + do! client.LogDebug $"Opened {doc.Uri}" + } + + override _.TextDocumentDidChange(param) = + async { + let doc = param.TextDocument + + match param.ContentChanges with + | [| U2.C2 { Text = newText } |] -> + Workspace.changeDocument doc.Uri (int doc.Version) newText + do! client.LogDebug $"Changed {doc.Uri}" + | _ -> Logger.warn "Received unsupported partial/multi change" + } + + override _.TextDocumentDidClose(param) = + async { + let doc = param.TextDocument + Workspace.closeDocument doc.Uri + do! client.LogDebug $"Closed {doc.Uri}" + } + + // ─── Document Symbols ──────────────────────────────────── + // Replaces: extractHttpMethod, parsePlaylistStepPaths, CodeLens section detection in TS + + override _.TextDocumentDocumentSymbol(param) = + async { + let uri = param.TextDocument.Uri + + match getDocumentText uri with + | None -> return Result.Ok None + | Some text -> + let sections = + if isNapFile uri then + SectionScanner.scanNapSections text + elif isNaplistFile uri then + SectionScanner.scanNaplistSections text + else + [] + + let symbols = sections |> List.map sectionToSymbol |> Array.ofList + + Logger.debug $"documentSymbol: {uri} -> {symbols.Length} symbols" + return Result.Ok(Some(U2.C2 symbols)) + } + + // ─── Code Lens ─────────────────────────────────────────── + // Replaces: codeLensProvider.ts section scanning + method extraction in TS + + override _.TextDocumentCodeLens(param) = + async { + let uri = param.TextDocument.Uri + + match getDocumentText uri with + | None -> return Result.Ok None + | Some text when isNapFile uri -> + let sections = SectionScanner.scanNapSections text + + let lenses = + sections + |> List.choose (fun s -> + if s.Name = "request" then + let range = + { Start = { Line = uint32 s.Line; Character = 0u } + End = { Line = uint32 s.Line; Character = 0u } } + + // Extract method + URL for display + let detail = + match Parser.parseNapFile text with + | Result.Ok nap -> Some $"{methodString nap.Request.Method} {nap.Request.Url}" + | Result.Error _ -> None + + Some + { Range = range + Command = None + Data = detail |> Option.map (fun d -> JValue(d) :> JToken) } + else + None) + |> Array.ofList + + Logger.debug $"codeLens: {uri} -> {lenses.Length} lenses" + return Result.Ok(Some lenses) + | Some text when isNaplistFile uri -> + let sections = SectionScanner.scanNaplistSections text + + let lenses = + sections + |> List.choose (fun s -> + if s.Name = "meta" then + let range = + { Start = { Line = uint32 s.Line; Character = 0u } + End = { Line = uint32 s.Line; Character = 0u } } + + Some + { Range = range + Command = None + Data = None } + else + None) + |> Array.ofList + + return Result.Ok(Some lenses) + | _ -> return Result.Ok None + } + + // ─── Execute Command ───────────────────────────────────── + // Replaces: parseMethodAndUrl, detectEnvironments, curl generation in TS + + override _.WorkspaceExecuteCommand(param) = + let extractedArg = + param.Arguments + |> Option.bind Array.tryHead + |> Option.map (fun (t: JToken) -> t.ToObject<string>()) + |> Option.defaultValue "" + + async { + match param.Command with + | cmd when cmd = commandRequestInfo -> + let uri = extractedArg + + match parseRequestFromUri uri with + | Some request -> + let result = JObject() + result["method"] <- JValue(methodString request.Method) + result["url"] <- JValue(request.Url) + let headers = JObject() + request.Headers |> Map.iter (fun k v -> headers[k] <- JValue(v)) + result["headers"] <- headers + Logger.debug $"requestInfo: {uri} -> {methodString request.Method} {request.Url}" + return Result.Ok(Some(result :> JToken)) + | None -> return Result.Ok None + + | cmd when cmd = commandCopyCurl -> + let uri = extractedArg + + match parseRequestFromUri uri with + | Some request -> + let curl = CurlGenerator.toCurl request + Logger.debug $"copyCurl: {uri} -> {curl}" + return Result.Ok(Some(JValue(curl) :> JToken)) + | None -> return Result.Ok None + + | cmd when cmd = commandListEnvs -> + let rootUri = extractedArg + let dir = uriToFilePath rootUri + let envNames = Environment.detectEnvironmentNames dir + Logger.debug $"listEnvironments: {dir} -> {envNames.Length} envs" + let arr = JArray(envNames |> List.map (fun n -> JValue(n) :> JToken)) + return Result.Ok(Some(arr :> JToken)) + + | _ -> + Logger.warn $"Unknown command: {param.Command}" + return Result.Ok None + } + + override _.Dispose() = () diff --git a/src/Napper.Lsp/Workspace.fs b/src/Napper.Lsp/Workspace.fs new file mode 100644 index 0000000..3b5ff6c --- /dev/null +++ b/src/Napper.Lsp/Workspace.fs @@ -0,0 +1,51 @@ +module Napper.Lsp.Workspace + +open System.Collections.Concurrent + +/// A tracked document: version + full text content +type TrackedDocument = + { Version: int + Text: string + Uri: string } + +/// In-memory store for all open documents synced from the IDE +let private documents = ConcurrentDictionary<string, TrackedDocument>() + +/// Track a newly opened document +let openDocument (uri: string) (version: int) (text: string) : unit = + let doc = + { Version = version + Text = text + Uri = uri } + + documents.AddOrUpdate(uri, doc, fun _ _ -> doc) |> ignore + Napper.Core.Logger.debug $"Workspace: opened {uri} (v{version})" + +/// Update an existing document with new content +let changeDocument (uri: string) (version: int) (text: string) : unit = + let doc = + { Version = version + Text = text + Uri = uri } + + documents.AddOrUpdate(uri, doc, fun _ old -> if version > old.Version then doc else old) + |> ignore + + Napper.Core.Logger.debug $"Workspace: changed {uri} (v{version})" + +/// Remove a closed document +let closeDocument (uri: string) : unit = + documents.TryRemove(uri) |> ignore + Napper.Core.Logger.debug $"Workspace: closed {uri}" + +/// Get a tracked document by URI +let tryGetDocument (uri: string) : TrackedDocument option = + match documents.TryGetValue(uri) with + | true, doc -> Some doc + | false, _ -> None + +/// Get all currently tracked document URIs +let trackedUris () : string list = documents.Keys |> Seq.toList + +/// Number of currently tracked documents +let documentCount () : int = documents.Count diff --git a/src/Nap.VsCode/.c8rc.json b/src/Napper.VsCode/.c8rc.json similarity index 100% rename from src/Nap.VsCode/.c8rc.json rename to src/Napper.VsCode/.c8rc.json diff --git a/src/Napper.VsCode/.prettierrc b/src/Napper.VsCode/.prettierrc new file mode 100644 index 0000000..9af3519 --- /dev/null +++ b/src/Napper.VsCode/.prettierrc @@ -0,0 +1,7 @@ +{ + "semi": true, + "singleQuote": true, + "trailingComma": "all", + "tabWidth": 2, + "printWidth": 100 +} diff --git a/src/Nap.VsCode/.vscode-test.mjs b/src/Napper.VsCode/.vscode-test.mjs similarity index 90% rename from src/Nap.VsCode/.vscode-test.mjs rename to src/Napper.VsCode/.vscode-test.mjs index fcffeaa..40464c3 100644 --- a/src/Nap.VsCode/.vscode-test.mjs +++ b/src/Napper.VsCode/.vscode-test.mjs @@ -9,7 +9,7 @@ const __dirname = dirname(fileURLToPath(import.meta.url)); const testWorkspace = mkdtempSync(join(tmpdir(), "napper-test-")); cpSync("./src/test/fixtures/workspace", testWorkspace, { recursive: true }); -// No custom settings needed — CLI resolves from extension path via context.extensionPath +// CLI resolves from extension bin/ dir via checkVersionMatch in extension.ts const userDataDir = resolve(__dirname, ".vscode-test/user-data"); diff --git a/src/Nap.VsCode/.vscodeignore b/src/Napper.VsCode/.vscodeignore similarity index 100% rename from src/Nap.VsCode/.vscodeignore rename to src/Napper.VsCode/.vscodeignore diff --git a/src/Nap.VsCode/LICENSE b/src/Napper.VsCode/LICENSE similarity index 100% rename from src/Nap.VsCode/LICENSE rename to src/Napper.VsCode/LICENSE diff --git a/src/Nap.VsCode/README.md b/src/Napper.VsCode/README.md similarity index 72% rename from src/Nap.VsCode/README.md rename to src/Napper.VsCode/README.md index 9b316b7..f44a341 100644 --- a/src/Nap.VsCode/README.md +++ b/src/Napper.VsCode/README.md @@ -20,12 +20,12 @@ Napper is a free, open-source API testing tool that runs from the command line a Everything you need for API testing. Nothing you don't. -- **CLI First** -- The command line is the product. Run requests, execute test suites, and integrate with CI/CD pipelines from your terminal. -- **VS Code Native** -- Full extension with syntax highlighting, request explorer, environment switching, and Test Explorer integration. Never leave your editor. -- **F# and C# Scripting** -- Full power of F# and C# for pre/post request hooks. Extract tokens, build dynamic payloads, orchestrate complex flows with the entire .NET ecosystem. -- **Declarative Assertions** -- Assert on status codes, JSON paths, headers, and response times with a clean, readable syntax. No scripting required for simple checks. -- **Composable Playlists** -- Chain requests into test suites with `.naplist` files. Nest playlists, reference folders, pass variables between steps. -- **Plain Text, Git Friendly** -- Every request is a `.nap` file. Every environment is a `.napenv` file. Version control everything. No binary blobs, no lock-in. +- **CLI First** (`cli-run`) -- The command line is the product. Run requests, execute test suites, and integrate with CI/CD pipelines from your terminal. +- **VS Code Native** (`vscode-extension`) -- Full extension with syntax highlighting (`vscode-syntax`), request explorer (`vscode-explorer`), environment switching (`vscode-env-switcher`), and Test Explorer integration (`vscode-test-explorer`). Never leave your editor. +- **F# and C# Scripting** (`script-fsx`, `script-csx`) -- Full power of F# and C# for pre/post request hooks. Extract tokens, build dynamic payloads, orchestrate complex flows with the entire .NET ecosystem. +- **Declarative Assertions** (`nap-assert`) -- Assert on status codes (`assert-status`), JSON paths (`assert-equals`, `assert-exists`), headers (`assert-contains`), and response times (`assert-lt`) with a clean, readable syntax. No scripting required for simple checks. +- **Composable Playlists** (`naplist-file`) -- Chain requests into test suites with `.naplist` files. Nest playlists (`naplist-nested`), reference folders (`naplist-folder-step`), pass variables between steps (`naplist-var-scope`). +- **Plain Text, Git Friendly** (`nap-file`) -- Every request is a `.nap` file. Every environment is a `.napenv` file (`env-file`). Version control everything. No binary blobs, no lock-in. ## Quick Start @@ -179,14 +179,17 @@ Variable priority (highest wins): ``` Usage: - napper run <file|folder> Run a .nap file, .naplist playlist, or folder - napper check <file> Validate a .nap or .naplist file - napper help Show this help + napper run <file|folder> Run a .nap file, .naplist playlist, or folder (cli-run) + napper check <file> Validate a .nap or .naplist file (cli-check) + napper generate openapi <spec> --output-dir <dir> Generate .nap files from OpenAPI spec (cli-generate) + napper help Show this help Options: - --env <name> Environment name (loads .napenv.<name>) - --var <key=value> Variable override (repeatable) - --output <format> Output: pretty (default), junit, json + --env <name> Environment name (loads .napenv.<name>) (cli-env) + --var <key=value> Variable override (repeatable) (cli-var) + --output <format> Output: pretty, junit, json, ndjson (cli-output) + --output-dir <dir> Output directory for generate command (cli-output-dir) + --verbose Enable debug-level logging (cli-verbose) ``` | Exit Code | Meaning | @@ -204,7 +207,7 @@ Options: | Git-friendly files | Yes | JSON blobs | Yes | Yes | | Assertions | Declarative + scripts | JS scripts | JS scripts | None | | Full scripting language | F# + C# (.fsx/.csx) | Sandboxed JS | Sandboxed JS | None | -| CI/CD output formats | JUnit, TAP, JSON | Via Newman | Via CLI | None | +| CI/CD output formats | JUnit, JSON, NDJSON | Via Newman | Via CLI | None | | Test Explorer | Native | No | No | No | | Free & open source | Yes | Freemium | Yes | Yes | | No account required | Yes | Account needed | Yes | Yes | diff --git a/src/Nap.VsCode/eslint-rules.cjs b/src/Napper.VsCode/eslint-rules.cjs similarity index 100% rename from src/Nap.VsCode/eslint-rules.cjs rename to src/Napper.VsCode/eslint-rules.cjs diff --git a/src/Nap.VsCode/eslint.config.mjs b/src/Napper.VsCode/eslint.config.mjs similarity index 100% rename from src/Nap.VsCode/eslint.config.mjs rename to src/Napper.VsCode/eslint.config.mjs diff --git a/src/Nap.VsCode/language-configuration.json b/src/Napper.VsCode/language-configuration.json similarity index 100% rename from src/Nap.VsCode/language-configuration.json rename to src/Napper.VsCode/language-configuration.json diff --git a/src/Nap.VsCode/media/logo.png b/src/Napper.VsCode/media/logo.png similarity index 100% rename from src/Nap.VsCode/media/logo.png rename to src/Napper.VsCode/media/logo.png diff --git a/src/Nap.VsCode/media/napper-icon.png b/src/Napper.VsCode/media/napper-icon.png similarity index 100% rename from src/Nap.VsCode/media/napper-icon.png rename to src/Napper.VsCode/media/napper-icon.png diff --git a/src/Nap.VsCode/media/screenshot.png b/src/Napper.VsCode/media/screenshot.png similarity index 100% rename from src/Nap.VsCode/media/screenshot.png rename to src/Napper.VsCode/media/screenshot.png diff --git a/src/Nap.VsCode/package-lock.json b/src/Napper.VsCode/package-lock.json similarity index 99% rename from src/Nap.VsCode/package-lock.json rename to src/Napper.VsCode/package-lock.json index 3c7be9a..492e7c9 100644 --- a/src/Nap.VsCode/package-lock.json +++ b/src/Napper.VsCode/package-lock.json @@ -20,6 +20,7 @@ "eslint": "^10.0.2", "glob": "^13.0.6", "mocha": "^11.7.5", + "prettier": "^3.8.1", "ts-loader": "^9.5.4", "typescript": "^5.9.3", "typescript-eslint": "^8.56.1", @@ -5807,6 +5808,22 @@ "node": ">= 0.8.0" } }, + "node_modules/prettier": { + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.8.1.tgz", + "integrity": "sha512-UOnG6LftzbdaHZcKoPFtOcCKztrQ57WkHDeRD9t/PTQtmT0NHSeWWepj6pS0z/N7+08BHFDQVUrfmfMRcZwbMg==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, "node_modules/process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", diff --git a/src/Nap.VsCode/package.json b/src/Napper.VsCode/package.json similarity index 92% rename from src/Nap.VsCode/package.json rename to src/Napper.VsCode/package.json index add9565..1b4d484 100644 --- a/src/Nap.VsCode/package.json +++ b/src/Napper.VsCode/package.json @@ -2,7 +2,7 @@ "name": "napper", "displayName": "Napper", "description": "CLI-first, test-oriented HTTP API testing tool. Send requests, run assertions, manage environments.", - "version": "0.9.0", + "version": "0.6.0", "publisher": "nimblesite", "license": "MIT", "repository": { @@ -34,7 +34,8 @@ "activationEvents": [ "workspaceContains:**/*.nap", "workspaceContains:**/*.naplist", - "workspaceContains:**/*.napenv" + "workspaceContains:**/*.napenv", + "workspaceContains:**/*.http" ], "main": "./dist/extension.js", "contributes": { @@ -189,6 +190,15 @@ "command": "napper.enrichWithAi", "title": "Enrich with AI", "icon": "$(sparkle)" + }, + { + "command": "napper.convertHttpFile", + "title": "Napper: Convert .http to .nap", + "icon": "$(file-add)" + }, + { + "command": "napper.convertHttpDirectory", + "title": "Napper: Convert .http Directory to .nap" } ], "menus": { @@ -270,6 +280,13 @@ "when": "view == napperExplorer && viewItem =~ /scriptFile|playlist/", "group": "9_delete" } + ], + "explorer/context": [ + { + "command": "napper.convertHttpFile", + "when": "resourceExtname == .http || resourceExtname == .rest", + "group": "navigation" + } ] }, "configuration": { @@ -317,6 +334,8 @@ "test": "vscode-test", "test:unit": "npm run compile:tests && c8 mocha out/test/unit/**/*.test.js --ui tdd --timeout 5000", "lint": "eslint src", + "format:check": "prettier --check \"src/**/*.ts\"", + "format": "prettier --write \"src/**/*.ts\"", "package": "vsce package" }, "devDependencies": { @@ -331,6 +350,7 @@ "eslint": "^10.0.2", "glob": "^13.0.6", "mocha": "^11.7.5", + "prettier": "^3.8.1", "ts-loader": "^9.5.4", "typescript": "^5.9.3", "typescript-eslint": "^8.56.1", diff --git a/src/Napper.VsCode/src/cliInstaller.ts b/src/Napper.VsCode/src/cliInstaller.ts new file mode 100644 index 0000000..1e57522 --- /dev/null +++ b/src/Napper.VsCode/src/cliInstaller.ts @@ -0,0 +1,347 @@ +// Specs: vscode-impl +// CLI Installer — downloads matching binary with checksum verification, +// falls back to dotnet tool if binary cannot run. +// Decoupled from vscode SDK — takes config values as parameters + +import * as crypto from 'crypto'; +import * as fs from 'fs'; +import * as https from 'https'; +import * as os from 'os'; +import * as path from 'path'; +import { execFile } from 'child_process'; +import { type Result, err, ok } from './types'; +import { + CLI_ARCH_ARM64, + CLI_ARCH_X64, + CLI_ASSET_PREFIX, + CLI_BINARY_NAME, + CLI_BIN_DIR, + CLI_CHECKSUM_MISMATCH_MSG, + CLI_CHECKSUM_NOT_FOUND_MSG, + CLI_CHECKSUMS_FILE, + CLI_DOTNET_CMD, + CLI_DOTNET_FALLBACK_MSG, + CLI_DOTNET_INSTALL_ERROR_PREFIX, + CLI_DOTNET_TOOL_INSTALL_TIMEOUT, + CLI_DOWNLOAD_BASE_URL, + CLI_DOWNLOAD_ERROR_PREFIX, + CLI_FILE_MODE_EXECUTABLE, + CLI_MAX_REDIRECTS, + CLI_PLATFORM_DARWIN, + CLI_PLATFORM_LINUX, + CLI_PLATFORM_WIN32, + CLI_REDIRECT_ERROR, + CLI_RID_LINUX_X64, + CLI_RID_OSX_ARM64, + CLI_RID_OSX_X64, + CLI_RID_WIN_X64, + CLI_TOO_MANY_REDIRECTS, + CLI_TOOL_ARG, + CLI_TOOL_GLOBAL_FLAG, + CLI_TOOL_INSTALL_ARG, + CLI_TOOL_LIST_ARG, + CLI_TOOL_UPDATE_ARG, + CLI_TOOL_VERSION_FLAG, + CLI_UNSUPPORTED_PLATFORM_MSG, + CLI_VERSION_CHECK_ERROR, + CLI_VERSION_CHECK_TIMEOUT, + CLI_VERSION_FLAG, + CLI_WIN_EXE_SUFFIX, +} from './constants'; + +// ── Platform detection ────────────────────────────────────────────── + +const PLATFORM_RID_MAP: ReadonlyMap<string, string> = new Map([ + [`${CLI_PLATFORM_DARWIN}-${CLI_ARCH_ARM64}`, CLI_RID_OSX_ARM64], + [`${CLI_PLATFORM_DARWIN}-${CLI_ARCH_X64}`, CLI_RID_OSX_X64], + [`${CLI_PLATFORM_LINUX}-${CLI_ARCH_X64}`, CLI_RID_LINUX_X64], + [`${CLI_PLATFORM_WIN32}-${CLI_ARCH_X64}`, CLI_RID_WIN_X64], +]); + +const platformToRid = (): Result<string, string> => { + const key = `${os.platform()}-${os.arch()}`, + rid = PLATFORM_RID_MAP.get(key); + return rid !== undefined ? ok(rid) : err(`${CLI_UNSUPPORTED_PLATFORM_MSG}${key}`); +}; + +const assetName = (rid: string): string => { + const base = `${CLI_ASSET_PREFIX}${rid}`; + return rid === CLI_RID_WIN_X64 ? `${base}${CLI_WIN_EXE_SUFFIX}` : base; +}; + +const localBinaryName = (): string => + os.platform() === CLI_PLATFORM_WIN32 + ? `${CLI_BINARY_NAME}${CLI_WIN_EXE_SUFFIX}` + : CLI_BINARY_NAME; + +// ── Version check ─────────────────────────────────────────────────── + +export const getCliVersion = async (cliPath: string): Promise<Result<string, string>> => + new Promise((resolve) => { + execFile( + cliPath, + [CLI_VERSION_FLAG], + { timeout: CLI_VERSION_CHECK_TIMEOUT }, + (error: Error | null, stdout: string) => { + if (error !== null) { + resolve(err(`${CLI_VERSION_CHECK_ERROR}${error.message}`)); + return; + } + resolve(ok(stdout.trim())); + }, + ); + }); + +// ── HTTPS download with redirect following ────────────────────────── + +import type * as http from 'http'; + +type ResultResolver = (value: Result<Buffer, string>) => void; + +const collectBody = (response: http.IncomingMessage, resolve: ResultResolver): void => { + const chunks: Buffer[] = []; + response.on('data', (chunk: Buffer) => { + chunks.push(chunk); + }); + response.on('end', () => { + resolve(ok(Buffer.concat(chunks))); + }); + response.on('error', (e) => { + resolve(err(e.message)); + }); +}; + +interface HttpGetResult { + readonly response: http.IncomingMessage; + readonly status: number; +} + +const httpsGetOnce = async (url: string): Promise<Result<HttpGetResult, string>> => + new Promise((resolve) => { + https + .get(url, { headers: { 'User-Agent': CLI_BINARY_NAME } }, (response) => { + resolve(ok({ response, status: response.statusCode ?? 0 })); + }) + .on('error', (e) => { + resolve(err(e.message)); + }); + }); + +const resolveRedirect = (response: http.IncomingMessage): Result<string, string> => { + response.resume(); + const { location } = response.headers; + return location !== undefined && location !== '' ? ok(location) : err(CLI_REDIRECT_ERROR); +}; + +const handleNon200 = ( + response: http.IncomingMessage, + status: number, +): Result<http.IncomingMessage, string> => { + response.resume(); + return err(`${CLI_DOWNLOAD_ERROR_PREFIX}HTTP ${String(status)}`); +}; + +const followRedirects = async ( + url: string, + depth: number, +): Promise<Result<http.IncomingMessage, string>> => { + if (depth > CLI_MAX_REDIRECTS) { + return err(CLI_TOO_MANY_REDIRECTS); + } + const result = await httpsGetOnce(url); + if (!result.ok) { + return err(result.error); + } + const { response, status } = result.value; + if (status >= 300 && status < 400) { + const loc = resolveRedirect(response); + return loc.ok ? followRedirects(loc.value, depth + 1) : err(loc.error); + } + return status === 200 ? ok(response) : handleNon200(response, status); +}; + +const downloadFile = async (url: string): Promise<Result<Buffer, string>> => { + const result = await followRedirects(url, 0); + if (!result.ok) { + return err(result.error); + } + return new Promise((resolve) => { + collectBody(result.value, resolve); + }); +}; + +// ── Checksum verification ─────────────────────────────────────────── + +const verifyChecksum = ( + data: Buffer, + checksumFileContent: string, + asset: string, +): Result<void, string> => { + const line = checksumFileContent.split('\n').find((l) => l.includes(asset)); + + if (line === undefined) { + return err(CLI_CHECKSUM_NOT_FOUND_MSG); + } + + const expectedHash = line.split(/\s+/)[0]?.toLowerCase() ?? '', + actualHash = crypto.createHash('sha256').update(data).digest('hex'); + + return actualHash === expectedHash + ? ok(undefined) + : err(`${CLI_CHECKSUM_MISMATCH_MSG} — expected ${expectedHash}, got ${actualHash}`); +}; + +// ── Binary download + verify ──────────────────────────────────────── + +const buildDownloadUrls = ( + version: string, + rid: string, +): { readonly binaryUrl: string; readonly checksumUrl: string; readonly asset: string } => { + const asset = assetName(rid), + tag = `v${version}`; + return { + binaryUrl: `${CLI_DOWNLOAD_BASE_URL}/${tag}/${asset}`, + checksumUrl: `${CLI_DOWNLOAD_BASE_URL}/${tag}/${CLI_CHECKSUMS_FILE}`, + asset, + }; +}; + +const writeBinaryToDisk = (destPath: string, data: Buffer): void => { + const dir = path.dirname(destPath); + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + } + fs.writeFileSync(destPath, data); + if (os.platform() !== CLI_PLATFORM_WIN32) { + fs.chmodSync(destPath, CLI_FILE_MODE_EXECUTABLE); + } +}; + +const downloadPair = async ( + version: string, + rid: string, +): Promise< + Result<{ readonly binary: Buffer; readonly checksum: Buffer; readonly asset: string }, string> +> => { + const { binaryUrl, checksumUrl, asset } = buildDownloadUrls(version, rid), + [binaryResult, checksumResult] = await Promise.all([ + downloadFile(binaryUrl), + downloadFile(checksumUrl), + ]); + if (!binaryResult.ok) { + return err(`${CLI_DOWNLOAD_ERROR_PREFIX}${binaryResult.error}`); + } + if (!checksumResult.ok) { + return err(`${CLI_DOWNLOAD_ERROR_PREFIX}checksums: ${checksumResult.error}`); + } + return ok({ binary: binaryResult.value, checksum: checksumResult.value, asset }); +}; + +const fetchAndVerify = async ( + version: string, + rid: string, +): Promise<Result<{ readonly data: Buffer; readonly asset: string }, string>> => { + const dlResult = await downloadPair(version, rid); + if (!dlResult.ok) { + return err(dlResult.error); + } + const { binary, checksum, asset } = dlResult.value, + verifyResult = verifyChecksum(binary, checksum.toString('utf-8'), asset); + return verifyResult.ok ? ok({ data: binary, asset }) : err(verifyResult.error); +}; + +const downloadAndVerifyBinary = async ( + version: string, + destPath: string, +): Promise<Result<void, string>> => { + const ridResult = platformToRid(); + if (!ridResult.ok) { + return err(ridResult.error); + } + const fetchResult = await fetchAndVerify(version, ridResult.value); + if (!fetchResult.ok) { + return err(fetchResult.error); + } + writeBinaryToDisk(destPath, fetchResult.value.data); + return ok(undefined); +}; + +// ── Dotnet tool fallback ──────────────────────────────────────────── + +const parseToolVersion = (stdout: string): Result<string, string> => { + const line = stdout.split('\n').find((l) => l.toLowerCase().startsWith(CLI_BINARY_NAME)); + if (line === undefined) { + return err('not installed'); + } + const parts = line.split(/\s+/); + return ok(parts[1] ?? ''); +}; + +const isToolInstalled = async (): Promise<Result<string, string>> => + new Promise((resolve) => { + execFile( + CLI_DOTNET_CMD, + [CLI_TOOL_ARG, CLI_TOOL_LIST_ARG, CLI_TOOL_GLOBAL_FLAG], + { timeout: CLI_VERSION_CHECK_TIMEOUT }, + (error: Error | null, stdout: string) => { + if (error !== null) { + resolve(err(error.message)); + return; + } + resolve(parseToolVersion(stdout)); + }, + ); + }); + +const runDotnetTool = async (action: string, version: string): Promise<Result<void, string>> => + new Promise((resolve) => { + execFile( + CLI_DOTNET_CMD, + [CLI_TOOL_ARG, action, CLI_TOOL_GLOBAL_FLAG, CLI_BINARY_NAME, CLI_TOOL_VERSION_FLAG, version], + { timeout: CLI_DOTNET_TOOL_INSTALL_TIMEOUT }, + (error: Error | null, _stdout: string, stderr: string) => { + if (error !== null) { + resolve(err(`${CLI_DOTNET_INSTALL_ERROR_PREFIX}${stderr || error.message}`)); + return; + } + resolve(ok(undefined)); + }, + ); + }); + +const installViaDotnetTool = async (version: string): Promise<Result<void, string>> => { + const existing = await isToolInstalled(), + action = existing.ok ? CLI_TOOL_UPDATE_ARG : CLI_TOOL_INSTALL_ARG; + return runDotnetTool(action, version); +}; + +// ── Public API ────────────────────────────────────────────────────── + +export interface DownloadBinaryParams { + readonly version: string; + readonly storageDir: string; + readonly log: (msg: string) => void; +} + +export const installedBinaryPath = (dir: string): string => + path.join(dir, CLI_BIN_DIR, localBinaryName()); + +export const downloadBinary = async ( + params: DownloadBinaryParams, +): Promise<Result<string, string>> => { + const destPath = installedBinaryPath(params.storageDir); + params.log(`Downloading binary v${params.version}...`); + const downloadResult = await downloadAndVerifyBinary(params.version, destPath); + if (!downloadResult.ok) { + return err(downloadResult.error); + } + params.log(`Binary written to ${destPath}`); + return ok(destPath); +}; + +export const installDotnetTool = async ( + params: DownloadBinaryParams, +): Promise<Result<void, string>> => { + params.log(CLI_DOTNET_FALLBACK_MSG); + return installViaDotnetTool(params.version); +}; diff --git a/src/Napper.VsCode/src/cliRunner.ts b/src/Napper.VsCode/src/cliRunner.ts new file mode 100644 index 0000000..2997283 --- /dev/null +++ b/src/Napper.VsCode/src/cliRunner.ts @@ -0,0 +1,221 @@ +// Specs: vscode-impl +// Runs the Napper CLI as a subprocess and parses JSON results +// Decoupled from vscode SDK — takes config values as parameters + +import { execFile, spawn } from 'child_process'; +import { + CLI_CMD_CHECK, + CLI_CMD_RUN, + CLI_FLAG_ENV, + CLI_FLAG_OUTPUT, + CLI_OUTPUT_JSON, + CLI_OUTPUT_NDJSON, + CLI_PARSE_FAILED_PREFIX, + CLI_SPAWN_FAILED_PREFIX, + DEFAULT_CLI_PATH, +} from './constants'; +import { type Result, type RunResult, err, ok } from './types'; + +const MAX_PREVIEW_LENGTH = 200; + +interface RunOptions { + readonly cliPath: string; + readonly filePath: string; + readonly env?: string | undefined; + readonly vars?: readonly string[]; + readonly cwd: string; +} + +const appendEnvArgs = (args: string[], env: string | undefined): void => { + if (env !== undefined && env !== '') { + args.push(CLI_FLAG_ENV, env); + } + }, + buildArgs = (options: RunOptions): readonly string[] => { + const args: string[] = [CLI_CMD_RUN, options.filePath, CLI_FLAG_OUTPUT, CLI_OUTPUT_JSON]; + appendEnvArgs(args, options.env); + return args; + }, + parseJsonOutput = (stdout: string): Result<readonly RunResult[], string> => { + try { + const parsed: unknown = JSON.parse(stdout); + if (Array.isArray(parsed)) { + // validated: JSON.parse produced an array; elements typed at consumption + return ok(parsed); + } + return ok([parsed as RunResult]); + } catch { + return err(`${CLI_PARSE_FAILED_PREFIX}${stdout.slice(0, MAX_PREVIEW_LENGTH)}`); + } + }, + formatSpawnError = (cliPath: string, error: Error, stderr: string): string => { + const code = 'code' in error ? ` (${String(error.code)})` : '', + stderrSuffix = stderr.length > 0 ? ` — ${stderr}` : ''; + return `${CLI_SPAWN_FAILED_PREFIX}${cliPath}${code}${stderrSuffix}`; + }, + spawnCli = async ( + cliPath: string, + args: readonly string[], + cwd: string, + ): Promise<Result<readonly RunResult[], string>> => + new Promise((resolve) => { + execFile( + cliPath, + [...args], + { cwd, timeout: 30_000, env: { ...process.env } }, + (error, stdout, stderr) => { + if (error !== null && stdout.length === 0) { + resolve(err(formatSpawnError(cliPath, error, stderr))); + return; + } + resolve(parseJsonOutput(stdout)); + }, + ); + }), + resolveCliPath = (cliPath: string): string => (cliPath.length > 0 ? cliPath : DEFAULT_CLI_PATH); + +export const runCli = async ( + options: RunOptions, +): Promise<Result<readonly RunResult[], string>> => { + const cliPath = resolveCliPath(options.cliPath), + args = buildArgs(options); + return spawnCli(cliPath, args, options.cwd); +}; + +interface StreamOptions { + readonly cliPath: string; + readonly filePath: string; + readonly env?: string | undefined; + readonly cwd: string; + readonly onResult: (result: RunResult) => void; + readonly onDone: (error?: string) => void; +} + +const buildStreamArgs = (options: StreamOptions): readonly string[] => { + const args: string[] = [CLI_CMD_RUN, options.filePath, CLI_FLAG_OUTPUT, CLI_OUTPUT_NDJSON]; + appendEnvArgs(args, options.env); + return args; + }, + parseLine = (line: string): Result<RunResult, string> => { + try { + return ok(JSON.parse(line)); + } catch { + return err(`${CLI_PARSE_FAILED_PREFIX}${line.slice(0, MAX_PREVIEW_LENGTH)}`); + } + }, + emitParsedLine = (trimmed: string, onResult: (result: RunResult) => void): void => { + const parsed = parseLine(trimmed); + if (parsed.ok) { + onResult(parsed.value); + } + }, + processChunk = (buffer: string, chunk: Buffer, onResult: (result: RunResult) => void): string => { + const combined = buffer + chunk.toString(), + lines = combined.split('\n'), + remainder = lines.pop() ?? ''; + for (const line of lines) { + const trimmed = line.trim(); + if (trimmed.length > 0) { + emitParsedLine(trimmed, onResult); + } + } + return remainder; + }; + +interface FlushContext { + readonly buffer: string; + readonly onResult: (result: RunResult) => void; + readonly stderrOutput: string; + readonly onDone: (error?: string) => void; +} + +const flushAndFinish = (ctx: FlushContext): void => { + const remaining = ctx.buffer.trim(); + if (remaining.length > 0) { + emitParsedLine(remaining, ctx.onResult); + } + ctx.onDone(ctx.stderrOutput.length > 0 ? ctx.stderrOutput : undefined); +}; + +interface StreamState { + buffer: string; + stderrOutput: string; + finished: boolean; +} + +interface StreamListenerContext { + readonly child: ReturnType<typeof spawn>; + readonly state: StreamState; + readonly options: StreamOptions; + readonly cliPath: string; +} + +const attachDataListeners = (ctx: StreamListenerContext): void => { + ctx.child.stdout?.on('data', (chunk: Buffer) => { + ctx.state.buffer = processChunk(ctx.state.buffer, chunk, ctx.options.onResult); + }); + ctx.child.stderr?.on('data', (chunk: Buffer) => { + ctx.state.stderrOutput += chunk.toString(); + }); + }, + handleClose = (ctx: StreamListenerContext): void => { + if (ctx.state.finished) { + return; + } + ctx.state.finished = true; + flushAndFinish({ + buffer: ctx.state.buffer, + onResult: ctx.options.onResult, + stderrOutput: ctx.state.stderrOutput, + onDone: ctx.options.onDone, + }); + }, + handleError = (ctx: StreamListenerContext, error: Error): void => { + if (ctx.state.finished) { + return; + } + ctx.state.finished = true; + ctx.options.onDone(`${CLI_SPAWN_FAILED_PREFIX}${ctx.cliPath} — ${error.message}`); + }, + attachLifecycleListeners = (ctx: StreamListenerContext): void => { + ctx.child.on('close', () => { + handleClose(ctx); + }); + ctx.child.on('error', (error) => { + handleError(ctx, error); + }); + }; + +export const streamCli = (options: StreamOptions): void => { + const cliPath = resolveCliPath(options.cliPath), + args = buildStreamArgs(options), + child = spawn(cliPath, [...args], { + cwd: options.cwd, + env: { ...process.env }, + }), + state: StreamState = { buffer: '', stderrOutput: '', finished: false }, + ctx: StreamListenerContext = { child, state, options, cliPath }; + attachDataListeners(ctx); + attachLifecycleListeners(ctx); +}; + +export const checkFile = async ( + cliPath: string, + filePath: string, + cwd: string, +): Promise<Result<string, string>> => + new Promise((resolve) => { + const cmd = resolveCliPath(cliPath); + execFile( + cmd, + [CLI_CMD_CHECK, filePath], + { cwd, timeout: 10_000, env: { ...process.env } }, + (error, stdout, stderr) => { + if (error !== null) { + resolve(err(stderr.length > 0 ? stderr : error.message)); + return; + } + resolve(ok(stdout)); + }, + ); + }); diff --git a/src/Napper.VsCode/src/codeLensProvider.ts b/src/Napper.VsCode/src/codeLensProvider.ts new file mode 100644 index 0000000..18dabe7 --- /dev/null +++ b/src/Napper.VsCode/src/codeLensProvider.ts @@ -0,0 +1,109 @@ +// Specs: vscode-codelens, vscode-commands +// CodeLens provider for .nap and .naplist files +// Shows "Run" and "Copy as curl" actions above key sections + +import * as vscode from 'vscode'; +import { + CMD_CONVERT_HTTP_FILE, + CMD_COPY_CURL, + CMD_RUN_FILE, + CONVERT_HTTP_CODELENS_TITLE, + HTTP_FILE_EXTENSION, + HTTP_METHODS, + NAPLIST_EXTENSION, + NAP_EXTENSION, + REST_FILE_EXTENSION, + SECTION_META, + SECTION_REQUEST, +} from './constants'; + +const RUN_LENS_TITLE = '$(play) Run', + COPY_CURL_TITLE = '$(clippy) Copy as curl', + RUN_PLAYLIST_TITLE = '$(play) Run Playlist', + makeRunLens = (range: vscode.Range, uri: vscode.Uri): vscode.CodeLens => + new vscode.CodeLens(range, { + title: RUN_LENS_TITLE, + command: CMD_RUN_FILE, + arguments: [uri], + }), + makeCurlLens = (range: vscode.Range, uri: vscode.Uri): vscode.CodeLens => + new vscode.CodeLens(range, { + title: COPY_CURL_TITLE, + command: CMD_COPY_CURL, + arguments: [uri], + }), + isShorthandMethod = (line: string): boolean => HTTP_METHODS.some((m) => line.startsWith(`${m} `)), + buildRequestLenses = (document: vscode.TextDocument): vscode.CodeLens[] => { + const lenses: vscode.CodeLens[] = [], + firstLine = document.lineAt(0).text.trim(); + + if (isShorthandMethod(firstLine)) { + const range = new vscode.Range(0, 0, 0, firstLine.length); + lenses.push(makeRunLens(range, document.uri)); + lenses.push(makeCurlLens(range, document.uri)); + } + + for (let i = 0; i < document.lineCount; i++) { + const line = document.lineAt(i).text.trim(); + if (line === SECTION_REQUEST) { + const range = new vscode.Range(i, 0, i, line.length); + lenses.push(makeRunLens(range, document.uri)); + lenses.push(makeCurlLens(range, document.uri)); + } + } + + return lenses; + }, + isHttpFile = (fileName: string): boolean => + fileName.endsWith(HTTP_FILE_EXTENSION) || fileName.endsWith(REST_FILE_EXTENSION), + buildHttpLenses = (document: vscode.TextDocument): vscode.CodeLens[] => { + const range = new vscode.Range(0, 0, 0, 0); + return [ + new vscode.CodeLens(range, { + title: CONVERT_HTTP_CODELENS_TITLE, + command: CMD_CONVERT_HTTP_FILE, + arguments: [document.uri], + }), + ]; + }, + buildPlaylistLenses = (document: vscode.TextDocument): vscode.CodeLens[] => { + const lenses: vscode.CodeLens[] = []; + + for (let i = 0; i < document.lineCount; i++) { + const line = document.lineAt(i).text.trim(); + if (line === SECTION_META) { + const range = new vscode.Range(i, 0, i, line.length); + lenses.push( + new vscode.CodeLens(range, { + title: RUN_PLAYLIST_TITLE, + command: CMD_RUN_FILE, + arguments: [document.uri], + }), + ); + } + } + + return lenses; + }; + +export class CodeLensProvider implements vscode.CodeLensProvider { + private readonly _onDidChangeCodeLenses = new vscode.EventEmitter<void>(); + readonly onDidChangeCodeLenses = this._onDidChangeCodeLenses.event; + + provideCodeLenses(document: vscode.TextDocument): vscode.CodeLens[] { + const isNap = document.fileName.endsWith(NAP_EXTENSION), + isNapList = document.fileName.endsWith(NAPLIST_EXTENSION), + isHttp = isHttpFile(document.fileName); + + if (isNap) { + return buildRequestLenses(document); + } + if (isNapList) { + return buildPlaylistLenses(document); + } + if (isHttp) { + return buildHttpLenses(document); + } + return []; + } +} diff --git a/src/Napper.VsCode/src/constants.ts b/src/Napper.VsCode/src/constants.ts new file mode 100644 index 0000000..a43335e --- /dev/null +++ b/src/Napper.VsCode/src/constants.ts @@ -0,0 +1,413 @@ +// All string constants in one location — no literals elsewhere + +// File extensions +export const NAP_EXTENSION = '.nap'; +export const NAPLIST_EXTENSION = '.naplist'; +export const NAPENV_EXTENSION = '.napenv'; +export const NAPENV_LOCAL_SUFFIX = '.local'; +export const FSX_EXTENSION = '.fsx'; +export const CSX_EXTENSION = '.csx'; + +// Glob patterns +export const NAP_GLOB = '**/*.nap'; +export const NAPLIST_GLOB = '**/*.naplist'; +export const NAPENV_GLOB = '**/.napenv*'; + +// View IDs +export const VIEW_EXPLORER = 'napperExplorer'; + +// Command IDs +export const CMD_RUN_FILE = 'napper.runFile'; +export const CMD_RUN_ALL = 'napper.runAll'; +export const CMD_NEW_REQUEST = 'napper.newRequest'; +export const CMD_NEW_PLAYLIST = 'napper.newPlaylist'; +export const CMD_SWITCH_ENV = 'napper.switchEnvironment'; +export const CMD_COPY_CURL = 'napper.copyAsCurl'; +export const CMD_OPEN_RESPONSE = 'napper.openResponse'; +export const CMD_SAVE_REPORT = 'napper.savePlaylistReport'; + +// Config keys +export const CONFIG_SECTION = 'napper'; +export const CONFIG_DEFAULT_ENV = 'defaultEnvironment'; +export const CONFIG_AUTO_RUN = 'autoRunOnSave'; +export const CONFIG_SPLIT_LAYOUT = 'splitEditorLayout'; +export const CONFIG_MASK_SECRETS = 'maskSecretsInPreview'; +export const CONFIG_CLI_PATH = 'cliPath'; + +// CLI defaults +export const DEFAULT_CLI_PATH = 'napper'; +export const CLI_OUTPUT_JSON = 'json'; +export const CLI_OUTPUT_NDJSON = 'ndjson'; +export const CLI_CMD_RUN = 'run'; +export const CLI_CMD_CHECK = 'check'; +export const CLI_CMD_GENERATE = 'generate'; +export const CLI_SUBCMD_OPENAPI = 'openapi'; +export const CLI_FLAG_OUTPUT = '--output'; +export const CLI_FLAG_ENV = '--env'; +export const CLI_FLAG_VAR = '--var'; +export const CLI_FLAG_OUTPUT_DIR = '--output-dir'; + +// Context values for tree items +export const CONTEXT_REQUEST_FILE = 'requestFile'; +export const CONTEXT_PLAYLIST = 'playlist'; +export const CONTEXT_FOLDER = 'folder'; +export const CONTEXT_PLAYLIST_SECTION = 'playlistSection'; +export const CONTEXT_SCRIPT_FILE = 'scriptFile'; + +// Labels +export const PLAYLIST_SECTION_LABEL = 'Playlists'; + +// Icons +export const ICON_PLAYLIST_SECTION = 'list-tree'; +export const ICON_PLAYLIST_FILE = 'list-ordered'; +export const ICON_IDLE = 'circle-outline'; +export const ICON_RUNNING = 'loading~spin'; +export const ICON_PASSED = 'pass'; +export const ICON_FAILED = 'error'; +export const ICON_ERROR = 'warning'; +export const ICON_IMPORT_OPENAPI = 'cloud-download'; + +// Badge decorations (single-char for file decorations) +export const BADGE_PASSED = '\u2713'; +export const BADGE_FAILED = '\u2717'; +export const BADGE_ERROR = '!'; + +// Section headers in .nap files +export const SECTION_REQUEST = '[request]'; +export const SECTION_META = '[meta]'; +export const SECTION_STEPS = '[steps]'; + +// Status bar +export const STATUS_BAR_PREFIX = 'Napper: '; +export const STATUS_BAR_NO_ENV = 'No Environment'; +export const STATUS_BAR_PRIORITY = 100; + +// Theme colors for run state icons +export const THEME_COLOR_PASSED = 'testing.iconPassed'; +export const THEME_COLOR_FAILED = 'testing.iconFailed'; +export const THEME_COLOR_ERROR = 'problemsWarningIcon.foreground'; + +// Response panel +export const RESPONSE_PANEL_TITLE = 'Napper Response'; +export const RESPONSE_PANEL_VIEW_TYPE = 'napperResponse'; +export const SECTION_LABEL_REQUEST_HEADERS = 'Request Headers'; +export const SECTION_LABEL_RESPONSE_HEADERS = 'Response Headers'; +export const SECTION_LABEL_BODY = 'Body'; +export const SECTION_LABEL_ASSERTIONS = 'Assertions'; +export const SECTION_LABEL_OUTPUT = 'Output'; +export const SECTION_LABEL_ERROR = 'Error'; +export const SECTION_LABEL_REQUEST = 'Request'; +export const SECTION_LABEL_RESPONSE = 'Response'; +export const NO_REQUEST_HEADERS = 'No request headers'; +export const SECTION_LABEL_REQUEST_BODY = 'Request Body'; + +// Playlist panel +export const PLAYLIST_PANEL_TITLE = 'Napper Playlist'; +export const PLAYLIST_PANEL_VIEW_TYPE = 'napperPlaylist'; + +// Webview message types +export const MSG_ADD_RESULT = 'addResult'; +export const MSG_RUN_COMPLETE = 'runComplete'; +export const MSG_RUN_ERROR = 'runError'; +export const MSG_SAVE_REPORT = 'saveReport'; + +// Report +export const REPORT_FILE_EXTENSION = '.html'; +export const REPORT_FILE_SUFFIX = '-report'; +export const REPORT_SAVED_MSG = 'Report saved: '; + +// CLI error messages +export const CLI_SPAWN_FAILED_PREFIX = 'Failed to run CLI: '; +export const CLI_PARSE_FAILED_PREFIX = 'Failed to parse CLI JSON: '; +export const CLI_ERROR_PREFIX = 'Napper CLI error: '; + +// Status bar running +export const STATUS_RUNNING_ICON = '$(loading~spin) Running '; +export const STATUS_RUNNING_SUFFIX = '...'; + +// Curl +export const CURL_CMD_PREFIX = 'curl -X '; + +// File creation +export const REQUEST_NAME_SUFFIX = '-request'; + +// Nap file content formatting +export const NAP_NAME_KEY_PREFIX = 'name = "'; +export const NAP_NAME_KEY_SUFFIX = '"'; + +// Property keys +export const PROP_FILE_PATH = 'filePath'; + +// CLI installer (binary download) +export const CLI_BINARY_NAME = 'napper'; +export const CLI_BIN_DIR = 'bin'; +export const CLI_DOWNLOAD_REPO = 'MelbourneDeveloper/napper'; +export const CLI_DOWNLOAD_BASE_URL = + 'https://github.com/MelbourneDeveloper/napper/releases/download'; +export const CLI_CHECKSUMS_FILE = 'checksums-sha256.txt'; +export const CLI_ASSET_PREFIX = 'napper-'; +export const CLI_WIN_EXE_SUFFIX = '.exe'; +export const CLI_PLATFORM_DARWIN = 'darwin'; +export const CLI_PLATFORM_LINUX = 'linux'; +export const CLI_PLATFORM_WIN32 = 'win32'; +export const CLI_ARCH_ARM64 = 'arm64'; +export const CLI_ARCH_X64 = 'x64'; +export const CLI_RID_OSX_ARM64 = 'osx-arm64'; +export const CLI_RID_OSX_X64 = 'osx-x64'; +export const CLI_RID_LINUX_X64 = 'linux-x64'; +export const CLI_RID_WIN_X64 = 'win-x64'; +export const CLI_UNSUPPORTED_PLATFORM_MSG = 'Unsupported platform: '; +export const CLI_DOWNLOAD_ERROR_PREFIX = 'Binary download failed: '; +export const CLI_CHECKSUM_MISMATCH_MSG = 'SHA256 checksum mismatch'; +export const CLI_CHECKSUM_NOT_FOUND_MSG = 'Asset not found in checksums file'; +export const CLI_FILE_MODE_EXECUTABLE = 0o755; +export const CLI_MAX_REDIRECTS = 5; +export const CLI_TOO_MANY_REDIRECTS = 'Too many redirects'; +export const CLI_REDIRECT_ERROR = 'Redirect with no location header'; + +// CLI installer (dotnet tool fallback) +export const CLI_DOTNET_CMD = 'dotnet'; +export const CLI_TOOL_ARG = 'tool'; +export const CLI_TOOL_INSTALL_ARG = 'install'; +export const CLI_TOOL_UPDATE_ARG = 'update'; +export const CLI_TOOL_LIST_ARG = 'list'; +export const CLI_TOOL_GLOBAL_FLAG = '-g'; +export const CLI_TOOL_VERSION_FLAG = '--version'; +export const CLI_DOTNET_TOOL_INSTALL_TIMEOUT = 60000; +export const CLI_DOTNET_FALLBACK_MSG = 'Binary install failed, falling back to dotnet tool'; +export const CLI_DOTNET_INSTALL_ERROR_PREFIX = 'dotnet tool install failed: '; + +// CLI installer (shared) +export const CLI_INSTALL_MSG = 'Installing Napper CLI...'; +export const CLI_INSTALL_COMPLETE_MSG = 'Napper CLI installed successfully'; +export const CLI_INSTALL_FAILED_MSG = 'Failed to install Napper CLI: '; +export const CLI_VERSION_FLAG = '--version'; +export const CLI_VERSION_CHECK_TIMEOUT = 5000; +export const CLI_VERSION_CHECK_ERROR = 'Failed to check CLI version: '; +export const CLI_VERSION_MISMATCH_MSG = 'CLI version mismatch — re-installing'; + +// VSCode built-in commands +export const CMD_VSCODE_OPEN = 'vscode.open'; + +// Layout options +export const LAYOUT_BESIDE = 'beside'; +export const LAYOUT_BELOW = 'below'; + +// Encoding +export const ENCODING_UTF8 = 'utf-8'; + +// Language IDs +export const LANG_NAP = 'nap'; +export const LANG_NAPLIST = 'naplist'; + +// UI messages +export const MSG_NO_FILE_SELECTED = 'No .nap or .naplist file selected'; +export const MSG_COPIED = 'Copied to clipboard'; +export const MSG_NO_RESPONSE = 'No response to show. Run a request first.'; + +// UI prompts +export const PROMPT_SELECT_METHOD = 'Select HTTP method'; +export const PROMPT_ENTER_URL = 'Enter request URL'; +export const PROMPT_REQUEST_NAME = 'Request file name'; +export const PROMPT_PLAYLIST_NAME = 'Playlist name'; +export const PROMPT_SELECT_ENV = 'Select Napper environment'; + +// Default values +export const PLACEHOLDER_URL = 'https://api.example.com/resource'; +export const DEFAULT_PLAYLIST_NAME = 'new-playlist'; +export const DEFAULT_METHOD = 'GET'; + +// .nap file keys +export const NAP_KEY_METHOD = 'method'; +export const NAP_KEY_URL = 'url'; + +// HTTP methods +export const HTTP_METHODS = ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'HEAD', 'OPTIONS'] as const; + +// Branding +export const NAPPER_URL = 'https://napperapi.dev'; +export const NIMBLESITE_URL = 'https://nimblesite.co'; +export const REPORT_FOOTER_GENERATED_BY = 'Generated by'; +export const REPORT_FOOTER_MADE_BY = 'Made by'; + +// .nap file sections (additional) +export const SECTION_REQUEST_HEADERS = '[request.headers]'; +export const SECTION_REQUEST_BODY = '[request.body]'; +export const SECTION_ASSERT = '[assert]'; +export const SECTION_VARS = '[vars]'; + +// .nap file content +export const NAP_TRIPLE_QUOTE = '"""'; +export const HEADER_CONTENT_TYPE = 'Content-Type'; +export const HEADER_ACCEPT = 'Accept'; +export const CONTENT_TYPE_JSON = 'application/json'; +export const ASSERT_STATUS_PREFIX = 'status = '; +export const ASSERT_BODY_EXISTS_SUFFIX = ' exists'; +export const ASSERT_BODY_PREFIX = 'body.'; +export const NAP_KEY_NAME = 'name'; +export const NAP_KEY_DESCRIPTION = 'description'; +export const NAP_KEY_GENERATED = 'generated'; +export const NAP_VALUE_TRUE = 'true'; +export const BASE_URL_VAR = '{{baseUrl}}'; +export const BASE_URL_KEY = 'baseUrl'; +export const VARS_PLACEHOLDER = 'REPLACE_ME'; + +// OpenAPI generator — commands +export const CMD_IMPORT_OPENAPI_URL = 'napper.importOpenApiUrl'; +export const CMD_IMPORT_OPENAPI_FILE = 'napper.importOpenApiFile'; +export const OPENAPI_PICK_FILE = 'Select OpenAPI specification file'; +export const OPENAPI_PICK_FOLDER = 'Select output folder'; +export const OPENAPI_SUCCESS_PREFIX = 'Generated '; +export const OPENAPI_SUCCESS_SUFFIX = ' test files from OpenAPI spec'; +export const OPENAPI_ERROR_PREFIX = 'Failed to import OpenAPI: '; +export const OPENAPI_FILTER_LABEL = 'OpenAPI Spec'; +export const OPENAPI_FILE_EXTENSIONS = ['json', 'yaml', 'yml']; +export const OPENAPI_URL_PROMPT = 'Enter OpenAPI specification URL'; +export const OPENAPI_URL_PLACEHOLDER = 'https://petstore3.swagger.io/api/v3/openapi.json'; +export const OPENAPI_DOWNLOAD_FAILED_PREFIX = 'Failed to download spec: '; +export const OPENAPI_DOWNLOADING = 'Downloading OpenAPI spec...'; +export const ICON_IMPORT_OPENAPI_FILE = 'file-symlink-file'; + +// OpenAPI generator — validation +export const OPENAPI_INVALID_SPEC = 'Invalid OpenAPI specification: missing paths'; +export const OPENAPI_NO_ENDPOINTS = 'No endpoints found in specification'; +export const OPENAPI_PARSE_ERROR = 'Failed to parse JSON'; + +// OpenAPI generator — spec fields +export const HTTPS_SCHEME = 'https'; +export const DEFAULT_BASE_URL = 'https://api.example.com'; +export const OPENAPI_DEFAULT_TITLE = 'API Tests'; +export const PARAM_IN_BODY = 'body'; +export const PARAM_IN_QUERY = 'query'; +export const PARAM_IN_PATH = 'path'; +export const AUTH_BEARER_PREFIX = 'Authorization = Bearer '; +export const AUTH_BASIC_PREFIX = 'Authorization = Basic '; +export const SECURITY_TYPE_HTTP = 'http'; +export const SECURITY_SCHEME_BEARER = 'bearer'; +export const SECURITY_SCHEME_BASIC = 'basic'; +export const SECURITY_TYPE_API_KEY = 'apiKey'; +export const SECURITY_LOCATION_HEADER = 'header'; +export const SECURITY_LOCATION_QUERY = 'query'; + +// OpenAPI generator — HTTP methods (lowercase for spec parsing) +export const OPENAPI_HTTP_METHODS = [ + 'get', + 'post', + 'put', + 'patch', + 'delete', + 'head', + 'options', +] as const; + +// JSON Schema types +export const SCHEMA_TYPE_STRING = 'string'; +export const SCHEMA_TYPE_NUMBER = 'number'; +export const SCHEMA_TYPE_INTEGER = 'integer'; +export const SCHEMA_TYPE_BOOLEAN = 'boolean'; +export const SCHEMA_TYPE_ARRAY = 'array'; +export const SCHEMA_TYPE_OBJECT = 'object'; +export const SCHEMA_EXAMPLE_STRING = 'example'; + +// Logging +export const LOG_CHANNEL_NAME = 'Napper'; +export const LOG_PREFIX_INFO = 'INFO'; +export const LOG_PREFIX_WARN = 'WARN'; +export const LOG_PREFIX_ERROR = 'ERROR'; +export const LOG_PREFIX_DEBUG = 'DEBUG'; +export const LOG_MSG_ACTIVATED = 'Extension activated'; +export const LOG_MSG_DEACTIVATED = 'Extension deactivated'; +export const LOG_MSG_RUN_FILE = 'Running file:'; +export const LOG_MSG_RUN_PLAYLIST = 'Running playlist:'; +export const LOG_MSG_CLI_RESULT_COUNT = 'CLI returned results:'; +export const LOG_MSG_CLI_SPAWN_ERROR = 'CLI spawn error:'; +export const LOG_MSG_STREAM_RESULT = 'Stream result:'; +export const LOG_MSG_STREAM_DONE = 'Stream completed'; +export const LOG_MSG_TREE_REFRESH = 'Explorer tree refresh'; +export const LOG_MSG_OPENAPI_IMPORT = 'OpenAPI import:'; +export const LOG_MSG_OPENAPI_URL_FETCH = 'OpenAPI URL fetch:'; +export const LOG_MSG_OPENAPI_URL_DOWNLOAD_OK = 'OpenAPI URL download succeeded, content length:'; +export const LOG_MSG_OPENAPI_URL_DOWNLOAD_FAIL = 'OpenAPI URL download failed:'; +export const LOG_MSG_OPENAPI_SPEC_SAVED = 'OpenAPI spec saved to:'; +export const LOG_MSG_OPENAPI_AI_CHOICE = 'OpenAPI AI choice:'; +export const LOG_MSG_OPENAPI_AI_NO_MODEL = 'No Copilot model available for AI enhancement'; +export const LOG_MSG_OPENAPI_AI_MODEL_SELECTED = 'Copilot model selected for AI enhancement:'; +export const LOG_MSG_OPENAPI_GENERATE_CLI = 'OpenAPI generate CLI call:'; +export const LOG_MSG_OPENAPI_GENERATE_RESULT = 'OpenAPI generate result:'; + +// AI enrichment +export const OPENAPI_AI_CHOICE_TITLE = 'How should tests be generated?'; +export const OPENAPI_AI_CHOICE_BASIC = 'Generate tests'; +export const OPENAPI_AI_CHOICE_ENHANCED = 'Generate with AI enhancement'; +export const OPENAPI_AI_PROGRESS_TITLE = 'Enhancing with AI...'; +export const OPENAPI_AI_NO_COPILOT = 'GitHub Copilot not available for AI enhancement'; +export const OPENAPI_AI_COPILOT_FAMILY = 'copilot-gpt-4o'; +export const OPENAPI_AI_ENRICHING_ASSERTIONS = 'Enriching assertions'; +export const OPENAPI_AI_ENRICHING_TEST_DATA = 'Enriching test data'; +export const OPENAPI_AI_REORDERING_PLAYLIST = 'Reordering playlist'; + +// Context menu command IDs +export const CMD_ADD_TO_PLAYLIST = 'napper.addToPlaylist'; +export const CMD_PERF_TEST = 'napper.performanceTest'; +export const CMD_DELETE_FILE = 'napper.deleteFile'; +export const CMD_ADD_NAP_TO_PLAYLIST = 'napper.addNapToPlaylist'; +export const CMD_ADD_SCRIPT_TO_PLAYLIST = 'napper.addScriptToPlaylist'; +export const CMD_DUPLICATE_PLAYLIST = 'napper.duplicatePlaylist'; +export const CMD_COPY_PATH = 'napper.copyPath'; +export const CMD_ENRICH_AI = 'napper.enrichWithAi'; + +// Context menu prompts +export const PROMPT_SELECT_PLAYLIST = 'Select a playlist to add this script to'; +export const PROMPT_SELECT_NAP_FILE = 'Select a .nap file to add'; +export const PROMPT_SELECT_SCRIPT_FILE = 'Select a script file to add'; +export const PROMPT_CONFIRM_DELETE_PREFIX = 'Are you sure you want to delete "'; +export const PROMPT_CONFIRM_DELETE_SUFFIX = '"?'; +export const PROMPT_DUPLICATE_NAME = 'Enter name for the duplicated playlist'; +export const CONFIRM_YES = 'Yes'; +export const CONFIRM_NO = 'No'; + +// Context menu messages +export const MSG_ADDED_TO_PLAYLIST = 'Added to playlist: '; +export const MSG_FILE_DELETED = 'Deleted: '; +export const MSG_PLAYLIST_DUPLICATED = 'Duplicated playlist: '; +export const MSG_PATH_COPIED = 'Path copied to clipboard'; +export const MSG_PERF_TEST_COMING_SOON = 'Performance Test: Coming soon'; +export const MSG_NO_PLAYLISTS = 'No .naplist files found in workspace'; +export const MSG_NO_NAP_FILES = 'No .nap files found in workspace'; +export const MSG_NO_SCRIPT_FILES = 'No script files found in workspace'; + +// Glob patterns for context menu pickers +export const SCRIPT_GLOB = '**/*.{fsx,csx}'; + +// Playlist duplication +export const DUPLICATE_SUFFIX = '-copy'; + +// .http file conversion +export const HTTP_FILE_EXTENSION = '.http'; +export const REST_FILE_EXTENSION = '.rest'; +export const HTTP_FILE_GLOB = '**/*.http'; +export const CLI_CMD_CONVERT = 'convert'; +export const CLI_SUBCMD_HTTP = 'http'; +export const CMD_CONVERT_HTTP_FILE = 'napper.convertHttpFile'; +export const CMD_CONVERT_HTTP_DIR = 'napper.convertHttpDirectory'; +export const CONVERT_HTTP_PICK_FILE = 'Select .http file to convert'; +export const CONVERT_HTTP_PICK_DIR = 'Select directory containing .http files'; +export const CONVERT_HTTP_FILTER_LABEL = 'HTTP Files'; +export const CONVERT_HTTP_FILE_EXTENSIONS = ['http', 'rest']; +export const CONVERT_HTTP_SUCCESS_PREFIX = 'Converted '; +export const CONVERT_HTTP_SUCCESS_SUFFIX = ' requests to .nap files'; +export const CONVERT_HTTP_ERROR_PREFIX = 'Failed to convert .http: '; +export const CONVERT_HTTP_NO_FILES = 'No .http or .rest files found'; +export const LOG_MSG_CONVERT_HTTP = 'Convert .http:'; +export const LOG_MSG_CONVERT_HTTP_RESULT = 'Convert .http result:'; +export const CONVERT_HTTP_CODELENS_TITLE = '$(file-add) Convert to .nap'; + +// Numeric thresholds +export const PERCENTAGE_MULTIPLIER = 100; +export const HTTP_STATUS_OK = 200; +export const HTTP_STATUS_REDIRECT_MIN = 300; +export const HTTP_STATUS_CLIENT_ERROR_MIN = 400; +export const JSON_INDENT_SIZE = 2; +export const PAD_DIGITS_DEFAULT = 2; +export const PAD_DIGITS_LARGE = 3; +export const PAD_LARGE_THRESHOLD = 100; diff --git a/src/Napper.VsCode/src/contextMenuCommands.ts b/src/Napper.VsCode/src/contextMenuCommands.ts new file mode 100644 index 0000000..1b47ee7 --- /dev/null +++ b/src/Napper.VsCode/src/contextMenuCommands.ts @@ -0,0 +1,281 @@ +// Specs: vscode-commands, vscode-explorer, vscode-playlists +// Context menu command handlers for tree view items +// Scripts: Add to Playlist, Performance Test, Delete +// Playlists: Add .nap, Add Script, Delete, Duplicate, Copy Path + +import * as vscode from 'vscode'; +import * as path from 'path'; +import * as fs from 'fs'; +import type { ExplorerAdapter } from './explorerAdapter'; +import { appendStepToPlaylist, updatePlaylistName } from './explorerProvider'; +import { + CMD_ADD_NAP_TO_PLAYLIST, + CMD_ADD_SCRIPT_TO_PLAYLIST, + CMD_ADD_TO_PLAYLIST, + CMD_COPY_PATH, + CMD_DELETE_FILE, + CMD_DUPLICATE_PLAYLIST, + CMD_PERF_TEST, + CONFIRM_NO, + CONFIRM_YES, + DUPLICATE_SUFFIX, + ENCODING_UTF8, + MSG_ADDED_TO_PLAYLIST, + MSG_FILE_DELETED, + MSG_NO_NAP_FILES, + MSG_NO_PLAYLISTS, + MSG_NO_SCRIPT_FILES, + MSG_PATH_COPIED, + MSG_PERF_TEST_COMING_SOON, + MSG_PLAYLIST_DUPLICATED, + NAPLIST_EXTENSION, + NAPLIST_GLOB, + NAP_GLOB, + PROMPT_CONFIRM_DELETE_PREFIX, + PROMPT_CONFIRM_DELETE_SUFFIX, + PROMPT_DUPLICATE_NAME, + PROMPT_SELECT_NAP_FILE, + PROMPT_SELECT_PLAYLIST, + PROMPT_SELECT_SCRIPT_FILE, + SCRIPT_GLOB, +} from './constants'; + +interface FilePickItem extends vscode.QuickPickItem { + readonly uri: vscode.Uri; +} + +const workspaceRoot = (): string | undefined => vscode.workspace.workspaceFolders?.[0]?.uri.fsPath, + toPickItems = (uris: readonly vscode.Uri[], root: string): readonly FilePickItem[] => + uris.map((uri) => ({ + label: path.relative(root, uri.fsPath), + uri, + })), + writeStepToPlaylist = async ( + playlistPath: string, + pickedFilePath: string, + explorer: ExplorerAdapter, + ): Promise<void> => { + const playlistDir = path.dirname(playlistPath), + relStep = path.relative(playlistDir, pickedFilePath), + content = fs.readFileSync(playlistPath, ENCODING_UTF8), + updated = appendStepToPlaylist(content, relStep); + await vscode.workspace.fs.writeFile( + vscode.Uri.file(playlistPath), + Buffer.from(updated, ENCODING_UTF8), + ); + await vscode.window.showInformationMessage( + `${MSG_ADDED_TO_PLAYLIST}${path.basename(playlistPath)}`, + ); + explorer.refresh(); + }, + pickFileFromGlob = async ( + glob: string, + prompt: string, + emptyMsg: string, + ): Promise<FilePickItem | undefined> => { + const files = await vscode.workspace.findFiles(glob); + if (files.length === 0) { + await vscode.window.showInformationMessage(emptyMsg); + return undefined; + } + const root = workspaceRoot(); + if (root === undefined) { + return undefined; + } + return vscode.window.showQuickPick(toPickItems(files, root), { placeHolder: prompt }); + }, + addFileToPlaylist = async ({ + playlistPath, + glob, + prompt, + emptyMsg, + explorer, + }: { + readonly playlistPath: string; + readonly glob: string; + readonly prompt: string; + readonly emptyMsg: string; + readonly explorer: ExplorerAdapter; + }): Promise<void> => { + const picked = await pickFileFromGlob(glob, prompt, emptyMsg); + if (picked === undefined) { + return; + } + await writeStepToPlaylist(playlistPath, picked.uri.fsPath, explorer); + }; + +export const addToPlaylist = async (filePath: string, explorer: ExplorerAdapter): Promise<void> => { + const picked = await pickFileFromGlob(NAPLIST_GLOB, PROMPT_SELECT_PLAYLIST, MSG_NO_PLAYLISTS); + if (picked === undefined) { + return; + } + await writeStepToPlaylist(picked.uri.fsPath, filePath, explorer); +}; + +export const performanceTest = async (): Promise<void> => { + await vscode.window.showInformationMessage(MSG_PERF_TEST_COMING_SOON); +}; + +const confirmDelete = async (fileName: string): Promise<boolean> => { + const answer = await vscode.window.showWarningMessage( + `${PROMPT_CONFIRM_DELETE_PREFIX}${fileName}${PROMPT_CONFIRM_DELETE_SUFFIX}`, + CONFIRM_YES, + CONFIRM_NO, + ); + return answer === CONFIRM_YES; +}; + +export const deleteFile = async (filePath: string, explorer: ExplorerAdapter): Promise<void> => { + const fileName = path.basename(filePath), + confirmed = await confirmDelete(fileName); + if (!confirmed) { + return; + } + await vscode.workspace.fs.delete(vscode.Uri.file(filePath)); + await vscode.window.showInformationMessage(`${MSG_FILE_DELETED}${fileName}`); + explorer.refresh(); +}; + +export const addNapToPlaylist = async ( + playlistPath: string, + explorer: ExplorerAdapter, +): Promise<void> => { + await addFileToPlaylist({ + playlistPath, + glob: NAP_GLOB, + prompt: PROMPT_SELECT_NAP_FILE, + emptyMsg: MSG_NO_NAP_FILES, + explorer, + }); +}; + +export const addScriptToPlaylist = async ( + playlistPath: string, + explorer: ExplorerAdapter, +): Promise<void> => { + await addFileToPlaylist({ + playlistPath, + glob: SCRIPT_GLOB, + prompt: PROMPT_SELECT_SCRIPT_FILE, + emptyMsg: MSG_NO_SCRIPT_FILES, + explorer, + }); +}; + +interface DuplicateContext { + readonly newPath: string; + readonly content: string; + readonly newName: string; + readonly explorer: ExplorerAdapter; +} + +const writeDuplicate = async (ctx: DuplicateContext): Promise<void> => { + await vscode.workspace.fs.writeFile( + vscode.Uri.file(ctx.newPath), + Buffer.from(ctx.content, ENCODING_UTF8), + ); + const doc = await vscode.workspace.openTextDocument(ctx.newPath); + await vscode.window.showTextDocument(doc); + await vscode.window.showInformationMessage(`${MSG_PLAYLIST_DUPLICATED}${ctx.newName}`); + ctx.explorer.refresh(); +}; + +export const duplicatePlaylist = async ( + playlistPath: string, + explorer: ExplorerAdapter, +): Promise<void> => { + const baseName = path.basename(playlistPath, NAPLIST_EXTENSION), + newName = await vscode.window.showInputBox({ + prompt: PROMPT_DUPLICATE_NAME, + value: `${baseName}${DUPLICATE_SUFFIX}`, + }); + if (newName === undefined) { + return; + } + const newPath = path.join(path.dirname(playlistPath), `${newName}${NAPLIST_EXTENSION}`), + content = fs.readFileSync(playlistPath, ENCODING_UTF8), + updated = updatePlaylistName(content, newName); + await writeDuplicate({ newPath, content: updated, newName, explorer }); +}; + +export const copyPath = async (filePath: string): Promise<void> => { + await vscode.env.clipboard.writeText(filePath); + await vscode.window.showInformationMessage(MSG_PATH_COPIED); +}; + +interface NodeArg { + readonly filePath?: string; +} + +const withFilePath = + (handler: (fp: string) => Promise<void>): ((arg?: NodeArg) => Promise<void>) => + async (arg?: NodeArg): Promise<void> => { + const fp = arg?.filePath; + if (fp !== undefined) { + await handler(fp); + } + }, + registerScriptCommands = (context: vscode.ExtensionContext, explorer: ExplorerAdapter): void => { + context.subscriptions.push( + vscode.commands.registerCommand( + CMD_ADD_TO_PLAYLIST, + withFilePath(async (fp) => { + await addToPlaylist(fp, explorer); + }), + ), + vscode.commands.registerCommand(CMD_PERF_TEST, performanceTest), + vscode.commands.registerCommand( + CMD_DELETE_FILE, + withFilePath(async (fp) => { + await deleteFile(fp, explorer); + }), + ), + ); + }, + registerPlaylistAddCommands = ( + context: vscode.ExtensionContext, + explorer: ExplorerAdapter, + ): void => { + context.subscriptions.push( + vscode.commands.registerCommand( + CMD_ADD_NAP_TO_PLAYLIST, + withFilePath(async (fp) => { + await addNapToPlaylist(fp, explorer); + }), + ), + vscode.commands.registerCommand( + CMD_ADD_SCRIPT_TO_PLAYLIST, + withFilePath(async (fp) => { + await addScriptToPlaylist(fp, explorer); + }), + ), + ); + }, + registerPlaylistEditCommands = ( + context: vscode.ExtensionContext, + explorer: ExplorerAdapter, + ): void => { + context.subscriptions.push( + vscode.commands.registerCommand( + CMD_DUPLICATE_PLAYLIST, + withFilePath(async (fp) => { + await duplicatePlaylist(fp, explorer); + }), + ), + vscode.commands.registerCommand( + CMD_COPY_PATH, + withFilePath(async (fp) => { + await copyPath(fp); + }), + ), + ); + }; + +export const registerContextMenuCommands = ( + context: vscode.ExtensionContext, + explorer: ExplorerAdapter, +): void => { + registerScriptCommands(context, explorer); + registerPlaylistAddCommands(context, explorer); + registerPlaylistEditCommands(context, explorer); +}; diff --git a/src/Napper.VsCode/src/curlCopy.ts b/src/Napper.VsCode/src/curlCopy.ts new file mode 100644 index 0000000..874a56e --- /dev/null +++ b/src/Napper.VsCode/src/curlCopy.ts @@ -0,0 +1,69 @@ +// Specs: vscode-commands +// Curl copy command — copyAsCurl and parsing helpers +// Extracted from extension.ts to keep files under 450 LOC + +import * as vscode from 'vscode'; +import { + CURL_CMD_PREFIX, + DEFAULT_METHOD, + HTTP_METHODS, + MSG_COPIED, + NAP_KEY_METHOD, + NAP_KEY_URL, +} from './constants'; + +const EQUALS_CHAR = '=', + SPACE_CHAR = ' ', + valueAfterFirstEquals = (line: string): string => { + const eqIndex = line.indexOf(EQUALS_CHAR); + return eqIndex === -1 ? '' : line.slice(eqIndex + 1).trim(); + }, + matchesHttpMethodLine = (trimmed: string, method: string): boolean => + trimmed.startsWith(`${method}${SPACE_CHAR}`), + extractMethodFromLine = ( + trimmed: string, + ): { readonly method: string; readonly url: string } | undefined => { + for (const m of HTTP_METHODS) { + if (matchesHttpMethodLine(trimmed, m)) { + return { method: m, url: trimmed.slice(m.length + 1).trim() }; + } + } + return undefined; + }, + parseLine = (trimmed: string, current: { method: string; url: string }): void => { + const httpMatch = extractMethodFromLine(trimmed); + if (httpMatch !== undefined) { + current.method = httpMatch.method; + current.url = httpMatch.url; + } + if (trimmed.startsWith(NAP_KEY_METHOD) && trimmed.includes(EQUALS_CHAR)) { + current.method = valueAfterFirstEquals(trimmed); + } + if (trimmed.startsWith(NAP_KEY_URL) && trimmed.includes(EQUALS_CHAR)) { + current.url = valueAfterFirstEquals(trimmed); + } + }; + +export const parseMethodAndUrl = ( + text: string, +): { readonly method: string; readonly url: string } => { + const result = { method: DEFAULT_METHOD, url: '' }, + lines = text.split('\n'); + for (const line of lines) { + parseLine(line.trim(), result); + } + return result; +}; + +export const copyAsCurl = async (uri?: vscode.Uri): Promise<void> => { + const fileUri = uri ?? vscode.window.activeTextEditor?.document.uri; + if (fileUri === undefined) { + return; + } + + const doc = await vscode.workspace.openTextDocument(fileUri), + { method, url } = parseMethodAndUrl(doc.getText()), + curl = `${CURL_CMD_PREFIX}${method} '${url}'`; + await vscode.env.clipboard.writeText(curl); + void vscode.window.showInformationMessage(MSG_COPIED); +}; diff --git a/src/Napper.VsCode/src/editAndImportCommands.ts b/src/Napper.VsCode/src/editAndImportCommands.ts new file mode 100644 index 0000000..8fcae66 --- /dev/null +++ b/src/Napper.VsCode/src/editAndImportCommands.ts @@ -0,0 +1,96 @@ +// Specs: vscode-commands +// Edit, HTTP convert, and OpenAPI import command registrations + +import * as vscode from 'vscode'; +import * as path from 'path'; +import type { ExplorerAdapter } from './explorerAdapter'; +import type { EnvironmentStatusBar } from './environmentAdapter'; +import type { Logger } from './logger'; +import { newPlaylist, newRequest } from './fileCreation'; +import { copyAsCurl } from './curlCopy'; +import { importOpenApiFromFile, importOpenApiFromUrl, runAiEnrichment } from './openApiImport'; +import { type ConvertContext, convertHttpFile, convertHttpDirectory } from './httpConvert'; +import { + CMD_CONVERT_HTTP_DIR, + CMD_CONVERT_HTTP_FILE, + CMD_COPY_CURL, + CMD_ENRICH_AI, + CMD_IMPORT_OPENAPI_FILE, + CMD_IMPORT_OPENAPI_URL, + CMD_NEW_PLAYLIST, + CMD_NEW_REQUEST, + CMD_SWITCH_ENV, +} from './constants'; + +interface CommandDeps { + readonly explorer: ExplorerAdapter; + readonly envStatusBar: EnvironmentStatusBar; + readonly logger: Logger; + readonly getCliPath: () => string; +} + +const handleEnrichAi = async ( + arg: { readonly filePath?: string } | undefined, + explorer: ExplorerAdapter, + logger: Logger, +): Promise<void> => { + const fp = arg?.filePath; + if (fp === undefined) { + return; + } + await runAiEnrichment(path.dirname(fp), logger); + explorer.refresh(); +}; + +export const registerEditCommands = (context: vscode.ExtensionContext, deps: CommandDeps): void => { + context.subscriptions.push( + vscode.commands.registerCommand(CMD_NEW_REQUEST, async () => { + await newRequest(deps.explorer); + }), + vscode.commands.registerCommand(CMD_NEW_PLAYLIST, async () => { + await newPlaylist(deps.explorer); + }), + vscode.commands.registerCommand(CMD_SWITCH_ENV, async () => { + await deps.envStatusBar.showPicker(); + }), + vscode.commands.registerCommand(CMD_COPY_CURL, copyAsCurl), + ); +}; + +const toConvertContext = (deps: CommandDeps): ConvertContext => ({ + explorer: deps.explorer, + logger: deps.logger, + getCliPath: deps.getCliPath, +}); + +export const registerHttpConvertCommands = ( + context: vscode.ExtensionContext, + deps: CommandDeps, +): void => { + const ctx = toConvertContext(deps); + context.subscriptions.push( + vscode.commands.registerCommand(CMD_CONVERT_HTTP_FILE, async (uri?: vscode.Uri) => { + await convertHttpFile(ctx, uri); + }), + vscode.commands.registerCommand(CMD_CONVERT_HTTP_DIR, async () => { + await convertHttpDirectory(ctx); + }), + ); +}; + +export const registerOpenApiCommands = ( + context: vscode.ExtensionContext, + deps: CommandDeps, +): void => { + context.subscriptions.push( + vscode.commands.registerCommand(CMD_IMPORT_OPENAPI_URL, async () => { + await importOpenApiFromUrl(deps.explorer, deps.logger, deps.getCliPath); + }), + vscode.commands.registerCommand(CMD_IMPORT_OPENAPI_FILE, async () => { + await importOpenApiFromFile(deps.explorer, deps.logger, deps.getCliPath); + }), + vscode.commands.registerCommand(CMD_ENRICH_AI, async (arg?: { readonly filePath?: string }) => { + await handleEnrichAi(arg, deps.explorer, deps.logger); + }), + ); +}; diff --git a/src/Nap.VsCode/src/environmentAdapter.ts b/src/Napper.VsCode/src/environmentAdapter.ts similarity index 71% rename from src/Nap.VsCode/src/environmentAdapter.ts rename to src/Napper.VsCode/src/environmentAdapter.ts index 82896d1..aeeb02d 100644 --- a/src/Nap.VsCode/src/environmentAdapter.ts +++ b/src/Napper.VsCode/src/environmentAdapter.ts @@ -1,8 +1,9 @@ +// Specs: vscode-env-switcher, vscode-impl // VSCode adapter for the environment switcher // Status bar item and quick pick integration -import * as vscode from "vscode"; -import { detectEnvironments } from "./environmentSwitcher"; +import * as vscode from 'vscode'; +import { detectEnvironments } from './environmentSwitcher'; import { CMD_SWITCH_ENV, CONFIG_DEFAULT_ENV, @@ -12,7 +13,7 @@ import { STATUS_BAR_NO_ENV, STATUS_BAR_PREFIX, STATUS_BAR_PRIORITY, -} from "./constants"; +} from './constants'; export class EnvironmentStatusBar implements vscode.Disposable { private readonly _statusBarItem: vscode.StatusBarItem; @@ -22,12 +23,12 @@ export class EnvironmentStatusBar implements vscode.Disposable { constructor() { this._statusBarItem = vscode.window.createStatusBarItem( vscode.StatusBarAlignment.Left, - STATUS_BAR_PRIORITY + STATUS_BAR_PRIORITY, ); this._statusBarItem.command = CMD_SWITCH_ENV; const config = vscode.workspace.getConfiguration(CONFIG_SECTION); - this._currentEnv = config.get<string>(CONFIG_DEFAULT_ENV, ""); + this._currentEnv = config.get<string>(CONFIG_DEFAULT_ENV, ''); this._updateLabel(); this._statusBarItem.show(); @@ -36,10 +37,10 @@ export class EnvironmentStatusBar implements vscode.Disposable { vscode.workspace.onDidChangeConfiguration((e) => { if (e.affectsConfiguration(`${CONFIG_SECTION}.${CONFIG_DEFAULT_ENV}`)) { const cfg = vscode.workspace.getConfiguration(CONFIG_SECTION); - this._currentEnv = cfg.get<string>(CONFIG_DEFAULT_ENV, ""); + this._currentEnv = cfg.get<string>(CONFIG_DEFAULT_ENV, ''); this._updateLabel(); } - }) + }), ); } @@ -48,20 +49,15 @@ export class EnvironmentStatusBar implements vscode.Disposable { } async showPicker(): Promise<void> { - const files = await vscode.workspace.findFiles( - NAPENV_GLOB, - "**/node_modules/**" - ), - - envNames = detectEnvironments(files.map((f) => f.fsPath)), - items = envNames.map((name) => ({ - label: name, - picked: name === this._currentEnv, - })), - - selected = await vscode.window.showQuickPick(items, { - placeHolder: PROMPT_SELECT_ENV, - }); + const files = await vscode.workspace.findFiles(NAPENV_GLOB, '**/node_modules/**'), + envNames = detectEnvironments(files.map((f) => f.fsPath)), + items = envNames.map((name) => ({ + label: name, + picked: name === this._currentEnv, + })), + selected = await vscode.window.showQuickPick(items, { + placeHolder: PROMPT_SELECT_ENV, + }); if (selected) { await this._applySelection(selected.label); @@ -73,11 +69,7 @@ export class EnvironmentStatusBar implements vscode.Disposable { this._updateLabel(); const config = vscode.workspace.getConfiguration(CONFIG_SECTION); - await config.update( - CONFIG_DEFAULT_ENV, - this._currentEnv, - vscode.ConfigurationTarget.Workspace - ); + await config.update(CONFIG_DEFAULT_ENV, this._currentEnv, vscode.ConfigurationTarget.Workspace); } private _updateLabel(): void { diff --git a/src/Nap.VsCode/src/environmentSwitcher.ts b/src/Napper.VsCode/src/environmentSwitcher.ts similarity index 77% rename from src/Nap.VsCode/src/environmentSwitcher.ts rename to src/Napper.VsCode/src/environmentSwitcher.ts index 2b1fd1c..16488cc 100644 --- a/src/Nap.VsCode/src/environmentSwitcher.ts +++ b/src/Napper.VsCode/src/environmentSwitcher.ts @@ -1,8 +1,9 @@ +// Specs: vscode-env-switcher // Environment switcher — status bar item + quick pick // Decoupled: detection logic is pure, only the adapter touches vscode -import * as path from "path"; -import { NAPENV_EXTENSION, NAPENV_LOCAL_SUFFIX } from "./constants"; +import * as path from 'path'; +import { NAPENV_EXTENSION, NAPENV_LOCAL_SUFFIX } from './constants'; export const extractEnvName = (fileName: string): string | undefined => { const base = path.basename(fileName); @@ -22,9 +23,7 @@ export const extractEnvName = (fileName: string): string | undefined => { return undefined; }; -export const detectEnvironments = ( - filePaths: readonly string[] -): readonly string[] => { +export const detectEnvironments = (filePaths: readonly string[]): readonly string[] => { const envs: string[] = []; for (const fp of filePaths) { diff --git a/src/Nap.VsCode/src/explorerAdapter.ts b/src/Napper.VsCode/src/explorerAdapter.ts similarity index 60% rename from src/Nap.VsCode/src/explorerAdapter.ts rename to src/Napper.VsCode/src/explorerAdapter.ts index 1258365..abca64d 100644 --- a/src/Nap.VsCode/src/explorerAdapter.ts +++ b/src/Napper.VsCode/src/explorerAdapter.ts @@ -1,9 +1,10 @@ +// Specs: vscode-explorer, vscode-impl // VSCode adapter for the Explorer tree view // This is the only file that touches the vscode SDK for the explorer -import * as vscode from "vscode"; -import * as path from "path"; -import * as fs from "fs"; +import * as vscode from 'vscode'; +import * as path from 'path'; +import * as fs from 'fs'; import { type TreeNode, createFileNode, @@ -11,8 +12,8 @@ import { createPlaylistNode, createPlaylistSectionNode, parsePlaylistStepPaths, -} from "./explorerProvider"; -import { type RunResult, RunState } from "./types"; +} from './explorerProvider'; +import { type RunResult, RunState } from './types'; import { BADGE_ERROR, BADGE_FAILED, @@ -34,72 +35,57 @@ import { THEME_COLOR_ERROR, THEME_COLOR_FAILED, THEME_COLOR_PASSED, -} from "./constants"; - -const OPEN_COMMAND_TITLE = "Open", - EMPTY_STRING = "", - - RUN_STATE_ICONS: Record<RunState, string> = { - [RunState.Idle]: ICON_IDLE, - [RunState.Running]: ICON_RUNNING, - [RunState.Passed]: ICON_PASSED, - [RunState.Failed]: ICON_FAILED, - [RunState.Error]: ICON_ERROR, -}, - - RUN_STATE_COLORS: Record<RunState, string | undefined> = { - [RunState.Idle]: undefined, - [RunState.Running]: undefined, - [RunState.Passed]: THEME_COLOR_PASSED, - [RunState.Failed]: THEME_COLOR_FAILED, - [RunState.Error]: THEME_COLOR_ERROR, -}, - - hasChildren = (node: TreeNode): boolean => - node.isDirectory || (node.children !== undefined && node.children.length > 0), - - applyPlaylistSectionStyle = ( - item: vscode.TreeItem, -): void => { - item.iconPath = new vscode.ThemeIcon(ICON_PLAYLIST_SECTION); -}, - - applyDirectoryStyle = ( - item: vscode.TreeItem, - node: TreeNode, -): void => { - item.resourceUri = vscode.Uri.file(node.filePath); - item.iconPath = vscode.ThemeIcon.Folder; -}, - - runStateIcon = (state: RunState): vscode.ThemeIcon => { - const color = RUN_STATE_COLORS[state]; - return new vscode.ThemeIcon( - RUN_STATE_ICONS[state], - color !== undefined ? new vscode.ThemeColor(color) : undefined, - ); -}, - - applyFileStyle = ( - item: vscode.TreeItem, - node: TreeNode, -): void => { - item.resourceUri = vscode.Uri.file(node.filePath); - item.command = { - command: CMD_VSCODE_OPEN, - title: OPEN_COMMAND_TITLE, - arguments: [vscode.Uri.file(node.filePath)], +} from './constants'; + +const OPEN_COMMAND_TITLE = 'Open', + EMPTY_STRING = '', + RUN_STATE_ICONS: Record<RunState, string> = { + [RunState.Idle]: ICON_IDLE, + [RunState.Running]: ICON_RUNNING, + [RunState.Passed]: ICON_PASSED, + [RunState.Failed]: ICON_FAILED, + [RunState.Error]: ICON_ERROR, + }, + RUN_STATE_COLORS: Record<RunState, string | undefined> = { + [RunState.Idle]: undefined, + [RunState.Running]: undefined, + [RunState.Passed]: THEME_COLOR_PASSED, + [RunState.Failed]: THEME_COLOR_FAILED, + [RunState.Error]: THEME_COLOR_ERROR, + }, + hasChildren = (node: TreeNode): boolean => + node.isDirectory || (node.children !== undefined && node.children.length > 0), + applyPlaylistSectionStyle = (item: vscode.TreeItem): void => { + item.iconPath = new vscode.ThemeIcon(ICON_PLAYLIST_SECTION); + }, + applyDirectoryStyle = (item: vscode.TreeItem, node: TreeNode): void => { + item.resourceUri = vscode.Uri.file(node.filePath); + item.iconPath = vscode.ThemeIcon.Folder; + }, + runStateIcon = (state: RunState): vscode.ThemeIcon => { + const color = RUN_STATE_COLORS[state]; + return new vscode.ThemeIcon( + RUN_STATE_ICONS[state], + color !== undefined ? new vscode.ThemeColor(color) : undefined, + ); + }, + applyFileStyle = (item: vscode.TreeItem, node: TreeNode): void => { + item.resourceUri = vscode.Uri.file(node.filePath); + item.command = { + command: CMD_VSCODE_OPEN, + title: OPEN_COMMAND_TITLE, + arguments: [vscode.Uri.file(node.filePath)], + }; + if (node.contextValue === CONTEXT_PLAYLIST) { + item.iconPath = new vscode.ThemeIcon(ICON_PLAYLIST_FILE); + return; + } + if (node.contextValue === CONTEXT_SCRIPT_FILE) { + return; + } + item.description = node.httpMethod ?? EMPTY_STRING; + item.iconPath = runStateIcon(node.runState); }; - if (node.contextValue === CONTEXT_PLAYLIST) { - item.iconPath = new vscode.ThemeIcon(ICON_PLAYLIST_FILE); - return; - } - if (node.contextValue === CONTEXT_SCRIPT_FILE) { - return; - } - item.description = node.httpMethod ?? EMPTY_STRING; - item.iconPath = runStateIcon(node.runState); -}; class ExplorerTreeItem extends vscode.TreeItem { constructor(node: TreeNode) { @@ -140,9 +126,9 @@ function buildPlaylistStepNodes( results: ReadonlyMap<string, RunResult>, ): TreeNode[] { const content = fs.readFileSync(naplistPath, ENCODING_UTF8), - stepRelPaths = parsePlaylistStepPaths(content), - basePath = path.dirname(naplistPath), - stepNodes: TreeNode[] = []; + stepRelPaths = parsePlaylistStepPaths(content), + basePath = path.dirname(naplistPath), + stepNodes: TreeNode[] = []; for (const rel of stepRelPaths) { const node = buildStepNode(path.resolve(basePath, rel), results); if (node !== undefined) { @@ -152,35 +138,26 @@ function buildPlaylistStepNodes( return stepNodes; } -const makeDecoration = ( - badge: string, - color: string, - tooltip?: string, -): vscode.FileDecoration => - new vscode.FileDecoration(badge, tooltip, new vscode.ThemeColor(color)), - - runStateBadge = ( - result: RunResult, -): vscode.FileDecoration | undefined => { - if (result.error !== undefined) { - return makeDecoration(BADGE_ERROR, THEME_COLOR_ERROR, result.error); - } - return result.passed - ? makeDecoration(BADGE_PASSED, THEME_COLOR_PASSED) - : makeDecoration(BADGE_FAILED, THEME_COLOR_FAILED); -}; +const makeDecoration = (badge: string, color: string, tooltip?: string): vscode.FileDecoration => + new vscode.FileDecoration(badge, tooltip, new vscode.ThemeColor(color)), + runStateBadge = (result: RunResult): vscode.FileDecoration | undefined => { + if (result.error !== undefined) { + return makeDecoration(BADGE_ERROR, THEME_COLOR_ERROR, result.error); + } + return result.passed + ? makeDecoration(BADGE_PASSED, THEME_COLOR_PASSED) + : makeDecoration(BADGE_FAILED, THEME_COLOR_FAILED); + }; export class ExplorerAdapter - implements - vscode.TreeDataProvider<TreeNode>, - vscode.FileDecorationProvider + implements vscode.TreeDataProvider<TreeNode>, vscode.FileDecorationProvider { - private readonly _onDidChangeTreeData = - new vscode.EventEmitter<TreeNode | undefined>(); + private readonly _onDidChangeTreeData = new vscode.EventEmitter<TreeNode | undefined>(); readonly onDidChangeTreeData = this._onDidChangeTreeData.event; - private readonly _onDidChangeFileDecorations = - new vscode.EventEmitter<vscode.Uri | vscode.Uri[] | undefined>(); + private readonly _onDidChangeFileDecorations = new vscode.EventEmitter< + vscode.Uri | vscode.Uri[] | undefined + >(); readonly onDidChangeFileDecorations = this._onDidChangeFileDecorations.event; private readonly _results = new Map<string, RunResult>(); @@ -215,15 +192,15 @@ export class ExplorerAdapter getChildren(element?: TreeNode): TreeNode[] { const folders = vscode.workspace.workspaceFolders, - firstFolder = folders?.[0]; + firstFolder = folders?.[0]; if (firstFolder === undefined) { return []; } if (element === undefined) { const root = firstFolder.uri.fsPath, - fileTree = this._buildTree(root), - playlistSection = this._buildPlaylistSection(root); + fileTree = this._buildTree(root), + playlistSection = this._buildPlaylistSection(root); return [...fileTree, playlistSection]; } @@ -236,10 +213,10 @@ export class ExplorerAdapter private _buildPlaylistSection(rootPath: string): TreeNode { const naplistPaths = this._collectNaplistFiles(rootPath), - playlistNodes = naplistPaths.map((fp) => { - const stepNodes = buildPlaylistStepNodes(fp, this._results); - return createPlaylistNode(fp, this._results, stepNodes); - }); + playlistNodes = naplistPaths.map((fp) => { + const stepNodes = buildPlaylistStepNodes(fp, this._results); + return createPlaylistNode(fp, this._results, stepNodes); + }); return createPlaylistSectionNode(playlistNodes); } @@ -248,9 +225,9 @@ export class ExplorerAdapter return []; } const entries = fs.readdirSync(dirPath, { withFileTypes: true }), - results: string[] = []; + results: string[] = []; for (const entry of entries) { - if (entry.name.startsWith(".")) { + if (entry.name.startsWith('.')) { continue; } const fullPath = path.join(dirPath, entry.name); @@ -276,7 +253,7 @@ export class ExplorerAdapter private _sortedVisibleEntries(dirPath: string): fs.Dirent[] { const entries = fs.readdirSync(dirPath, { withFileTypes: true }); return entries - .filter((e) => !e.name.startsWith(".")) + .filter((e) => !e.name.startsWith('.')) .sort((a, b) => { if (a.isDirectory() && !b.isDirectory()) { return -1; @@ -288,15 +265,10 @@ export class ExplorerAdapter }); } - private _buildEntryNode( - entry: fs.Dirent, - fullPath: string, - ): TreeNode | undefined { + private _buildEntryNode(entry: fs.Dirent, fullPath: string): TreeNode | undefined { if (entry.isDirectory()) { const children = this._buildTree(fullPath); - return children.length > 0 - ? createFolderNode(fullPath, children) - : undefined; + return children.length > 0 ? createFolderNode(fullPath, children) : undefined; } if (entry.name.endsWith(NAPLIST_EXTENSION)) { return this._buildNaplistNode(fullPath); @@ -312,12 +284,9 @@ export class ExplorerAdapter return []; } const sorted = this._sortedVisibleEntries(dirPath), - nodes: TreeNode[] = []; + nodes: TreeNode[] = []; for (const entry of sorted) { - const node = this._buildEntryNode( - entry, - path.join(dirPath, entry.name), - ); + const node = this._buildEntryNode(entry, path.join(dirPath, entry.name)); if (node !== undefined) { nodes.push(node); } diff --git a/src/Napper.VsCode/src/explorerProvider.ts b/src/Napper.VsCode/src/explorerProvider.ts new file mode 100644 index 0000000..461c675 --- /dev/null +++ b/src/Napper.VsCode/src/explorerProvider.ts @@ -0,0 +1,187 @@ +// Specs: vscode-explorer, vscode-playlists +// Tree data provider for the Explorer view +// Shows .nap and .naplist files in workspace folder structure + +import * as path from 'path'; +import { + CONTEXT_FOLDER, + CONTEXT_PLAYLIST, + CONTEXT_PLAYLIST_SECTION, + CONTEXT_REQUEST_FILE, + CONTEXT_SCRIPT_FILE, + CSX_EXTENSION, + FSX_EXTENSION, + HTTP_METHODS, + NAPLIST_EXTENSION, + NAP_EXTENSION, + NAP_KEY_METHOD, + NAP_NAME_KEY_PREFIX, + NAP_NAME_KEY_SUFFIX, + PLAYLIST_SECTION_LABEL, + SECTION_STEPS, +} from './constants'; +import { type RunResult, RunState } from './types'; + +// Decoupled node type — no vscode dependency +export interface TreeNode { + readonly label: string; + readonly filePath: string; + readonly isDirectory: boolean; + readonly contextValue: string; + readonly httpMethod?: string; + readonly runState: RunState; + readonly children?: readonly TreeNode[]; +} + +const isScriptFile = (filePath: string): boolean => + filePath.endsWith(FSX_EXTENSION) || filePath.endsWith(CSX_EXTENSION), + getContextValue = (filePath: string): string => { + if (filePath.endsWith(NAPLIST_EXTENSION)) { + return CONTEXT_PLAYLIST; + } + if (isScriptFile(filePath)) { + return CONTEXT_SCRIPT_FILE; + } + return CONTEXT_REQUEST_FILE; + }, + isMethodLine = (trimmed: string, method: string): boolean => + trimmed.startsWith(`${method} `) || + trimmed === `${NAP_KEY_METHOD} = ${method}` || + trimmed === `${NAP_KEY_METHOD} = ${method}`, + extractHttpMethod = (fileContent: string): string | undefined => { + const lines = fileContent.split('\n'); + for (const line of lines) { + const trimmed = line.trim(); + if (trimmed.length === 0 || trimmed.startsWith('#')) { + continue; + } + for (const method of HTTP_METHODS) { + if (isMethodLine(trimmed, method)) { + return method; + } + } + } + return undefined; + }, + getRunState = (filePath: string, results: ReadonlyMap<string, RunResult>): RunState => { + const result = results.get(filePath); + if (result === undefined) { + return RunState.Idle; + } + if (result.error !== undefined) { + return RunState.Error; + } + return result.passed ? RunState.Passed : RunState.Failed; + }; + +export const createFileNode = ( + filePath: string, + fileContent: string, + results: ReadonlyMap<string, RunResult>, +): TreeNode => { + const method = filePath.endsWith(NAP_EXTENSION) ? extractHttpMethod(fileContent) : undefined, + base = { + label: path.basename(filePath, path.extname(filePath)), + filePath, + isDirectory: false as const, + contextValue: getContextValue(filePath), + runState: getRunState(filePath, results), + }; + if (method !== undefined) { + return { ...base, httpMethod: method }; + } + return base; +}; + +export const createFolderNode = (folderPath: string, children: readonly TreeNode[]): TreeNode => ({ + label: path.basename(folderPath), + filePath: folderPath, + isDirectory: true, + contextValue: CONTEXT_FOLDER, + runState: RunState.Idle, + children, +}); + +const isSectionHeader = (trimmed: string): boolean => + trimmed.startsWith('[') && trimmed.endsWith(']'); + +export const parsePlaylistStepPaths = (content: string): readonly string[] => { + const lines = content.split('\n'); + let inSteps = false; + const steps: string[] = []; + for (const line of lines) { + const trimmed = line.trim(); + if (isSectionHeader(trimmed)) { + inSteps = trimmed === SECTION_STEPS; + continue; + } + if (!inSteps || trimmed.length === 0 || trimmed.startsWith('#')) { + continue; + } + steps.push(trimmed); + } + return steps; +}; + +export const createPlaylistNode = ( + filePath: string, + results: ReadonlyMap<string, RunResult>, + stepChildren: readonly TreeNode[], +): TreeNode => ({ + label: path.basename(filePath, path.extname(filePath)), + filePath, + isDirectory: false, + contextValue: CONTEXT_PLAYLIST, + runState: getRunState(filePath, results), + children: stepChildren, +}); + +export const createPlaylistSectionNode = (children: readonly TreeNode[]): TreeNode => ({ + label: PLAYLIST_SECTION_LABEL, + filePath: '', + isDirectory: false, + contextValue: CONTEXT_PLAYLIST_SECTION, + runState: RunState.Idle, + children, +}); + +const findStepsInsertIndex = ( + lines: readonly string[], +): { readonly inSteps: boolean; readonly index: number } => { + let inSteps = false; + for (let i = 0; i < lines.length; i++) { + const line = lines[i]; + if (line === undefined) { + continue; + } + const trimmed = line.trim(); + if (trimmed === SECTION_STEPS) { + inSteps = true; + continue; + } + if (inSteps && isSectionHeader(trimmed)) { + return { inSteps: true, index: i }; + } + } + return { inSteps, index: lines.length }; +}; + +export const appendStepToPlaylist = (content: string, stepPath: string): string => { + const lines = content.split('\n'), + result = findStepsInsertIndex(lines); + if (!result.inSteps) { + return `${content}\n${SECTION_STEPS}\n${stepPath}\n`; + } + lines.splice(result.index, 0, stepPath); + return lines.join('\n'); +}; + +export const updatePlaylistName = (content: string, newName: string): string => { + const lines = content.split('\n'), + updated = lines.map((line) => + line.trim().startsWith(NAP_NAME_KEY_PREFIX) + ? `${NAP_NAME_KEY_PREFIX}${newName}${NAP_NAME_KEY_SUFFIX}` + : line, + ); + return updated.join('\n'); +}; diff --git a/src/Napper.VsCode/src/extension.ts b/src/Napper.VsCode/src/extension.ts new file mode 100644 index 0000000..ad9eea7 --- /dev/null +++ b/src/Napper.VsCode/src/extension.ts @@ -0,0 +1,412 @@ +// Specs: vscode-impl, vscode-commands +// Napper VSCode Extension — main entry point +// Registers all providers, commands, and file watchers + +import * as vscode from 'vscode'; +import * as path from 'path'; +import * as fs from 'fs'; +import { ExplorerAdapter } from './explorerAdapter'; +import { CodeLensProvider } from './codeLensProvider'; +import { EnvironmentStatusBar } from './environmentAdapter'; +import { ResponsePanel } from './responsePanel'; +import { PlaylistPanel } from './playlistPanel'; +import { runCli, streamCli } from './cliRunner'; +import type { RunResult } from './types'; +import { parsePlaylistStepPaths } from './explorerProvider'; +import { generatePlaylistReport } from './reportGenerator'; +import { type Logger, createLogger } from './logger'; +import { + type DownloadBinaryParams, + downloadBinary, + getCliVersion, + installDotnetTool, + installedBinaryPath, +} from './cliInstaller'; +import { + registerEditCommands, + registerHttpConvertCommands, + registerOpenApiCommands, +} from './editAndImportCommands'; +import { registerContextMenuCommands } from './contextMenuCommands'; +import { registerAutoRun, registerWatchers } from './watchers'; +import { + CLI_BIN_DIR, + CLI_BINARY_NAME, + CLI_ERROR_PREFIX, + CLI_INSTALL_COMPLETE_MSG, + CLI_INSTALL_FAILED_MSG, + CLI_INSTALL_MSG, + CLI_VERSION_MISMATCH_MSG, + CMD_OPEN_RESPONSE, + CMD_RUN_ALL, + CMD_RUN_FILE, + CMD_SAVE_REPORT, + CONFIG_CLI_PATH, + CONFIG_SECTION, + CONFIG_SPLIT_LAYOUT, + DEFAULT_CLI_PATH, + ENCODING_UTF8, + HTTP_FILE_EXTENSION, + LANG_NAP, + LANG_NAPLIST, + REST_FILE_EXTENSION, + LAYOUT_BELOW, + LAYOUT_BESIDE, + LOG_CHANNEL_NAME, + LOG_MSG_ACTIVATED, + LOG_MSG_CLI_RESULT_COUNT, + LOG_MSG_CLI_SPAWN_ERROR, + LOG_MSG_DEACTIVATED, + LOG_MSG_RUN_FILE, + LOG_MSG_RUN_PLAYLIST, + LOG_MSG_STREAM_DONE, + LOG_MSG_STREAM_RESULT, + MSG_NO_FILE_SELECTED, + MSG_NO_RESPONSE, + NAPLIST_EXTENSION, + PROP_FILE_PATH, + REPORT_FILE_EXTENSION, + REPORT_FILE_SUFFIX, + REPORT_SAVED_MSG, + STATUS_RUNNING_ICON, + STATUS_RUNNING_SUFFIX, + VIEW_EXPLORER, +} from './constants'; + +let envStatusBar: EnvironmentStatusBar, + extensionDir: string, + extensionVersion: string, + explorerProvider: ExplorerAdapter, + installedCliOverride: string | undefined, + lastPlaylistReport: (() => void) | undefined, + lastResult: RunResult | undefined, + logger: Logger, + playlistPanel: PlaylistPanel, + responsePanel: ResponsePanel, + storageDir: string; + +const bundledCliPath = (): string => path.join(extensionDir, CLI_BIN_DIR, CLI_BINARY_NAME), + getCliPath = (): string => { + const configured = vscode.workspace + .getConfiguration(CONFIG_SECTION) + .get<string>(CONFIG_CLI_PATH, DEFAULT_CLI_PATH); + if (configured !== DEFAULT_CLI_PATH) { + return configured; + } + if (installedCliOverride !== undefined) { + return installedCliOverride; + } + const bundled = bundledCliPath(); + return fs.existsSync(bundled) ? bundled : CLI_BINARY_NAME; + }, + checkVersionAt = async (cliPath: string): Promise<boolean> => { + logger.debug(`Version check: ${cliPath}`); + const result = await getCliVersion(cliPath); + if (!result.ok) { + logger.debug(`Version check failed at ${cliPath}: ${result.error}`); + return false; + } + logger.debug(`${cliPath}: v${result.value} (need ${extensionVersion})`); + if (result.value !== extensionVersion) { + return false; + } + installedCliOverride = cliPath; + logger.info(`${CLI_INSTALL_COMPLETE_MSG} (${cliPath})`); + return true; + }, + checkVersionMatch = async (): Promise<boolean> => { + if (await checkVersionAt(installedBinaryPath(storageDir))) { + return true; + } + if (await checkVersionAt(bundledCliPath())) { + return true; + } + if (await checkVersionAt(CLI_BINARY_NAME)) { + return true; + } + logger.info(CLI_VERSION_MISMATCH_MSG); + return false; + }, + installParams = (): DownloadBinaryParams => ({ + version: extensionVersion, + storageDir, + log: (msg) => { + logger.info(msg); + }, + }), + tryBinaryInstall = async (params: DownloadBinaryParams): Promise<boolean> => { + const dlResult = await downloadBinary(params); + if (!dlResult.ok) { + logger.error(dlResult.error); + return false; + } + if (await checkVersionAt(dlResult.value)) { + return true; + } + logger.error(`Binary downloaded but version check failed at ${dlResult.value}`); + return false; + }, + tryDotnetFallback = async (params: DownloadBinaryParams): Promise<void> => { + const dotnetResult = await installDotnetTool(params); + if (!dotnetResult.ok) { + logger.error(`${CLI_INSTALL_FAILED_MSG}${dotnetResult.error}`); + void vscode.window.showErrorMessage(`${CLI_INSTALL_FAILED_MSG}${dotnetResult.error}`); + return; + } + installedCliOverride = CLI_BINARY_NAME; + logger.info(`${CLI_INSTALL_COMPLETE_MSG} (dotnet tool)`); + }, + performInstall = async (): Promise<void> => { + const params = installParams(); + if (await tryBinaryInstall(params)) { + return; + } + await tryDotnetFallback(params); + }, + ensureCliInstalled = async (): Promise<void> => { + logger.info('Checking CLI installation...'); + if (await checkVersionMatch()) { + return; + } + logger.info('No matching CLI found, starting install...'); + await vscode.window.withProgress( + { + location: vscode.ProgressLocation.Notification, + title: CLI_INSTALL_MSG, + cancellable: false, + }, + performInstall, + ); + }, + getWorkspacePath = (): string | undefined => vscode.workspace.workspaceFolders?.[0]?.uri.fsPath, + getResponseColumn = (): vscode.ViewColumn => { + const config = vscode.workspace.getConfiguration(CONFIG_SECTION), + layout = config.get<string>(CONFIG_SPLIT_LAYOUT, LAYOUT_BESIDE); + return layout === LAYOUT_BELOW ? vscode.ViewColumn.Active : vscode.ViewColumn.Beside; + }, + resolveFileUri = (arg?: vscode.Uri | { readonly filePath: string }): vscode.Uri | undefined => { + if (arg === undefined) { + return vscode.window.activeTextEditor?.document.uri; + } + if (arg instanceof vscode.Uri) { + return arg; + } + return PROP_FILE_PATH in arg ? vscode.Uri.file(arg.filePath) : undefined; + }, + makeRunningStatus = (fsPath: string): vscode.Disposable => + vscode.window.setStatusBarMessage( + `${STATUS_RUNNING_ICON}${path.basename(fsPath)}${STATUS_RUNNING_SUFFIX}`, + ), + handleStreamResult = (result: RunResult, index: number): void => { + logger.debug(`${LOG_MSG_STREAM_RESULT} ${result.file}`); + explorerProvider.updateResult(result.file, result); + lastResult = result; + playlistPanel.addResult(index, result); + }, + savePlaylistReport = (playlistFile: string, results: readonly RunResult[]): void => { + const baseName = path.basename(playlistFile, path.extname(playlistFile)), + reportPath = path.join( + path.dirname(playlistFile), + `${baseName}${REPORT_FILE_SUFFIX}${REPORT_FILE_EXTENSION}`, + ); + fs.writeFileSync(reportPath, generatePlaylistReport(baseName, results), ENCODING_UTF8); + void vscode.env.openExternal(vscode.Uri.file(reportPath)); + void vscode.window.showInformationMessage(`${REPORT_SAVED_MSG}${path.basename(reportPath)}`); + }, + currentEnvOrUndefined = (): string | undefined => { + const env = envStatusBar.currentEnv; + return env !== '' ? env : undefined; + }, + preparePlaylistRun = (fileUri: vscode.Uri): void => { + logger.info(`${LOG_MSG_RUN_PLAYLIST} ${fileUri.fsPath}`); + explorerProvider.clearResults(); + const content = fs.readFileSync(fileUri.fsPath, ENCODING_UTF8), + stepPaths = parsePlaylistStepPaths(content), + stepFileNames = stepPaths.map((s) => path.basename(s)); + playlistPanel.showRunning(fileUri.fsPath, stepFileNames, getResponseColumn()); + }; + +interface StreamState { + readonly collectedResults: RunResult[]; + resultIndex: number; + streamError: string | undefined; +} + +const collectResult = (state: StreamState, result: RunResult): void => { + handleStreamResult(result, state.resultIndex); + state.collectedResults.push(result); + state.resultIndex++; + }, + awaitStream = async (fileUri: vscode.Uri, cwd: string, state: StreamState): Promise<void> => { + await new Promise<void>((resolve) => { + streamCli({ + cliPath: getCliPath(), + filePath: fileUri.fsPath, + env: currentEnvOrUndefined(), + cwd, + onResult: (result: RunResult) => { + collectResult(state, result); + }, + onDone: (error?: string) => { + state.streamError = error; + resolve(); + }, + }); + }); + }, + handleStreamError = (state: StreamState): void => { + logger.error(`${LOG_MSG_CLI_SPAWN_ERROR} ${state.streamError}`); + playlistPanel.showError(state.streamError ?? ''); + void vscode.window.showErrorMessage(`${CLI_ERROR_PREFIX}${state.streamError}`); + }, + handleStreamSuccess = (state: StreamState, fileUri: vscode.Uri): void => { + logger.info(LOG_MSG_STREAM_DONE); + playlistPanel.showComplete(state.collectedResults); + const doSave = (): void => { + savePlaylistReport(fileUri.fsPath, state.collectedResults); + }; + playlistPanel.onSaveReport = doSave; + lastPlaylistReport = doSave; + }, + runPlaylistStreaming = async (fileUri: vscode.Uri, cwd: string): Promise<void> => { + preparePlaylistRun(fileUri); + const statusMsg = makeRunningStatus(fileUri.fsPath), + state: StreamState = { collectedResults: [], resultIndex: 0, streamError: undefined }; + await awaitStream(fileUri, cwd, state); + statusMsg.dispose(); + if (state.streamError !== undefined && state.collectedResults.length === 0) { + handleStreamError(state); + } else { + handleStreamSuccess(state, fileUri); + } + }, + handleCliResults = (results: readonly RunResult[]): void => { + logger.info(`${LOG_MSG_CLI_RESULT_COUNT} ${results.length}`); + for (const r of results) { + explorerProvider.updateResult(r.file, r); + lastResult = r; + } + const [first] = results; + if (first !== undefined) { + responsePanel.show(first, getResponseColumn()); + } + }, + runSingleFile = async (fileUri: vscode.Uri, cwd: string): Promise<void> => { + const resolvedCliPath = getCliPath(); + logger.info(`${LOG_MSG_RUN_FILE} ${fileUri.fsPath}`); + logger.info(`CLI path: ${resolvedCliPath}, cwd: ${cwd}`); + const statusMsg = makeRunningStatus(fileUri.fsPath), + result = await runCli({ + cliPath: resolvedCliPath, + filePath: fileUri.fsPath, + env: currentEnvOrUndefined(), + cwd, + }); + statusMsg.dispose(); + logger.info(`CLI completed: ok=${String(result.ok)}`); + if (!result.ok) { + logger.error(`${LOG_MSG_CLI_SPAWN_ERROR} ${result.error}`); + void vscode.window.showErrorMessage(`${CLI_ERROR_PREFIX}${result.error}`); + return; + } + handleCliResults(result.value); + }, + runFile = async (arg?: vscode.Uri | { readonly filePath: string }): Promise<void> => { + const fileUri = resolveFileUri(arg); + if (fileUri === undefined) { + void vscode.window.showWarningMessage(MSG_NO_FILE_SELECTED); + return; + } + const cwd = getWorkspacePath(); + if (cwd === undefined) { + return; + } + if (fileUri.fsPath.endsWith(NAPLIST_EXTENSION)) { + await runPlaylistStreaming(fileUri, cwd); + } else { + await runSingleFile(fileUri, cwd); + } + }, + runAll = async (): Promise<void> => { + const cwd = getWorkspacePath(); + if (cwd !== undefined) { + await runFile(vscode.Uri.file(cwd)); + } + }, + openResponse = (): void => { + if (lastResult !== undefined) { + responsePanel.show(lastResult, getResponseColumn()); + } else { + void vscode.window.showInformationMessage(MSG_NO_RESPONSE); + } + }, + registerRunCommands = (context: vscode.ExtensionContext): void => { + context.subscriptions.push( + vscode.commands.registerCommand(CMD_RUN_FILE, runFile), + vscode.commands.registerCommand(CMD_RUN_ALL, runAll), + vscode.commands.registerCommand(CMD_OPEN_RESPONSE, openResponse), + vscode.commands.registerCommand(CMD_SAVE_REPORT, () => { + if (lastPlaylistReport !== undefined) { + lastPlaylistReport(); + } + }), + ); + }, + initProviders = (): void => { + explorerProvider = new ExplorerAdapter(); + envStatusBar = new EnvironmentStatusBar(); + responsePanel = new ResponsePanel(); + playlistPanel = new PlaylistPanel(); + }, + codeLensSelectors = [ + { language: LANG_NAP }, + { language: LANG_NAPLIST }, + { pattern: `**/*${HTTP_FILE_EXTENSION}` }, + { pattern: `**/*${REST_FILE_EXTENSION}` }, + ], + registerCodeLens = (context: vscode.ExtensionContext): void => { + context.subscriptions.push( + vscode.languages.registerCodeLensProvider(codeLensSelectors, new CodeLensProvider()), + ); + }, + initLogger = (context: vscode.ExtensionContext): void => { + const outputChannel = vscode.window.createOutputChannel(LOG_CHANNEL_NAME); + context.subscriptions.push(outputChannel); + logger = createLogger((msg) => { + outputChannel.appendLine(msg); + }); + logger.info(LOG_MSG_ACTIVATED); + extensionVersion = (context.extension.packageJSON as { version: string }).version; + extensionDir = context.extensionUri.fsPath; + storageDir = context.globalStorageUri.fsPath; + logger.info(`Extension version: ${extensionVersion}`); + ensureCliInstalled().catch(() => undefined); + }; + +export interface ExtensionApi { + readonly explorerProvider: ExplorerAdapter; +} + +export function activate(context: vscode.ExtensionContext): ExtensionApi { + initLogger(context); + initProviders(); + context.subscriptions.push( + vscode.window.registerTreeDataProvider(VIEW_EXPLORER, explorerProvider), + vscode.window.registerFileDecorationProvider(explorerProvider), + ); + registerCodeLens(context); + const commandDeps = { explorer: explorerProvider, envStatusBar, logger, getCliPath }; + registerRunCommands(context); + registerEditCommands(context, commandDeps); + registerOpenApiCommands(context, commandDeps); + registerHttpConvertCommands(context, commandDeps); + registerContextMenuCommands(context, explorerProvider); + registerWatchers(context, explorerProvider, logger); + registerAutoRun(context, async (uri) => runFile(uri)); + context.subscriptions.push(envStatusBar, responsePanel, playlistPanel); + return { explorerProvider }; +} + +export function deactivate(): void { + logger.info(LOG_MSG_DEACTIVATED); +} diff --git a/src/Napper.VsCode/src/fileCreation.ts b/src/Napper.VsCode/src/fileCreation.ts new file mode 100644 index 0000000..a931ba7 --- /dev/null +++ b/src/Napper.VsCode/src/fileCreation.ts @@ -0,0 +1,103 @@ +// Specs: vscode-commands, vscode-new-request +// File creation commands — newRequest, newPlaylist +// Extracted from extension.ts to keep files under 450 LOC + +import * as vscode from 'vscode'; +import * as path from 'path'; +import type { ExplorerAdapter } from './explorerAdapter'; +import { + DEFAULT_PLAYLIST_NAME, + ENCODING_UTF8, + HTTP_METHODS, + NAPLIST_EXTENSION, + NAP_EXTENSION, + NAP_NAME_KEY_PREFIX, + NAP_NAME_KEY_SUFFIX, + PLACEHOLDER_URL, + PROMPT_ENTER_URL, + PROMPT_PLAYLIST_NAME, + PROMPT_REQUEST_NAME, + PROMPT_SELECT_METHOD, + REQUEST_NAME_SUFFIX, + SECTION_META, + SECTION_STEPS, +} from './constants'; + +const promptMethod = (): Thenable<string | undefined> => + vscode.window.showQuickPick( + HTTP_METHODS.map((m) => m), + { placeHolder: PROMPT_SELECT_METHOD }, + ), + promptUrl = (): Thenable<string | undefined> => + vscode.window.showInputBox({ + prompt: PROMPT_ENTER_URL, + placeHolder: PLACEHOLDER_URL, + }), + promptFileName = (defaultValue: string): Thenable<string | undefined> => + vscode.window.showInputBox({ + prompt: PROMPT_REQUEST_NAME, + value: defaultValue, + }), + writeAndOpen = async ( + filePath: string, + content: string, + explorer: ExplorerAdapter, + ): Promise<void> => { + await vscode.workspace.fs.writeFile( + vscode.Uri.file(filePath), + Buffer.from(content, ENCODING_UTF8), + ); + const doc = await vscode.workspace.openTextDocument(filePath); + await vscode.window.showTextDocument(doc); + explorer.refresh(); + }, + getWorkspacePath = (): string | undefined => vscode.workspace.workspaceFolders?.[0]?.uri.fsPath; + +const promptRequestDetails = async (): Promise< + | { readonly method: string; readonly url: string; readonly name: string; readonly cwd: string } + | undefined +> => { + const method = await promptMethod(); + if (method === undefined) { + return undefined; + } + const url = await promptUrl(); + if (url === undefined) { + return undefined; + } + const cwd = getWorkspacePath(); + if (cwd === undefined) { + return undefined; + } + const defaultName = `${method.toLowerCase()}${REQUEST_NAME_SUFFIX}`, + name = await promptFileName(defaultName); + return name !== undefined ? { method, url, name, cwd } : undefined; +}; + +export const newRequest = async (explorer: ExplorerAdapter): Promise<void> => { + const details = await promptRequestDetails(); + if (details === undefined) { + return; + } + const filePath = path.join(details.cwd, `${details.name}${NAP_EXTENSION}`); + await writeAndOpen(filePath, `${details.method} ${details.url}\n`, explorer); +}; + +export const newPlaylist = async (explorer: ExplorerAdapter): Promise<void> => { + const cwd = getWorkspacePath(); + if (cwd === undefined) { + return; + } + + const name = await vscode.window.showInputBox({ + prompt: PROMPT_PLAYLIST_NAME, + value: DEFAULT_PLAYLIST_NAME, + }); + if (name === undefined) { + return; + } + + const filePath = path.join(cwd, `${name}${NAPLIST_EXTENSION}`), + content = `${SECTION_META}\n${NAP_NAME_KEY_PREFIX}${name}${NAP_NAME_KEY_SUFFIX}\n\n${SECTION_STEPS}\n`; + await writeAndOpen(filePath, content, explorer); +}; diff --git a/src/Nap.VsCode/src/htmlUtils.ts b/src/Napper.VsCode/src/htmlUtils.ts similarity index 57% rename from src/Nap.VsCode/src/htmlUtils.ts rename to src/Napper.VsCode/src/htmlUtils.ts index 3663c87..17b7917 100644 --- a/src/Nap.VsCode/src/htmlUtils.ts +++ b/src/Napper.VsCode/src/htmlUtils.ts @@ -1,7 +1,8 @@ +// Specs: vscode-layout // Shared HTML utility functions for webview panels // Used by both responsePanel and playlistPanel -import type { AssertionResult, RunResult } from "./types"; +import type { AssertionResult, RunResult } from './types'; import { JSON_INDENT_SIZE, NO_REQUEST_HEADERS, @@ -14,67 +15,62 @@ import { SECTION_LABEL_REQUEST_HEADERS, SECTION_LABEL_RESPONSE, SECTION_LABEL_RESPONSE_HEADERS, -} from "./constants"; +} from './constants'; export const escapeHtml = (text: string): string => text - .split("&").join("&") - .split("<").join("<") - .split(">").join(">") - .split('"').join("""); + .split('&') + .join('&') + .split('<') + .join('<') + .split('>') + .join('>') + .split('"') + .join('"'); const jsonSpan = (cls: string, content: string): string => - `<span class="json-${cls}">${escapeHtml(content)}</span>`, - - highlightJsonPrimitive = (value: unknown): string | undefined => { - if (value === null) { - return jsonSpan("null", "null"); - } - if (typeof value === "boolean") { - return jsonSpan("bool", String(value)); - } - if (typeof value === "number") { - return jsonSpan("number", String(value)); - } - if (typeof value === "string") { - return jsonSpan("string", `"${escapeHtml(value)}"`); - } - return undefined; -}, - - highlightJsonArray = ( - items: readonly unknown[], - indent: number -): string => { - if (items.length === 0) { - return "[]"; - } - const pad = " ".repeat(indent), - innerPad = " ".repeat(indent + JSON_INDENT_SIZE), - rendered = items - .map((item) => `${innerPad}${highlightJson(item, indent + JSON_INDENT_SIZE)}`) - .join(",\n"); - return `[\n${rendered}\n${pad}]`; -}, - - highlightJsonObject = ( - value: Record<string, unknown>, - indent: number -): string => { - const entries = Object.entries(value); - if (entries.length === 0) { - return "{}"; - } - const pad = " ".repeat(indent), - innerPad = " ".repeat(indent + JSON_INDENT_SIZE), - props = entries - .map( - ([k, v]) => - `${innerPad}${jsonSpan("key", `"${escapeHtml(k)}"`)}: ${highlightJson(v, indent + JSON_INDENT_SIZE)}` - ) - .join(",\n"); - return `{\n${props}\n${pad}}`; -}; + `<span class="json-${cls}">${escapeHtml(content)}</span>`, + highlightJsonPrimitive = (value: unknown): string | undefined => { + if (value === null) { + return jsonSpan('null', 'null'); + } + if (typeof value === 'boolean') { + return jsonSpan('bool', String(value)); + } + if (typeof value === 'number') { + return jsonSpan('number', String(value)); + } + if (typeof value === 'string') { + return jsonSpan('string', `"${escapeHtml(value)}"`); + } + return undefined; + }, + highlightJsonArray = (items: readonly unknown[], indent: number): string => { + if (items.length === 0) { + return '[]'; + } + const pad = ' '.repeat(indent), + innerPad = ' '.repeat(indent + JSON_INDENT_SIZE), + rendered = items + .map((item) => `${innerPad}${highlightJson(item, indent + JSON_INDENT_SIZE)}`) + .join(',\n'); + return `[\n${rendered}\n${pad}]`; + }, + highlightJsonObject = (value: Record<string, unknown>, indent: number): string => { + const entries = Object.entries(value); + if (entries.length === 0) { + return '{}'; + } + const pad = ' '.repeat(indent), + innerPad = ' '.repeat(indent + JSON_INDENT_SIZE), + props = entries + .map( + ([k, v]) => + `${innerPad}${jsonSpan('key', `"${escapeHtml(k)}"`)}: ${highlightJson(v, indent + JSON_INDENT_SIZE)}`, + ) + .join(',\n'); + return `{\n${props}\n${pad}}`; + }; export function highlightJson(value: unknown, indent: number): string { const primitive = highlightJsonPrimitive(value); @@ -84,10 +80,10 @@ export function highlightJson(value: unknown, indent: number): string { if (Array.isArray(value)) { return highlightJsonArray(value, indent); } - if (typeof value === "object" && value !== null) { + if (typeof value === 'object' && value !== null) { return highlightJsonObject(value as Record<string, unknown>, indent); } - return escapeHtml(typeof value === "undefined" ? "undefined" : JSON.stringify(value)); + return escapeHtml(typeof value === 'undefined' ? 'undefined' : JSON.stringify(value)); } export const formatBodyHtml = (body: string): string => { @@ -112,58 +108,59 @@ export const buildCollapsibleSection = ({ readonly content: string; readonly open: boolean; }): string => - `<details class="section"${open ? " open" : ""}> + `<details class="section"${open ? ' open' : ''}> <summary><h3>${title}</h3><span class="chevron">▶</span></summary> <div class="section-content">${content}</div> </details>`; export const buildHeadersTableRows = ( - headers: Readonly<Record<string, string>> | undefined + headers: Readonly<Record<string, string>> | undefined, ): string => { - if (!headers) {return "";} + if (!headers) { + return ''; + } return Object.entries(headers) .map( - ([k, v]) => - `<tr><td class="header-key">${escapeHtml(k)}</td><td>${escapeHtml(v)}</td></tr>` + ([k, v]) => `<tr><td class="header-key">${escapeHtml(k)}</td><td>${escapeHtml(v)}</td></tr>`, ) - .join("\n"); + .join('\n'); }; -const buildAssertionRowsHtml = ( - assertions: readonly AssertionResult[] -): string => { - if (assertions.length === 0) {return "";} +const buildAssertionRowsHtml = (assertions: readonly AssertionResult[]): string => { + if (assertions.length === 0) { + return ''; + } return assertions .map((a) => { - const icon = a.passed ? "✓" : "✗", - cls = a.passed ? "pass" : "fail", - detail = a.passed - ? "" - : `<div class="assert-detail">expected: ${escapeHtml(a.expected)} | actual: ${escapeHtml(a.actual)}</div>`; + const icon = a.passed ? '✓' : '✗', + cls = a.passed ? 'pass' : 'fail', + detail = a.passed + ? '' + : `<div class="assert-detail">expected: ${escapeHtml(a.expected)} | actual: ${escapeHtml(a.actual)}</div>`; return `<div class="assert-row ${cls}">${icon} ${escapeHtml(a.target)}${detail}</div>`; }) - .join("\n"); + .join('\n'); }; const buildRequestUrlHtml = (result: RunResult): string => - result.requestUrl !== undefined && result.requestUrl !== "" - ? `<div class="request-url"><span class="request-method">${escapeHtml(result.requestMethod ?? "")}</span> ${escapeHtml(result.requestUrl)}</div>` - : ""; + result.requestUrl !== undefined && result.requestUrl !== '' + ? `<div class="request-url"><span class="request-method">${escapeHtml(result.requestMethod ?? '')}</span> ${escapeHtml(result.requestUrl)}</div>` + : ''; export const buildErrorHtml = (error: string | undefined): string => - error !== undefined && error !== "" + error !== undefined && error !== '' ? buildCollapsibleSection({ title: SECTION_LABEL_ERROR, content: `<pre class="error-text">${escapeHtml(error)}</pre>`, open: true, }) - : ""; + : ''; -export const buildLogHtml = ( - log: readonly string[] | undefined -): string => { - if (!log || log.length === 0) {return "";} - const lines = log.map((line) => escapeHtml(line)).join("\n"); +export const buildLogHtml = (log: readonly string[] | undefined): string => { + if (!log || log.length === 0) { + return ''; + } + const lines = log.map((line) => escapeHtml(line)).join('\n'); return buildCollapsibleSection({ title: SECTION_LABEL_OUTPUT, content: `<pre class="log-output">${lines}</pre>`, @@ -172,21 +169,25 @@ export const buildLogHtml = ( }; const buildRequestBodyHtml = (result: RunResult): string => { - if (result.requestBody === undefined || result.requestBody === "") {return "";} + if (result.requestBody === undefined || result.requestBody === '') { + return ''; + } const formatted = formatBodyHtml(result.requestBody), - contentTypeHint = result.requestBodyContentType !== undefined && result.requestBodyContentType !== "" - ? `<div class="content-type-hint">${escapeHtml(result.requestBodyContentType)}</div>` - : ""; + contentTypeHint = + result.requestBodyContentType !== undefined && result.requestBodyContentType !== '' + ? `<div class="content-type-hint">${escapeHtml(result.requestBodyContentType)}</div>` + : ''; return `<div class="subsection"><h4 class="subsection-title">${SECTION_LABEL_REQUEST_BODY}</h4>${contentTypeHint}<pre class="body">${formatted}</pre></div>`; }; export const buildRequestGroupHtml = (result: RunResult): string => { const urlHtml = buildRequestUrlHtml(result), - headersRows = buildHeadersTableRows(result.requestHeaders), - headersHtml = headersRows !== "" - ? `<div class="subsection"><h4 class="subsection-title">${SECTION_LABEL_REQUEST_HEADERS}</h4><table>${headersRows}</table></div>` - : `<span class="empty-hint">${NO_REQUEST_HEADERS}</span>`, - bodyHtml = buildRequestBodyHtml(result); + headersRows = buildHeadersTableRows(result.requestHeaders), + headersHtml = + headersRows !== '' + ? `<div class="subsection"><h4 class="subsection-title">${SECTION_LABEL_REQUEST_HEADERS}</h4><table>${headersRows}</table></div>` + : `<span class="empty-hint">${NO_REQUEST_HEADERS}</span>`, + bodyHtml = buildRequestBodyHtml(result); return buildCollapsibleSection({ title: SECTION_LABEL_REQUEST, @@ -195,38 +196,41 @@ export const buildRequestGroupHtml = (result: RunResult): string => { }); }; -const buildResponseSubsection = ( - title: string, - content: string -): string => +const buildResponseSubsection = (title: string, content: string): string => `<div class="subsection"><h4 class="subsection-title">${title}</h4>${content}</div>`; -const buildResponseParts = (result: RunResult): readonly string[] => { - const parts: string[] = [], - assertionsHtml = buildAssertionRowsHtml(result.assertions); - - if (assertionsHtml !== "") { - parts.push(buildResponseSubsection(SECTION_LABEL_ASSERTIONS, assertionsHtml)); - } - - const headersRows = buildHeadersTableRows(result.headers); - if (headersRows !== "") { - parts.push(buildResponseSubsection(SECTION_LABEL_RESPONSE_HEADERS, `<table>${headersRows}</table>`)); - } - - if (result.body !== undefined && result.body !== "") { - parts.push(buildResponseSubsection(SECTION_LABEL_BODY, `<pre class="body">${formatBodyHtml(result.body)}</pre>`)); - } - - return parts; -}; +const buildAssertionsPart = (result: RunResult): string | undefined => { + const assertionsHtml = buildAssertionRowsHtml(result.assertions); + return assertionsHtml !== '' + ? buildResponseSubsection(SECTION_LABEL_ASSERTIONS, assertionsHtml) + : undefined; + }, + buildHeadersPart = (result: RunResult): string | undefined => { + const headersRows = buildHeadersTableRows(result.headers); + return headersRows !== '' + ? buildResponseSubsection(SECTION_LABEL_RESPONSE_HEADERS, `<table>${headersRows}</table>`) + : undefined; + }, + buildBodyPart = (result: RunResult): string | undefined => + result.body !== undefined && result.body !== '' + ? buildResponseSubsection( + SECTION_LABEL_BODY, + `<pre class="body">${formatBodyHtml(result.body)}</pre>`, + ) + : undefined, + buildResponseParts = (result: RunResult): readonly string[] => + [buildAssertionsPart(result), buildHeadersPart(result), buildBodyPart(result)].filter( + (p): p is string => p !== undefined, + ); export const buildResponseGroupHtml = (result: RunResult): string => { const parts = buildResponseParts(result); - if (parts.length === 0) {return "";} + if (parts.length === 0) { + return ''; + } return buildCollapsibleSection({ title: SECTION_LABEL_RESPONSE, - content: parts.join("\n"), + content: parts.join('\n'), open: true, }); }; diff --git a/src/Napper.VsCode/src/httpConvert.ts b/src/Napper.VsCode/src/httpConvert.ts new file mode 100644 index 0000000..6bfa0ea --- /dev/null +++ b/src/Napper.VsCode/src/httpConvert.ts @@ -0,0 +1,192 @@ +// Specs: vscode-http-convert, vscode-commands +// .http → .nap conversion — calls CLI `nap convert http` subprocess +// Decoupled from vscode SDK where possible; thin vscode layer for dialogs + +import * as vscode from 'vscode'; +import * as path from 'path'; +import * as fs from 'fs'; +import { execFile } from 'child_process'; +import type { ExplorerAdapter } from './explorerAdapter'; +import type { Logger } from './logger'; +import { type Result, err, ok } from './types'; +import { + CLI_CMD_CONVERT, + CLI_FLAG_OUTPUT, + CLI_FLAG_OUTPUT_DIR, + CLI_OUTPUT_JSON, + CLI_SPAWN_FAILED_PREFIX, + CLI_SUBCMD_HTTP, + CONVERT_HTTP_ERROR_PREFIX, + CONVERT_HTTP_FILE_EXTENSIONS, + CONVERT_HTTP_FILTER_LABEL, + CONVERT_HTTP_NO_FILES, + CONVERT_HTTP_PICK_DIR, + CONVERT_HTTP_PICK_FILE, + CONVERT_HTTP_SUCCESS_PREFIX, + CONVERT_HTTP_SUCCESS_SUFFIX, + HTTP_FILE_EXTENSION, + LOG_MSG_CONVERT_HTTP, + LOG_MSG_CONVERT_HTTP_RESULT, + REST_FILE_EXTENSION, +} from './constants'; + +const MAX_PREVIEW_LENGTH = 200; + +interface ConvertResult { + readonly files: number; + readonly warnings: number; +} + +export interface ConvertContext { + readonly explorer: ExplorerAdapter; + readonly logger: Logger; + readonly getCliPath: () => string; +} + +const buildConvertArgs = (inputPath: string, outDir: string): readonly string[] => [ + CLI_CMD_CONVERT, + CLI_SUBCMD_HTTP, + inputPath, + CLI_FLAG_OUTPUT_DIR, + outDir, + CLI_FLAG_OUTPUT, + CLI_OUTPUT_JSON, +]; + +const parseConvertOutput = (stdout: string): Result<ConvertResult, string> => { + try { + return ok(JSON.parse(stdout) as ConvertResult); + } catch { + return err(`${CONVERT_HTTP_ERROR_PREFIX}${stdout.slice(0, MAX_PREVIEW_LENGTH)}`); + } +}; + +const isHttpFile = (filePath: string): boolean => + filePath.endsWith(HTTP_FILE_EXTENSION) || filePath.endsWith(REST_FILE_EXTENSION); + +interface ExecContext { + readonly cliPath: string; + readonly logger: Logger; + readonly resolve: (r: Result<ConvertResult, string>) => void; +} + +const resolveExecError = (ctx: ExecContext, stderr: string): void => { + const msg = stderr.length > 0 ? ` — ${stderr}` : ''; + ctx.logger.error(`${CLI_SPAWN_FAILED_PREFIX}${ctx.cliPath}${msg}`); + ctx.resolve(err(`${CLI_SPAWN_FAILED_PREFIX}${ctx.cliPath}${msg}`)); +}; + +const resolveExecSuccess = (ctx: ExecContext, stdout: string): void => { + const result = parseConvertOutput(stdout); + const logFn = result.ok ? ctx.logger.info : ctx.logger.error; + logFn( + `${LOG_MSG_CONVERT_HTTP_RESULT} ${result.ok ? `${result.value.files} files` : result.error}`, + ); + ctx.resolve(result); +}; + +const spawnConvert = (inputPath: string, outDir: string, ctx: ExecContext): void => { + execFile( + ctx.cliPath, + [...buildConvertArgs(inputPath, outDir)], + { timeout: 30_000, env: { ...process.env } }, + (error, stdout, stderr) => { + if (error !== null && stdout.length === 0) { + resolveExecError(ctx, stderr); + } else { + resolveExecSuccess(ctx, stdout); + } + }, + ); +}; + +interface ConvertParams { + readonly inputPath: string; + readonly outDir: string; + readonly logger: Logger; + readonly getCliPath: () => string; +} + +export const callCliConvert = async ( + params: ConvertParams, +): Promise<Result<ConvertResult, string>> => + new Promise((resolve) => { + const cliPath = params.getCliPath(); + params.logger.info(`${LOG_MSG_CONVERT_HTTP} ${cliPath} ${params.inputPath} → ${params.outDir}`); + spawnConvert(params.inputPath, params.outDir, { cliPath, logger: params.logger, resolve }); + }); + +const handleConvertSuccess = (generated: ConvertResult, ctx: ConvertContext): void => { + ctx.logger.info(`${LOG_MSG_CONVERT_HTTP} ${generated.files} files generated`); + ctx.explorer.refresh(); + void vscode.window.showInformationMessage( + `${CONVERT_HTTP_SUCCESS_PREFIX}${generated.files}${CONVERT_HTTP_SUCCESS_SUFFIX}`, + ); +}; + +const runConvert = async ( + inputPath: string, + outDir: string, + ctx: ConvertContext, +): Promise<void> => { + const result = await callCliConvert({ + inputPath, + outDir, + logger: ctx.logger, + getCliPath: ctx.getCliPath, + }); + if (!result.ok) { + await vscode.window.showErrorMessage(`${CONVERT_HTTP_ERROR_PREFIX}${result.error}`); + return; + } + handleConvertSuccess(result.value, ctx); +}; + +const pickHttpFile = async (): Promise<vscode.Uri | undefined> => { + const picked = await vscode.window.showOpenDialog({ + canSelectFiles: true, + canSelectFolders: false, + canSelectMany: false, + filters: { + [CONVERT_HTTP_FILTER_LABEL]: [...CONVERT_HTTP_FILE_EXTENSIONS], + }, + title: CONVERT_HTTP_PICK_FILE, + }); + return picked?.[0]; +}; + +export const convertHttpFile = async (ctx: ConvertContext, fileUri?: vscode.Uri): Promise<void> => { + const uri = fileUri ?? (await pickHttpFile()); + if (uri === undefined) { + return; + } + if (!isHttpFile(uri.fsPath)) { + await vscode.window.showWarningMessage(CONVERT_HTTP_NO_FILES); + return; + } + const outDir = path.dirname(uri.fsPath); + await runConvert(uri.fsPath, outDir, ctx); +}; + +const pickHttpDirectory = async (): Promise<vscode.Uri | undefined> => { + const picked = await vscode.window.showOpenDialog({ + canSelectFiles: false, + canSelectFolders: true, + canSelectMany: false, + title: CONVERT_HTTP_PICK_DIR, + }); + return picked?.[0]; +}; + +export const convertHttpDirectory = async (ctx: ConvertContext): Promise<void> => { + const uri = await pickHttpDirectory(); + if (uri === undefined) { + return; + } + const hasHttpFiles = fs.readdirSync(uri.fsPath).some((f) => isHttpFile(f)); + if (!hasHttpFiles) { + await vscode.window.showWarningMessage(CONVERT_HTTP_NO_FILES); + return; + } + await runConvert(uri.fsPath, uri.fsPath, ctx); +}; diff --git a/src/Nap.VsCode/src/logger.ts b/src/Napper.VsCode/src/logger.ts similarity index 89% rename from src/Nap.VsCode/src/logger.ts rename to src/Napper.VsCode/src/logger.ts index bb48b18..ff0967c 100644 --- a/src/Nap.VsCode/src/logger.ts +++ b/src/Napper.VsCode/src/logger.ts @@ -1,12 +1,7 @@ // Logger — decoupled from vscode SDK // Accepts a write function; the adapter provides the OutputChannel binding -import { - LOG_PREFIX_DEBUG, - LOG_PREFIX_ERROR, - LOG_PREFIX_INFO, - LOG_PREFIX_WARN, -} from "./constants"; +import { LOG_PREFIX_DEBUG, LOG_PREFIX_ERROR, LOG_PREFIX_INFO, LOG_PREFIX_WARN } from './constants'; export type LogWriter = (message: string) => void; diff --git a/src/Napper.VsCode/src/openApiAiEnhancer.ts b/src/Napper.VsCode/src/openApiAiEnhancer.ts new file mode 100644 index 0000000..5966b59 --- /dev/null +++ b/src/Napper.VsCode/src/openApiAiEnhancer.ts @@ -0,0 +1,317 @@ +// Specs: vscode-openapi-ai +// AI enrichment for OpenAPI-generated .nap files +// Pure functions — NO VS Code SDK dependency — fully testable + +import * as fs from 'fs'; +import * as path from 'path'; +import { type Result, err, ok } from './types'; +import { + NAP_EXTENSION, + NAP_TRIPLE_QUOTE, + SECTION_ASSERT, + SECTION_REQUEST_BODY, + SECTION_STEPS, +} from './constants'; + +// ─── Types ────────────────────────────────────────────────── + +export interface OperationSummary { + readonly operationId: string; + readonly method: string; + readonly path: string; + readonly summary: string; + readonly responseFields: readonly string[]; + readonly hasRequestBody: boolean; +} + +export interface AssertionEnrichment { + readonly operationId: string; + readonly assertions: readonly string[]; +} + +export interface TestDataEnrichment { + readonly operationId: string; + readonly requestBody: string; +} + +export interface GeneratedFile { + readonly fileName: string; + readonly content: string; +} + +export interface EnrichmentResult { + readonly files: readonly GeneratedFile[]; + readonly playlistContent: string; +} + +// ─── Prompt builders ──────────────────────────────────────── + +const ASSERTION_SYSTEM = [ + 'You are an API test engineer.', + 'Given API operations with their response fields,', + "suggest semantic assertions that go beyond 'exists' checks.", + 'Return ONLY a JSON array.', + 'Each element: { operationId: string, assertions: string[] }.', + 'Assertions use napper syntax: body.field > 0, body.email contains @,', + 'body.name != "", headers.Content-Type contains json.', + 'Do NOT repeat status assertions. Only add value/format checks.', + ].join(' '), + TEST_DATA_SYSTEM = [ + 'You are an API test data generator.', + 'Given API operations that accept request bodies,', + 'generate realistic JSON request body examples.', + 'Return ONLY a JSON array.', + 'Each element: { operationId: string, requestBody: string }.', + 'requestBody must be a valid JSON string with realistic values.', + 'Use real-looking names, emails, dates, IDs — not placeholders.', + ].join(' '), + PLAYLIST_SYSTEM = [ + 'You are an API test orchestrator.', + 'Given a list of test file paths, reorder them for logical flow:', + 'auth/login first, then creates, then reads, then updates, then deletes.', + 'Return ONLY a JSON array of the file paths in the recommended order.', + ].join(' '); + +export const buildAssertionPrompt = (operations: readonly OperationSummary[]): string => { + const lines = operations.map( + (op) => + `- ${op.method.toUpperCase()} ${op.path} (${op.operationId}): ` + + `response fields: [${op.responseFields.join(', ')}]`, + ); + return lines.join('\n'); +}; + +export const buildTestDataPrompt = (operations: readonly OperationSummary[]): string => { + const withBody = operations.filter((op) => op.hasRequestBody), + lines = withBody.map( + (op) => `- ${op.method.toUpperCase()} ${op.path} (${op.operationId}): ${op.summary}`, + ); + return lines.join('\n'); +}; + +export const buildPlaylistOrderPrompt = (filePaths: readonly string[]): string => + filePaths.join('\n'); + +export const getAssertionSystemPrompt = (): string => ASSERTION_SYSTEM; +export const getTestDataSystemPrompt = (): string => TEST_DATA_SYSTEM; +export const getPlaylistSystemPrompt = (): string => PLAYLIST_SYSTEM; + +// ─── Response parsers ─────────────────────────────────────── + +export const parseAssertionResponse = ( + json: string, +): Result<readonly AssertionEnrichment[], string> => { + try { + const parsed: unknown = JSON.parse(json); + if (!Array.isArray(parsed)) { + return err('Expected JSON array for assertion enrichments'); + } + return ok(parsed as readonly AssertionEnrichment[]); + } catch { + return err('Failed to parse assertion enrichment response'); + } +}; + +export const parseTestDataResponse = ( + json: string, +): Result<readonly TestDataEnrichment[], string> => { + try { + const parsed: unknown = JSON.parse(json); + if (!Array.isArray(parsed)) { + return err('Expected JSON array for test data enrichments'); + } + return ok(parsed as readonly TestDataEnrichment[]); + } catch { + return err('Failed to parse test data enrichment response'); + } +}; + +export const parsePlaylistOrderResponse = (json: string): Result<readonly string[], string> => { + try { + const parsed: unknown = JSON.parse(json); + if (!Array.isArray(parsed)) { + return err('Expected JSON array for playlist order'); + } + return ok(parsed as readonly string[]); + } catch { + return err('Failed to parse playlist order response'); + } +}; + +// ─── Content enrichment (line-based, no regex) ────────────── + +const isSectionHeader = (line: string): boolean => line.startsWith('[') && line.endsWith(']'), + skipToNextSection = (lines: readonly string[], startIdx: number): number => { + let idx = startIdx; + while (idx < lines.length && !isSectionHeader(lines[idx] ?? '')) { + idx++; + } + return idx; + }, + trimTrailingBlanks = (lines: readonly string[], endIdx: number, minIdx: number): number => { + let idx = endIdx; + while (idx > minIdx && (lines[idx - 1] ?? '').trim().length === 0) { + idx--; + } + return idx; + }, + findSectionEnd = (lines: readonly string[], sectionHeader: string): number => { + const sectionIdx = lines.indexOf(sectionHeader); + if (sectionIdx < 0) { + return -1; + } + const rawEnd = skipToNextSection(lines, sectionIdx + 1); + return trimTrailingBlanks(lines, rawEnd, sectionIdx + 1); + }; + +export const enrichAssertions = (napContent: string, newAssertions: readonly string[]): string => { + if (newAssertions.length === 0) { + return napContent; + } + const lines = napContent.split('\n'), + insertAt = findSectionEnd(lines, SECTION_ASSERT); + if (insertAt < 0) { + return napContent; + } + const before = lines.slice(0, insertAt), + after = lines.slice(insertAt); + return [...before, ...newAssertions, ...after].join('\n'); +}; + +const findTripleQuoteBounds = ( + lines: readonly string[], + startIdx: number, +): { readonly start: number; readonly end: number } | undefined => { + let startQuote = -1; + for (let i = startIdx; i < lines.length; i++) { + if ((lines[i] ?? '').trim() === NAP_TRIPLE_QUOTE) { + if (startQuote < 0) { + startQuote = i; + } else { + return { start: startQuote, end: i }; + } + } + } + return undefined; +}; + +export const enrichRequestBody = (napContent: string, newBody: string): string => { + const lines = napContent.split('\n'), + bodyIdx = lines.indexOf(SECTION_REQUEST_BODY); + if (bodyIdx < 0) { + return napContent; + } + const bounds = findTripleQuoteBounds(lines, bodyIdx + 1); + if (bounds === undefined) { + return napContent; + } + const before = lines.slice(0, bounds.start + 1), + after = lines.slice(bounds.end); + return [...before, newBody, ...after].join('\n'); +}; + +export const reorderPlaylistSteps = ( + playlistContent: string, + orderedFiles: readonly string[], +): string => { + if (orderedFiles.length === 0) { + return playlistContent; + } + const lines = playlistContent.split('\n'), + stepsIdx = lines.indexOf(SECTION_STEPS); + if (stepsIdx < 0) { + return playlistContent; + } + const before = lines.slice(0, stepsIdx + 1), + newSteps = orderedFiles.map((f) => (f.startsWith('./') ? f : `./${f}`)); + return [...before, ...newSteps, ''].join('\n'); +}; + +// ─── File-level enrichment ────────────────────────────────── + +const fileMatchesOperation = (file: GeneratedFile, operationId: string): boolean => + file.content.includes(operationId); + +export const applyAssertionEnrichments = ( + files: readonly GeneratedFile[], + enrichments: readonly AssertionEnrichment[], +): readonly GeneratedFile[] => + files.map((file) => { + const match = enrichments.find((e) => fileMatchesOperation(file, e.operationId)); + if (match === undefined) { + return file; + } + return { + fileName: file.fileName, + content: enrichAssertions(file.content, match.assertions), + }; + }); + +export const applyTestDataEnrichments = ( + files: readonly GeneratedFile[], + enrichments: readonly TestDataEnrichment[], +): readonly GeneratedFile[] => + files.map((file) => { + const match = enrichments.find((e) => fileMatchesOperation(file, e.operationId)); + if (match === undefined) { + return file; + } + return { + fileName: file.fileName, + content: enrichRequestBody(file.content, match.requestBody), + }; + }); + +// ─── File scanning & summary extraction ───────────────────── + +const NAME_PREFIX = 'name = ', + BODY_PREFIX = 'body.', + EXISTS_SUFFIX = ' exists', + HTTP_METHOD_PREFIXES = [ + 'GET ', + 'POST ', + 'PUT ', + 'PATCH ', + 'DELETE ', + 'HEAD ', + 'OPTIONS ', + ] as const, + isRequestLine = (line: string): boolean => + HTTP_METHOD_PREFIXES.some((prefix) => line.startsWith(prefix)); + +export const extractSummary = (file: GeneratedFile): OperationSummary => { + const lines = file.content.split('\n'), + nameLine = lines.find((l) => l.startsWith(NAME_PREFIX)), + requestLine = lines.find(isRequestLine), + name = nameLine?.slice(NAME_PREFIX.length) ?? file.fileName; + return { + operationId: name, + method: requestLine?.split(' ')[0] ?? 'GET', + path: requestLine?.split(' ')[1] ?? '', + summary: name, + responseFields: lines + .filter((l) => l.startsWith(BODY_PREFIX) && l.includes(EXISTS_SUFFIX)) + .map((l) => l.slice(BODY_PREFIX.length, l.indexOf(EXISTS_SUFFIX))), + hasRequestBody: file.content.includes(SECTION_REQUEST_BODY), + }; +}; + +const collectNapFiles = (dir: string, baseDir: string, out: GeneratedFile[]): void => { + for (const entry of fs.readdirSync(dir, { withFileTypes: true })) { + const full = path.join(dir, entry.name); + if (entry.isDirectory()) { + collectNapFiles(full, baseDir, out); + } else if (entry.name.endsWith(NAP_EXTENSION)) { + out.push({ + fileName: path.relative(baseDir, full), + content: fs.readFileSync(full, 'utf-8'), + }); + } + } +}; + +export const readGeneratedFiles = (outDir: string): readonly GeneratedFile[] => { + const files: GeneratedFile[] = []; + collectNapFiles(outDir, outDir, files); + return files; +}; diff --git a/src/Napper.VsCode/src/openApiDownloader.ts b/src/Napper.VsCode/src/openApiDownloader.ts new file mode 100644 index 0000000..6674e90 --- /dev/null +++ b/src/Napper.VsCode/src/openApiDownloader.ts @@ -0,0 +1,68 @@ +// Specs: vscode-openapi +// OpenAPI spec download — fetches specs via HTTPS with redirect support +// Pure function — no VS Code SDK dependency + +import * as fs from 'fs'; +import * as path from 'path'; +import * as https from 'https'; +import type { IncomingMessage } from 'http'; +import { type Result, err, ok } from './types'; +import { + HTTP_STATUS_CLIENT_ERROR_MIN, + HTTP_STATUS_REDIRECT_MIN, + OPENAPI_DOWNLOAD_FAILED_PREFIX, +} from './constants'; + +const isRedirect = (code: number): boolean => + code >= HTTP_STATUS_REDIRECT_MIN && code < HTTP_STATUS_CLIENT_ERROR_MIN, + isClientError = (code: number): boolean => code >= HTTP_STATUS_CLIENT_ERROR_MIN, + collectBody = (res: IncomingMessage, resolve: (r: Result<string, string>) => void): void => { + const chunks: Buffer[] = []; + res.on('data', (chunk: Buffer) => { + chunks.push(chunk); + }); + res.on('end', () => { + resolve(ok(Buffer.concat(chunks).toString('utf-8'))); + }); + res.on('error', (e) => { + resolve(err(`${OPENAPI_DOWNLOAD_FAILED_PREFIX}${e.message}`)); + }); + }; + +function handleHttpResponse( + res: IncomingMessage, + resolve: (r: Result<string, string>) => void, +): void { + const status = res.statusCode ?? 0; + if (isRedirect(status) && res.headers.location !== undefined) { + downloadSpec(res.headers.location) + .then(resolve) + .catch(() => { + resolve(err(`${OPENAPI_DOWNLOAD_FAILED_PREFIX}redirect`)); + }); + return; + } + if (isClientError(status)) { + resolve(err(`${OPENAPI_DOWNLOAD_FAILED_PREFIX}HTTP ${status}`)); + return; + } + collectBody(res, resolve); +} + +export async function downloadSpec(url: string): Promise<Result<string, string>> { + return new Promise((resolve) => { + https + .get(url, (res) => { + handleHttpResponse(res, resolve); + }) + .on('error', (e) => { + resolve(err(`${OPENAPI_DOWNLOAD_FAILED_PREFIX}${e.message}`)); + }); + }); +} + +export const saveTempSpec = (content: string, outDir: string): string => { + const specPath = path.join(outDir, '.openapi-spec.json'); + fs.writeFileSync(specPath, content, 'utf-8'); + return specPath; +}; diff --git a/src/Napper.VsCode/src/openApiImport.ts b/src/Napper.VsCode/src/openApiImport.ts new file mode 100644 index 0000000..35b8829 --- /dev/null +++ b/src/Napper.VsCode/src/openApiImport.ts @@ -0,0 +1,434 @@ +// Specs: vscode-openapi, vscode-openapi-import, vscode-openapi-ai, vscode-commands +// OpenAPI import command — calls CLI to generate .nap files from spec +// Deterministic generation lives in F# CLI; AI enrichment is optional via Copilot + +import * as vscode from 'vscode'; +import * as path from 'path'; +import * as fs from 'fs'; +import { execFile } from 'child_process'; +import type { ExplorerAdapter } from './explorerAdapter'; +import type { Logger } from './logger'; +import { type Result, err, ok } from './types'; +import { + CLI_CMD_GENERATE, + CLI_FLAG_OUTPUT, + CLI_FLAG_OUTPUT_DIR, + CLI_OUTPUT_JSON, + CLI_PARSE_FAILED_PREFIX, + CLI_SPAWN_FAILED_PREFIX, + CLI_SUBCMD_OPENAPI, + LOG_MSG_OPENAPI_AI_CHOICE, + LOG_MSG_OPENAPI_AI_MODEL_SELECTED, + LOG_MSG_OPENAPI_AI_NO_MODEL, + LOG_MSG_OPENAPI_GENERATE_CLI, + LOG_MSG_OPENAPI_IMPORT, + LOG_MSG_OPENAPI_SPEC_SAVED, + LOG_MSG_OPENAPI_URL_DOWNLOAD_FAIL, + LOG_MSG_OPENAPI_URL_DOWNLOAD_OK, + LOG_MSG_OPENAPI_URL_FETCH, + NAPLIST_EXTENSION, + OPENAPI_AI_CHOICE_BASIC, + OPENAPI_AI_CHOICE_ENHANCED, + OPENAPI_AI_CHOICE_TITLE, + OPENAPI_AI_COPILOT_FAMILY, + OPENAPI_AI_ENRICHING_ASSERTIONS, + OPENAPI_AI_ENRICHING_TEST_DATA, + OPENAPI_AI_NO_COPILOT, + OPENAPI_AI_PROGRESS_TITLE, + OPENAPI_AI_REORDERING_PLAYLIST, + OPENAPI_DOWNLOADING, + OPENAPI_ERROR_PREFIX, + OPENAPI_FILE_EXTENSIONS, + OPENAPI_FILTER_LABEL, + OPENAPI_PICK_FILE, + OPENAPI_PICK_FOLDER, + OPENAPI_SUCCESS_PREFIX, + OPENAPI_SUCCESS_SUFFIX, + OPENAPI_URL_PLACEHOLDER, + OPENAPI_URL_PROMPT, +} from './constants'; +import { downloadSpec, saveTempSpec } from './openApiDownloader'; +import { + type GeneratedFile, + type OperationSummary, + applyAssertionEnrichments, + applyTestDataEnrichments, + buildAssertionPrompt, + buildPlaylistOrderPrompt, + buildTestDataPrompt, + extractSummary, + getAssertionSystemPrompt, + getPlaylistSystemPrompt, + getTestDataSystemPrompt, + parseAssertionResponse, + parsePlaylistOrderResponse, + parseTestDataResponse, + readGeneratedFiles, + reorderPlaylistSteps, +} from './openApiAiEnhancer'; + +interface GenerateResult { + readonly files: number; + readonly playlist: string; +} +interface PickedPaths { + readonly specFile: vscode.Uri; + readonly outFolder: vscode.Uri; +} +interface ImportContext { + readonly explorer: ExplorerAdapter; + readonly logger: Logger; + readonly getCliPath: () => string; +} + +interface LmRequestParams { + readonly model: vscode.LanguageModelChat; + readonly systemPrompt: string; + readonly userPrompt: string; + readonly token: vscode.CancellationToken; +} + +interface EnrichStepParams { + readonly lm: LmRequestParams; + readonly operations: readonly OperationSummary[]; + readonly files: readonly GeneratedFile[]; +} + +interface EnrichmentContext { + readonly progress: vscode.Progress<{ message?: string }>; + readonly baseParams: LmRequestParams; + readonly outDir: string; + readonly logger: Logger; +} + +const MAX_PREVIEW_LENGTH = 200, + pickSpecFile = (): Thenable<readonly vscode.Uri[] | undefined> => + vscode.window.showOpenDialog({ + canSelectFiles: true, + canSelectFolders: false, + canSelectMany: false, + filters: { [OPENAPI_FILTER_LABEL]: [...OPENAPI_FILE_EXTENSIONS] }, + title: OPENAPI_PICK_FILE, + }), + pickOutputFolder = (): Thenable<readonly vscode.Uri[] | undefined> => { + const uri = vscode.workspace.workspaceFolders?.[0]?.uri, + base = uri !== undefined ? { defaultUri: uri } : {}; + return vscode.window.showOpenDialog({ + canSelectFiles: false, + canSelectFolders: true, + canSelectMany: false, + title: OPENAPI_PICK_FOLDER, + ...base, + }); + }, + pickPaths = async (): Promise<PickedPaths | undefined> => { + const specFile = (await pickSpecFile())?.[0]; + if (specFile === undefined) { + return undefined; + } + const outFolder = (await pickOutputFolder())?.[0]; + return outFolder !== undefined ? { specFile, outFolder } : undefined; + }, + buildGenerateArgs = (specPath: string, outDir: string): readonly string[] => [ + CLI_CMD_GENERATE, + CLI_SUBCMD_OPENAPI, + specPath, + CLI_FLAG_OUTPUT_DIR, + outDir, + CLI_FLAG_OUTPUT, + CLI_OUTPUT_JSON, + ], + parseGenerateOutput = (stdout: string): Result<GenerateResult, string> => { + try { + return ok(JSON.parse(stdout) as GenerateResult); + } catch { + return err(`${CLI_PARSE_FAILED_PREFIX}${stdout.slice(0, MAX_PREVIEW_LENGTH)}`); + } + }, + buildCliErrorMsg = (cliPath: string, stderr: string): string => { + const suffix = stderr.length > 0 ? ` — ${stderr}` : ''; + return `${CLI_SPAWN_FAILED_PREFIX}${cliPath}${suffix}`; + }, + spawnGenerate = ( + cliPath: string, + args: readonly string[], + resolve: (r: Result<GenerateResult, string>) => void, + ): void => { + execFile( + cliPath, + [...args], + { timeout: 30_000, env: { ...process.env } }, + (error, stdout, stderr) => { + if (error !== null && stdout.length === 0) { + resolve(err(buildCliErrorMsg(cliPath, stderr))); + return; + } + resolve(parseGenerateOutput(stdout)); + }, + ); + }, + callCliGenerate = async ( + specPath: string, + outDir: string, + ctx: ImportContext, + ): Promise<Result<GenerateResult, string>> => { + const cliPath = ctx.getCliPath(), + args = buildGenerateArgs(specPath, outDir); + ctx.logger.info(`${LOG_MSG_OPENAPI_GENERATE_CLI} ${cliPath} ${specPath} → ${outDir}`); + return new Promise((resolve) => { + spawnGenerate(cliPath, args, resolve); + }); + }, + handleSuccess = async ( + outDir: string, + generated: GenerateResult, + ctx: ImportContext, + ): Promise<void> => { + ctx.logger.info(`${LOG_MSG_OPENAPI_IMPORT} ${generated.files}`); + ctx.explorer.refresh(); + await vscode.window.showTextDocument( + await vscode.workspace.openTextDocument(path.join(outDir, generated.playlist)), + ); + void vscode.window.showInformationMessage( + `${OPENAPI_SUCCESS_PREFIX}${generated.files}${OPENAPI_SUCCESS_SUFFIX}`, + ); + }, + askAiChoice = async (): Promise<string | undefined> => { + const picked = await vscode.window.showQuickPick( + [{ label: OPENAPI_AI_CHOICE_BASIC }, { label: OPENAPI_AI_CHOICE_ENHANCED }], + { title: OPENAPI_AI_CHOICE_TITLE, placeHolder: OPENAPI_AI_CHOICE_TITLE }, + ); + return picked?.label; + }, + selectCopilotModel = async (): Promise<vscode.LanguageModelChat | undefined> => { + const models = await vscode.lm.selectChatModels({ family: OPENAPI_AI_COPILOT_FAMILY }); + return models[0]; + }, + sendLmRequest = async (params: LmRequestParams): Promise<string> => { + const messages = [ + vscode.LanguageModelChatMessage.User(`${params.systemPrompt}\n\n${params.userPrompt}`), + ], + response = await params.model.sendRequest(messages, {}, params.token), + parts: string[] = []; + for await (const chunk of response.text) { + parts.push(chunk); + } + return parts.join(''); + }, + enrichAssertionStep = async ( + step: EnrichStepParams, + logger: Logger, + ): Promise<readonly GeneratedFile[]> => { + const response = await sendLmRequest({ + ...step.lm, + systemPrompt: getAssertionSystemPrompt(), + userPrompt: buildAssertionPrompt(step.operations), + }), + result = parseAssertionResponse(response); + if (!result.ok) { + logger.info(result.error); + return step.files; + } + return applyAssertionEnrichments(step.files, result.value); + }, + enrichTestDataStep = async ( + step: EnrichStepParams, + logger: Logger, + ): Promise<readonly GeneratedFile[]> => { + const prompt = buildTestDataPrompt(step.operations); + if (prompt.length === 0) { + return step.files; + } + const response = await sendLmRequest({ + ...step.lm, + systemPrompt: getTestDataSystemPrompt(), + userPrompt: prompt, + }), + result = parseTestDataResponse(response); + if (!result.ok) { + logger.info(result.error); + return step.files; + } + return applyTestDataEnrichments(step.files, result.value); + }, + findFirstNaplist = (outDir: string): string | undefined => { + const naplists = fs.readdirSync(outDir).filter((f) => f.endsWith(NAPLIST_EXTENSION)); + return naplists[0]; + }, + fetchPlaylistOrder = async ( + params: LmRequestParams, + fileNames: readonly string[], + ): Promise<Result<readonly string[], string>> => { + const response = await sendLmRequest({ + ...params, + systemPrompt: getPlaylistSystemPrompt(), + userPrompt: buildPlaylistOrderPrompt(fileNames), + }); + return parsePlaylistOrderResponse(response); + }, + reorderPlaylistStep = async ( + params: LmRequestParams, + outDir: string, + fileNames: readonly string[], + ): Promise<void> => { + const first = findFirstNaplist(outDir); + if (first === undefined) { + return; + } + const playlistPath = path.join(outDir, first), + result = await fetchPlaylistOrder(params, fileNames); + if (!result.ok) { + return; + } + const content = reorderPlaylistSteps(fs.readFileSync(playlistPath, 'utf-8'), result.value); + fs.writeFileSync(playlistPath, content, 'utf-8'); + }, + writeEnrichedFiles = (outDir: string, files: readonly GeneratedFile[]): void => { + for (const file of files) { + fs.writeFileSync(path.join(outDir, file.fileName), file.content, 'utf-8'); + } + }, + executeEnrichmentSteps = async (ctx: EnrichmentContext): Promise<void> => { + const files = readGeneratedFiles(ctx.outDir), + operations = files.map(extractSummary); + + ctx.progress.report({ message: OPENAPI_AI_ENRICHING_ASSERTIONS }); + let enriched = await enrichAssertionStep({ lm: ctx.baseParams, operations, files }, ctx.logger); + + ctx.progress.report({ message: OPENAPI_AI_ENRICHING_TEST_DATA }); + enriched = await enrichTestDataStep( + { lm: ctx.baseParams, operations, files: enriched }, + ctx.logger, + ); + + ctx.progress.report({ message: OPENAPI_AI_REORDERING_PLAYLIST }); + await reorderPlaylistStep( + ctx.baseParams, + ctx.outDir, + enriched.map((f) => f.fileName), + ); + + writeEnrichedFiles(ctx.outDir, enriched); + }; + +export const runAiEnrichment = async (outDir: string, logger: Logger): Promise<void> => { + const model = await selectCopilotModel(); + if (model === undefined) { + logger.warn(LOG_MSG_OPENAPI_AI_NO_MODEL); + await vscode.window.showWarningMessage(OPENAPI_AI_NO_COPILOT); + return; + } + logger.info(`${LOG_MSG_OPENAPI_AI_MODEL_SELECTED} ${model.name}`); + await vscode.window.withProgress( + { + location: vscode.ProgressLocation.Notification, + title: OPENAPI_AI_PROGRESS_TITLE, + cancellable: true, + }, + async (progress, token) => { + const baseParams: LmRequestParams = { model, systemPrompt: '', userPrompt: '', token }; + await executeEnrichmentSteps({ progress, baseParams, outDir, logger }); + }, + ); +}; + +export { downloadSpec, saveTempSpec } from './openApiDownloader'; + +const askForUrl = async (): Promise<string | undefined> => + vscode.window.showInputBox({ + prompt: OPENAPI_URL_PROMPT, + placeHolder: OPENAPI_URL_PLACEHOLDER, + ignoreFocusOut: true, + }); + +const generateAndEnrich = async ( + specPath: string, + outDir: string, + ctx: ImportContext, +): Promise<void> => { + const choice = await askAiChoice(); + if (choice === undefined) { + return; + } + ctx.logger.info(`${LOG_MSG_OPENAPI_AI_CHOICE} ${choice}`); + const result = await callCliGenerate(specPath, outDir, ctx); + if (!result.ok) { + await vscode.window.showErrorMessage(`${OPENAPI_ERROR_PREFIX}${result.error}`); + return; + } + if (choice === OPENAPI_AI_CHOICE_ENHANCED) { + await runAiEnrichment(outDir, ctx.logger); + } + await handleSuccess(outDir, result.value, ctx); +}; + +const downloadWithProgress = async (url: string): Promise<Result<string, string>> => + vscode.window.withProgress( + { + location: vscode.ProgressLocation.Notification, + title: OPENAPI_DOWNLOADING, + cancellable: false, + }, + async () => downloadSpec(url), + ), + handleDownloadResult = async ( + specResult: Result<string, string>, + outDir: string, + logger: Logger, + ): Promise<string | undefined> => { + if (!specResult.ok) { + logger.error(`${LOG_MSG_OPENAPI_URL_DOWNLOAD_FAIL} ${specResult.error}`); + await vscode.window.showErrorMessage(`${OPENAPI_ERROR_PREFIX}${specResult.error}`); + return undefined; + } + logger.info(`${LOG_MSG_OPENAPI_URL_DOWNLOAD_OK} ${specResult.value.length}`); + const specPath = saveTempSpec(specResult.value, outDir); + logger.info(`${LOG_MSG_OPENAPI_SPEC_SAVED} ${specPath}`); + return specPath; + }, + fetchAndSaveSpec = async ( + url: string, + outDir: string, + logger: Logger, + ): Promise<string | undefined> => { + logger.info(`${LOG_MSG_OPENAPI_URL_FETCH} ${url}`); + const specResult = await downloadWithProgress(url); + return handleDownloadResult(specResult, outDir, logger); + }; + +export const importOpenApiFromUrl = async ( + explorer: ExplorerAdapter, + logger: Logger, + getCliPath: () => string, +): Promise<void> => { + const url = await askForUrl(); + if (url === undefined || url.length === 0) { + return; + } + const outFolder = await pickOutputFolder(), + outDir = outFolder?.[0]?.fsPath; + if (outDir === undefined) { + return; + } + const specPath = await fetchAndSaveSpec(url, outDir, logger); + if (specPath === undefined) { + return; + } + await generateAndEnrich(specPath, outDir, { explorer, logger, getCliPath }); +}; + +export const importOpenApiFromFile = async ( + explorer: ExplorerAdapter, + logger: Logger, + getCliPath: () => string, +): Promise<void> => { + const paths = await pickPaths(); + if (paths === undefined) { + return; + } + await generateAndEnrich(paths.specFile.fsPath, paths.outFolder.fsPath, { + explorer, + logger, + getCliPath, + }); +}; diff --git a/src/Nap.VsCode/src/playlistPanel.ts b/src/Napper.VsCode/src/playlistPanel.ts similarity index 78% rename from src/Nap.VsCode/src/playlistPanel.ts rename to src/Napper.VsCode/src/playlistPanel.ts index d8940ee..17a03e1 100644 --- a/src/Nap.VsCode/src/playlistPanel.ts +++ b/src/Napper.VsCode/src/playlistPanel.ts @@ -1,9 +1,10 @@ +// Specs: vscode-playlists, vscode-layout // Playlist results webview panel — shows all step results from a .naplist run // Opens IMMEDIATELY with pending rows, updates progressively via postMessage -import * as vscode from "vscode"; -import * as path from "path"; -import type { RunResult } from "./types"; +import * as vscode from 'vscode'; +import * as path from 'path'; +import type { RunResult } from './types'; import { MSG_ADD_RESULT, MSG_RUN_COMPLETE, @@ -11,33 +12,35 @@ import { MSG_SAVE_REPORT, PLAYLIST_PANEL_TITLE, PLAYLIST_PANEL_VIEW_TYPE, -} from "./constants"; -import { escapeHtml, buildResultDetailHtml, SHARED_SECTION_STYLES } from "./htmlUtils"; +} from './constants'; +import { escapeHtml, buildResultDetailHtml, SHARED_SECTION_STYLES } from './htmlUtils'; -const buildStepMetadata = (result: RunResult): { +interface StepMetadata { readonly icon: string; readonly statusCls: string; readonly fileName: string; readonly statusCode: number | string; readonly duration: string; readonly assertionSummary: string; -} => { - const assertionCount = result.assertions.length, - passedCount = result.assertions.filter((a) => a.passed).length; - return { - icon: result.passed ? "✓" : "✗", - statusCls: result.passed ? "pass" : "fail", - fileName: path.basename(result.file), - statusCode: result.statusCode ?? "", - duration: result.duration !== undefined ? `${result.duration.toFixed(0)}ms` : "", - assertionSummary: assertionCount > 0 ? `${passedCount}/${assertionCount}` : "", - }; -}, +} - buildCompletedStepRow = (result: RunResult, index: number): string => { - const meta = buildStepMetadata(result); +const formatAssertionSummary = (result: RunResult): string => { + const assertionCount = result.assertions.length, + passedCount = result.assertions.filter((a) => a.passed).length; + return assertionCount > 0 ? `${passedCount}/${assertionCount}` : ''; + }, + buildStepMetadata = (result: RunResult): StepMetadata => ({ + icon: result.passed ? '✓' : '✗', + statusCls: result.passed ? 'pass' : 'fail', + fileName: path.basename(result.file), + statusCode: result.statusCode ?? '', + duration: result.duration !== undefined ? `${result.duration.toFixed(0)}ms` : '', + assertionSummary: formatAssertionSummary(result), + }), + buildCompletedStepRow = (result: RunResult, index: number): string => { + const meta = buildStepMetadata(result); - return ` + return ` <div class="step" data-index="${index}"> <div class="step-summary ${meta.statusCls}" onclick="toggleStep(${index})"> <span class="step-icon">${meta.icon}</span> @@ -51,9 +54,8 @@ const buildStepMetadata = (result: RunResult): { ${buildResultDetailHtml(result)} </div> </div>`; -}, - - buildPendingStepRow = (stepFileName: string, index: number): string => ` + }, + buildPendingStepRow = (stepFileName: string, index: number): string => ` <div class="step" data-index="${index}" id="step-${index}"> <div class="step-summary pending"> <span class="step-icon spinner">○</span> @@ -65,8 +67,7 @@ const buildStepMetadata = (result: RunResult): { </div> <div class="step-detail" id="detail-${index}" style="display:none;"></div> </div>`, - - PLAYLIST_PANEL_STYLES = ` + PLAYLIST_PANEL_STYLES = ` body { font-family: var(--vscode-font-family); color: var(--vscode-foreground); background: var(--vscode-editor-background); padding: 16px; margin: 0; } h2 { margin: 0 0 12px 0; font-size: 16px; } h3 { margin: 0; font-size: 13px; color: var(--vscode-descriptionForeground); display: inline; } @@ -99,8 +100,7 @@ const buildStepMetadata = (result: RunResult): { .report-btn { display: inline-flex; align-items: center; gap: 6px; padding: 5px 12px; margin-left: auto; font-size: 12px; font-weight: 500; color: var(--vscode-button-foreground); background: var(--vscode-button-background); border: none; border-radius: 4px; cursor: pointer; white-space: nowrap; } .report-btn:hover { background: var(--vscode-button-hoverBackground); } .report-btn svg { width: 14px; height: 14px; fill: currentColor; }`, - - TOGGLE_STEP_FN = ` + TOGGLE_STEP_FN = ` function toggleStep(index) { const detail = document.getElementById('detail-' + index); const chevron = document.getElementById('chevron-' + index); @@ -110,8 +110,7 @@ const buildStepMetadata = (result: RunResult): { if (isHidden) { chevron.classList.add('open'); } else { chevron.classList.remove('open'); } }`, - - buildMessageHandler = (): string => ` + buildMessageHandler = (): string => ` window.addEventListener('message', function(event) { const msg = event.data; if (msg.type === '${MSG_ADD_RESULT}') { @@ -122,8 +121,7 @@ const buildStepMetadata = (result: RunResult): { updateSummary(msg.summaryHtml); } });`, - - HELPER_FNS = ` + HELPER_FNS = ` function updateStepRow(index, html) { const stepEl = document.getElementById('step-' + index); if (stepEl) { stepEl.outerHTML = html; } @@ -135,20 +133,14 @@ const buildStepMetadata = (result: RunResult): { function saveReport() { vscodeApi.postMessage({ type: '${MSG_SAVE_REPORT}' }); }`, - - buildStreamingScript = (): string => ` + buildStreamingScript = (): string => ` <script> const vscodeApi = acquireVsCodeApi(); ${TOGGLE_STEP_FN} ${buildMessageHandler()} ${HELPER_FNS} </script>`, - - buildStreamingBody = ( - playlistName: string, - stepsHtml: string, - stepCount: number -): string => ` + buildStreamingBody = (playlistName: string, stepsHtml: string, stepCount: number): string => ` <h2>${escapeHtml(playlistName)}</h2> <div class="playlist-summary" id="summary"> <span class="summary-badge running">RUNNING</span> @@ -158,9 +150,8 @@ const buildStepMetadata = (result: RunResult): { <div class="steps-list" id="steps-list"> ${stepsHtml} </div>`, - - wrapInHtmlShell = (bodyContent: string, scriptContent: string): string => - `<!DOCTYPE html> + wrapInHtmlShell = (bodyContent: string, scriptContent: string): string => + `<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"/> @@ -172,39 +163,30 @@ const buildStepMetadata = (result: RunResult): { ${scriptContent} </body> </html>`, + buildStreamingHtml = (playlistFile: string, stepFileNames: readonly string[]): string => { + const playlistName = path.basename(playlistFile, path.extname(playlistFile)), + stepsHtml = stepFileNames.map((name, i) => buildPendingStepRow(name, i)).join('\n'), + body = buildStreamingBody(playlistName, stepsHtml, stepFileNames.length); + return wrapInHtmlShell(body, buildStreamingScript()); + }, + buildSummaryHtml = (results: readonly RunResult[]): string => { + const totalCount = results.length, + passedCount = results.filter((r) => r.passed).length, + failedCount = totalCount - passedCount, + totalDuration = results.reduce((acc, r) => acc + (r.duration ?? 0), 0), + allPassed = totalCount > 0 && failedCount === 0; - buildStreamingHtml = ( - playlistFile: string, - stepFileNames: readonly string[] -): string => { - const playlistName = path.basename(playlistFile, path.extname(playlistFile)), - stepsHtml = stepFileNames.map((name, i) => buildPendingStepRow(name, i)).join("\n"), - body = buildStreamingBody(playlistName, stepsHtml, stepFileNames.length); - return wrapInHtmlShell(body, buildStreamingScript()); -}, - - buildSummaryHtml = (results: readonly RunResult[]): string => { - const totalCount = results.length, - passedCount = results.filter((r) => r.passed).length, - failedCount = totalCount - passedCount, - totalDuration = results.reduce( - (acc, r) => acc + (r.duration ?? 0), - 0 - ), - allPassed = totalCount > 0 && failedCount === 0; - - return `<div class="playlist-summary" id="summary"> - <span class="summary-badge ${allPassed ? "all-passed" : "has-failures"}">${allPassed ? "PASSED" : "FAILED"}</span> + return `<div class="playlist-summary" id="summary"> + <span class="summary-badge ${allPassed ? 'all-passed' : 'has-failures'}">${allPassed ? 'PASSED' : 'FAILED'}</span> <span class="summary-passed">${passedCount} passed</span> - ${failedCount > 0 ? `<span class="summary-failed">${failedCount} failed</span>` : ""} + ${failedCount > 0 ? `<span class="summary-failed">${failedCount} failed</span>` : ''} <span class="summary-total">${totalCount} steps</span> <span class="summary-duration">${totalDuration.toFixed(0)}ms</span> <button class="report-btn" onclick="saveReport()"><svg viewBox="0 0 16 16"><path d="M4 1h8a1 1 0 0 1 1 1v12a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1zm1 2v2h6V3H5zm0 4v1h6V7H5zm0 3v1h4v-1H5z"/></svg>Save Report</button> </div>`; -}, - - buildErrorSummaryHtml = (error: string): string => - `<div class="playlist-summary" id="summary"> + }, + buildErrorSummaryHtml = (error: string): string => + `<div class="playlist-summary" id="summary"> <span class="summary-badge has-failures">ERROR</span> <span class="summary-failed">${escapeHtml(error)}</span> </div>`; @@ -221,7 +203,7 @@ const createNewPanel = (opts: CreatePanelOptions): vscode.WebviewPanel => { PLAYLIST_PANEL_VIEW_TYPE, PLAYLIST_PANEL_TITLE, opts.viewColumn, - { enableScripts: true, retainContextWhenHidden: true } + { enableScripts: true, retainContextWhenHidden: true }, ); panel.webview.html = opts.html; panel.webview.onDidReceiveMessage(opts.onMessage); @@ -246,7 +228,7 @@ export class PlaylistPanel implements vscode.Disposable { showRunning( playlistFile: string, stepFileNames: readonly string[], - viewColumn: vscode.ViewColumn + viewColumn: vscode.ViewColumn, ): void { const html = buildStreamingHtml(playlistFile, stepFileNames); @@ -260,12 +242,16 @@ export class PlaylistPanel implements vscode.Disposable { viewColumn, html, onMessage: this._handleWebviewMessage, - onDispose: () => { this._panel = undefined; }, + onDispose: () => { + this._panel = undefined; + }, }); } addResult(index: number, result: RunResult): void { - if (!this._panel) {return;} + if (!this._panel) { + return; + } const html = buildCompletedStepRow(result, index); this._panel.webview.postMessage({ type: MSG_ADD_RESULT, @@ -275,7 +261,9 @@ export class PlaylistPanel implements vscode.Disposable { } showComplete(results: readonly RunResult[]): void { - if (!this._panel) {return;} + if (!this._panel) { + return; + } const summaryHtml = buildSummaryHtml(results); this._panel.webview.postMessage({ type: MSG_RUN_COMPLETE, @@ -284,7 +272,9 @@ export class PlaylistPanel implements vscode.Disposable { } showError(error: string): void { - if (!this._panel) {return;} + if (!this._panel) { + return; + } const summaryHtml = buildErrorSummaryHtml(error); this._panel.webview.postMessage({ type: MSG_RUN_ERROR, diff --git a/src/Napper.VsCode/src/reportGenerator.ts b/src/Napper.VsCode/src/reportGenerator.ts new file mode 100644 index 0000000..f16797b --- /dev/null +++ b/src/Napper.VsCode/src/reportGenerator.ts @@ -0,0 +1,373 @@ +// Specs: vscode-playlists +// Standalone HTML report generator for playlist results +// Pure function — no VS Code SDK dependency +// Generates a beautiful, self-contained HTML file + +import * as path from 'path'; +import type { RunResult } from './types'; +import { escapeHtml, formatBodyHtml } from './htmlUtils'; +import { REPORT_STYLES } from './reportStyles'; +import { + NAPPER_URL, + NIMBLESITE_URL, + PERCENTAGE_MULTIPLIER, + REPORT_FOOTER_GENERATED_BY, + REPORT_FOOTER_MADE_BY, + SECTION_LABEL_REQUEST, + SECTION_LABEL_REQUEST_BODY, + SECTION_LABEL_REQUEST_HEADERS, + SECTION_LABEL_RESPONSE, + SECTION_LABEL_RESPONSE_HEADERS, +} from './constants'; + +const buildReportAssertionRow = (a: { + readonly passed: boolean; + readonly target: string; + readonly expected: string; + readonly actual: string; + }): string => { + const cls = a.passed ? 'pass' : 'fail', + icon = a.passed ? '\u2713' : '\u2717', + detail = a.passed + ? '' + : `<span class="assertion-detail">expected: ${escapeHtml(a.expected)} | actual: ${escapeHtml(a.actual)}</span>`; + return `<div class="assertion-row ${cls}"> + <span class="assertion-icon">${icon}</span> + <span class="assertion-target">${escapeHtml(a.target)}</span> + ${detail} + </div>`; + }, + buildReportAssertions = (result: RunResult): string => { + if (result.assertions.length === 0) { + return ''; + } + + const rows = result.assertions.map((a) => buildReportAssertionRow(a)).join('\n'); + + return `<div class="detail-section"> + <div class="detail-section-title">Assertions</div> + <div class="assertions-list">${rows}</div> + </div>`; + }, + buildReportHeadersTable = (headers: Readonly<Record<string, string>> | undefined): string => { + if (!headers) { + return ''; + } + + return Object.entries(headers) + .map( + ([k, v]) => + `<tr><td class="h-key">${escapeHtml(k)}</td><td class="h-val">${escapeHtml(v)}</td></tr>`, + ) + .join('\n'); + }, + buildReportHeadersSection = ( + title: string, + headers: Readonly<Record<string, string>> | undefined, + ): string => { + const rows = buildReportHeadersTable(headers); + if (!rows) { + return ''; + } + + return `<div class="detail-section"> + <div class="detail-section-title">${title}</div> + <table class="headers-table"> + <thead><tr><th>Header</th><th>Value</th></tr></thead> + <tbody>${rows}</tbody> + </table> + </div>`; + }, + buildReportLog = (log: readonly string[] | undefined): string => { + if (!log || log.length === 0) { + return ''; + } + + const lines = log.map((line) => escapeHtml(line)).join('\n'); + + return `<div class="detail-section"> + <div class="detail-section-title">Output</div> + <pre class="log-output">${lines}</pre> + </div>`; + }, + buildReportBody = (body: string | undefined): string => { + if (body === undefined || body === '') { + return ''; + } + + return `<div class="detail-section"> + <div class="detail-section-title">Response Body</div> + <pre class="code-block">${formatBodyHtml(body)}</pre> + </div>`; + }, + buildReportRequestUrl = (result: RunResult): string => + result.requestUrl !== undefined && result.requestUrl !== '' + ? `<div class="request-url-line"><span class="request-method-tag">${escapeHtml(result.requestMethod ?? '')}</span> ${escapeHtml(result.requestUrl)}</div>` + : '', + buildReportRequestBody = (result: RunResult): string => { + if (result.requestBody === undefined || result.requestBody === '') { + return ''; + } + const formatted = formatBodyHtml(result.requestBody), + contentTypeHint = + result.requestBodyContentType !== undefined && result.requestBodyContentType !== '' + ? `<div class="content-type-hint">${escapeHtml(result.requestBodyContentType)}</div>` + : ''; + return `<div class="detail-section"> + <div class="detail-section-title">${SECTION_LABEL_REQUEST_BODY}</div> + ${contentTypeHint} + <pre class="code-block">${formatted}</pre> + </div>`; + }, + buildReportCollapsibleGroup = ({ + title, + content, + open, + }: { + readonly title: string; + readonly content: string; + readonly open: boolean; + }): string => + `<details class="report-group"${open ? ' open' : ''}> + <summary class="report-group-summary"><span class="report-group-title">${title}</span><span class="report-group-chevron">▶</span></summary> + <div class="report-group-content">${content}</div> + </details>`, + buildReportRequestGroup = (result: RunResult): string => { + const urlHtml = buildReportRequestUrl(result), + headersHtml = buildReportHeadersSection(SECTION_LABEL_REQUEST_HEADERS, result.requestHeaders), + bodyHtml = buildReportRequestBody(result), + content = `${urlHtml}${headersHtml}${bodyHtml}`; + + return buildReportCollapsibleGroup({ + title: SECTION_LABEL_REQUEST, + content: content !== '' ? content : '<span class="empty-hint">No request details</span>', + open: false, + }); + }, + collectResponseParts = (result: RunResult): readonly string[] => { + const assertionsPart = result.assertions.length > 0 ? buildReportAssertions(result) : undefined, + headersPart = buildReportHeadersSection(SECTION_LABEL_RESPONSE_HEADERS, result.headers), + bodyPart = buildReportBody(result.body); + return [assertionsPart, headersPart, bodyPart].filter( + (p): p is string => p !== undefined && p !== '', + ); + }, + buildReportResponseGroup = (result: RunResult): string => { + const parts = collectResponseParts(result); + if (parts.length === 0) { + return ''; + } + return buildReportCollapsibleGroup({ + title: SECTION_LABEL_RESPONSE, + content: parts.join('\n'), + open: true, + }); + }, + buildStepCardBadges = (result: RunResult, cls: string, duration: string): string => { + const httpBadge = + result.statusCode !== undefined + ? `<span class="badge http">${result.statusCode}</span>` + : '', + durationBadge = duration !== '' ? `<span class="badge duration">${duration}</span>` : '', + statusBadge = `<span class="badge status-${cls}">${result.passed ? 'PASSED' : 'FAILED'}</span>`; + + return `${httpBadge} + ${durationBadge} + ${statusBadge}`; + }, + buildStepCardErrorHtml = (error: string | undefined): string => + error !== undefined && error !== '' + ? `<div class="detail-section"><div class="detail-section-title">Error</div><pre class="error-box">${escapeHtml(error)}</pre></div>` + : '', + buildStepCardMetaHtml = (assertionText: string): string => + assertionText !== '' ? `<span class="step-meta-item">${assertionText}</span>` : '', + buildStepCardHeader = (opts: { + readonly result: RunResult; + readonly index: number; + readonly cls: string; + readonly icon: string; + readonly fileName: string; + readonly assertionText: string; + readonly duration: string; + }): string => ` + <div class="step-header" onclick="toggleStep(${opts.index})"> + <div class="step-indicator ${opts.cls}">${opts.icon}</div> + <div class="step-info"> + <div class="step-name">${escapeHtml(opts.fileName)}</div> + <div class="step-meta">${buildStepCardMetaHtml(opts.assertionText)}</div> + </div> + <div class="step-badges"> + ${buildStepCardBadges(opts.result, opts.cls, opts.duration)} + </div> + <span class="step-chevron">▶</span> + </div>`, + buildStepCardProps = ( + result: RunResult, + ): { + readonly cls: string; + readonly icon: string; + readonly fileName: string; + readonly duration: string; + readonly assertionText: string; + } => { + const passedAssertions = result.assertions.filter((a) => a.passed).length, + totalAssertions = result.assertions.length; + return { + cls: result.passed ? 'pass' : 'fail', + icon: result.passed ? '\u2713' : '\u2717', + fileName: path.basename(result.file), + duration: result.duration !== undefined ? `${result.duration.toFixed(0)}ms` : '', + assertionText: totalAssertions > 0 ? `${passedAssertions}/${totalAssertions} assertions` : '', + }; + }, + buildStepCardDetail = (result: RunResult): string => + `${buildStepCardErrorHtml(result.error)} + ${buildReportLog(result.log)} + ${buildReportRequestGroup(result)} + ${buildReportResponseGroup(result)}`, + buildStepCard = (result: RunResult, index: number): string => { + const props = buildStepCardProps(result), + header = buildStepCardHeader({ result, index, ...props }); + return `<div class="step-card" data-index="${index}"> + ${header} + <div class="step-detail">${buildStepCardDetail(result)}</div> + </div>`; + }, + computeReportStats = ( + results: readonly RunResult[], + ): { + readonly totalCount: number; + readonly passedCount: number; + readonly failedCount: number; + readonly totalDuration: number; + readonly allPassed: boolean; + readonly passRate: string; + } => { + const totalCount = results.length, + passedCount = results.filter((r) => r.passed).length, + failedCount = totalCount - passedCount, + totalDuration = results.reduce((acc, r) => acc + (r.duration ?? 0), 0), + allPassed = totalCount > 0 && failedCount === 0, + passRate = + totalCount > 0 ? ((passedCount / totalCount) * PERCENTAGE_MULTIPLIER).toFixed(0) : '0'; + return { totalCount, passedCount, failedCount, totalDuration, allPassed, passRate }; + }, + buildReportStatusSection = (stats: { + readonly allPassed: boolean; + readonly statusCls: string; + readonly statusText: string; + readonly statusIcon: string; + }): string => ` + <div class="status-banner ${stats.statusCls}"> + <div class="status-icon">${stats.statusIcon}</div> + <span>${stats.statusText}</span> + </div>`, + buildStatCard = (opts: { + readonly label: string; + readonly valueCls: string; + readonly value: string; + readonly sub: string; + }): string => + `<div class="stat-card"><div class="stat-label">${opts.label}</div><div class="stat-value ${opts.valueCls}">${opts.value}</div><div class="stat-sub">${opts.sub}</div></div>`, + buildPassRateCard = (stats: ReturnType<typeof computeReportStats>): string => + buildStatCard({ + label: 'Pass Rate', + valueCls: stats.allPassed ? 'pass' : 'fail', + value: `${stats.passRate}%`, + sub: `${stats.passedCount} of ${stats.totalCount} steps`, + }), + buildPassedCard = (count: number): string => + buildStatCard({ label: 'Passed', valueCls: 'pass', value: `${count}`, sub: 'steps succeeded' }), + buildFailedCard = (count: number): string => + buildStatCard({ + label: 'Failed', + valueCls: count > 0 ? 'fail' : 'neutral', + value: `${count}`, + sub: 'steps failed', + }), + buildDurationCard = (duration: number): string => + buildStatCard({ + label: 'Duration', + valueCls: 'neutral', + value: `${duration.toFixed(0)}<span style="font-size: 16px; font-weight: 400;">ms</span>`, + sub: 'total execution time', + }), + buildReportStatsGrid = (stats: ReturnType<typeof computeReportStats>): string => { + const cards = [ + buildPassRateCard(stats), + buildPassedCard(stats.passedCount), + buildFailedCard(stats.failedCount), + buildDurationCard(stats.totalDuration), + ].join(''); + return `<div class="stats-grid">${cards}</div>`; + }, + buildReportProgressBar = (passRate: string, allPassed: boolean): string => ` + <div class="progress-container"> + <div class="progress-bar-bg"> + <div class="progress-bar-fill ${allPassed ? 'pass' : 'mixed'}" style="width: ${passRate}%; --pass-pct: ${passRate}%;"></div> + </div> + </div>`, + buildReportDashboard = ( + stats: ReturnType<typeof computeReportStats>, + stepsHtml: string, + ): string => { + const statusCls = stats.allPassed ? 'passed' : 'failed', + statusText = stats.allPassed ? 'All Steps Passed' : 'Some Steps Failed', + statusIcon = stats.allPassed ? '\u2713' : '\u2717'; + + return `<div class="dashboard"> + ${buildReportStatusSection({ allPassed: stats.allPassed, statusCls, statusText, statusIcon })} + ${buildReportStatsGrid(stats)} + ${buildReportProgressBar(stats.passRate, stats.allPassed)} + <div class="section-title">Steps (${stats.totalCount})</div> + <div class="steps-list"> + ${stepsHtml} + </div> + </div>`; + }, + buildReportFooter = (): string => ` + <div class="footer"> + ${REPORT_FOOTER_GENERATED_BY} <a href="${NAPPER_URL}">Napper</a> · ${REPORT_FOOTER_MADE_BY} <a href="${NIMBLESITE_URL}">Nimblesite</a> + </div>`, + buildReportHeroHtml = (playlistName: string, timestamp: string): string => ` + <div class="hero"> + <div class="hero-content"> + <div class="hero-label">Playlist Report</div> + <h1>${escapeHtml(playlistName)}</h1> + <div class="hero-timestamp">${escapeHtml(timestamp)}</div> + </div> + </div>`, + buildReportToggleScript = (): string => ` + <script> + function toggleStep(index) { + var card = document.querySelector('.step-card[data-index="' + index + '"]'); + if (!card) return; + card.classList.toggle('open'); + } + </script>`, + buildReportHead = (playlistName: string): string => `<head> +<meta charset="UTF-8"/> +<meta name="viewport" content="width=device-width, initial-scale=1.0"/> +<title>Napper Report — ${escapeHtml(playlistName)} + +`; + +export const generatePlaylistReport = ( + playlistName: string, + results: readonly RunResult[], +): string => { + const stats = computeReportStats(results), + stepsHtml = results.map((result, index) => buildStepCard(result, index)).join('\n'), + hero = buildReportHeroHtml(playlistName, new Date().toLocaleString()), + dashboard = buildReportDashboard(stats, stepsHtml); + return ` + +${buildReportHead(playlistName)} + + ${hero} + ${dashboard} + ${buildReportFooter()} + ${buildReportToggleScript()} + +`; +}; diff --git a/src/Nap.VsCode/src/reportStyles.ts b/src/Napper.VsCode/src/reportStyles.ts similarity index 95% rename from src/Nap.VsCode/src/reportStyles.ts rename to src/Napper.VsCode/src/reportStyles.ts index 2273aa3..0efa61b 100644 --- a/src/Nap.VsCode/src/reportStyles.ts +++ b/src/Napper.VsCode/src/reportStyles.ts @@ -1,22 +1,22 @@ // CSS styles for standalone HTML reports // Extracted to keep reportGenerator.ts under 450 LOC -const REPORT_ACCENT = "#6366f1", - REPORT_BODY_BG = "#f8fafc", - REPORT_BORDER = "#e2e8f0", - REPORT_CARD_BG = "#ffffff", - REPORT_CODE_BG = "#1e293b", - REPORT_CODE_TEXT = "#e2e8f0", - REPORT_FAIL_BG = "#fef2f2", - REPORT_FAIL_BORDER = "#fecaca", - REPORT_FAIL_COLOR = "#ef4444", - REPORT_GRADIENT_END = "#1e293b", - REPORT_GRADIENT_START = "#0f172a", - REPORT_PASS_BG = "#ecfdf5", - REPORT_PASS_BORDER = "#a7f3d0", - REPORT_PASS_COLOR = "#10b981", - REPORT_TEXT_PRIMARY = "#0f172a", - REPORT_TEXT_SECONDARY = "#64748b"; +const REPORT_ACCENT = '#6366f1', + REPORT_BODY_BG = '#f8fafc', + REPORT_BORDER = '#e2e8f0', + REPORT_CARD_BG = '#ffffff', + REPORT_CODE_BG = '#1e293b', + REPORT_CODE_TEXT = '#e2e8f0', + REPORT_FAIL_BG = '#fef2f2', + REPORT_FAIL_BORDER = '#fecaca', + REPORT_FAIL_COLOR = '#ef4444', + REPORT_GRADIENT_END = '#1e293b', + REPORT_GRADIENT_START = '#0f172a', + REPORT_PASS_BG = '#ecfdf5', + REPORT_PASS_BORDER = '#a7f3d0', + REPORT_PASS_COLOR = '#10b981', + REPORT_TEXT_PRIMARY = '#0f172a', + REPORT_TEXT_SECONDARY = '#64748b'; export const REPORT_STYLES = ` @import url('https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&family=JetBrains+Mono:wght@400;500&display=swap'); diff --git a/src/Nap.VsCode/src/responsePanel.ts b/src/Napper.VsCode/src/responsePanel.ts similarity index 72% rename from src/Nap.VsCode/src/responsePanel.ts rename to src/Napper.VsCode/src/responsePanel.ts index 4983c3f..c9be3ce 100644 --- a/src/Nap.VsCode/src/responsePanel.ts +++ b/src/Napper.VsCode/src/responsePanel.ts @@ -1,14 +1,15 @@ +// Specs: vscode-editor, vscode-layout // Response webview panel — shows HTTP response after running a .nap file // Uses minimal vanilla HTML/CSS — no framework dependency -import * as vscode from "vscode"; -import type { RunResult } from "./types"; +import * as vscode from 'vscode'; +import type { RunResult } from './types'; import { HTTP_STATUS_CLIENT_ERROR_MIN, RESPONSE_PANEL_TITLE, RESPONSE_PANEL_VIEW_TYPE, -} from "./constants"; -import { escapeHtml, buildResultDetailHtml, SHARED_SECTION_STYLES } from "./htmlUtils"; +} from './constants'; +import { escapeHtml, buildResultDetailHtml, SHARED_SECTION_STYLES } from './htmlUtils'; const RESPONSE_PANEL_STYLES = ` body { font-family: var(--vscode-font-family); color: var(--vscode-foreground); background: var(--vscode-editor-background); padding: 16px; margin: 0; } @@ -21,31 +22,27 @@ const RESPONSE_PANEL_STYLES = ` .duration { color: var(--vscode-descriptionForeground); } .passed-badge { color: var(--vscode-testing-iconPassed); font-weight: bold; } .failed-badge { color: var(--vscode-testing-iconFailed); font-weight: bold; }`, + buildStatusLine = (result: RunResult): string => { + if (result.statusCode === undefined) { + return ''; + } + const statusClass = + result.statusCode < HTTP_STATUS_CLIENT_ERROR_MIN ? 'status-ok' : 'status-error'; + return `${result.statusCode}`; + }, + buildResponseBody = (result: RunResult): string => { + const durationLine = result.duration !== undefined ? `${result.duration.toFixed(0)}ms` : ''; - buildStatusLine = (result: RunResult): string => { - if (result.statusCode === undefined) {return "";} - const statusClass = - result.statusCode < HTTP_STATUS_CLIENT_ERROR_MIN - ? "status-ok" - : "status-error"; - return `${result.statusCode}`; -}, - - buildResponseBody = (result: RunResult): string => { - const durationLine = - result.duration !== undefined ? `${result.duration.toFixed(0)}ms` : ""; - - return ` + return `

${escapeHtml(result.file)}

${buildStatusLine(result)} ${durationLine} - ${result.passed ? "PASSED" : "FAILED"} + ${result.passed ? 'PASSED' : 'FAILED'}
${buildResultDetailHtml(result)}`; -}, - - buildHtml = (result: RunResult): string => ` + }, + buildHtml = (result: RunResult): string => ` @@ -69,7 +66,7 @@ export class ResponsePanel implements vscode.Disposable { RESPONSE_PANEL_VIEW_TYPE, RESPONSE_PANEL_TITLE, viewColumn, - { enableScripts: false, retainContextWhenHidden: true } + { enableScripts: false, retainContextWhenHidden: true }, ); this._panel.webview.html = buildHtml(result); diff --git a/src/Napper.VsCode/src/test/e2e/activation.e2e.test.ts b/src/Napper.VsCode/src/test/e2e/activation.e2e.test.ts new file mode 100644 index 0000000..ea3d98e --- /dev/null +++ b/src/Napper.VsCode/src/test/e2e/activation.e2e.test.ts @@ -0,0 +1,143 @@ +// Specs: vscode-impl, vscode-commands, vscode-syntax, vscode-settings +import * as assert from 'assert'; +import * as fs from 'fs'; +import { + activateExtension, + getExtensionPath, + getRegisteredCommands, + sleep, +} from '../helpers/helpers'; +import { + CMD_COPY_CURL, + CMD_NEW_PLAYLIST, + CMD_NEW_REQUEST, + CMD_OPEN_RESPONSE, + CMD_RUN_ALL, + CMD_RUN_FILE, + CMD_SWITCH_ENV, + NAPENV_EXTENSION, + NAPLIST_EXTENSION, + NAP_EXTENSION, + VIEW_EXPLORER, +} from '../../constants'; + +suite('Extension Activation', () => { + suiteSetup(async function () { + this.timeout(30000); + await activateExtension(); + await sleep(3000); + }); + + test('extension activates successfully', async () => { + const ctx = await activateExtension(); + assert.strictEqual(ctx.extension.isActive, true, 'Extension should be active'); + }); + + test('all commands are registered', async () => { + const commands = await getRegisteredCommands(), + expectedCommands = [ + CMD_RUN_FILE, + CMD_RUN_ALL, + CMD_NEW_REQUEST, + CMD_NEW_PLAYLIST, + CMD_SWITCH_ENV, + CMD_COPY_CURL, + CMD_OPEN_RESPONSE, + ]; + + for (const cmd of expectedCommands) { + assert.ok(commands.includes(cmd), `Command ${cmd} should be registered`); + } + }); + + test('package.json declares all views in napper-panel container', () => { + const packageJsonPath = getExtensionPath('package.json'), + raw = fs.readFileSync(packageJsonPath, 'utf-8'), + packageJson = JSON.parse(raw) as { + contributes: { + views: Record; + }; + }, + napperPanelViews = packageJson.contributes.views['napper-panel']; + assert.ok(Array.isArray(napperPanelViews), 'napper-panel view container should exist'); + + const viewIds = napperPanelViews.map((v) => v.id); + assert.ok(viewIds.includes(VIEW_EXPLORER), 'napperExplorer view should be declared'); + }); + + test('package.json registers all three languages', () => { + const packageJsonPath = getExtensionPath('package.json'), + raw = fs.readFileSync(packageJsonPath, 'utf-8'), + packageJson = JSON.parse(raw) as { + contributes: { + languages: { id: string; extensions: string[] }[]; + }; + }, + { languages } = packageJson.contributes, + langIds = languages.map((l) => l.id); + + assert.ok(langIds.includes('nap'), 'nap language should be registered'); + assert.ok(langIds.includes('naplist'), 'naplist language should be registered'); + assert.ok(langIds.includes('napenv'), 'napenv language should be registered'); + + const napLang = languages.find((l) => l.id === 'nap'); + assert.ok(napLang !== undefined, 'nap language must be registered'); + assert.ok(napLang.extensions.includes(NAP_EXTENSION), '.nap extension should be associated'); + + const naplistLang = languages.find((l) => l.id === 'naplist'); + assert.ok(naplistLang !== undefined, 'naplist language must be registered'); + assert.ok( + naplistLang.extensions.includes(NAPLIST_EXTENSION), + '.naplist extension should be associated', + ); + + const napenvLang = languages.find((l) => l.id === 'napenv'); + assert.ok(napenvLang !== undefined, 'napenv language must be registered'); + assert.ok( + napenvLang.extensions.includes(NAPENV_EXTENSION), + '.napenv extension should be associated', + ); + }); + + test('package.json declares all configuration properties', () => { + const packageJsonPath = getExtensionPath('package.json'), + raw = fs.readFileSync(packageJsonPath, 'utf-8'), + packageJson = JSON.parse(raw) as { + contributes: { + configuration: { + properties: Record; + }; + }; + }, + props = packageJson.contributes.configuration.properties, + expectedKeys = [ + 'napper.defaultEnvironment', + 'napper.autoRunOnSave', + 'napper.splitEditorLayout', + 'napper.maskSecretsInPreview', + 'napper.cliPath', + ]; + + for (const key of expectedKeys) { + assert.ok(key in props, `Configuration property ${key} should be declared`); + } + }); + + test('package.json declares context menu for napperExplorer', () => { + const packageJsonPath = getExtensionPath('package.json'), + raw = fs.readFileSync(packageJsonPath, 'utf-8'), + packageJson = JSON.parse(raw) as { + contributes: { + menus: { + 'view/item/context': { + command: string; + when: string; + }[]; + }; + }; + }, + contextMenus = packageJson.contributes.menus['view/item/context'], + runFileMenu = contextMenus.find((m) => m.command === CMD_RUN_FILE); + assert.ok(runFileMenu, 'runFile context menu should exist for explorer items'); + }); +}); diff --git a/src/Napper.VsCode/src/test/e2e/codelens.e2e.test.ts b/src/Napper.VsCode/src/test/e2e/codelens.e2e.test.ts new file mode 100644 index 0000000..72983f3 --- /dev/null +++ b/src/Napper.VsCode/src/test/e2e/codelens.e2e.test.ts @@ -0,0 +1,99 @@ +// Specs: vscode-codelens, vscode-commands +import * as assert from 'assert'; +import * as vscode from 'vscode'; +import { activateExtension, closeAllEditors, openDocument, sleep } from '../helpers/helpers'; +import { CMD_COPY_CURL, CMD_RUN_FILE } from '../../constants'; + +suite('CodeLens', () => { + suiteSetup(async function () { + this.timeout(30000); + await activateExtension(); + await sleep(3000); + }); + + suiteTeardown(async () => { + await closeAllEditors(); + }); + + test('CodeLens appears for shorthand .nap file', async function () { + this.timeout(15000); + const doc = await openDocument('get-httpbin.nap'); + await sleep(3000); + + const lenses = await vscode.commands.executeCommand( + 'vscode.executeCodeLensProvider', + doc.uri, + ); + + assert.ok(lenses.length > 0, 'Should have at least one CodeLens for shorthand .nap file'); + + const runLens = lenses.find((l) => l.command?.command === CMD_RUN_FILE); + assert.ok(runLens, 'Should have a Run CodeLens'); + + const curlLens = lenses.find((l) => l.command?.command === CMD_COPY_CURL); + assert.ok(curlLens, 'Should have a Copy as curl CodeLens'); + }); + + test('CodeLens appears for .nap file with [request] section', async function () { + this.timeout(15000); + const doc = await openDocument('petstore/list-pets.nap'); + await sleep(3000); + + const lenses = await vscode.commands.executeCommand( + 'vscode.executeCodeLensProvider', + doc.uri, + ); + + assert.ok(lenses.length > 0, 'Should have CodeLens for [request] section'); + + const runLens = lenses.find((l) => l.command?.command === CMD_RUN_FILE); + assert.ok(runLens, 'Run lens should exist on [request] section'); + }); + + test('CodeLens appears for POST .nap file', async function () { + this.timeout(15000); + const doc = await openDocument('post-jsonplaceholder.nap'); + await sleep(3000); + + const lenses = await vscode.commands.executeCommand( + 'vscode.executeCodeLensProvider', + doc.uri, + ); + + assert.ok(lenses.length > 0, 'Should have CodeLens for POST .nap file'); + }); + + test('CodeLens appears for .naplist file', async function () { + this.timeout(15000); + const doc = await openDocument('petstore/smoke.naplist'); + await sleep(3000); + + const lenses = await vscode.commands.executeCommand( + 'vscode.executeCodeLensProvider', + doc.uri, + ); + + assert.ok(lenses.length > 0, 'Should have CodeLens for .naplist file with [meta] section'); + + const runPlaylistLens = lenses.find((l) => l.command?.command === CMD_RUN_FILE); + assert.ok(runPlaylistLens, 'Should have Run Playlist CodeLens'); + }); + + test('CodeLens Run lens passes document URI as argument', async function () { + this.timeout(15000); + const doc = await openDocument('get-httpbin.nap'); + await sleep(3000); + + const lenses = await vscode.commands.executeCommand( + 'vscode.executeCodeLensProvider', + doc.uri, + ), + runLens = lenses.find((l) => l.command?.command === CMD_RUN_FILE); + assert.ok(runLens, 'Run lens should exist'); + assert.ok(runLens.command?.arguments, 'Run lens should have arguments'); + assert.ok( + runLens.command.arguments.length > 0, + 'Run lens should pass at least one argument (the URI)', + ); + }); +}); diff --git a/src/Napper.VsCode/src/test/e2e/configuration.e2e.test.ts b/src/Napper.VsCode/src/test/e2e/configuration.e2e.test.ts new file mode 100644 index 0000000..834e648 --- /dev/null +++ b/src/Napper.VsCode/src/test/e2e/configuration.e2e.test.ts @@ -0,0 +1,63 @@ +// Specs: vscode-settings +import * as assert from 'assert'; +import * as vscode from 'vscode'; +import { activateExtension, sleep } from '../helpers/helpers'; +import { + CONFIG_AUTO_RUN, + CONFIG_CLI_PATH, + CONFIG_DEFAULT_ENV, + CONFIG_MASK_SECRETS, + CONFIG_SECTION, + CONFIG_SPLIT_LAYOUT, + DEFAULT_CLI_PATH, +} from '../../constants'; + +suite('Configuration', () => { + suiteSetup(async function () { + this.timeout(30000); + await activateExtension(); + await sleep(3000); + }); + + test('napper configuration section exists', () => { + const config = vscode.workspace.getConfiguration(CONFIG_SECTION); + assert.notStrictEqual(config, undefined, 'napper configuration section should exist'); + }); + + test('autoRunOnSave defaults to false', () => { + const config = vscode.workspace.getConfiguration(CONFIG_SECTION), + autoRun = config.get(CONFIG_AUTO_RUN); + assert.strictEqual(autoRun, false, 'autoRunOnSave should default to false'); + }); + + test('splitEditorLayout defaults to beside', () => { + const config = vscode.workspace.getConfiguration(CONFIG_SECTION), + layout = config.get(CONFIG_SPLIT_LAYOUT); + assert.strictEqual(layout, 'beside', "splitEditorLayout should default to 'beside'"); + }); + + test('maskSecretsInPreview defaults to true', () => { + const config = vscode.workspace.getConfiguration(CONFIG_SECTION), + mask = config.get(CONFIG_MASK_SECRETS); + assert.strictEqual(mask, true, 'maskSecretsInPreview should default to true'); + }); + + test('cliPath has a default value', () => { + const config = vscode.workspace.getConfiguration(CONFIG_SECTION), + cliPath = config.get(CONFIG_CLI_PATH); + assert.strictEqual(cliPath, DEFAULT_CLI_PATH, `cliPath should default to ${DEFAULT_CLI_PATH}`); + }); + + test('defaultEnvironment defaults to empty string', () => { + const config = vscode.workspace.getConfiguration(CONFIG_SECTION), + env = config.get(CONFIG_DEFAULT_ENV); + assert.strictEqual(env, '', 'defaultEnvironment should default to empty string'); + }); + + test('splitEditorLayout only accepts valid values', () => { + const config = vscode.workspace.getConfiguration(CONFIG_SECTION), + inspected = config.inspect(CONFIG_SPLIT_LAYOUT); + assert.ok(inspected, 'splitEditorLayout should be inspectable'); + assert.strictEqual(inspected.defaultValue, 'beside', "Default should be 'beside'"); + }); +}); diff --git a/src/Napper.VsCode/src/test/e2e/copycurl.e2e.test.ts b/src/Napper.VsCode/src/test/e2e/copycurl.e2e.test.ts new file mode 100644 index 0000000..a21efa1 --- /dev/null +++ b/src/Napper.VsCode/src/test/e2e/copycurl.e2e.test.ts @@ -0,0 +1,67 @@ +// Specs: vscode-commands +import * as assert from 'assert'; +import * as vscode from 'vscode'; +import { + activateExtension, + closeAllEditors, + executeCommand, + openDocument, + sleep, +} from '../helpers/helpers'; +import { CMD_COPY_CURL } from '../../constants'; + +suite('Copy as Curl', () => { + suiteSetup(async function () { + this.timeout(30000); + await activateExtension(); + await sleep(3000); + }); + + suiteTeardown(async () => { + await closeAllEditors(); + }); + + test('copy curl for shorthand GET request', async function () { + this.timeout(15000); + const doc = await openDocument('get-httpbin.nap'); + await sleep(1000); + + await executeCommand(CMD_COPY_CURL, doc.uri); + await sleep(1000); + + const clipboard = await vscode.env.clipboard.readText(); + assert.ok(clipboard.includes('curl'), 'Clipboard should contain curl command'); + assert.ok(clipboard.includes('httpbin.org/get'), 'Clipboard should contain the request URL'); + assert.ok(clipboard.includes('GET'), 'Clipboard should contain GET method'); + }); + + test('copy curl for POST request with [request] section', async function () { + this.timeout(15000); + const doc = await openDocument('post-jsonplaceholder.nap'); + await sleep(1000); + + await executeCommand(CMD_COPY_CURL, doc.uri); + await sleep(1000); + + const clipboard = await vscode.env.clipboard.readText(); + assert.ok(clipboard.includes('curl'), 'Clipboard should contain curl'); + assert.ok(clipboard.includes('POST'), 'Clipboard should contain POST method'); + assert.ok( + clipboard.includes('jsonplaceholder.typicode.com'), + 'Clipboard should contain the URL', + ); + }); + + test('copy curl for GET with [request] section', async function () { + this.timeout(15000); + const doc = await openDocument('petstore/list-pets.nap'); + await sleep(1000); + + await executeCommand(CMD_COPY_CURL, doc.uri); + await sleep(1000); + + const clipboard = await vscode.env.clipboard.readText(); + assert.ok(clipboard.includes('curl'), 'Clipboard should contain curl'); + assert.ok(clipboard.includes('petstore.swagger.io'), 'Clipboard should contain petstore URL'); + }); +}); diff --git a/src/Napper.VsCode/src/test/e2e/csx-scripts.e2e.test.ts b/src/Napper.VsCode/src/test/e2e/csx-scripts.e2e.test.ts new file mode 100644 index 0000000..857da65 --- /dev/null +++ b/src/Napper.VsCode/src/test/e2e/csx-scripts.e2e.test.ts @@ -0,0 +1,473 @@ +// Specs: vscode-commands, vscode-playlists, script-csx +import * as assert from 'assert'; +import * as vscode from 'vscode'; +import * as fs from 'fs'; +import * as path from 'path'; +import { + activateExtension, + closeAllEditors, + executeCommand, + extractStepLines, + getFixturePath, + openDocument, + sleep, + waitForCondition, +} from '../helpers/helpers'; +import { + CMD_RUN_FILE, + CMD_SAVE_REPORT, + PLAYLIST_PANEL_TITLE, + REPORT_FILE_EXTENSION, + REPORT_FILE_SUFFIX, + RESPONSE_PANEL_TITLE, +} from '../../constants'; + +const findTabByLabel = (label: string): vscode.Tab | undefined => + vscode.window.tabGroups.all.flatMap((g) => g.tabs).find((tab) => tab.label.includes(label)), + countTabsByLabel = (label: string): number => + vscode.window.tabGroups.all.flatMap((g) => g.tabs).filter((t) => t.label.includes(label)) + .length; + +suite('CSX Script Edge Cases — Real Execution', () => { + suiteSetup(async function () { + this.timeout(30000); + await activateExtension(); + await sleep(3000); + }); + + suiteTeardown(async () => { + await closeAllEditors(); + }); + + // ── CSX-only playlist (no .nap requests at all) ────────────────────── + + test('csx-only playlist opens panel and completes successfully', async function () { + this.timeout(60000); + await closeAllEditors(); + await sleep(500); + + const doc = await openDocument('petstore/csx-only.naplist'); + assert.strictEqual( + doc.languageId, + 'naplist', + 'csx-only.naplist must have naplist language mode', + ); + + const runPromise = executeCommand(CMD_RUN_FILE, doc.uri); + + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 5000); + + const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + playlistTab, + `Tab '${PLAYLIST_PANEL_TITLE}' must open for a playlist containing only .csx scripts`, + ); + + const responseTab = findTabByLabel(RESPONSE_PANEL_TITLE); + assert.strictEqual( + responseTab, + undefined, + `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear — .csx-only playlist uses playlist panel`, + ); + + await runPromise; + + const panelAfter = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + panelAfter, + `Tab '${PLAYLIST_PANEL_TITLE}' must persist after csx-only playlist completes`, + ); + }); + + test('csx-only playlist contains no .nap steps and all scripts exist', () => { + const playlistPath = getFixturePath('petstore/csx-only.naplist'), + content = fs.readFileSync(playlistPath, 'utf-8'); + + assert.ok(content.includes('[meta]'), 'Must have [meta] section'); + assert.ok(content.includes('[steps]'), 'Must have [steps] section'); + assert.ok(content.includes('echo.csx'), 'Must reference echo.csx'); + assert.ok(content.includes('multi-output.csx'), 'Must reference multi-output.csx'); + + const scriptsDir = getFixturePath('scripts'); + assert.ok(fs.existsSync(path.join(scriptsDir, 'echo.csx')), 'echo.csx must exist'); + assert.ok( + fs.existsSync(path.join(scriptsDir, 'multi-output.csx')), + 'multi-output.csx must exist', + ); + }); + + // ── Failing script — extension must not crash ──────────────────────── + + test('playlist with failing csx script opens panel and completes without crashing', async function () { + this.timeout(60000); + await closeAllEditors(); + await sleep(500); + + const doc = await openDocument('petstore/csx-fail.naplist'); + assert.strictEqual( + doc.languageId, + 'naplist', + 'csx-fail.naplist must have naplist language mode', + ); + + const runPromise = executeCommand(CMD_RUN_FILE, doc.uri); + + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 5000); + + assert.ok( + findTabByLabel(PLAYLIST_PANEL_TITLE), + `Tab '${PLAYLIST_PANEL_TITLE}' must open even when playlist contains a failing .csx script`, + ); + + // The run must resolve — a failing script must not hang the extension + await runPromise; + + assert.ok( + findTabByLabel(PLAYLIST_PANEL_TITLE), + `Tab '${PLAYLIST_PANEL_TITLE}' must persist after playlist with failing script completes`, + ); + + assert.strictEqual( + findTabByLabel(RESPONSE_PANEL_TITLE), + undefined, + `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear — even failed playlists use playlist panel`, + ); + }); + + test('csx-fail.naplist fixture has failing script and valid steps', () => { + const playlistPath = getFixturePath('petstore/csx-fail.naplist'), + content = fs.readFileSync(playlistPath, 'utf-8'); + + assert.ok(content.includes('fail.csx'), 'Must reference fail.csx'); + assert.ok(content.includes('echo.csx'), 'Must reference echo.csx'); + assert.ok(content.includes('list-pets.nap'), 'Must reference list-pets.nap'); + + const scriptsDir = getFixturePath('scripts'); + assert.ok(fs.existsSync(path.join(scriptsDir, 'fail.csx')), 'fail.csx fixture must exist'); + + const failContent = fs.readFileSync(path.join(scriptsDir, 'fail.csx'), 'utf-8'); + assert.ok(failContent.includes('Environment.Exit(1)'), 'fail.csx must exit with non-zero code'); + }); + + // ── Compilation error — extension must handle gracefully ───────────── + + test('playlist with compilation-error csx opens panel and completes without crashing', async function () { + this.timeout(60000); + await closeAllEditors(); + await sleep(500); + + const doc = await openDocument('petstore/csx-compile-error.naplist'); + assert.strictEqual( + doc.languageId, + 'naplist', + 'csx-compile-error.naplist must have naplist language mode', + ); + + const runPromise = executeCommand(CMD_RUN_FILE, doc.uri); + + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 5000); + + assert.ok( + findTabByLabel(PLAYLIST_PANEL_TITLE), + `Tab '${PLAYLIST_PANEL_TITLE}' must open even when playlist contains a .csx with compilation errors`, + ); + + // Must not hang — compilation errors should produce a failed result, not block forever + await runPromise; + + assert.ok( + findTabByLabel(PLAYLIST_PANEL_TITLE), + `Tab '${PLAYLIST_PANEL_TITLE}' must persist after playlist with compilation-error script`, + ); + }); + + test('csx-compile-error.naplist fixture has script with type error', () => { + const playlistPath = getFixturePath('petstore/csx-compile-error.naplist'), + content = fs.readFileSync(playlistPath, 'utf-8'); + + assert.ok(content.includes('compile-error.csx'), 'Must reference compile-error.csx'); + + const scriptsDir = getFixturePath('scripts'), + scriptContent = fs.readFileSync(path.join(scriptsDir, 'compile-error.csx'), 'utf-8'); + + // The script assigns a string to an int — guaranteed compilation failure + assert.ok(scriptContent.includes('int x'), 'compile-error.csx must declare an int variable'); + }); + + // ── Multiple CSX scripts interleaved with .nap requests ────────────── + + test('playlist with multiple csx scripts interleaved with requests completes', async function () { + this.timeout(90000); + await closeAllEditors(); + await sleep(500); + + const doc = await openDocument('petstore/csx-multi.naplist'); + assert.strictEqual( + doc.languageId, + 'naplist', + 'csx-multi.naplist must have naplist language mode', + ); + + const runPromise = executeCommand(CMD_RUN_FILE, doc.uri); + + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 5000); + + assert.ok( + findTabByLabel(PLAYLIST_PANEL_TITLE), + `Tab '${PLAYLIST_PANEL_TITLE}' must open for multi-script interleaved playlist`, + ); + + await runPromise; + + assert.ok( + findTabByLabel(PLAYLIST_PANEL_TITLE), + `Tab '${PLAYLIST_PANEL_TITLE}' must persist after multi-script interleaved playlist`, + ); + + assert.strictEqual( + findTabByLabel(RESPONSE_PANEL_TITLE), + undefined, + `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear for interleaved playlist`, + ); + }); + + test('csx-multi.naplist has 5 steps mixing scripts and requests', () => { + const playlistPath = getFixturePath('petstore/csx-multi.naplist'), + content = fs.readFileSync(playlistPath, 'utf-8'), + lines = content.split('\n'); + + let inSteps = false; + const steps: string[] = []; + for (const line of lines) { + const trimmed = line.trim(); + if (trimmed === '[steps]') { + inSteps = true; + continue; + } + if (trimmed.startsWith('[') && trimmed.endsWith(']')) { + inSteps = false; + continue; + } + if (inSteps && trimmed.length > 0) { + steps.push(trimmed); + } + } + + assert.strictEqual(steps.length, 5, 'csx-multi must have exactly 5 steps'); + + const csxSteps = steps.filter((s) => s.endsWith('.csx')), + napSteps = steps.filter((s) => s.endsWith('.nap')); + assert.strictEqual(csxSteps.length, 3, 'Must have 3 .csx script steps'); + assert.strictEqual(napSteps.length, 2, 'Must have 2 .nap request steps'); + }); + + // ── Slow script — panel opens before script finishes ───────────────── + + test('slow csx script: panel opens immediately, run eventually completes', async function () { + this.timeout(90000); + await closeAllEditors(); + await sleep(500); + + const doc = await openDocument('petstore/csx-slow.naplist'), + runPromise = executeCommand(CMD_RUN_FILE, doc.uri); + + // Panel must appear within 2s — the slow script takes 3s+ + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 2000); + + assert.ok( + findTabByLabel(PLAYLIST_PANEL_TITLE), + `Tab '${PLAYLIST_PANEL_TITLE}' must open BEFORE slow .csx script finishes`, + ); + + // Now wait for the full run to complete + await runPromise; + + assert.ok( + findTabByLabel(PLAYLIST_PANEL_TITLE), + `Tab '${PLAYLIST_PANEL_TITLE}' must persist after slow script completes`, + ); + }); + + // ── Re-run csx-only playlist reuses panel ──────────────────────────── + + test('re-running csx-only playlist reuses panel, no duplicates', async function () { + this.timeout(120000); + await closeAllEditors(); + await sleep(500); + + const doc = await openDocument('petstore/csx-only.naplist'); + + // First run + await executeCommand(CMD_RUN_FILE, doc.uri); + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 5000); + + assert.ok( + findTabByLabel(PLAYLIST_PANEL_TITLE), + 'Playlist panel must exist after first csx-only run', + ); + + // Second run + const secondRunPromise = executeCommand(CMD_RUN_FILE, doc.uri); + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 2000); + + assert.strictEqual( + countTabsByLabel(PLAYLIST_PANEL_TITLE), + 1, + 'Only ONE playlist panel tab must exist during re-run — panel must be reused', + ); + + await secondRunPromise; + + assert.strictEqual( + countTabsByLabel(PLAYLIST_PANEL_TITLE), + 1, + 'Only ONE playlist panel tab must exist after re-run completes', + ); + }); + + // ── Save report after failed playlist ──────────────────────────────── + + test('save report works after playlist with failing csx script', async function () { + this.timeout(60000); + await closeAllEditors(); + await sleep(500); + + const playlistPath = getFixturePath('petstore/csx-fail.naplist'), + expectedReportPath = path.join( + path.dirname(playlistPath), + `csx-fail${REPORT_FILE_SUFFIX}${REPORT_FILE_EXTENSION}`, + ); + + if (fs.existsSync(expectedReportPath)) { + fs.unlinkSync(expectedReportPath); + } + + const doc = await openDocument('petstore/csx-fail.naplist'); + await executeCommand(CMD_RUN_FILE, doc.uri); + + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 5000); + + // Wait for run to fully complete before saving report + await sleep(15000); + + await executeCommand(CMD_SAVE_REPORT); + + // Report file must be created even when playlist contains failures + await waitForCondition(() => fs.existsSync(expectedReportPath), 5000); + + assert.ok( + fs.existsSync(expectedReportPath), + `Report must be created at ${expectedReportPath} even when playlist has failing scripts`, + ); + + const reportContent = fs.readFileSync(expectedReportPath, 'utf-8'); + assert.ok(reportContent.includes(' findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 5000); + + // Wait for run to complete + await sleep(15000); + + await executeCommand(CMD_SAVE_REPORT); + + await waitForCondition(() => fs.existsSync(expectedReportPath), 5000); + + assert.ok(fs.existsSync(expectedReportPath), `Report must be created for csx-only playlist`); + + const reportContent = fs.readFileSync(expectedReportPath, 'utf-8'); + assert.ok(reportContent.includes(' { + const scriptsDir = getFixturePath('scripts'), + expectedScripts = [ + 'echo.csx', + 'fail.csx', + 'compile-error.csx', + 'multi-output.csx', + 'slow.csx', + ]; + + for (const script of expectedScripts) { + const scriptPath = path.join(scriptsDir, script); + assert.ok(fs.existsSync(scriptPath), `Fixture script ${script} must exist`); + + const content = fs.readFileSync(scriptPath, 'utf-8'); + assert.ok(content.trim().length > 0, `Fixture script ${script} must not be empty`); + } + }); + + test('all csx edge-case naplist fixtures exist and have valid structure', () => { + const petstoreDir = getFixturePath('petstore'), + expectedPlaylists = [ + 'csx-only.naplist', + 'csx-fail.naplist', + 'csx-compile-error.naplist', + 'csx-multi.naplist', + 'csx-slow.naplist', + ]; + + for (const playlist of expectedPlaylists) { + const playlistPath = path.join(petstoreDir, playlist); + assert.ok(fs.existsSync(playlistPath), `Fixture playlist ${playlist} must exist`); + + const content = fs.readFileSync(playlistPath, 'utf-8'); + assert.ok(content.includes('[meta]'), `${playlist} must have [meta] section`); + assert.ok(content.includes('[steps]'), `${playlist} must have [steps] section`); + } + }); + + test('all naplist step file references resolve to existing files', () => { + const petstoreDir = getFixturePath('petstore'), + playlists = [ + 'csx-only.naplist', + 'csx-fail.naplist', + 'csx-compile-error.naplist', + 'csx-multi.naplist', + 'csx-slow.naplist', + ]; + + for (const playlist of playlists) { + const playlistPath = path.join(petstoreDir, playlist), + content = fs.readFileSync(playlistPath, 'utf-8'), + stepLines = extractStepLines(content); + + for (const step of stepLines) { + const resolved = path.resolve(petstoreDir, step); + assert.ok( + fs.existsSync(resolved), + `Step '${step}' in ${playlist} must resolve to existing file: ${resolved}`, + ); + } + } + }); +}); diff --git a/src/Napper.VsCode/src/test/e2e/environment.e2e.test.ts b/src/Napper.VsCode/src/test/e2e/environment.e2e.test.ts new file mode 100644 index 0000000..491479f --- /dev/null +++ b/src/Napper.VsCode/src/test/e2e/environment.e2e.test.ts @@ -0,0 +1,65 @@ +// Specs: vscode-env-switcher, vscode-settings +import * as assert from 'assert'; +import * as vscode from 'vscode'; +import * as fs from 'fs'; +import { activateExtension, getExtensionPath, getFixturePath, sleep } from '../helpers/helpers'; +import { + CMD_SWITCH_ENV, + CONFIG_DEFAULT_ENV, + CONFIG_SECTION, + NAPENV_EXTENSION, +} from '../../constants'; + +suite('Environment Switching', () => { + suiteSetup(async function () { + this.timeout(30000); + await activateExtension(); + await sleep(3000); + }); + + test('.napenv file exists in test workspace', () => { + const envPath = getFixturePath(`petstore/${NAPENV_EXTENSION}`); + assert.ok(fs.existsSync(envPath), '.napenv file should exist in petstore fixture'); + }); + + test('.napenv.staging file exists for multi-env testing', () => { + const envPath = getFixturePath('petstore/.napenv.staging'); + assert.ok(fs.existsSync(envPath), '.napenv.staging file should exist'); + }); + + test('.napenv file contains environment variables', () => { + const envPath = getFixturePath(`petstore/${NAPENV_EXTENSION}`), + content = fs.readFileSync(envPath, 'utf-8'); + assert.ok(content.includes('baseUrl'), '.napenv should define baseUrl variable'); + assert.ok(content.includes('petId'), '.napenv should define petId variable'); + }); + + test('configuration property for defaultEnvironment is readable', () => { + const config = vscode.workspace.getConfiguration(CONFIG_SECTION), + envValue = config.get(CONFIG_DEFAULT_ENV); + assert.ok( + envValue !== undefined, + 'defaultEnvironment config should be readable (may be empty string)', + ); + }); + + test('switchEnvironment command is registered', async () => { + const commands = await vscode.commands.getCommands(true); + assert.ok(commands.includes(CMD_SWITCH_ENV), 'switchEnvironment command should be registered'); + }); + + test('package.json declares defaultEnvironment configuration', () => { + const packageJsonPath = getExtensionPath('package.json'), + raw = fs.readFileSync(packageJsonPath, 'utf-8'), + packageJson = JSON.parse(raw) as { + contributes: { + configuration: { + properties: Record; + }; + }; + }, + envProp = packageJson.contributes.configuration.properties['napper.defaultEnvironment']; + assert.ok(envProp, 'defaultEnvironment property should exist'); + assert.strictEqual(envProp.type, 'string', 'defaultEnvironment should be a string type'); + }); +}); diff --git a/src/Napper.VsCode/src/test/e2e/explorer.e2e.test.ts b/src/Napper.VsCode/src/test/e2e/explorer.e2e.test.ts new file mode 100644 index 0000000..885dd0a --- /dev/null +++ b/src/Napper.VsCode/src/test/e2e/explorer.e2e.test.ts @@ -0,0 +1,185 @@ +// Specs: vscode-explorer, vscode-playlists +import * as assert from 'assert'; +import * as vscode from 'vscode'; +import * as fs from 'fs'; +import { + activateExtension, + closeAllEditors, + deleteFixtureFile, + getFixturePath, + openDocument, + sleep, + writeFixtureFile, +} from '../helpers/helpers'; +import type { ExtensionApi } from '../../extension'; +import type { TreeNode } from '../../explorerProvider'; +import { CONTEXT_PLAYLIST, CONTEXT_PLAYLIST_SECTION, CONTEXT_REQUEST_FILE } from '../../constants'; + +const EXTENSION_ID = 'nimblesite.napper', + getExplorerProvider = (): ExtensionApi['explorerProvider'] => { + const ext = vscode.extensions.getExtension(EXTENSION_ID); + if (!ext) { + throw new Error(`Extension ${EXTENSION_ID} not found`); + } + return ext.exports.explorerProvider; + }, + findNodeByLabel = (nodes: readonly TreeNode[], label: string): TreeNode | undefined => + nodes.find((n: TreeNode) => n.label === label); + +suite('Explorer Tree View', () => { + suiteSetup(async function () { + this.timeout(30000); + await activateExtension(); + await sleep(3000); + }); + + suiteTeardown(async () => { + await closeAllEditors(); + }); + + test('workspace contains .nap fixture files', () => { + const httpbinPath = getFixturePath('get-httpbin.nap'); + assert.ok(fs.existsSync(httpbinPath), 'get-httpbin.nap fixture should exist in workspace'); + + const postPath = getFixturePath('post-jsonplaceholder.nap'); + assert.ok(fs.existsSync(postPath), 'post-jsonplaceholder.nap fixture should exist'); + }); + + test('workspace contains petstore subfolder with .nap files', () => { + const listPetsPath = getFixturePath('petstore/list-pets.nap'); + assert.ok(fs.existsSync(listPetsPath), 'petstore/list-pets.nap should exist'); + + const getPetPath = getFixturePath('petstore/get-pet.nap'); + assert.ok(fs.existsSync(getPetPath), 'petstore/get-pet.nap should exist'); + }); + + test('workspace contains .naplist file', () => { + const playlistPath = getFixturePath('petstore/smoke.naplist'); + assert.ok(fs.existsSync(playlistPath), 'petstore/smoke.naplist should exist'); + + const content = fs.readFileSync(playlistPath, 'utf-8'); + assert.ok(content.includes('[steps]'), 'Playlist should have [steps] section'); + assert.ok(content.includes('list-pets.nap'), 'Playlist should reference list-pets.nap'); + }); + + test('opening a .nap file sets correct language mode', async function () { + this.timeout(10000); + const doc = await openDocument('get-httpbin.nap'); + assert.strictEqual(doc.languageId, 'nap', 'Language should be nap for .nap files'); + }); + + test('opening a .naplist file sets correct language mode', async function () { + this.timeout(10000); + const doc = await openDocument('petstore/smoke.naplist'); + assert.strictEqual(doc.languageId, 'naplist', 'Language should be naplist for .naplist files'); + }); + + test('file watcher detects new .nap file creation', async function () { + this.timeout(15000); + const testFileName = 'temp-watcher-test.nap'; + + writeFixtureFile(testFileName, 'GET https://httpbin.org/status/200\n'); + await sleep(2000); + + const filePath = getFixturePath(testFileName); + assert.ok(fs.existsSync(filePath), 'Newly created .nap file should exist'); + + deleteFixtureFile(testFileName); + await sleep(1000); + }); + + test('.nap file content is readable and valid', async function () { + this.timeout(10000); + const doc = await openDocument('post-jsonplaceholder.nap'), + text = doc.getText(); + + assert.ok(text.includes('[request]'), 'Should have [request] section'); + assert.ok(text.includes('[assert]'), 'Should have [assert] section'); + assert.ok(text.includes('jsonplaceholder.typicode.com'), 'Should contain the API URL'); + }); + + test('nested playlist in tree view expands to show its own children', function () { + this.timeout(10000); + const provider = getExplorerProvider(), + rootNodes = provider.getChildren(), + // Find the Playlists section + playlistSection = rootNodes.find((n) => n.contextValue === CONTEXT_PLAYLIST_SECTION); + assert.ok(playlistSection, 'Tree must have a Playlists section'); + assert.ok( + playlistSection.children && playlistSection.children.length > 0, + 'Playlists section must have children', + ); + + // Find full.naplist — it references smoke.naplist (nested) and get-pet.nap + const fullPlaylist = findNodeByLabel(playlistSection.children, 'full'); + assert.ok(fullPlaylist, "Playlists section must contain 'full' playlist (from full.naplist)"); + assert.strictEqual( + fullPlaylist.contextValue, + CONTEXT_PLAYLIST, + 'full playlist must have playlist context', + ); + assert.ok( + fullPlaylist.children && fullPlaylist.children.length > 0, + 'full playlist must have children (its steps)', + ); + + // The nested smoke.naplist step must itself be a playlist with children + const smokeChild = findNodeByLabel(fullPlaylist.children, 'smoke'); + assert.ok(smokeChild, "full playlist must contain 'smoke' as a child (the nested .naplist)"); + assert.strictEqual( + smokeChild.contextValue, + CONTEXT_PLAYLIST, + 'Nested smoke.naplist must have playlist context, not requestFile', + ); + assert.ok( + smokeChild.children && smokeChild.children.length > 0, + 'Nested smoke.naplist MUST have its own children — it must be expandable', + ); + + // Verify smoke's children are the actual .nap step files + const smokeChildLabels = smokeChild.children.map((c) => c.label); + assert.ok( + smokeChildLabels.includes('list-pets'), + 'Nested smoke playlist must contain list-pets step', + ); + assert.ok( + smokeChildLabels.includes('get-pet'), + 'Nested smoke playlist must contain get-pet step', + ); + + // The get-pet.nap direct child of full.naplist is a leaf (not a playlist) + const getPetChild = findNodeByLabel(fullPlaylist.children, 'get-pet'); + assert.ok(getPetChild, "full playlist must also contain 'get-pet' as a direct step"); + assert.strictEqual( + getPetChild.contextValue, + CONTEXT_REQUEST_FILE, + 'get-pet.nap must be a requestFile (leaf node)', + ); + }); + + test('nested playlist in file tree also expands with children', function () { + this.timeout(10000); + const provider = getExplorerProvider(), + rootNodes = provider.getChildren(), + // Find the petstore folder in the file tree + petstoreFolder = findNodeByLabel(rootNodes, 'petstore'); + assert.ok(petstoreFolder, 'File tree must contain petstore folder'); + + const petstoreChildren = provider.getChildren(petstoreFolder), + // Find full.naplist in the petstore folder + fullNode = findNodeByLabel(petstoreChildren, 'full'); + assert.ok(fullNode, "petstore folder must contain 'full' playlist node"); + assert.ok( + fullNode.children && fullNode.children.length > 0, + 'full playlist in file tree must have expandable children', + ); + + // The nested smoke.naplist must be a playlist with its own children + const smokeInFileTree = findNodeByLabel(fullNode.children, 'smoke'); + assert.ok(smokeInFileTree, "full playlist in file tree must contain nested 'smoke' playlist"); + assert.ok( + smokeInFileTree.children && smokeInFileTree.children.length > 0, + 'Nested smoke.naplist in file tree MUST expand to show its own children', + ); + }); +}); diff --git a/src/Napper.VsCode/src/test/e2e/httpConvert.e2e.test.ts b/src/Napper.VsCode/src/test/e2e/httpConvert.e2e.test.ts new file mode 100644 index 0000000..b8fc2e9 --- /dev/null +++ b/src/Napper.VsCode/src/test/e2e/httpConvert.e2e.test.ts @@ -0,0 +1,307 @@ +// Specs: vscode-http-convert +// E2E tests — prove the .http → .nap conversion works through the actual +// VSCode extension commands and CodeLens, not by calling the CLI directly. +import * as assert from 'assert'; +import * as vscode from 'vscode'; +import * as fs from 'fs'; +import * as path from 'path'; +import { + activateExtension, + closeAllEditors, + getRegisteredCommands, + openDocument, + sleep, + waitForCondition, +} from '../helpers/helpers'; +import { + CMD_CONVERT_HTTP_DIR, + CMD_CONVERT_HTTP_FILE, + ENCODING_UTF8, + NAP_EXTENSION, + SECTION_ASSERT, + SECTION_REQUEST, +} from '../../constants'; + +const FIXTURE_HTTP_FILE = 'sample.http'; +const EXPECTED_REQUEST_COUNT = 3; + +const workspaceRoot = (): string => { + const folders = vscode.workspace.workspaceFolders; + if (!folders || folders.length === 0) { + throw new Error('No workspace folder'); + } + const [first] = folders; + if (!first) { + throw new Error('No workspace folder'); + } + return first.uri.fsPath; +}; + +const collectNapFiles = (dir: string): string[] => + fs + .readdirSync(dir) + .filter((f: string) => f.endsWith(NAP_EXTENSION)) + .map((f: string) => path.join(dir, f)); + +const generatedNapFilesInWorkspace = (): string[] => { + const root = workspaceRoot(); + return collectNapFiles(root).filter((f) => { + const content = fs.readFileSync(f, ENCODING_UTF8); + return ( + content.includes('jsonplaceholder.typicode.com') && + content.includes(SECTION_REQUEST) && + !content.includes(SECTION_ASSERT) + ); + }); +}; + +const cleanupGeneratedNapFiles = (): void => { + for (const f of generatedNapFilesInWorkspace()) { + fs.unlinkSync(f); + } +}; + +suite('HTTP Convert — Command Registration', () => { + suiteSetup(async function () { + this.timeout(30_000); + await activateExtension(); + }); + + test('convertHttpFile command is registered', async () => { + const commands = await getRegisteredCommands(); + assert.ok( + commands.includes(CMD_CONVERT_HTTP_FILE), + `Command ${CMD_CONVERT_HTTP_FILE} must be registered`, + ); + }); + + test('convertHttpDirectory command is registered', async () => { + const commands = await getRegisteredCommands(); + assert.ok( + commands.includes(CMD_CONVERT_HTTP_DIR), + `Command ${CMD_CONVERT_HTTP_DIR} must be registered`, + ); + }); +}); + +suite('HTTP Convert — CodeLens on .http files', () => { + suiteSetup(async function () { + this.timeout(30_000); + await activateExtension(); + await sleep(3000); + }); + + suiteTeardown(async () => { + await closeAllEditors(); + }); + + test("CodeLens 'Convert to .nap' appears on .http file", async function () { + this.timeout(15_000); + const doc = await openDocument(FIXTURE_HTTP_FILE); + await sleep(3000); + + const lenses = await vscode.commands.executeCommand( + 'vscode.executeCodeLensProvider', + doc.uri, + ); + + assert.ok(lenses.length > 0, 'Must have at least one CodeLens on .http file'); + + const convertLens = lenses.find((l) => l.command?.command === CMD_CONVERT_HTTP_FILE); + assert.ok(convertLens, `Must have a CodeLens with command ${CMD_CONVERT_HTTP_FILE}`); + const title = convertLens.command?.title ?? ''; + assert.ok( + title.includes('Convert to .nap'), + `CodeLens title must contain "Convert to .nap", got: ${title}`, + ); + }); + + test('CodeLens passes file URI as argument', async function () { + this.timeout(15_000); + const doc = await openDocument(FIXTURE_HTTP_FILE); + await sleep(3000); + + const lenses = await vscode.commands.executeCommand( + 'vscode.executeCodeLensProvider', + doc.uri, + ); + + const convertLens = lenses.find((l) => l.command?.command === CMD_CONVERT_HTTP_FILE); + assert.ok(convertLens, 'Convert CodeLens must exist'); + assert.ok(convertLens.command?.arguments !== undefined, 'Convert CodeLens must have arguments'); + assert.ok( + convertLens.command.arguments.length > 0, + 'Convert CodeLens must pass at least one argument (the file URI)', + ); + }); +}); + +suite('HTTP Convert — Execute via VSCode Command', () => { + suiteSetup(async function () { + this.timeout(30_000); + await activateExtension(); + await sleep(3000); + cleanupGeneratedNapFiles(); + }); + + suiteTeardown(async () => { + await closeAllEditors(); + cleanupGeneratedNapFiles(); + }); + + setup(() => { + cleanupGeneratedNapFiles(); + }); + + teardown(() => { + cleanupGeneratedNapFiles(); + }); + + test('executing convertHttpFile command with .http URI creates .nap files on disk', async function () { + this.timeout(30_000); + const httpFilePath = path.join(workspaceRoot(), FIXTURE_HTTP_FILE); + assert.ok(fs.existsSync(httpFilePath), `Fixture .http file must exist at ${httpFilePath}`); + + const fixturePath = path.join(workspaceRoot(), 'post-jsonplaceholder.nap'); + assert.ok(fs.existsSync(fixturePath), 'Hand-written fixture must survive cleanup'); + + const napFilesBefore = generatedNapFilesInWorkspace(); + assert.strictEqual( + napFilesBefore.length, + 0, + 'No converted .nap files should exist before running command', + ); + + const fileUri = vscode.Uri.file(httpFilePath); + await vscode.commands.executeCommand(CMD_CONVERT_HTTP_FILE, fileUri); + + await waitForCondition(() => generatedNapFilesInWorkspace().length > 0, 15_000); + + const napFilesAfter = generatedNapFilesInWorkspace(); + assert.strictEqual( + napFilesAfter.length, + EXPECTED_REQUEST_COUNT, + `Command must produce exactly ${EXPECTED_REQUEST_COUNT} .nap files, got ${napFilesAfter.length}`, + ); + }); + + test('generated .nap files have [request] sections with correct content', async function () { + this.timeout(30_000); + const httpFilePath = path.join(workspaceRoot(), FIXTURE_HTTP_FILE), + fileUri = vscode.Uri.file(httpFilePath); + + await vscode.commands.executeCommand(CMD_CONVERT_HTTP_FILE, fileUri); + + await waitForCondition( + () => generatedNapFilesInWorkspace().length >= EXPECTED_REQUEST_COUNT, + 15_000, + ); + + const napFiles = generatedNapFilesInWorkspace(); + for (const napFile of napFiles) { + const content = fs.readFileSync(napFile, ENCODING_UTF8); + assert.ok( + content.includes(SECTION_REQUEST), + `${path.basename(napFile)} must contain [request] section`, + ); + assert.ok( + content.includes('jsonplaceholder.typicode.com'), + `${path.basename(napFile)} must preserve the URL`, + ); + assert.ok(content.length > 10, `${path.basename(napFile)} must have substantive content`); + assert.ok( + !content.includes(SECTION_ASSERT), + `${path.basename(napFile)} must not have [assert] section (generated, not hand-written)`, + ); + const hasMethod = + content.includes('GET ') || + content.includes('POST ') || + content.includes('PUT ') || + content.includes('PATCH ') || + content.includes('DELETE '); + assert.ok(hasMethod, `${path.basename(napFile)} must specify an HTTP method`); + } + }); + + test('generated .nap files contain GET and POST methods from source .http', async function () { + this.timeout(30_000); + const httpFilePath = path.join(workspaceRoot(), FIXTURE_HTTP_FILE), + fileUri = vscode.Uri.file(httpFilePath); + + await vscode.commands.executeCommand(CMD_CONVERT_HTTP_FILE, fileUri); + + await waitForCondition( + () => generatedNapFilesInWorkspace().length >= EXPECTED_REQUEST_COUNT, + 15_000, + ); + + const napFiles = generatedNapFilesInWorkspace(), + allContent = napFiles.map((f) => fs.readFileSync(f, ENCODING_UTF8)).join('\n'); + + assert.ok(allContent.includes('GET'), 'Converted output must contain a GET request'); + assert.ok(allContent.includes('POST'), 'Converted output must contain a POST request'); + }); + + test('POST .nap file preserves Content-Type header and JSON body', async function () { + this.timeout(30_000); + const httpFilePath = path.join(workspaceRoot(), FIXTURE_HTTP_FILE), + fileUri = vscode.Uri.file(httpFilePath); + + await vscode.commands.executeCommand(CMD_CONVERT_HTTP_FILE, fileUri); + + await waitForCondition( + () => generatedNapFilesInWorkspace().length >= EXPECTED_REQUEST_COUNT, + 15_000, + ); + + const napFiles = generatedNapFilesInWorkspace(), + postFile = napFiles.find((f) => { + const content = fs.readFileSync(f, ENCODING_UTF8); + return content.includes('POST'); + }); + + assert.ok(postFile !== undefined, 'Must have a .nap file containing POST method'); + + const content = fs.readFileSync(postFile, ENCODING_UTF8); + assert.ok(content.includes('Content-Type'), 'POST .nap must preserve Content-Type header'); + assert.ok(content.includes('application/json'), 'POST .nap must preserve application/json'); + assert.ok(content.includes('John Doe'), 'POST .nap must preserve request body content'); + assert.ok( + content.includes('jsonplaceholder.typicode.com'), + 'POST .nap must preserve the target URL', + ); + assert.ok( + !content.includes(SECTION_ASSERT), + 'POST .nap must not have [assert] section (converter output)', + ); + }); + + test('running convert command twice does not fail', async function () { + this.timeout(30_000); + const httpFilePath = path.join(workspaceRoot(), FIXTURE_HTTP_FILE), + fileUri = vscode.Uri.file(httpFilePath); + + await vscode.commands.executeCommand(CMD_CONVERT_HTTP_FILE, fileUri); + await waitForCondition(() => generatedNapFilesInWorkspace().length > 0, 15_000); + + await vscode.commands.executeCommand(CMD_CONVERT_HTTP_FILE, fileUri); + await sleep(3000); + + const napFiles = generatedNapFilesInWorkspace(); + assert.ok( + napFiles.length >= EXPECTED_REQUEST_COUNT, + `Must still have at least ${EXPECTED_REQUEST_COUNT} .nap files after re-running`, + ); + for (const napFile of napFiles) { + const content = fs.readFileSync(napFile, ENCODING_UTF8); + assert.ok( + content.includes(SECTION_REQUEST), + `${path.basename(napFile)} must still have [request] after re-run`, + ); + assert.ok( + !content.includes(SECTION_ASSERT), + `${path.basename(napFile)} must still be a generated file (no [assert]) after re-run`, + ); + } + }); +}); diff --git a/src/Napper.VsCode/src/test/e2e/openApiImport.e2e.test.ts b/src/Napper.VsCode/src/test/e2e/openApiImport.e2e.test.ts new file mode 100644 index 0000000..af9f7d9 --- /dev/null +++ b/src/Napper.VsCode/src/test/e2e/openApiImport.e2e.test.ts @@ -0,0 +1,354 @@ +// Specs: vscode-openapi, vscode-openapi-import +import * as assert from 'assert'; +import * as vscode from 'vscode'; +import * as fs from 'fs'; +import * as path from 'path'; +import * as os from 'os'; +import { execFile } from 'child_process'; +import { activateExtension, getRegisteredCommands, readFixtureFile } from '../helpers/helpers'; +import { downloadSpec, saveTempSpec } from '../../openApiImport'; +import { + BASE_URL_KEY, + CLI_CMD_GENERATE, + CLI_FLAG_OUTPUT, + CLI_FLAG_OUTPUT_DIR, + CLI_OUTPUT_JSON, + CLI_SPAWN_FAILED_PREFIX, + CLI_SUBCMD_OPENAPI, + CMD_IMPORT_OPENAPI_FILE, + CMD_IMPORT_OPENAPI_URL, + CONFIG_CLI_PATH, + CONFIG_SECTION, + DEFAULT_CLI_PATH, + ENCODING_UTF8, + NAPENV_EXTENSION, + NAP_EXTENSION, + OPENAPI_DOWNLOAD_FAILED_PREFIX, + OPENAPI_URL_PLACEHOLDER, + SECTION_ASSERT, + SECTION_META, + SECTION_REQUEST, + SECTION_STEPS, +} from '../../constants'; + +const PETSTORE_URL = OPENAPI_URL_PLACEHOLDER, + BEECEPTOR_URL = 'https://beeceptor.com/docs/storefront-sample.json', + BEECEPTOR_EXPECTED_ENDPOINTS = 11, + BEECEPTOR_BASE_URL_DOMAIN = 'api.demo-ecommerce.com', + BEECEPTOR_AUTH_REGISTER_PATH = '/auth/register', + BEECEPTOR_CHECKOUT_PATH = '/checkout', + BEECEPTOR_SPEC_TITLE = 'E-commerce API', + NONEXISTENT_URL = 'https://httpbin.org/status/404', + TEMP_SPEC_FILENAME = '.openapi-spec.json'; + +suite('OpenAPI Import', () => { + suiteSetup(async function () { + this.timeout(30_000); + await activateExtension(); + }); + + test('import URL command is registered', async () => { + const commands = await getRegisteredCommands(); + assert.ok( + commands.includes(CMD_IMPORT_OPENAPI_URL), + `Command ${CMD_IMPORT_OPENAPI_URL} should be registered`, + ); + }); + + test('import file command is registered', async () => { + const commands = await getRegisteredCommands(); + assert.ok( + commands.includes(CMD_IMPORT_OPENAPI_FILE), + `Command ${CMD_IMPORT_OPENAPI_FILE} should be registered`, + ); + }); + + test('downloadSpec fetches valid OpenAPI from petstore URL', async function () { + this.timeout(30_000); + const result = await downloadSpec(PETSTORE_URL); + assert.ok(result.ok, 'Download should succeed'); + const parsed: unknown = JSON.parse(result.value), + spec = parsed as { openapi?: string; paths?: Record }; + assert.ok(spec.openapi !== undefined, 'Downloaded spec must have an openapi version field'); + assert.ok(spec.paths !== undefined, 'Downloaded spec must have paths'); + assert.ok( + Object.keys(spec.paths ?? {}).length > 0, + 'Downloaded spec must have at least one path', + ); + }); + + test('downloadSpec returns error for 404 URL', async function () { + this.timeout(15_000); + const result = await downloadSpec(NONEXISTENT_URL); + assert.ok(!result.ok, 'Download should fail for 404'); + assert.ok( + result.error.startsWith(OPENAPI_DOWNLOAD_FAILED_PREFIX), + `Error should start with download failed prefix, got: ${result.error}`, + ); + }); + + test('downloadSpec follows redirects', async function () { + this.timeout(15_000); + const redirectUrl = + 'https://httpbin.org/redirect-to?url=https%3A%2F%2Fpetstore3.swagger.io%2Fapi%2Fv3%2Fopenapi.json&status_code=302', + result = await downloadSpec(redirectUrl); + assert.ok(result.ok, 'Download should succeed after redirect'); + const parsed: unknown = JSON.parse(result.value), + spec = parsed as { openapi?: string }; + assert.ok(spec.openapi !== undefined, 'Redirected spec must have openapi version field'); + }); + + test('saveTempSpec writes file and returns path', () => { + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'napper-test-')), + content = '{"openapi":"3.0.0","paths":{}}', + specPath = saveTempSpec(content, tmpDir); + assert.ok(fs.existsSync(specPath), 'Temp spec file must exist after save'); + assert.ok( + specPath.endsWith(TEMP_SPEC_FILENAME), + `Spec path must end with ${TEMP_SPEC_FILENAME}`, + ); + const written = fs.readFileSync(specPath, 'utf-8'); + assert.strictEqual(written, content, 'Written content must match input'); + // Cleanup + fs.rmSync(tmpDir, { recursive: true }); + }); + + test('saveTempSpec overwrites existing file', () => { + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'napper-test-')), + first = '{"openapi":"3.0.0"}', + second = '{"openapi":"3.1.0","paths":{"/pets":{}}}'; + saveTempSpec(first, tmpDir); + const specPath = saveTempSpec(second, tmpDir), + written = fs.readFileSync(specPath, 'utf-8'); + assert.strictEqual(written, second, 'Second write must overwrite the first'); + fs.rmSync(tmpDir, { recursive: true }); + }); +}); + +// ─── CLI generate openapi E2E ──────────────────────────────── + +const ECOMMERCE_SPEC_FIXTURE = 'ecommerce-spec.json', + EXPECTED_ENDPOINT_COUNT = 11, + resolveCliPath = (): string => { + const configured = vscode.workspace + .getConfiguration(CONFIG_SECTION) + .get(CONFIG_CLI_PATH, ''); + return configured.length > 0 ? configured : DEFAULT_CLI_PATH; + }, + runCliGenerate = async (specPath: string, outDir: string): Promise => + new Promise((resolve, reject) => { + execFile( + resolveCliPath(), + [ + CLI_CMD_GENERATE, + CLI_SUBCMD_OPENAPI, + specPath, + CLI_FLAG_OUTPUT_DIR, + outDir, + CLI_FLAG_OUTPUT, + CLI_OUTPUT_JSON, + ], + { timeout: 30_000 }, + (error: Error | null, stdout: string, stderr: string) => { + if (error !== null && stdout.length === 0) { + reject(new Error(`${CLI_SPAWN_FAILED_PREFIX}${stderr}`)); + return; + } + resolve(stdout); + }, + ); + }), + collectNapFiles = (dir: string): string[] => + fs + .readdirSync(dir) + .filter((f: string) => f.endsWith(NAP_EXTENSION)) + .map((f: string) => path.join(dir, f)); + +suite('OpenAPI CLI Generate', () => { + suiteSetup(async function () { + this.timeout(30_000); + await activateExtension(); + }); + + test('CLI generates .nap files from ecommerce spec', async function () { + this.timeout(30_000); + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'napper-generate-')); + + try { + const specContent = readFixtureFile(ECOMMERCE_SPEC_FIXTURE), + specPath = path.join(tmpDir, ECOMMERCE_SPEC_FIXTURE); + fs.writeFileSync(specPath, specContent, ENCODING_UTF8); + + const stdout = await runCliGenerate(specPath, tmpDir), + generated = JSON.parse(stdout) as { files: number; playlist: string }; + + assert.strictEqual( + generated.files, + EXPECTED_ENDPOINT_COUNT, + `CLI must generate exactly ${EXPECTED_ENDPOINT_COUNT} .nap files`, + ); + + const playlistPath = path.join(tmpDir, generated.playlist); + assert.ok(fs.existsSync(playlistPath), `Playlist file must exist at ${generated.playlist}`); + + const playlistContent = fs.readFileSync(playlistPath, ENCODING_UTF8); + assert.ok(playlistContent.includes(SECTION_META), 'Playlist must have [meta] section'); + assert.ok(playlistContent.includes(SECTION_STEPS), 'Playlist must have [steps] section'); + + const napenvPath = path.join(tmpDir, NAPENV_EXTENSION); + assert.ok(fs.existsSync(napenvPath), '.napenv file must exist with base URL'); + + const envContent = fs.readFileSync(napenvPath, ENCODING_UTF8); + assert.ok(envContent.includes(BASE_URL_KEY), '.napenv must contain baseUrl key'); + + const napFiles = collectNapFiles(tmpDir); + assert.strictEqual( + napFiles.length, + EXPECTED_ENDPOINT_COUNT, + `Must find exactly ${EXPECTED_ENDPOINT_COUNT} .nap files on disk`, + ); + + for (const napFile of napFiles) { + const content = fs.readFileSync(napFile, ENCODING_UTF8); + assert.ok( + content.includes(SECTION_META), + `${path.basename(napFile)} must have [meta] section`, + ); + assert.ok( + content.includes(SECTION_REQUEST), + `${path.basename(napFile)} must have [request] section`, + ); + assert.ok( + content.includes(SECTION_ASSERT), + `${path.basename(napFile)} must have [assert] section`, + ); + } + } finally { + fs.rmSync(tmpDir, { recursive: true }); + } + }); +}); + +// ─── Beeceptor URL → CLI generate E2E ─────────────────────── +// Proves the URL content drives generated output — not a fixture + +suite('OpenAPI URL-to-Generate E2E (Beeceptor)', () => { + suiteSetup(async function () { + this.timeout(30_000); + await activateExtension(); + }); + + test('downloadSpec + CLI generate produces beeceptor-specific output', async function () { + this.timeout(60_000); + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'napper-beeceptor-')); + + try { + const specResult = await downloadSpec(BEECEPTOR_URL); + assert.ok( + specResult.ok, + `Beeceptor URL download must succeed, got: ${specResult.ok ? '' : specResult.error}`, + ); + + const specPath = saveTempSpec(specResult.value, tmpDir); + assert.ok( + fs.existsSync(specPath), + 'Temp spec file must exist after saving downloaded content', + ); + + const stdout = await runCliGenerate(specPath, tmpDir), + generated = JSON.parse(stdout) as { files: number; playlist: string }; + + assert.strictEqual( + generated.files, + BEECEPTOR_EXPECTED_ENDPOINTS, + `Beeceptor spec must produce exactly ${BEECEPTOR_EXPECTED_ENDPOINTS} endpoints`, + ); + + const napenvPath = path.join(tmpDir, NAPENV_EXTENSION), + envContent = fs.readFileSync(napenvPath, ENCODING_UTF8); + assert.ok( + envContent.includes(BEECEPTOR_BASE_URL_DOMAIN), + `Environment must contain beeceptor base URL domain ${BEECEPTOR_BASE_URL_DOMAIN}`, + ); + + const playlistPath = path.join(tmpDir, generated.playlist), + playlistContent = fs.readFileSync(playlistPath, ENCODING_UTF8); + assert.ok( + playlistContent.includes(BEECEPTOR_SPEC_TITLE), + `Playlist must contain beeceptor spec title "${BEECEPTOR_SPEC_TITLE}"`, + ); + + const napFiles = collectNapFiles(tmpDir); + const hasAuthRegister = napFiles.some((f: string) => { + const content = fs.readFileSync(f, ENCODING_UTF8); + return content.includes(BEECEPTOR_AUTH_REGISTER_PATH); + }); + assert.ok(hasAuthRegister, 'Must have auth/register endpoint from beeceptor spec'); + + const hasCheckout = napFiles.some((f: string) => { + const content = fs.readFileSync(f, ENCODING_UTF8); + return content.includes(BEECEPTOR_CHECKOUT_PATH); + }); + assert.ok(hasCheckout, 'Must have checkout endpoint from beeceptor spec'); + + for (const napFile of napFiles) { + const content = fs.readFileSync(napFile, ENCODING_UTF8); + assert.ok( + content.includes(SECTION_META), + `${path.basename(napFile)} must have [meta] section`, + ); + assert.ok( + content.includes(SECTION_REQUEST), + `${path.basename(napFile)} must have [request] section`, + ); + assert.ok( + content.includes(SECTION_ASSERT), + `${path.basename(napFile)} must have [assert] section`, + ); + } + } finally { + fs.rmSync(tmpDir, { recursive: true }); + } + }); + + test('beeceptor URL produces different output than petstore URL', async function () { + this.timeout(60_000); + const beeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'napper-bee-')), + petDir = fs.mkdtempSync(path.join(os.tmpdir(), 'napper-pet-')); + + try { + const beeResult = await downloadSpec(BEECEPTOR_URL); + assert.ok(beeResult.ok, 'Beeceptor download must succeed'); + const beePath = saveTempSpec(beeResult.value, beeDir); + await runCliGenerate(beePath, beeDir); + + const petResult = await downloadSpec(PETSTORE_URL); + assert.ok(petResult.ok, 'Petstore download must succeed'); + const petPath = saveTempSpec(petResult.value, petDir); + await runCliGenerate(petPath, petDir); + + const beeEnv = fs.readFileSync(path.join(beeDir, NAPENV_EXTENSION), ENCODING_UTF8), + petEnv = fs.readFileSync(path.join(petDir, NAPENV_EXTENSION), ENCODING_UTF8); + + assert.ok( + beeEnv.includes(BEECEPTOR_BASE_URL_DOMAIN), + 'Beeceptor env must have beeceptor domain', + ); + assert.ok( + !petEnv.includes(BEECEPTOR_BASE_URL_DOMAIN), + 'Petstore env must NOT have beeceptor domain', + ); + + const beeNaps = collectNapFiles(beeDir), + petNaps = collectNapFiles(petDir); + assert.notStrictEqual( + beeNaps.length, + petNaps.length, + 'Different specs must produce different number of files', + ); + } finally { + fs.rmSync(beeDir, { recursive: true }); + fs.rmSync(petDir, { recursive: true }); + } + }); +}); diff --git a/src/Napper.VsCode/src/test/e2e/playlist.e2e.test.ts b/src/Napper.VsCode/src/test/e2e/playlist.e2e.test.ts new file mode 100644 index 0000000..313264d --- /dev/null +++ b/src/Napper.VsCode/src/test/e2e/playlist.e2e.test.ts @@ -0,0 +1,513 @@ +// Specs: vscode-playlists, vscode-layout, vscode-commands +import * as assert from 'assert'; +import * as vscode from 'vscode'; +import * as fs from 'fs'; +import { + activateExtension, + closeAllEditors, + executeCommand, + extractStepLines, + getFixturePath, + openDocument, + sleep, + waitForCondition, +} from '../helpers/helpers'; +import * as path from 'path'; +import { + CMD_RUN_FILE, + CMD_SAVE_REPORT, + CONFIG_CLI_PATH, + CONFIG_SECTION, + PLAYLIST_PANEL_TITLE, + REPORT_FILE_EXTENSION, + REPORT_FILE_SUFFIX, + RESPONSE_PANEL_TITLE, +} from '../../constants'; + +const findTabByLabel = (label: string): vscode.Tab | undefined => + vscode.window.tabGroups.all.flatMap((g) => g.tabs).find((tab) => tab.label.includes(label)); + +suite('Playlist Panel — Real API Calls', () => { + suiteSetup(async function () { + this.timeout(30000); + await activateExtension(); + await sleep(3000); + }); + + suiteTeardown(async () => { + await closeAllEditors(); + }); + + test('playlist panel opens IMMEDIATELY when run starts, before API calls complete', async function () { + this.timeout(45000); + await closeAllEditors(); + await sleep(500); + + const doc = await openDocument('petstore/smoke.naplist'), + // Fire the command but do NOT await — we want to check the panel + // Appears while API calls are still in flight + runPromise = executeCommand(CMD_RUN_FILE, doc.uri); + + // Panel must appear within 2 seconds — API calls take much longer + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 2000); + + const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + playlistTab, + `Tab '${PLAYLIST_PANEL_TITLE}' must open IMMEDIATELY when playlist starts, not after all API calls finish`, + ); + + const responseTab = findTabByLabel(RESPONSE_PANEL_TITLE); + assert.strictEqual( + responseTab, + undefined, + `Tab '${RESPONSE_PANEL_TITLE}' must NOT exist after running a .naplist — playlist panel should open instead`, + ); + + // Now wait for actual completion — panel must persist + await runPromise; + + const panelAfterCompletion = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + panelAfterCompletion, + `Tab '${PLAYLIST_PANEL_TITLE}' must persist after all API calls complete`, + ); + + const responseTabAfterRun = findTabByLabel(RESPONSE_PANEL_TITLE); + assert.strictEqual( + responseTabAfterRun, + undefined, + `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear even after playlist completes — only playlist panel is used`, + ); + }); + + test('running a playlist via filePath object opens panel immediately', async function () { + this.timeout(45000); + await closeAllEditors(); + await sleep(500); + + const playlistPath = getFixturePath('petstore/smoke.naplist'), + // Fire without await to test immediate opening + runPromise = executeCommand(CMD_RUN_FILE, { filePath: playlistPath }); + + // Panel must appear within 2 seconds — proves immediate opening + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 2000); + + const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + playlistTab, + `Tab '${PLAYLIST_PANEL_TITLE}' must open IMMEDIATELY via filePath object (tree view click path)`, + ); + + const responseTab = findTabByLabel(RESPONSE_PANEL_TITLE); + assert.strictEqual( + responseTab, + undefined, + `Tab '${RESPONSE_PANEL_TITLE}' must NOT exist — the tree view play button must open the playlist panel`, + ); + + await runPromise; + + const panelAfterCompletion = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + panelAfterCompletion, + `Tab '${PLAYLIST_PANEL_TITLE}' must persist after completion via filePath path`, + ); + + const responseTabAfterFilePath = findTabByLabel(RESPONSE_PANEL_TITLE); + assert.strictEqual( + responseTabAfterFilePath, + undefined, + `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear after playlist completion via filePath`, + ); + }); + + test('running a single .nap file opens response panel, not playlist panel', async function () { + this.timeout(45000); + await closeAllEditors(); + await sleep(500); + + const doc = await openDocument('get-httpbin.nap'); + assert.strictEqual(doc.languageId, 'nap', 'get-httpbin.nap should have nap language mode'); + + await executeCommand(CMD_RUN_FILE, doc.uri); + + await waitForCondition(() => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, 10000); + + const responseTab = findTabByLabel(RESPONSE_PANEL_TITLE); + assert.ok( + responseTab, + `Tab '${RESPONSE_PANEL_TITLE}' must exist after running a single .nap file`, + ); + assert.notStrictEqual( + responseTab.group, + undefined, + 'Response tab should be visible in a tab group', + ); + + const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.strictEqual( + playlistTab, + undefined, + `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist after running a single .nap file`, + ); + }); + + test('playlist file has correct structure', () => { + const playlistPath = getFixturePath('petstore/smoke.naplist'), + content = fs.readFileSync(playlistPath, 'utf-8'); + + assert.ok(content.includes('[meta]'), 'Should have [meta] section'); + assert.ok(content.includes('[steps]'), 'Should have [steps] section'); + assert.ok(content.includes('list-pets.nap'), 'Should reference list-pets step'); + assert.ok(content.includes('get-pet.nap'), 'Should reference get-pet step'); + }); + + test('playlist steps reference files that exist', () => { + const playlistPath = getFixturePath('petstore/smoke.naplist'), + content = fs.readFileSync(playlistPath, 'utf-8'), + stepPaths = extractStepLines(content); + + assert.ok(stepPaths.length > 0, 'Playlist should have at least one step'); + + const basePath = getFixturePath('petstore'); + for (const stepRelative of stepPaths) { + const stepFull = `${basePath}/${stepRelative.replace('./', '')}`; + assert.ok(fs.existsSync(stepFull), `Step file should exist: ${stepRelative}`); + } + }); + + test('playlist with script step opens panel and completes without error', async function () { + this.timeout(60000); + await closeAllEditors(); + await sleep(500); + + const doc = await openDocument('petstore/with-script.naplist'); + assert.strictEqual( + doc.languageId, + 'naplist', + 'with-script.naplist should have naplist language mode', + ); + + const runPromise = executeCommand(CMD_RUN_FILE, doc.uri); + + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 5000); + + const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + playlistTab, + `Tab '${PLAYLIST_PANEL_TITLE}' must open when running playlist that includes .fsx script steps`, + ); + + const responseTabDuringRun = findTabByLabel(RESPONSE_PANEL_TITLE); + assert.strictEqual( + responseTabDuringRun, + undefined, + `Tab '${RESPONSE_PANEL_TITLE}' must NOT exist — playlist with scripts should use playlist panel, not response panel`, + ); + + await runPromise; + + const panelAfterCompletion = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + panelAfterCompletion, + `Tab '${PLAYLIST_PANEL_TITLE}' must persist after playlist with scripts completes`, + ); + + const responseTabAfterCompletion = findTabByLabel(RESPONSE_PANEL_TITLE); + assert.strictEqual( + responseTabAfterCompletion, + undefined, + `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear even after playlist with scripts completes`, + ); + }); + + test('with-script.naplist fixture references existing files', () => { + const playlistPath = getFixturePath('petstore/with-script.naplist'), + content = fs.readFileSync(playlistPath, 'utf-8'); + + assert.ok(content.includes('[meta]'), 'Should have [meta] section'); + assert.ok(content.includes('[steps]'), 'Should have [steps] section'); + assert.ok(content.includes('echo.fsx'), 'Should reference echo.fsx script step'); + assert.ok(content.includes('list-pets.nap'), 'Should reference list-pets.nap API step'); + + const scriptsDir = getFixturePath('scripts'); + assert.ok(fs.existsSync(`${scriptsDir}/echo.fsx`), 'echo.fsx fixture script must exist'); + + const echoContent = fs.readFileSync(`${scriptsDir}/echo.fsx`, 'utf-8'); + assert.ok(echoContent.includes('printfn'), 'echo.fsx must contain printfn to produce output'); + }); + + test('re-running a playlist resets state and opens fresh running panel', async function () { + this.timeout(90000); + await closeAllEditors(); + await sleep(500); + + const doc = await openDocument('petstore/smoke.naplist'); + + // First run — wait for full completion so results are stored + await executeCommand(CMD_RUN_FILE, doc.uri); + + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 10000); + + const panelAfterFirstRun = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + panelAfterFirstRun, + `Tab '${PLAYLIST_PANEL_TITLE}' must exist after first playlist run completes`, + ); + + // Second run — fire WITHOUT await to test immediate state reset + const secondRunPromise = executeCommand(CMD_RUN_FILE, doc.uri); + + // Panel must still exist immediately (reused, not recreated) + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 2000); + + const panelDuringSecondRun = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + panelDuringSecondRun, + `Tab '${PLAYLIST_PANEL_TITLE}' must be reused for second run — not closed and reopened`, + ); + + // Only ONE playlist tab should exist (proves reuse, not duplication) + const playlistTabs = vscode.window.tabGroups.all + .flatMap((g) => g.tabs) + .filter((t) => t.label.includes(PLAYLIST_PANEL_TITLE)); + assert.strictEqual( + playlistTabs.length, + 1, + 'Only one playlist panel tab should exist during re-run — panel must be reused', + ); + + // Response panel must NOT appear during re-run + const responseTabDuringRerun = findTabByLabel(RESPONSE_PANEL_TITLE); + assert.strictEqual( + responseTabDuringRerun, + undefined, + `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear during playlist re-run`, + ); + + // Wait for second run to complete + await secondRunPromise; + + // Panel must persist after second run + const panelAfterSecondRun = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + panelAfterSecondRun, + `Tab '${PLAYLIST_PANEL_TITLE}' must persist after second playlist run completes`, + ); + }); + + test('opening .naplist sets naplist language mode', async function () { + this.timeout(10000); + const doc = await openDocument('petstore/smoke.naplist'); + assert.strictEqual(doc.languageId, 'naplist', 'Language should be naplist'); + }); + + test('save report command creates HTML report file after playlist completes', async function () { + this.timeout(60000); + await closeAllEditors(); + await sleep(500); + + const playlistPath = getFixturePath('petstore/smoke.naplist'), + expectedReportPath = path.join( + path.dirname(playlistPath), + `smoke${REPORT_FILE_SUFFIX}${REPORT_FILE_EXTENSION}`, + ); + + // Clean up any leftover report from previous runs + if (fs.existsSync(expectedReportPath)) { + fs.unlinkSync(expectedReportPath); + } + + const doc = await openDocument('petstore/smoke.naplist'); + await executeCommand(CMD_RUN_FILE, doc.uri); + + // Wait for panel to appear and run to complete + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 5000); + + // Report must NOT exist before the save command is invoked + assert.strictEqual( + fs.existsSync(expectedReportPath), + false, + 'Report file must not exist before Save Report is triggered', + ); + + // Trigger save report — same as clicking the Save Report button + await executeCommand(CMD_SAVE_REPORT); + + // Report file must now exist at the expected path + assert.ok( + fs.existsSync(expectedReportPath), + `Report file must be created at ${expectedReportPath} after Save Report command`, + ); + + const reportContent = fs.readFileSync(expectedReportPath, 'utf-8'); + + assert.ok(reportContent.includes(' findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 5000); + + const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + playlistTab, + `Tab '${PLAYLIST_PANEL_TITLE}' must open when running playlist that includes .csx script steps`, + ); + + const responseTabDuringRun = findTabByLabel(RESPONSE_PANEL_TITLE); + assert.strictEqual( + responseTabDuringRun, + undefined, + `Tab '${RESPONSE_PANEL_TITLE}' must NOT exist — playlist with C# scripts should use playlist panel`, + ); + + await runPromise; + + const panelAfterCompletion = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + panelAfterCompletion, + `Tab '${PLAYLIST_PANEL_TITLE}' must persist after playlist with C# scripts completes`, + ); + + const responseTabAfterCompletion = findTabByLabel(RESPONSE_PANEL_TITLE); + assert.strictEqual( + responseTabAfterCompletion, + undefined, + `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear after playlist with C# scripts completes`, + ); + }); + + test('with-csx-script.naplist fixture references existing files', () => { + const playlistPath = getFixturePath('petstore/with-csx-script.naplist'), + content = fs.readFileSync(playlistPath, 'utf-8'); + + assert.ok(content.includes('[meta]'), 'Should have [meta] section'); + assert.ok(content.includes('[steps]'), 'Should have [steps] section'); + assert.ok(content.includes('echo.csx'), 'Should reference echo.csx script step'); + assert.ok(content.includes('list-pets.nap'), 'Should reference list-pets.nap API step'); + + const scriptsDir = getFixturePath('scripts'); + assert.ok(fs.existsSync(`${scriptsDir}/echo.csx`), 'echo.csx fixture script must exist'); + + const echoContent = fs.readFileSync(`${scriptsDir}/echo.csx`, 'utf-8'); + assert.ok( + echoContent.includes('Console.WriteLine'), + 'echo.csx must contain Console.WriteLine to produce output', + ); + }); + + test('playlist with mixed FSX and CSX scripts opens panel and completes without error', async function () { + this.timeout(90000); + await closeAllEditors(); + await sleep(500); + + const doc = await openDocument('petstore/with-mixed-scripts.naplist'); + assert.strictEqual( + doc.languageId, + 'naplist', + 'with-mixed-scripts.naplist should have naplist language mode', + ); + + const runPromise = executeCommand(CMD_RUN_FILE, doc.uri); + + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 5000); + + const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + playlistTab, + `Tab '${PLAYLIST_PANEL_TITLE}' must open when running playlist with mixed F# and C# scripts`, + ); + + const responseTabDuringRun = findTabByLabel(RESPONSE_PANEL_TITLE); + assert.strictEqual( + responseTabDuringRun, + undefined, + `Tab '${RESPONSE_PANEL_TITLE}' must NOT exist — mixed-script playlist should use playlist panel`, + ); + + await runPromise; + + const panelAfterCompletion = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + panelAfterCompletion, + `Tab '${PLAYLIST_PANEL_TITLE}' must persist after mixed F#/C# playlist completes`, + ); + + const responseTabAfterCompletion = findTabByLabel(RESPONSE_PANEL_TITLE); + assert.strictEqual( + responseTabAfterCompletion, + undefined, + `Tab '${RESPONSE_PANEL_TITLE}' must NOT appear after mixed F#/C# playlist completes`, + ); + }); + + test('with-mixed-scripts.naplist fixture references both FSX and CSX files', () => { + const playlistPath = getFixturePath('petstore/with-mixed-scripts.naplist'), + content = fs.readFileSync(playlistPath, 'utf-8'); + + assert.ok(content.includes('[meta]'), 'Should have [meta] section'); + assert.ok(content.includes('[steps]'), 'Should have [steps] section'); + assert.ok(content.includes('echo.fsx'), 'Should reference echo.fsx F# script step'); + assert.ok(content.includes('echo.csx'), 'Should reference echo.csx C# script step'); + assert.ok(content.includes('list-pets.nap'), 'Should reference list-pets.nap API step'); + assert.ok(content.includes('get-pet.nap'), 'Should reference get-pet.nap API step'); + + const scriptsDir = getFixturePath('scripts'); + assert.ok(fs.existsSync(`${scriptsDir}/echo.fsx`), 'echo.fsx must exist for mixed playlist'); + assert.ok(fs.existsSync(`${scriptsDir}/echo.csx`), 'echo.csx must exist for mixed playlist'); + }); + + test('playlist with missing CLI shows error in panel, never PASSED', async function () { + this.timeout(30000); + await closeAllEditors(); + await sleep(500); + + const config = vscode.workspace.getConfiguration(CONFIG_SECTION), + originalPath = config.get(CONFIG_CLI_PATH); + + // Point to a nonexistent CLI binary + await config.update( + CONFIG_CLI_PATH, + '/nonexistent/napper-fake-binary', + vscode.ConfigurationTarget.Workspace, + ); + + try { + const doc = await openDocument('petstore/smoke.naplist'), + // Fire command — don't await since it may resolve quickly + runPromise = executeCommand(CMD_RUN_FILE, doc.uri); + + // Panel must open even when CLI fails (showRunning fires before CLI) + await waitForCondition(() => findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, 5000); + + const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); + assert.ok( + playlistTab, + `Tab '${PLAYLIST_PANEL_TITLE}' must open even when CLI fails — error must be shown in the panel, not silently ignored`, + ); + + await runPromise; + } finally { + // Restore original CLI path + await config.update(CONFIG_CLI_PATH, originalPath, vscode.ConfigurationTarget.Workspace); + } + }); +}); diff --git a/src/Nap.VsCode/src/test/e2e/response.e2e.test.ts b/src/Napper.VsCode/src/test/e2e/response.e2e.test.ts similarity index 52% rename from src/Nap.VsCode/src/test/e2e/response.e2e.test.ts rename to src/Napper.VsCode/src/test/e2e/response.e2e.test.ts index d587be5..559d536 100644 --- a/src/Nap.VsCode/src/test/e2e/response.e2e.test.ts +++ b/src/Napper.VsCode/src/test/e2e/response.e2e.test.ts @@ -1,5 +1,6 @@ -import * as assert from "assert"; -import * as vscode from "vscode"; +// Specs: vscode-editor, vscode-layout +import * as assert from 'assert'; +import * as vscode from 'vscode'; import { activateExtension, closeAllEditors, @@ -7,20 +8,18 @@ import { openDocument, sleep, waitForCondition, -} from "../helpers/helpers"; +} from '../helpers/helpers'; import { CMD_OPEN_RESPONSE, CMD_RUN_FILE, PLAYLIST_PANEL_TITLE, RESPONSE_PANEL_TITLE, -} from "../../constants"; +} from '../../constants'; const findTabByLabel = (label: string): vscode.Tab | undefined => - vscode.window.tabGroups.all - .flatMap((g) => g.tabs) - .find((tab) => tab.label.includes(label)); + vscode.window.tabGroups.all.flatMap((g) => g.tabs).find((tab) => tab.label.includes(label)); -suite("Response Panel", () => { +suite('Response Panel', () => { suiteSetup(async function () { this.timeout(30000); await activateExtension(); @@ -31,50 +30,38 @@ suite("Response Panel", () => { await closeAllEditors(); }); - test("response panel opens after running a request", async function () { + test('response panel opens after running a request', async function () { this.timeout(30000); await closeAllEditors(); await sleep(500); - const doc = await openDocument("get-httpbin.nap"); + const doc = await openDocument('get-httpbin.nap'); await executeCommand(CMD_RUN_FILE, doc.uri); - await waitForCondition( - () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - 10000 - ); + await waitForCondition(() => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, 10000); const responseTab = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.ok( - responseTab, - `Tab '${RESPONSE_PANEL_TITLE}' must exist after running a request` - ); + assert.ok(responseTab, `Tab '${RESPONSE_PANEL_TITLE}' must exist after running a request`); const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); assert.strictEqual( playlistTab, undefined, - `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist after running a single .nap file` + `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist after running a single .nap file`, ); }); - test("openResponse command reopens panel after closing all editors", async function () { + test('openResponse command reopens panel after closing all editors', async function () { this.timeout(30000); await closeAllEditors(); await sleep(500); - const doc = await openDocument("get-users.nap"); + const doc = await openDocument('get-users.nap'); await executeCommand(CMD_RUN_FILE, doc.uri); - await waitForCondition( - () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - 10000 - ); + await waitForCondition(() => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, 10000); - assert.ok( - findTabByLabel(RESPONSE_PANEL_TITLE), - "Response panel must exist after run" - ); + assert.ok(findTabByLabel(RESPONSE_PANEL_TITLE), 'Response panel must exist after run'); await closeAllEditors(); await sleep(500); @@ -82,61 +69,49 @@ suite("Response Panel", () => { assert.strictEqual( findTabByLabel(RESPONSE_PANEL_TITLE), undefined, - "Response panel must be gone after closing all editors" + 'Response panel must be gone after closing all editors', ); await executeCommand(CMD_OPEN_RESPONSE); - await waitForCondition( - () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - 5000 - ); + await waitForCondition(() => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, 5000); assert.ok( findTabByLabel(RESPONSE_PANEL_TITLE), - `Tab '${RESPONSE_PANEL_TITLE}' must reopen via openResponse command` + `Tab '${RESPONSE_PANEL_TITLE}' must reopen via openResponse command`, ); }); - test("response panel appears in a separate tab group from the editor", async function () { + test('response panel appears in a separate tab group from the editor', async function () { this.timeout(30000); await closeAllEditors(); await sleep(500); - await openDocument("get-httpbin.nap"); + await openDocument('get-httpbin.nap'); const groupsBefore = vscode.window.tabGroups.all.length; await executeCommand(CMD_RUN_FILE, vscode.window.activeTextEditor?.document.uri); - await waitForCondition( - () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - 10000 - ); + await waitForCondition(() => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, 10000); const groupsAfter = vscode.window.tabGroups.all.length; assert.ok( groupsAfter > groupsBefore, - `Response panel must open in a new tab group (before: ${groupsBefore}, after: ${groupsAfter})` + `Response panel must open in a new tab group (before: ${groupsBefore}, after: ${groupsAfter})`, ); }); - test("running multiple requests reuses the same response panel", async function () { + test('running multiple requests reuses the same response panel', async function () { this.timeout(45000); await closeAllEditors(); await sleep(500); - const doc1 = await openDocument("get-httpbin.nap"); + const doc1 = await openDocument('get-httpbin.nap'); await executeCommand(CMD_RUN_FILE, doc1.uri); - await waitForCondition( - () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - 10000 - ); + await waitForCondition(() => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, 10000); - assert.ok( - findTabByLabel(RESPONSE_PANEL_TITLE), - "Response panel must exist after first run" - ); + assert.ok(findTabByLabel(RESPONSE_PANEL_TITLE), 'Response panel must exist after first run'); const responseTabs1 = vscode.window.tabGroups.all .flatMap((g) => g.tabs) @@ -144,16 +119,13 @@ suite("Response Panel", () => { assert.strictEqual( responseTabs1.length, 1, - "Only one response panel tab should exist after first run" + 'Only one response panel tab should exist after first run', ); - const doc2 = await openDocument("get-users.nap"); + const doc2 = await openDocument('get-users.nap'); await executeCommand(CMD_RUN_FILE, doc2.uri); - await waitForCondition( - () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - 10000 - ); + await waitForCondition(() => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, 10000); const responseTabs2 = vscode.window.tabGroups.all .flatMap((g) => g.tabs) @@ -161,7 +133,7 @@ suite("Response Panel", () => { assert.strictEqual( responseTabs2.length, 1, - "Still only one response panel tab should exist after second run — panel is reused, not duplicated" + 'Still only one response panel tab should exist after second run — panel is reused, not duplicated', ); }); }); diff --git a/src/Nap.VsCode/src/test/e2e/runall.e2e.test.ts b/src/Napper.VsCode/src/test/e2e/runall.e2e.test.ts similarity index 56% rename from src/Nap.VsCode/src/test/e2e/runall.e2e.test.ts rename to src/Napper.VsCode/src/test/e2e/runall.e2e.test.ts index 417cbef..f1c74bc 100644 --- a/src/Nap.VsCode/src/test/e2e/runall.e2e.test.ts +++ b/src/Napper.VsCode/src/test/e2e/runall.e2e.test.ts @@ -1,24 +1,19 @@ -import * as assert from "assert"; -import * as vscode from "vscode"; +// Specs: vscode-commands, vscode-layout +import * as assert from 'assert'; +import * as vscode from 'vscode'; import { activateExtension, closeAllEditors, executeCommand, sleep, waitForCondition, -} from "../helpers/helpers"; -import { - CMD_RUN_ALL, - PLAYLIST_PANEL_TITLE, - RESPONSE_PANEL_TITLE, -} from "../../constants"; +} from '../helpers/helpers'; +import { CMD_RUN_ALL, PLAYLIST_PANEL_TITLE, RESPONSE_PANEL_TITLE } from '../../constants'; const findTabByLabel = (label: string): vscode.Tab | undefined => - vscode.window.tabGroups.all - .flatMap((g) => g.tabs) - .find((tab) => tab.label.includes(label)); + vscode.window.tabGroups.all.flatMap((g) => g.tabs).find((tab) => tab.label.includes(label)); -suite("Run All — Real API Calls", () => { +suite('Run All — Real API Calls', () => { suiteSetup(async function () { this.timeout(30000); await activateExtension(); @@ -29,15 +24,12 @@ suite("Run All — Real API Calls", () => { await closeAllEditors(); }); - test("runAll command is registered", async () => { + test('runAll command is registered', async () => { const commands = await vscode.commands.getCommands(true); - assert.ok( - commands.includes(CMD_RUN_ALL), - "runAll command should be registered" - ); + assert.ok(commands.includes(CMD_RUN_ALL), 'runAll command should be registered'); }); - test("runAll opens a response or playlist panel after execution", async function () { + test('runAll opens a response or playlist panel after execution', async function () { this.timeout(60000); await closeAllEditors(); await sleep(500); @@ -48,15 +40,15 @@ suite("Run All — Real API Calls", () => { () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined || findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined, - 30000 + 30000, ); const hasResponse = findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - hasPlaylist = findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined; + hasPlaylist = findTabByLabel(PLAYLIST_PANEL_TITLE) !== undefined; assert.ok( hasResponse || hasPlaylist, - `Either '${RESPONSE_PANEL_TITLE}' or '${PLAYLIST_PANEL_TITLE}' tab must exist after runAll` + `Either '${RESPONSE_PANEL_TITLE}' or '${PLAYLIST_PANEL_TITLE}' tab must exist after runAll`, ); }); }); diff --git a/src/Nap.VsCode/src/test/e2e/runfile.e2e.test.ts b/src/Napper.VsCode/src/test/e2e/runfile.e2e.test.ts similarity index 53% rename from src/Nap.VsCode/src/test/e2e/runfile.e2e.test.ts rename to src/Napper.VsCode/src/test/e2e/runfile.e2e.test.ts index 553f59e..5fd59f8 100644 --- a/src/Nap.VsCode/src/test/e2e/runfile.e2e.test.ts +++ b/src/Napper.VsCode/src/test/e2e/runfile.e2e.test.ts @@ -1,5 +1,6 @@ -import * as assert from "assert"; -import * as vscode from "vscode"; +// Specs: vscode-commands, vscode-editor, vscode-layout +import * as assert from 'assert'; +import * as vscode from 'vscode'; import { activateExtension, closeAllEditors, @@ -7,20 +8,18 @@ import { openDocument, sleep, waitForCondition, -} from "../helpers/helpers"; +} from '../helpers/helpers'; import { CMD_OPEN_RESPONSE, CMD_RUN_FILE, PLAYLIST_PANEL_TITLE, RESPONSE_PANEL_TITLE, -} from "../../constants"; +} from '../../constants'; const findTabByLabel = (label: string): vscode.Tab | undefined => - vscode.window.tabGroups.all - .flatMap((g) => g.tabs) - .find((tab) => tab.label.includes(label)); + vscode.window.tabGroups.all.flatMap((g) => g.tabs).find((tab) => tab.label.includes(label)); -suite("Run File — Real API Calls", () => { +suite('Run File — Real API Calls', () => { suiteSetup(async function () { this.timeout(30000); await activateExtension(); @@ -31,146 +30,119 @@ suite("Run File — Real API Calls", () => { await closeAllEditors(); }); - test("run shorthand GET against httpbin.org opens response panel", async function () { + test('run shorthand GET against httpbin.org opens response panel', async function () { this.timeout(30000); await closeAllEditors(); await sleep(500); - const doc = await openDocument("get-httpbin.nap"); - assert.strictEqual(doc.languageId, "nap", "Should have nap language mode"); + const doc = await openDocument('get-httpbin.nap'); + assert.strictEqual(doc.languageId, 'nap', 'Should have nap language mode'); await executeCommand(CMD_RUN_FILE, doc.uri); - await waitForCondition( - () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - 10000 - ); + await waitForCondition(() => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, 10000); const responseTab = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.ok( - responseTab, - `Tab '${RESPONSE_PANEL_TITLE}' must exist after running GET request` - ); + assert.ok(responseTab, `Tab '${RESPONSE_PANEL_TITLE}' must exist after running GET request`); const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); assert.strictEqual( playlistTab, undefined, - `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist after running a single .nap file` + `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist after running a single .nap file`, ); }); - test("run POST against jsonplaceholder opens response panel", async function () { + test('run POST against jsonplaceholder opens response panel', async function () { this.timeout(30000); await closeAllEditors(); await sleep(500); - const doc = await openDocument("post-jsonplaceholder.nap"); - assert.strictEqual(doc.languageId, "nap", "Should have nap language mode"); + const doc = await openDocument('post-jsonplaceholder.nap'); + assert.strictEqual(doc.languageId, 'nap', 'Should have nap language mode'); await executeCommand(CMD_RUN_FILE, doc.uri); - await waitForCondition( - () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - 10000 - ); + await waitForCondition(() => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, 10000); const responseTab = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.ok( - responseTab, - `Tab '${RESPONSE_PANEL_TITLE}' must exist after running POST request` - ); + assert.ok(responseTab, `Tab '${RESPONSE_PANEL_TITLE}' must exist after running POST request`); const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); assert.strictEqual( playlistTab, undefined, - `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist after running a single POST .nap file` + `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist after running a single POST .nap file`, ); }); - test("run GET against jsonplaceholder /users opens response panel", async function () { + test('run GET against jsonplaceholder /users opens response panel', async function () { this.timeout(30000); await closeAllEditors(); await sleep(500); - const doc = await openDocument("get-users.nap"); - assert.strictEqual(doc.languageId, "nap", "Should have nap language mode"); + const doc = await openDocument('get-users.nap'); + assert.strictEqual(doc.languageId, 'nap', 'Should have nap language mode'); await executeCommand(CMD_RUN_FILE, doc.uri); - await waitForCondition( - () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - 10000 - ); + await waitForCondition(() => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, 10000); const responseTab = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.ok( - responseTab, - `Tab '${RESPONSE_PANEL_TITLE}' must exist after running /users GET` - ); + assert.ok(responseTab, `Tab '${RESPONSE_PANEL_TITLE}' must exist after running /users GET`); const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); assert.strictEqual( playlistTab, undefined, - `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist after running a single GET .nap file` + `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist after running a single GET .nap file`, ); }); - test("run petstore list-pets with [request] section opens response panel", async function () { + test('run petstore list-pets with [request] section opens response panel', async function () { this.timeout(30000); await closeAllEditors(); await sleep(500); - const doc = await openDocument("petstore/list-pets.nap"); - assert.strictEqual(doc.languageId, "nap", "Should have nap language mode"); + const doc = await openDocument('petstore/list-pets.nap'); + assert.strictEqual(doc.languageId, 'nap', 'Should have nap language mode'); await executeCommand(CMD_RUN_FILE, doc.uri); - await waitForCondition( - () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - 10000 - ); + await waitForCondition(() => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, 10000); const responseTab = findTabByLabel(RESPONSE_PANEL_TITLE); assert.ok( responseTab, - `Tab '${RESPONSE_PANEL_TITLE}' must exist after running petstore request` + `Tab '${RESPONSE_PANEL_TITLE}' must exist after running petstore request`, ); const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); assert.strictEqual( playlistTab, undefined, - `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist after running a single petstore .nap file` + `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist after running a single petstore .nap file`, ); }); - test("running via URI opens same response panel as via document", async function () { + test('running via URI opens same response panel as via document', async function () { this.timeout(30000); await closeAllEditors(); await sleep(500); - const doc = await openDocument("get-httpbin.nap"); + const doc = await openDocument('get-httpbin.nap'); await executeCommand(CMD_RUN_FILE, doc.uri); - await waitForCondition( - () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - 10000 - ); + await waitForCondition(() => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, 10000); const responseTab = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.ok( - responseTab, - `Tab '${RESPONSE_PANEL_TITLE}' must exist when running via URI` - ); + assert.ok(responseTab, `Tab '${RESPONSE_PANEL_TITLE}' must exist when running via URI`); const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); assert.strictEqual( playlistTab, undefined, - `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist when running via URI` + `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist when running via URI`, ); const responseTabs = vscode.window.tabGroups.all @@ -179,22 +151,19 @@ suite("Run File — Real API Calls", () => { assert.strictEqual( responseTabs.length, 1, - "Only one response panel tab should exist — panel must be reused, not duplicated" + 'Only one response panel tab should exist — panel must be reused, not duplicated', ); }); - test("open response command shows panel when result exists", async function () { + test('open response command shows panel when result exists', async function () { this.timeout(30000); await closeAllEditors(); await sleep(500); - const doc = await openDocument("get-httpbin.nap"); + const doc = await openDocument('get-httpbin.nap'); await executeCommand(CMD_RUN_FILE, doc.uri); - await waitForCondition( - () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - 10000 - ); + await waitForCondition(() => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, 10000); await closeAllEditors(); await sleep(500); @@ -202,27 +171,21 @@ suite("Run File — Real API Calls", () => { assert.strictEqual( findTabByLabel(RESPONSE_PANEL_TITLE), undefined, - "Response panel should be gone after closing all editors" + 'Response panel should be gone after closing all editors', ); await executeCommand(CMD_OPEN_RESPONSE); - await waitForCondition( - () => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, - 5000 - ); + await waitForCondition(() => findTabByLabel(RESPONSE_PANEL_TITLE) !== undefined, 5000); const reopenedTab = findTabByLabel(RESPONSE_PANEL_TITLE); - assert.ok( - reopenedTab, - `Tab '${RESPONSE_PANEL_TITLE}' must reappear via openResponse command` - ); + assert.ok(reopenedTab, `Tab '${RESPONSE_PANEL_TITLE}' must reappear via openResponse command`); const playlistTab = findTabByLabel(PLAYLIST_PANEL_TITLE); assert.strictEqual( playlistTab, undefined, - `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist after reopening response for a .nap file` + `Tab '${PLAYLIST_PANEL_TITLE}' must NOT exist after reopening response for a .nap file`, ); const reopenedTabs = vscode.window.tabGroups.all @@ -231,7 +194,7 @@ suite("Run File — Real API Calls", () => { assert.strictEqual( reopenedTabs.length, 1, - "Only one response panel tab should exist after reopen" + 'Only one response panel tab should exist after reopen', ); }); }); diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/ecommerce-spec.json b/src/Napper.VsCode/src/test/fixtures/workspace/ecommerce-spec.json similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/ecommerce-spec.json rename to src/Napper.VsCode/src/test/fixtures/workspace/ecommerce-spec.json diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/get-httpbin.nap b/src/Napper.VsCode/src/test/fixtures/workspace/get-httpbin.nap similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/get-httpbin.nap rename to src/Napper.VsCode/src/test/fixtures/workspace/get-httpbin.nap diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/get-users.nap b/src/Napper.VsCode/src/test/fixtures/workspace/get-users.nap similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/get-users.nap rename to src/Napper.VsCode/src/test/fixtures/workspace/get-users.nap diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/petstore/.napenv b/src/Napper.VsCode/src/test/fixtures/workspace/petstore/.napenv similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/petstore/.napenv rename to src/Napper.VsCode/src/test/fixtures/workspace/petstore/.napenv diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/petstore/.napenv.staging b/src/Napper.VsCode/src/test/fixtures/workspace/petstore/.napenv.staging similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/petstore/.napenv.staging rename to src/Napper.VsCode/src/test/fixtures/workspace/petstore/.napenv.staging diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/petstore/csx-compile-error.naplist b/src/Napper.VsCode/src/test/fixtures/workspace/petstore/csx-compile-error.naplist similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/petstore/csx-compile-error.naplist rename to src/Napper.VsCode/src/test/fixtures/workspace/petstore/csx-compile-error.naplist diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/petstore/csx-fail.naplist b/src/Napper.VsCode/src/test/fixtures/workspace/petstore/csx-fail.naplist similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/petstore/csx-fail.naplist rename to src/Napper.VsCode/src/test/fixtures/workspace/petstore/csx-fail.naplist diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/petstore/csx-multi.naplist b/src/Napper.VsCode/src/test/fixtures/workspace/petstore/csx-multi.naplist similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/petstore/csx-multi.naplist rename to src/Napper.VsCode/src/test/fixtures/workspace/petstore/csx-multi.naplist diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/petstore/csx-only.naplist b/src/Napper.VsCode/src/test/fixtures/workspace/petstore/csx-only.naplist similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/petstore/csx-only.naplist rename to src/Napper.VsCode/src/test/fixtures/workspace/petstore/csx-only.naplist diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/petstore/csx-slow.naplist b/src/Napper.VsCode/src/test/fixtures/workspace/petstore/csx-slow.naplist similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/petstore/csx-slow.naplist rename to src/Napper.VsCode/src/test/fixtures/workspace/petstore/csx-slow.naplist diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/petstore/full.naplist b/src/Napper.VsCode/src/test/fixtures/workspace/petstore/full.naplist similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/petstore/full.naplist rename to src/Napper.VsCode/src/test/fixtures/workspace/petstore/full.naplist diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/petstore/get-pet.nap b/src/Napper.VsCode/src/test/fixtures/workspace/petstore/get-pet.nap similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/petstore/get-pet.nap rename to src/Napper.VsCode/src/test/fixtures/workspace/petstore/get-pet.nap diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/petstore/list-pets.nap b/src/Napper.VsCode/src/test/fixtures/workspace/petstore/list-pets.nap similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/petstore/list-pets.nap rename to src/Napper.VsCode/src/test/fixtures/workspace/petstore/list-pets.nap diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/petstore/smoke.naplist b/src/Napper.VsCode/src/test/fixtures/workspace/petstore/smoke.naplist similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/petstore/smoke.naplist rename to src/Napper.VsCode/src/test/fixtures/workspace/petstore/smoke.naplist diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/petstore/with-csx-script.naplist b/src/Napper.VsCode/src/test/fixtures/workspace/petstore/with-csx-script.naplist similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/petstore/with-csx-script.naplist rename to src/Napper.VsCode/src/test/fixtures/workspace/petstore/with-csx-script.naplist diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/petstore/with-mixed-scripts.naplist b/src/Napper.VsCode/src/test/fixtures/workspace/petstore/with-mixed-scripts.naplist similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/petstore/with-mixed-scripts.naplist rename to src/Napper.VsCode/src/test/fixtures/workspace/petstore/with-mixed-scripts.naplist diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/petstore/with-script.naplist b/src/Napper.VsCode/src/test/fixtures/workspace/petstore/with-script.naplist similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/petstore/with-script.naplist rename to src/Napper.VsCode/src/test/fixtures/workspace/petstore/with-script.naplist diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/post-jsonplaceholder.nap b/src/Napper.VsCode/src/test/fixtures/workspace/post-jsonplaceholder.nap similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/post-jsonplaceholder.nap rename to src/Napper.VsCode/src/test/fixtures/workspace/post-jsonplaceholder.nap diff --git a/src/Napper.VsCode/src/test/fixtures/workspace/sample.http b/src/Napper.VsCode/src/test/fixtures/workspace/sample.http new file mode 100644 index 0000000..078667e --- /dev/null +++ b/src/Napper.VsCode/src/test/fixtures/workspace/sample.http @@ -0,0 +1,14 @@ +### Get Users +GET https://jsonplaceholder.typicode.com/users + +### Get Single User +GET https://jsonplaceholder.typicode.com/users/1 + +### Create User +POST https://jsonplaceholder.typicode.com/users +Content-Type: application/json + +{ + "name": "John Doe", + "email": "john@example.com" +} diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/scripts/compile-error.csx b/src/Napper.VsCode/src/test/fixtures/workspace/scripts/compile-error.csx similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/scripts/compile-error.csx rename to src/Napper.VsCode/src/test/fixtures/workspace/scripts/compile-error.csx diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/scripts/echo.csx b/src/Napper.VsCode/src/test/fixtures/workspace/scripts/echo.csx similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/scripts/echo.csx rename to src/Napper.VsCode/src/test/fixtures/workspace/scripts/echo.csx diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/scripts/echo.fsx b/src/Napper.VsCode/src/test/fixtures/workspace/scripts/echo.fsx similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/scripts/echo.fsx rename to src/Napper.VsCode/src/test/fixtures/workspace/scripts/echo.fsx diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/scripts/fail.csx b/src/Napper.VsCode/src/test/fixtures/workspace/scripts/fail.csx similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/scripts/fail.csx rename to src/Napper.VsCode/src/test/fixtures/workspace/scripts/fail.csx diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/scripts/multi-output.csx b/src/Napper.VsCode/src/test/fixtures/workspace/scripts/multi-output.csx similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/scripts/multi-output.csx rename to src/Napper.VsCode/src/test/fixtures/workspace/scripts/multi-output.csx diff --git a/src/Nap.VsCode/src/test/fixtures/workspace/scripts/slow.csx b/src/Napper.VsCode/src/test/fixtures/workspace/scripts/slow.csx similarity index 100% rename from src/Nap.VsCode/src/test/fixtures/workspace/scripts/slow.csx rename to src/Napper.VsCode/src/test/fixtures/workspace/scripts/slow.csx diff --git a/src/Nap.VsCode/src/test/helpers/helpers.ts b/src/Napper.VsCode/src/test/helpers/helpers.ts similarity index 63% rename from src/Nap.VsCode/src/test/helpers/helpers.ts rename to src/Napper.VsCode/src/test/helpers/helpers.ts index d40290c..9b3c78f 100644 --- a/src/Nap.VsCode/src/test/helpers/helpers.ts +++ b/src/Napper.VsCode/src/test/helpers/helpers.ts @@ -1,8 +1,8 @@ -import * as vscode from "vscode"; -import * as path from "path"; -import * as fs from "fs"; +import * as vscode from 'vscode'; +import * as path from 'path'; +import * as fs from 'fs'; -const EXTENSION_ID = "nimblesite.napper"; +const EXTENSION_ID = 'nimblesite.napper'; interface TestContext { readonly extension: vscode.Extension; @@ -19,14 +19,14 @@ export const activateExtension = async (): Promise => { await extension.activate(); } - const {workspaceFolders} = vscode.workspace; + const { workspaceFolders } = vscode.workspace; if (!workspaceFolders || workspaceFolders.length === 0) { - throw new Error("No workspace folder open"); + throw new Error('No workspace folder open'); } const [firstFolder] = workspaceFolders; if (!firstFolder) { - throw new Error("No workspace folder open"); + throw new Error('No workspace folder open'); } return { @@ -35,19 +35,20 @@ export const activateExtension = async (): Promise => { }; }; -export const sleep = async (ms: number): Promise => - { await new Promise((resolve) => { +export const sleep = async (ms: number): Promise => { + await new Promise((resolve) => { setTimeout(resolve, ms); - }); }; + }); +}; export const getFixturePath = (relativePath: string): string => { - const {workspaceFolders} = vscode.workspace; + const { workspaceFolders } = vscode.workspace; if (!workspaceFolders || workspaceFolders.length === 0) { - throw new Error("No workspace folder open"); + throw new Error('No workspace folder open'); } const [firstFolder] = workspaceFolders; if (!firstFolder) { - throw new Error("No workspace folder open"); + throw new Error('No workspace folder open'); } return path.join(firstFolder.uri.fsPath, relativePath); }; @@ -60,22 +61,18 @@ export const getExtensionPath = (relativePath: string): string => { return path.join(extension.extensionPath, relativePath); }; -export const fileExists = (filePath: string): boolean => - fs.existsSync(filePath); +export const fileExists = (filePath: string): boolean => fs.existsSync(filePath); export const readFixtureFile = (relativePath: string): string => - fs.readFileSync(getFixturePath(relativePath), "utf-8"); + fs.readFileSync(getFixturePath(relativePath), 'utf-8'); -export const writeFixtureFile = ( - relativePath: string, - content: string -): void => { +export const writeFixtureFile = (relativePath: string, content: string): void => { const fullPath = getFixturePath(relativePath), - dir = path.dirname(fullPath); + dir = path.dirname(fullPath); if (!fs.existsSync(dir)) { fs.mkdirSync(dir, { recursive: true }); } - fs.writeFileSync(fullPath, content, "utf-8"); + fs.writeFileSync(fullPath, content, 'utf-8'); }; export const deleteFixtureFile = (relativePath: string): void => { @@ -88,7 +85,7 @@ export const deleteFixtureFile = (relativePath: string): void => { export const waitForCondition = async ( condition: () => boolean | Promise, timeout = 10000, - interval = 200 + interval = 200, ): Promise => { const startTime = Date.now(); while (Date.now() - startTime < timeout) { @@ -100,42 +97,38 @@ export const waitForCondition = async ( throw new Error(`Condition not met within ${timeout}ms`); }; -export const executeCommand = async ( - command: string, - ...args: unknown[] -): Promise => vscode.commands.executeCommand(command, ...args); +export const executeCommand = async (command: string, ...args: unknown[]): Promise => + vscode.commands.executeCommand(command, ...args); export const getRegisteredCommands = async (): Promise => vscode.commands.getCommands(true); -export const openDocument = async ( - relativePath: string -): Promise => { +export const openDocument = async (relativePath: string): Promise => { const fullPath = getFixturePath(relativePath), - doc = await vscode.workspace.openTextDocument(fullPath); + doc = await vscode.workspace.openTextDocument(fullPath); await vscode.window.showTextDocument(doc); return doc; }; export const closeAllEditors = async (): Promise => { - await vscode.commands.executeCommand("workbench.action.closeAllEditors"); + await vscode.commands.executeCommand('workbench.action.closeAllEditors'); }; export const extractStepLines = (content: string): string[] => { - const lines = content.split("\n"), - steps: string[] = []; + const lines = content.split('\n'), + steps: string[] = []; let inSteps = false; for (const line of lines) { const trimmed = line.trim(); - if (trimmed === "[steps]") { + if (trimmed === '[steps]') { inSteps = true; continue; } - if (trimmed.startsWith("[") && trimmed.endsWith("]")) { + if (trimmed.startsWith('[') && trimmed.endsWith(']')) { inSteps = false; continue; } - if (inSteps && trimmed.length > 0 && !trimmed.startsWith("#")) { + if (inSteps && trimmed.length > 0 && !trimmed.startsWith('#')) { steps.push(trimmed); } } diff --git a/src/Napper.VsCode/src/test/unit/explorerProvider.test.ts b/src/Napper.VsCode/src/test/unit/explorerProvider.test.ts new file mode 100644 index 0000000..cb8cb0f --- /dev/null +++ b/src/Napper.VsCode/src/test/unit/explorerProvider.test.ts @@ -0,0 +1,408 @@ +// Specs: vscode-explorer +import * as assert from 'assert'; +import { + appendStepToPlaylist, + createFileNode, + createFolderNode, + createPlaylistNode, + createPlaylistSectionNode, + parsePlaylistStepPaths, + updatePlaylistName, +} from '../../explorerProvider'; +import { type RunResult, RunState, ok, err } from '../../types'; +import { + CONTEXT_FOLDER, + CONTEXT_PLAYLIST, + CONTEXT_PLAYLIST_SECTION, + CONTEXT_REQUEST_FILE, + CONTEXT_SCRIPT_FILE, + NAP_NAME_KEY_PREFIX, + NAP_NAME_KEY_SUFFIX, + PLAYLIST_SECTION_LABEL, + SECTION_STEPS, +} from '../../constants'; + +const FAKE_NAP_PATH = '/workspace/test.nap', + FAKE_NAPLIST_PATH = '/workspace/smoke.naplist', + FAKE_FOLDER_PATH = '/workspace/petstore', + GET_CONTENT = '[request]\nmethod = GET\nurl = https://example.com\n', + POST_CONTENT = '[request]\nmethod = POST\nurl = https://example.com\n', + SHORTHAND_GET_CONTENT = 'GET https://example.com\n', + SHORTHAND_DELETE_CONTENT = 'DELETE https://example.com/1\n', + NO_METHOD_CONTENT = '[request]\nurl = https://example.com\n', + makePassedResult = (file: string): RunResult => ({ + file, + passed: true, + statusCode: 200, + duration: 42, + assertions: [{ target: 'status', passed: true, expected: '200', actual: '200' }], + }), + makeFailedResult = (file: string): RunResult => ({ + file, + passed: false, + statusCode: 404, + duration: 31, + assertions: [{ target: 'status', passed: false, expected: '200', actual: '404' }], + }), + makeErrorResult = (file: string): RunResult => ({ + file, + passed: false, + error: 'Connection refused', + assertions: [], + }); + +suite('explorerProvider — createFileNode', () => { + test('idle state when no results exist', () => { + const emptyResults = new Map(), + node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, emptyResults); + + assert.strictEqual(node.runState, RunState.Idle, 'should be Idle with no results'); + assert.strictEqual(node.isDirectory, false); + assert.strictEqual(node.contextValue, CONTEXT_REQUEST_FILE); + }); + + test('passed state with green icon when result.passed is true', () => { + const results = new Map(); + results.set(FAKE_NAP_PATH, makePassedResult(FAKE_NAP_PATH)); + const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, results); + + assert.strictEqual( + node.runState, + RunState.Passed, + 'should be Passed when result.passed is true', + ); + }); + + test('failed state with red icon when result.passed is false', () => { + const results = new Map(); + results.set(FAKE_NAP_PATH, makeFailedResult(FAKE_NAP_PATH)); + const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, results); + + assert.strictEqual( + node.runState, + RunState.Failed, + 'should be Failed when result.passed is false', + ); + }); + + test('error state when result has error string', () => { + const results = new Map(); + results.set(FAKE_NAP_PATH, makeErrorResult(FAKE_NAP_PATH)); + const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, results); + + assert.strictEqual(node.runState, RunState.Error, 'should be Error when result.error is set'); + }); + + test('result for different file does not affect this node', () => { + const otherPath = '/workspace/other.nap', + results = new Map(); + results.set(otherPath, makePassedResult(otherPath)); + const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, results); + + assert.strictEqual( + node.runState, + RunState.Idle, + 'should be Idle when result is for different file', + ); + }); + + test('extracts GET method from key-value format', () => { + const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, new Map()); + assert.strictEqual(node.httpMethod, 'GET'); + }); + + test('extracts POST method from key-value format', () => { + const node = createFileNode(FAKE_NAP_PATH, POST_CONTENT, new Map()); + assert.strictEqual(node.httpMethod, 'POST'); + }); + + test('extracts GET method from shorthand format', () => { + const node = createFileNode(FAKE_NAP_PATH, SHORTHAND_GET_CONTENT, new Map()); + assert.strictEqual(node.httpMethod, 'GET'); + }); + + test('extracts DELETE method from shorthand format', () => { + const node = createFileNode(FAKE_NAP_PATH, SHORTHAND_DELETE_CONTENT, new Map()); + assert.strictEqual(node.httpMethod, 'DELETE'); + }); + + test('no method extracted when content has no method line', () => { + const node = createFileNode(FAKE_NAP_PATH, NO_METHOD_CONTENT, new Map()); + assert.strictEqual(node.httpMethod, undefined); + }); + + test('naplist files get playlist context value', () => { + const node = createFileNode(FAKE_NAPLIST_PATH, '[meta]\nname = smoke\n', new Map()); + assert.strictEqual(node.contextValue, CONTEXT_PLAYLIST); + }); + + test('naplist files do not extract http method', () => { + const node = createFileNode(FAKE_NAPLIST_PATH, 'GET https://example.com\n', new Map()); + assert.strictEqual(node.httpMethod, undefined); + }); + + test('label is filename without extension', () => { + const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, new Map()); + assert.strictEqual(node.label, 'test'); + }); + + test('passed result stays passed even with multiple assertions', () => { + const results = new Map(); + results.set(FAKE_NAP_PATH, { + file: FAKE_NAP_PATH, + passed: true, + statusCode: 200, + duration: 50, + assertions: [ + { target: 'status', passed: true, expected: '200', actual: '200' }, + { target: 'body.id', passed: true, expected: 'exists', actual: '1' }, + { target: 'body.title', passed: true, expected: 'Test', actual: 'Test' }, + ], + }); + const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, results); + + assert.strictEqual(node.runState, RunState.Passed); + }); + + test('failed result even when some assertions pass', () => { + const results = new Map(); + results.set(FAKE_NAP_PATH, { + file: FAKE_NAP_PATH, + passed: false, + statusCode: 200, + duration: 50, + assertions: [ + { target: 'status', passed: true, expected: '200', actual: '200' }, + { target: 'body.name', passed: false, expected: 'Alice', actual: 'Bob' }, + ], + }); + const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, results); + + assert.strictEqual(node.runState, RunState.Failed, 'should be Failed when passed is false'); + }); + + test('error takes priority over passed field', () => { + const results = new Map(); + results.set(FAKE_NAP_PATH, { + file: FAKE_NAP_PATH, + passed: false, + error: 'timeout', + assertions: [], + }); + const node = createFileNode(FAKE_NAP_PATH, GET_CONTENT, results); + + assert.strictEqual( + node.runState, + RunState.Error, + 'error field should produce Error state, not Failed', + ); + }); +}); + +suite('explorerProvider — createFolderNode', () => { + test('folder node is always idle', () => { + const child = createFileNode(FAKE_NAP_PATH, GET_CONTENT, new Map()), + folder = createFolderNode(FAKE_FOLDER_PATH, [child]); + + assert.strictEqual(folder.runState, RunState.Idle); + assert.strictEqual(folder.isDirectory, true); + assert.strictEqual(folder.contextValue, CONTEXT_FOLDER); + }); + + test('folder label is directory basename', () => { + const folder = createFolderNode(FAKE_FOLDER_PATH, []); + assert.strictEqual(folder.label, 'petstore'); + }); + + test('folder children are preserved', () => { + const child1 = createFileNode(FAKE_NAP_PATH, GET_CONTENT, new Map()), + child2 = createFileNode('/workspace/other.nap', POST_CONTENT, new Map()), + folder = createFolderNode(FAKE_FOLDER_PATH, [child1, child2]); + + assert.strictEqual(folder.children?.length, 2); + }); +}); + +suite('explorerProvider — script file context', () => { + test('.fsx file gets script context value', () => { + const node = createFileNode('/workspace/echo.fsx', '', new Map()); + assert.strictEqual(node.contextValue, CONTEXT_SCRIPT_FILE); + assert.strictEqual(node.httpMethod, undefined, 'script files must not extract HTTP method'); + }); + + test('.csx file gets script context value', () => { + const node = createFileNode('/workspace/setup.csx', '', new Map()); + assert.strictEqual(node.contextValue, CONTEXT_SCRIPT_FILE); + assert.strictEqual(node.httpMethod, undefined, 'script files must not extract HTTP method'); + }); +}); + +suite('explorerProvider — parsePlaylistStepPaths', () => { + test('extracts step paths from [steps] section', () => { + const content = `[meta]\nname = "smoke"\n\n${SECTION_STEPS}\nget-users.nap\nget-pet.nap\n`; + const steps = parsePlaylistStepPaths(content); + + assert.strictEqual(steps.length, 2, 'must extract exactly 2 step paths'); + assert.strictEqual(steps[0], 'get-users.nap'); + assert.strictEqual(steps[1], 'get-pet.nap'); + }); + + test('skips blank lines and comments in steps section', () => { + const content = `${SECTION_STEPS}\nstep1.nap\n\n# a comment\nstep2.nap\n`; + const steps = parsePlaylistStepPaths(content); + + assert.strictEqual(steps.length, 2, 'blank lines and comments must be skipped'); + assert.strictEqual(steps[0], 'step1.nap'); + assert.strictEqual(steps[1], 'step2.nap'); + }); + + test('returns empty array when no [steps] section exists', () => { + const content = '[meta]\nname = "test"\n'; + const steps = parsePlaylistStepPaths(content); + + assert.strictEqual(steps.length, 0, 'must return empty when no [steps] section'); + }); + + test('stops collecting at next section header', () => { + const content = `${SECTION_STEPS}\nstep1.nap\n[scripts]\nscript.fsx\n`; + const steps = parsePlaylistStepPaths(content); + + assert.strictEqual(steps.length, 1, 'must stop at next section header'); + assert.strictEqual(steps[0], 'step1.nap'); + }); + + test('trims whitespace from step paths', () => { + const content = `${SECTION_STEPS}\n step1.nap \n`; + const steps = parsePlaylistStepPaths(content); + + assert.strictEqual(steps[0], 'step1.nap', 'step paths must be trimmed'); + }); +}); + +suite('explorerProvider — createPlaylistNode', () => { + test('creates node with playlist context and children', () => { + const child = createFileNode(FAKE_NAP_PATH, GET_CONTENT, new Map()), + node = createPlaylistNode(FAKE_NAPLIST_PATH, new Map(), [child]); + + assert.strictEqual(node.label, 'smoke', 'label must be filename without extension'); + assert.strictEqual(node.filePath, FAKE_NAPLIST_PATH); + assert.strictEqual(node.isDirectory, false); + assert.strictEqual(node.contextValue, CONTEXT_PLAYLIST); + assert.strictEqual(node.runState, RunState.Idle, 'idle when no results'); + assert.strictEqual(node.children?.length, 1, 'must include step children'); + }); + + test('reflects run state from results', () => { + const results = new Map(); + results.set(FAKE_NAPLIST_PATH, makePassedResult(FAKE_NAPLIST_PATH)); + const node = createPlaylistNode(FAKE_NAPLIST_PATH, results, []); + + assert.strictEqual(node.runState, RunState.Passed, 'must reflect passed state'); + }); +}); + +suite('explorerProvider — createPlaylistSectionNode', () => { + test('creates section node with correct label and context', () => { + const child = createFileNode(FAKE_NAP_PATH, GET_CONTENT, new Map()), + section = createPlaylistSectionNode([child]); + + assert.strictEqual(section.label, PLAYLIST_SECTION_LABEL); + assert.strictEqual(section.filePath, ''); + assert.strictEqual(section.isDirectory, false); + assert.strictEqual(section.contextValue, CONTEXT_PLAYLIST_SECTION); + assert.strictEqual(section.runState, RunState.Idle, 'section node is always idle'); + assert.strictEqual(section.children?.length, 1, 'must include children'); + }); + + test('works with empty children array', () => { + const section = createPlaylistSectionNode([]); + + assert.strictEqual(section.children?.length, 0); + assert.strictEqual(section.label, PLAYLIST_SECTION_LABEL); + }); +}); + +suite('explorerProvider — appendStepToPlaylist', () => { + test('adds [steps] section when none exists', () => { + const content = '[meta]\nname = "test"\n', + result = appendStepToPlaylist(content, 'new-step.nap'); + + assert.ok(result.includes(SECTION_STEPS), 'must add [steps] header'); + assert.ok(result.includes('new-step.nap'), 'must add the step path'); + assert.ok( + result.indexOf(SECTION_STEPS) < result.indexOf('new-step.nap'), + '[steps] must appear before the step path', + ); + }); + + test('appends to existing [steps] section', () => { + const content = `[meta]\nname = "test"\n\n${SECTION_STEPS}\nexisting.nap\n`, + result = appendStepToPlaylist(content, 'new-step.nap'); + + assert.ok(result.includes('existing.nap'), 'must keep existing steps'); + assert.ok(result.includes('new-step.nap'), 'must add new step'); + }); + + test('inserts before next section when [steps] is followed by another section', () => { + const content = `${SECTION_STEPS}\nexisting.nap\n[scripts]\nscript.fsx\n`, + result = appendStepToPlaylist(content, 'new-step.nap'); + + assert.ok(result.includes('new-step.nap'), 'must add new step'); + const newStepIdx = result.indexOf('new-step.nap'), + scriptsIdx = result.indexOf('[scripts]'); + assert.ok(newStepIdx < scriptsIdx, 'new step must be inserted before the [scripts] section'); + }); +}); + +suite('explorerProvider — updatePlaylistName', () => { + test('replaces existing name line', () => { + const content = `[meta]\n${NAP_NAME_KEY_PREFIX}old-name${NAP_NAME_KEY_SUFFIX}\n\n${SECTION_STEPS}\nstep.nap\n`, + result = updatePlaylistName(content, 'new-name'); + + assert.ok( + result.includes(`${NAP_NAME_KEY_PREFIX}new-name${NAP_NAME_KEY_SUFFIX}`), + 'must contain the new name', + ); + assert.ok(!result.includes('old-name'), 'old name must be replaced'); + assert.ok(result.includes('step.nap'), 'non-name lines must be preserved'); + }); + + test('preserves content when no name line exists', () => { + const content = `${SECTION_STEPS}\nstep.nap\n`, + result = updatePlaylistName(content, 'new-name'); + + assert.strictEqual(result, content, 'content must be unchanged when no name line'); + }); +}); + +suite('types — ok and err Result constructors', () => { + test('ok wraps value with ok: true', () => { + const result = ok(42); + + assert.strictEqual(result.ok, true, 'ok result must have ok: true'); + assert.strictEqual(result.value, 42, 'ok result must carry the value'); + }); + + test('ok works with string value', () => { + const result = ok('hello'); + + assert.strictEqual(result.ok, true); + assert.strictEqual(result.value, 'hello'); + }); + + test('err wraps error with ok: false', () => { + const result = err('something failed'); + + assert.strictEqual(result.ok, false, 'err result must have ok: false'); + assert.strictEqual(result.error, 'something failed', 'err result must carry the error'); + }); + + test('ok and err produce discriminated union', () => { + const success = ok('data'), + failure = err('oops'); + + assert.strictEqual(success.ok, true); + assert.strictEqual(failure.ok, false); + assert.notStrictEqual(success.ok, failure.ok, 'ok and err must be distinguishable'); + }); +}); diff --git a/src/Napper.VsCode/src/test/unit/htmlUtils.test.ts b/src/Napper.VsCode/src/test/unit/htmlUtils.test.ts new file mode 100644 index 0000000..bd2392e --- /dev/null +++ b/src/Napper.VsCode/src/test/unit/htmlUtils.test.ts @@ -0,0 +1,790 @@ +// Specs: vscode-layout +import * as assert from 'assert'; +import { + buildResultDetailHtml, + buildRequestGroupHtml, + buildResponseGroupHtml, + buildErrorHtml, + buildLogHtml, + buildCollapsibleSection, + buildHeadersTableRows, + escapeHtml, + formatBodyHtml, + highlightJson, +} from '../../htmlUtils'; +import type { RunResult } from '../../types'; +import { + NO_REQUEST_HEADERS, + SECTION_LABEL_ASSERTIONS, + SECTION_LABEL_BODY, + SECTION_LABEL_ERROR, + SECTION_LABEL_OUTPUT, + SECTION_LABEL_REQUEST, + SECTION_LABEL_REQUEST_BODY, + SECTION_LABEL_REQUEST_HEADERS, + SECTION_LABEL_RESPONSE, + SECTION_LABEL_RESPONSE_HEADERS, +} from '../../constants'; + +const MOCK_FULL_RESULT: RunResult = { + file: '/workspace/api/get-users.nap', + passed: true, + statusCode: 200, + duration: 150, + requestMethod: 'GET', + requestUrl: 'https://api.example.com/users', + requestHeaders: { Authorization: 'Bearer tok123', Accept: 'application/json' }, + headers: { 'content-type': 'application/json', 'x-request-id': 'abc-def' }, + body: '{"users":[{"id":1}]}', + assertions: [ + { target: 'status', passed: true, expected: '200', actual: '200' }, + { + target: 'headers.Content-Type', + passed: true, + expected: 'application/json', + actual: 'application/json', + }, + ], + }, + MOCK_FAILED_RESULT: RunResult = { + file: '/workspace/api/delete-user.nap', + passed: false, + statusCode: 403, + duration: 42, + requestMethod: 'DELETE', + requestUrl: 'https://api.example.com/users/99', + requestHeaders: {}, + headers: { 'content-type': 'text/plain' }, + body: 'Forbidden', + error: 'Access denied: insufficient permissions', + assertions: [{ target: 'status', passed: false, expected: '200', actual: '403' }], + }, + MOCK_MINIMAL_RESULT: RunResult = { + file: '/workspace/api/health.nap', + passed: true, + assertions: [], + }, + MOCK_SCRIPT_RESULT: RunResult = { + file: '/workspace/scripts/setup.fsx', + passed: true, + duration: 500, + log: ['Seeding database...', 'Created 10 records', 'Done'], + assertions: [], + }, + MOCK_NO_URL_RESULT: RunResult = { + file: '/workspace/api/check.nap', + passed: true, + statusCode: 200, + requestHeaders: { Accept: 'text/html' }, + headers: { 'content-type': 'text/html' }, + body: '', + assertions: [{ target: 'status', passed: true, expected: '200', actual: '200' }], + }, + MOCK_XSS_RESULT: RunResult = { + file: '/workspace/api/xss.nap', + passed: false, + statusCode: 200, + requestMethod: 'POST', + requestUrl: 'https://api.example.com/search?q=', + requestHeaders: { 'X-Evil': '' }, + headers: { 'x-injected': 'val"onmouseover=alert(1)' }, + body: '{"msg":""}', + error: 'Error: bold injection', + log: ["Log line with "], + assertions: [ + { + target: 'body.', + requestBodyContentType: '', + headers: {}, + assertions: [], + }; + +suite('Result Detail HTML — Request/Response grouping', () => { + test('output has a Request details section that is NOT open', () => { + const html = buildResultDetailHtml(MOCK_FULL_RESULT); + + assert.ok( + html.includes(`
`), + 'Request section must be a
element WITHOUT the open attribute', + ); + assert.ok(html.includes(SECTION_LABEL_REQUEST), 'Request section must have the Request title'); + }); + + test('output has a Response details section that IS open', () => { + const html = buildResultDetailHtml(MOCK_FULL_RESULT); + + assert.ok( + html.includes(`
`), + 'Response section must be a
element WITH the open attribute', + ); + assert.ok( + html.includes(SECTION_LABEL_RESPONSE), + 'Response section must have the Response title', + ); + }); + + test('Request section appears before Response section', () => { + const html = buildResultDetailHtml(MOCK_FULL_RESULT), + requestIdx = html.indexOf(SECTION_LABEL_REQUEST), + responseIdx = html.indexOf(SECTION_LABEL_RESPONSE); + + assert.ok(requestIdx > -1, 'Request section must exist'); + assert.ok(responseIdx > -1, 'Response section must exist'); + assert.ok( + requestIdx < responseIdx, + 'Request section must appear before Response section in the DOM', + ); + }); + + test('Request section contains the request URL and method', () => { + const html = buildRequestGroupHtml(MOCK_FULL_RESULT); + + assert.ok( + html.includes('https://api.example.com/users'), + 'Request section must contain the request URL', + ); + assert.ok(html.includes('GET'), 'Request section must contain the HTTP method'); + assert.ok(html.includes('request-url'), 'Request URL must use the request-url CSS class'); + assert.ok(html.includes('request-method'), 'HTTP method must use the request-method CSS class'); + }); + + test('Request section contains request headers', () => { + const html = buildRequestGroupHtml(MOCK_FULL_RESULT); + + assert.ok( + html.includes(SECTION_LABEL_REQUEST_HEADERS), + 'Request section must have a Request Headers subsection', + ); + assert.ok( + html.includes('Authorization'), + 'Request headers must include the Authorization header key', + ); + assert.ok( + html.includes('Bearer tok123'), + 'Request headers must include the Authorization header value', + ); + assert.ok(html.includes('Accept'), 'Request headers must include the Accept header key'); + }); + + test('Response section contains assertions', () => { + const html = buildResponseGroupHtml(MOCK_FULL_RESULT); + + assert.ok( + html.includes(SECTION_LABEL_ASSERTIONS), + 'Response section must have an Assertions subsection', + ); + assert.ok(html.includes('status'), 'Assertions must include the status assertion target'); + assert.ok( + html.includes('headers.Content-Type'), + 'Assertions must include the Content-Type assertion target', + ); + }); + + test('Response section contains response headers', () => { + const html = buildResponseGroupHtml(MOCK_FULL_RESULT); + + assert.ok( + html.includes(SECTION_LABEL_RESPONSE_HEADERS), + 'Response section must have a Response Headers subsection', + ); + assert.ok(html.includes('content-type'), 'Response headers must include the content-type key'); + assert.ok(html.includes('x-request-id'), 'Response headers must include the x-request-id key'); + assert.ok(html.includes('abc-def'), 'Response headers must include the x-request-id value'); + }); + + test('Response section contains response body', () => { + const html = buildResponseGroupHtml(MOCK_FULL_RESULT); + + assert.ok(html.includes(SECTION_LABEL_BODY), 'Response section must have a Body subsection'); + assert.ok(html.includes('users'), 'Body must contain the JSON key from the response'); + }); +}); + +suite('Result Detail HTML — Error and Log sections', () => { + test('error section is open and appears before request/response groups', () => { + const html = buildResultDetailHtml(MOCK_FAILED_RESULT), + errorIdx = html.indexOf(SECTION_LABEL_ERROR), + requestIdx = html.indexOf(SECTION_LABEL_REQUEST); + + assert.ok(errorIdx > -1, 'Error section must exist for failed results'); + assert.ok(errorIdx < requestIdx, 'Error section must appear before the Request group'); + assert.ok( + html.includes('Access denied: insufficient permissions'), + 'Error section must show the error message', + ); + }); + + test('error section uses open details element', () => { + const html = buildErrorHtml('Something went wrong'); + const detailsMatch = html.indexOf('
'); + + assert.ok(detailsMatch > -1, 'Error section must be an open
element'); + }); + + test('no error section when error is undefined', () => { + const html = buildErrorHtml(undefined); + assert.strictEqual(html, '', 'Error HTML must be empty when error is undefined'); + }); + + test('no error section when error is empty string', () => { + const html = buildErrorHtml(''); + assert.strictEqual(html, '', 'Error HTML must be empty when error is empty string'); + }); + + test('log section appears and shows all log lines', () => { + const html = buildResultDetailHtml(MOCK_SCRIPT_RESULT); + + assert.ok( + html.includes(SECTION_LABEL_OUTPUT), + 'Output section must exist for results with log lines', + ); + assert.ok(html.includes('Seeding database...'), 'Log must show first log line'); + assert.ok(html.includes('Created 10 records'), 'Log must show second log line'); + assert.ok(html.includes('Done'), 'Log must show last log line'); + }); + + test('no log section when log is undefined', () => { + const html = buildLogHtml(undefined); + assert.strictEqual(html, '', 'Log HTML must be empty when log is undefined'); + }); + + test('no log section when log is empty array', () => { + const html = buildLogHtml([]); + assert.strictEqual(html, '', 'Log HTML must be empty when log is empty array'); + }); + + test('log section appears before request/response groups', () => { + const html = buildResultDetailHtml(MOCK_SCRIPT_RESULT), + logIdx = html.indexOf(SECTION_LABEL_OUTPUT), + requestIdx = html.indexOf(SECTION_LABEL_REQUEST); + + assert.ok(logIdx < requestIdx, 'Log section must appear before the Request group'); + }); +}); + +suite('Result Detail HTML — Minimal and edge-case results', () => { + test('minimal result still produces Request group', () => { + const html = buildResultDetailHtml(MOCK_MINIMAL_RESULT); + + assert.ok( + html.includes(SECTION_LABEL_REQUEST), + 'Even a minimal result must have a Request section', + ); + }); + + test('minimal result with no assertions/headers/body produces no Response group', () => { + const html = buildResponseGroupHtml(MOCK_MINIMAL_RESULT); + + assert.strictEqual( + html, + '', + 'Response group must be empty when there are no assertions, headers, or body', + ); + }); + + test('request group without URL shows no request-url div', () => { + const html = buildRequestGroupHtml(MOCK_MINIMAL_RESULT); + + assert.ok( + !html.includes('request-url'), + 'Request group must not contain request-url div when URL is undefined', + ); + }); + + test('request group without request headers shows empty hint', () => { + const html = buildRequestGroupHtml(MOCK_MINIMAL_RESULT); + + assert.ok( + html.includes(NO_REQUEST_HEADERS), + 'Request group must show empty-hint text when no request headers exist', + ); + }); + + test('result with empty body produces no Body subsection', () => { + const html = buildResponseGroupHtml(MOCK_EMPTY_BODY_RESULT); + + assert.ok( + !html.includes(SECTION_LABEL_BODY), + 'Response group must not contain a Body subsection when body is empty string', + ); + }); + + test('result with empty headers object produces no Response Headers subsection', () => { + const html = buildResponseGroupHtml(MOCK_EMPTY_BODY_RESULT); + + assert.ok( + !html.includes(SECTION_LABEL_RESPONSE_HEADERS), + 'Response group must not contain a Response Headers subsection when headers is empty', + ); + }); + + test('result without URL but with request headers still shows headers', () => { + const html = buildRequestGroupHtml(MOCK_NO_URL_RESULT); + + assert.ok( + !html.includes('request-url'), + 'Request group must not show request-url when URL is undefined', + ); + assert.ok( + html.includes('Accept'), + 'Request group must still show request headers when present', + ); + assert.ok(html.includes('text/html'), 'Request group must show request header values'); + }); + + test('non-JSON body is rendered as escaped plain text', () => { + const html = buildResponseGroupHtml(MOCK_INVALID_JSON_BODY); + + assert.ok(html.includes('this is not json'), 'Non-JSON body text must appear in the output'); + assert.ok( + !html.includes('json-key'), + 'Non-JSON body must not have JSON syntax highlighting classes', + ); + }); +}); + +suite('Result Detail HTML — Failed assertion details', () => { + test('failed assertions show expected and actual values', () => { + const html = buildResponseGroupHtml(MOCK_FAILED_RESULT); + + assert.ok(html.includes('expected'), "Failed assertion must show 'expected' label"); + assert.ok(html.includes('actual'), "Failed assertion must show 'actual' label"); + assert.ok(html.includes('200'), 'Failed assertion must show the expected value'); + assert.ok(html.includes('403'), 'Failed assertion must show the actual value'); + }); + + test('failed assertions use the fail CSS class', () => { + const html = buildResponseGroupHtml(MOCK_FAILED_RESULT); + + assert.ok( + html.includes('class="assert-row fail"'), + "Failed assertion row must have the 'fail' CSS class", + ); + }); + + test('passed assertions use the pass CSS class', () => { + const html = buildResponseGroupHtml(MOCK_FULL_RESULT); + + assert.ok( + html.includes('class="assert-row pass"'), + "Passed assertion row must have the 'pass' CSS class", + ); + }); + + test('passed assertions do NOT show expected/actual detail', () => { + const html = buildResponseGroupHtml(MOCK_FULL_RESULT); + + assert.ok( + !html.includes('assert-detail'), + 'Passed assertions must not show the expected/actual detail div', + ); + }); +}); + +suite('Result Detail HTML — XSS prevention', () => { + test('HTML in request URL is escaped', () => { + const html = buildRequestGroupHtml(MOCK_XSS_RESULT); + + assert.ok( + !html.includes(''), + 'Raw script tags in URL must be escaped', + ); + assert.ok(html.includes('<script>'), 'Script tags in URL must be HTML-escaped'); + }); + + test('HTML in request header values is escaped', () => { + const html = buildRequestGroupHtml(MOCK_XSS_RESULT); + + assert.ok( + !html.includes(''), + 'Raw HTML in request header values must be escaped', + ); + assert.ok( + html.includes('<img onerror=alert(1)>'), + 'HTML in request header values must be escaped', + ); + }); + + test('HTML in response header values is escaped', () => { + const html = buildResponseGroupHtml(MOCK_XSS_RESULT); + + assert.ok( + !html.includes('val"onmouseover=alert(1)'), + 'Raw quotes in response header values must be escaped', + ); + assert.ok( + html.includes('"onmouseover'), + 'Quotes in response header values must be HTML-escaped', + ); + }); + + test('HTML in error message is escaped', () => { + const html = buildErrorHtml(MOCK_XSS_RESULT.error); + + assert.ok(!html.includes('bold injection'), 'Raw HTML in error must be escaped'); + assert.ok( + html.includes('<b>bold injection</b>'), + 'HTML tags in error must be escaped', + ); + }); + + test('HTML in log lines is escaped', () => { + const html = buildLogHtml(MOCK_XSS_RESULT.log); + + assert.ok( + !html.includes(""), + 'Raw script tags in log lines must be escaped', + ); + assert.ok(html.includes('<script>'), 'Script tags in log lines must be escaped'); + }); + + test('HTML in assertion targets is escaped', () => { + const html = buildResponseGroupHtml(MOCK_XSS_RESULT); + + assert.ok(!html.includes('body.'), + 'Raw script tags in request body must be escaped', + ); + assert.ok(html.includes('<script>'), 'Script tags in request body must be HTML-escaped'); + }); + + test('HTML in request body content type is escaped', () => { + const html = buildRequestGroupHtml(MOCK_XSS_REQUEST_BODY); + + assert.ok( + !html.includes(''), + 'Raw HTML in content type hint must be escaped', + ); + assert.ok(html.includes('<img'), 'HTML in content type hint must be escaped'); + }); +}); + +suite('escapeHtml', () => { + test('escapes ampersands', () => { + assert.strictEqual(escapeHtml('a&b'), 'a&b'); + }); + + test('escapes angle brackets', () => { + assert.strictEqual(escapeHtml('
'), '<div>'); + }); + + test('escapes double quotes', () => { + assert.strictEqual(escapeHtml('a"b'), 'a"b'); + }); + + test('handles string with all special chars', () => { + assert.strictEqual(escapeHtml('&'), '<a href="x">&'); + }); + + test('returns empty string unchanged', () => { + assert.strictEqual(escapeHtml(''), ''); + }); + + test('returns plain text unchanged', () => { + assert.strictEqual(escapeHtml('hello world'), 'hello world'); + }); +}); + +suite('JSON highlighting — null, boolean, and empty object', () => { + test('null value gets json-null class', () => { + const html = highlightJson(null, 0); + + assert.ok(html.includes('json-null'), 'null must use json-null CSS class'); + assert.ok(html.includes('null'), "null must render as text 'null'"); + }); + + test('boolean true gets json-bool class', () => { + const html = highlightJson(true, 0); + + assert.ok(html.includes('json-bool'), 'boolean must use json-bool CSS class'); + assert.ok(html.includes('true'), "true must render as text 'true'"); + }); + + test('boolean false gets json-bool class', () => { + const html = highlightJson(false, 0); + + assert.ok(html.includes('json-bool'), 'boolean must use json-bool CSS class'); + assert.ok(html.includes('false'), "false must render as text 'false'"); + }); + + test('empty object renders as {}', () => { + const html = highlightJson({}, 0); + + assert.strictEqual(html, '{}', "empty object must render as '{}'"); + }); + + test('formatBodyHtml handles JSON with null and boolean values', () => { + const html = formatBodyHtml('{"active":true,"deleted":null}'); + + assert.ok(html.includes('json-bool'), 'boolean in body must be highlighted'); + assert.ok(html.includes('json-null'), 'null in body must be highlighted'); + assert.ok(html.includes('json-key'), 'keys must be highlighted'); + }); + + test('empty array renders as []', () => { + const html = highlightJson([], 0); + + assert.strictEqual(html, '[]', "empty array must render as '[]'"); + }); +}); diff --git a/src/Napper.VsCode/src/test/unit/reportGenerator.test.ts b/src/Napper.VsCode/src/test/unit/reportGenerator.test.ts new file mode 100644 index 0000000..cee68eb --- /dev/null +++ b/src/Napper.VsCode/src/test/unit/reportGenerator.test.ts @@ -0,0 +1,272 @@ +// Specs: vscode-playlists +import * as assert from 'assert'; +import * as fs from 'fs'; +import * as os from 'os'; +import * as path from 'path'; +import { generatePlaylistReport } from '../../reportGenerator'; +import type { RunResult } from '../../types'; +import { + REPORT_FILE_EXTENSION, + REPORT_FILE_SUFFIX, + SECTION_LABEL_REQUEST, + SECTION_LABEL_REQUEST_BODY, + SECTION_LABEL_REQUEST_HEADERS, + SECTION_LABEL_RESPONSE, + SECTION_LABEL_RESPONSE_HEADERS, +} from '../../constants'; + +const MOCK_PASSED_STEP: RunResult = { + file: '/workspace/petstore/list-pets.nap', + passed: true, + statusCode: 200, + duration: 142, + body: '{"pets":[]}', + headers: { 'content-type': 'application/json' }, + assertions: [{ target: 'status', passed: true, expected: '200', actual: '200' }], + }, + MOCK_FAILED_STEP: RunResult = { + file: '/workspace/petstore/get-pet.nap', + passed: false, + statusCode: 404, + duration: 87, + error: 'Not Found', + body: '{"message":"not found"}', + headers: { 'content-type': 'application/json' }, + assertions: [{ target: 'status', passed: false, expected: '200', actual: '404' }], + }, + MOCK_SCRIPT_STEP: RunResult = { + file: '/workspace/scripts/echo.fsx', + passed: true, + duration: 320, + log: ['Hello from script', 'Done'], + assertions: [], + }, + MOCK_POST_STEP: RunResult = { + file: '/workspace/petstore/create-pet.nap', + passed: true, + statusCode: 201, + duration: 95, + requestMethod: 'POST', + requestUrl: 'https://api.petstore.io/v1/pets', + requestHeaders: { 'Content-Type': 'application/json', Authorization: 'Bearer xyz' }, + requestBody: '{"name":"Fido","species":"dog"}', + requestBodyContentType: 'application/json', + headers: { 'content-type': 'application/json', location: '/v1/pets/42' }, + body: '{"id":42,"name":"Fido"}', + assertions: [{ target: 'status', passed: true, expected: '201', actual: '201' }], + }; + +suite('Report Generator', () => { + test('produces valid HTML document with playlist name', () => { + const html = generatePlaylistReport('smoke', [MOCK_PASSED_STEP]); + + assert.ok(html.includes(''), 'Report must be a valid HTML document'); + assert.ok(html.includes('smoke'), 'Report must contain the playlist name in the hero'); + assert.ok(html.includes(''), 'Report must have an HTML title element'); + }); + + test('shows all step file names and HTTP status codes', () => { + const html = generatePlaylistReport('smoke', [MOCK_PASSED_STEP, MOCK_FAILED_STEP]); + + assert.ok(html.includes('list-pets.nap'), 'Report must contain passed step file name'); + assert.ok(html.includes('get-pet.nap'), 'Report must contain failed step file name'); + assert.ok(html.includes('200'), 'Report must show 200 status code'); + assert.ok(html.includes('404'), 'Report must show 404 status code'); + }); + + test('shows PASSED and FAILED badges on individual steps', () => { + const html = generatePlaylistReport('smoke', [MOCK_PASSED_STEP, MOCK_FAILED_STEP]); + + assert.ok(html.includes('PASSED'), 'Report must show PASSED badge'); + assert.ok(html.includes('FAILED'), 'Report must show FAILED badge'); + }); + + test('shows step durations', () => { + const html = generatePlaylistReport('smoke', [MOCK_PASSED_STEP, MOCK_FAILED_STEP]); + + assert.ok(html.includes('142ms'), 'Report must show 142ms duration'); + assert.ok(html.includes('87ms'), 'Report must show 87ms duration'); + }); + + test('shows error details for failed steps', () => { + const html = generatePlaylistReport('smoke', [MOCK_FAILED_STEP]); + + assert.ok(html.includes('Not Found'), 'Report must show error message for failed step'); + assert.ok(html.includes('error-box'), 'Report must render error in styled error box'); + }); + + test('includes response headers inside Response group', () => { + const html = generatePlaylistReport('smoke', [MOCK_PASSED_STEP]); + + assert.ok(html.includes(SECTION_LABEL_RESPONSE), 'Report must have Response group'); + assert.ok( + html.includes(SECTION_LABEL_RESPONSE_HEADERS), + 'Report must have response headers section title inside Response group', + ); + assert.ok(html.includes('content-type'), 'Report must show header key'); + assert.ok(html.includes('application/json'), 'Report must show header value'); + }); + + test('includes response body with JSON content inside Response group', () => { + const html = generatePlaylistReport('smoke', [MOCK_PASSED_STEP]); + + assert.ok(html.includes('Response Body'), 'Report must have response body section title'); + assert.ok(html.includes('pets'), 'Report must show JSON content from response body'); + }); + + test('shows assertions with pass/fail indicators', () => { + const html = generatePlaylistReport('smoke', [MOCK_PASSED_STEP, MOCK_FAILED_STEP]); + + assert.ok(html.includes('Assertions'), 'Report must have assertions section'); + assert.ok(html.includes('status'), 'Report must show assertion target name'); + assert.ok(html.includes('expected'), 'Report must show expected vs actual for failures'); + }); + + test('calculates correct pass rate for mixed results', () => { + const html = generatePlaylistReport('smoke', [MOCK_PASSED_STEP, MOCK_FAILED_STEP]); + + assert.ok(html.includes('50%'), 'Report must show 50% pass rate for 1 of 2 passing'); + assert.ok(html.includes('Pass Rate'), 'Report must have pass rate stat card'); + }); + + test('shows 100% pass rate when all steps pass', () => { + const html = generatePlaylistReport('smoke', [MOCK_PASSED_STEP]); + + assert.ok(html.includes('100%'), 'Report must show 100% pass rate when all pass'); + assert.ok(html.includes('All Steps Passed'), 'Report must show all-passed status banner'); + }); + + test('shows summary stats: passed, failed, duration', () => { + const html = generatePlaylistReport('smoke', [MOCK_PASSED_STEP, MOCK_FAILED_STEP]); + + assert.ok(html.includes('Duration'), 'Report must show duration stat'); + assert.ok(html.includes('Passed'), 'Report must show passed stat label'); + assert.ok(html.includes('Failed'), 'Report must show failed stat label'); + }); + + test('renders script step output/log section', () => { + const html = generatePlaylistReport('scripts', [MOCK_SCRIPT_STEP]); + + assert.ok(html.includes('echo.fsx'), 'Report must show script step file name'); + assert.ok(html.includes('Hello from script'), 'Report must show script log output'); + assert.ok(html.includes('Output'), 'Report must have output section title for script logs'); + }); + + test('has interactive expand/collapse for step details', () => { + const html = generatePlaylistReport('smoke', [MOCK_PASSED_STEP]); + + assert.ok( + html.includes('toggleStep'), + 'Report must have toggleStep function for expand/collapse', + ); + assert.ok(html.includes('step-chevron'), 'Report must have chevron indicators'); + }); + + test('zero results produces FAILED status, never PASSED', () => { + const html = generatePlaylistReport('empty-run', []); + + assert.ok( + html.includes('Some Steps Failed'), + 'Zero results must show failure status banner — playlist must NEVER pass by default', + ); + assert.ok( + !html.includes('All Steps Passed'), + "Zero results must NOT show 'All Steps Passed' — 0 steps executed is a failure", + ); + assert.ok(html.includes('0%'), 'Zero results must show 0% pass rate'); + }); + + test('step detail has collapsible Request group (closed by default)', () => { + const html = generatePlaylistReport('smoke', [MOCK_POST_STEP]); + + assert.ok( + html.includes('report-group'), + 'Report must use report-group class for collapsible groups', + ); + assert.ok(html.includes(SECTION_LABEL_REQUEST), 'Report must have a Request group'); + const requestGroupMatch = html.indexOf(`>${SECTION_LABEL_REQUEST}<`), + responseGroupMatch = html.indexOf(`>${SECTION_LABEL_RESPONSE}<`); + assert.ok(requestGroupMatch > -1, 'Request group title must exist'); + assert.ok(responseGroupMatch > -1, 'Response group title must exist'); + assert.ok( + requestGroupMatch < responseGroupMatch, + 'Request group must appear before Response group', + ); + }); + + test('step detail has collapsible Response group (open by default)', () => { + const html = generatePlaylistReport('smoke', [MOCK_POST_STEP]); + + assert.ok( + html.includes('<details class="report-group" open>'), + 'Response group must have the open attribute', + ); + }); + + test('Request group shows request URL and method', () => { + const html = generatePlaylistReport('smoke', [MOCK_POST_STEP]); + + assert.ok(html.includes('https://api.petstore.io/v1/pets'), 'Report must show the request URL'); + assert.ok(html.includes('POST'), 'Report must show the request method'); + assert.ok(html.includes('request-method-tag'), 'Request method must use the styled tag class'); + }); + + test('Request group shows request headers', () => { + const html = generatePlaylistReport('smoke', [MOCK_POST_STEP]); + + assert.ok( + html.includes(SECTION_LABEL_REQUEST_HEADERS), + 'Report must have Request Headers subsection', + ); + assert.ok(html.includes('Authorization'), 'Request headers must show Authorization key'); + assert.ok(html.includes('Bearer xyz'), 'Request headers must show Authorization value'); + }); + + test('Request group shows request body with content type', () => { + const html = generatePlaylistReport('smoke', [MOCK_POST_STEP]); + + assert.ok( + html.includes(SECTION_LABEL_REQUEST_BODY), + 'Report must have Request Body subsection', + ); + assert.ok(html.includes('Fido'), 'Request body must show JSON content'); + assert.ok(html.includes('content-type-hint'), 'Request body must show content type hint'); + }); + + test('Response group contains assertions, headers, and body', () => { + const html = generatePlaylistReport('smoke', [MOCK_POST_STEP]); + + assert.ok(html.includes(SECTION_LABEL_RESPONSE), 'Report must have Response group'); + assert.ok(html.includes('Assertions'), 'Response group must contain assertions'); + assert.ok( + html.includes(SECTION_LABEL_RESPONSE_HEADERS), + 'Response group must contain response headers', + ); + assert.ok(html.includes('location'), 'Response headers must show location key'); + assert.ok(html.includes('Response Body'), 'Response group must contain response body'); + }); + + test('Request group without URL/body still renders (no request details hint)', () => { + const html = generatePlaylistReport('smoke', [MOCK_PASSED_STEP]); + + assert.ok( + html.includes(SECTION_LABEL_REQUEST), + 'Report must have Request group even without URL', + ); + }); + + test('report file can be written to and read from disk', () => { + const tmpDir = os.tmpdir(), + reportPath = path.join(tmpDir, `test-playlist${REPORT_FILE_SUFFIX}${REPORT_FILE_EXTENSION}`), + html = generatePlaylistReport('test-playlist', [MOCK_PASSED_STEP]); + fs.writeFileSync(reportPath, html, 'utf-8'); + + assert.ok(fs.existsSync(reportPath), 'Report file must exist on disk after write'); + + const content = fs.readFileSync(reportPath, 'utf-8'); + assert.ok(content.includes('<!DOCTYPE html>'), 'Read-back content must be valid HTML'); + assert.ok(content.includes('test-playlist'), 'Read-back content must contain playlist name'); + + fs.unlinkSync(reportPath); + }); +}); diff --git a/src/Nap.VsCode/src/types.ts b/src/Napper.VsCode/src/types.ts similarity index 100% rename from src/Nap.VsCode/src/types.ts rename to src/Napper.VsCode/src/types.ts diff --git a/src/Nap.VsCode/src/watchers.ts b/src/Napper.VsCode/src/watchers.ts similarity index 74% rename from src/Nap.VsCode/src/watchers.ts rename to src/Napper.VsCode/src/watchers.ts index 50d0814..e8ca91d 100644 --- a/src/Nap.VsCode/src/watchers.ts +++ b/src/Napper.VsCode/src/watchers.ts @@ -1,8 +1,9 @@ +// Specs: vscode-settings, vscode-commands // File system watchers and auto-run registration for .nap/.naplist files -import * as vscode from "vscode"; -import type { ExplorerAdapter } from "./explorerAdapter"; -import type { Logger } from "./logger"; +import * as vscode from 'vscode'; +import type { ExplorerAdapter } from './explorerAdapter'; +import type { Logger } from './logger'; import { CONFIG_AUTO_RUN, CONFIG_SECTION, @@ -11,7 +12,7 @@ import { NAPLIST_GLOB, NAP_EXTENSION, NAP_GLOB, -} from "./constants"; +} from './constants'; const isNapperFile = (fileName: string): boolean => fileName.endsWith(NAP_EXTENSION) || fileName.endsWith(NAPLIST_EXTENSION); @@ -22,11 +23,11 @@ export const registerWatchers = ( log: Logger, ): void => { const napWatcher = vscode.workspace.createFileSystemWatcher(NAP_GLOB), - naplistWatcher = vscode.workspace.createFileSystemWatcher(NAPLIST_GLOB), - refreshExplorer = (): void => { - log.debug(LOG_MSG_TREE_REFRESH); - explorer.refresh(); - }; + naplistWatcher = vscode.workspace.createFileSystemWatcher(NAPLIST_GLOB), + refreshExplorer = (): void => { + log.debug(LOG_MSG_TREE_REFRESH); + explorer.refresh(); + }; napWatcher.onDidCreate(refreshExplorer); napWatcher.onDidDelete(refreshExplorer); napWatcher.onDidChange(refreshExplorer); @@ -43,7 +44,7 @@ export const registerAutoRun = ( context.subscriptions.push( vscode.workspace.onDidSaveTextDocument((doc) => { const config = vscode.workspace.getConfiguration(CONFIG_SECTION), - autoRun = config.get<boolean>(CONFIG_AUTO_RUN, false); + autoRun = config.get<boolean>(CONFIG_AUTO_RUN, false); if (autoRun && isNapperFile(doc.fileName)) { onRunFile(doc.uri).catch(() => undefined); } diff --git a/src/Nap.VsCode/syntaxes/nap.tmLanguage.json b/src/Napper.VsCode/syntaxes/nap.tmLanguage.json similarity index 100% rename from src/Nap.VsCode/syntaxes/nap.tmLanguage.json rename to src/Napper.VsCode/syntaxes/nap.tmLanguage.json diff --git a/src/Nap.VsCode/syntaxes/napenv.tmLanguage.json b/src/Napper.VsCode/syntaxes/napenv.tmLanguage.json similarity index 100% rename from src/Nap.VsCode/syntaxes/napenv.tmLanguage.json rename to src/Napper.VsCode/syntaxes/napenv.tmLanguage.json diff --git a/src/Nap.VsCode/syntaxes/naplist.tmLanguage.json b/src/Napper.VsCode/syntaxes/naplist.tmLanguage.json similarity index 100% rename from src/Nap.VsCode/syntaxes/naplist.tmLanguage.json rename to src/Napper.VsCode/syntaxes/naplist.tmLanguage.json diff --git a/src/Nap.VsCode/tsconfig.build.json b/src/Napper.VsCode/tsconfig.build.json similarity index 100% rename from src/Nap.VsCode/tsconfig.build.json rename to src/Napper.VsCode/tsconfig.build.json diff --git a/src/Nap.VsCode/tsconfig.json b/src/Napper.VsCode/tsconfig.json similarity index 100% rename from src/Nap.VsCode/tsconfig.json rename to src/Napper.VsCode/tsconfig.json diff --git a/src/Nap.VsCode/tsconfig.test.json b/src/Napper.VsCode/tsconfig.test.json similarity index 100% rename from src/Nap.VsCode/tsconfig.test.json rename to src/Napper.VsCode/tsconfig.test.json diff --git a/src/Nap.VsCode/webpack.config.js b/src/Napper.VsCode/webpack.config.js similarity index 100% rename from src/Nap.VsCode/webpack.config.js rename to src/Napper.VsCode/webpack.config.js diff --git a/src/Napper.Zed/Cargo.lock b/src/Napper.Zed/Cargo.lock new file mode 100644 index 0000000..268cd68 --- /dev/null +++ b/src/Napper.Zed/Cargo.lock @@ -0,0 +1,1045 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "auditable-serde" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7bf8143dfc3c0258df908843e169b5cc5fcf76c7718bd66135ef4a9cd558c5" +dependencies = [ + "semver", + "serde", + "serde_json", + "topological-sort", +] + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "flate2" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "getrandom" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", + "wasip3", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "libc" +version = "0.2.183" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d" + +[[package]] +name = "linux-raw-sys" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "nap-zed" +version = "0.1.0" +dependencies = [ + "tempfile", + "zed_extension_api", +] + +[[package]] +name = "once_cell" +version = "1.21.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50" + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" + +[[package]] +name = "rustix" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +dependencies = [ + "serde", + "serde_core", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "spdx" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e17e880bafaeb362a7b751ec46bdc5b61445a188f80e0606e68167cd540fa3" +dependencies = [ + "smallvec", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tempfile" +version = "3.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32497e9a4c7b38532efcdebeef879707aa9f794296a4f0244f6f69e9bc8574bd" +dependencies = [ + "fastrand", + "getrandom", + "once_cell", + "rustix", + "windows-sys", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "topological-sort" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "wasip2" +version = "1.0.2+wasi-0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +dependencies = [ + "wit-bindgen 0.51.0", +] + +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen 0.51.0", +] + +[[package]] +name = "wasm-encoder" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80bb72f02e7fbf07183443b27b0f3d4144abf8c114189f2e088ed95b696a7822" +dependencies = [ + "leb128fmt", + "wasmparser 0.227.1", +] + +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser 0.244.0", +] + +[[package]] +name = "wasm-metadata" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce1ef0faabbbba6674e97a56bee857ccddf942785a336c8b47b42373c922a91d" +dependencies = [ + "anyhow", + "auditable-serde", + "flate2", + "indexmap", + "serde", + "serde_derive", + "serde_json", + "spdx", + "url", + "wasm-encoder 0.227.1", + "wasmparser 0.227.1", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder 0.244.0", + "wasmparser 0.244.0", +] + +[[package]] +name = "wasmparser" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f51cad774fb3c9461ab9bccc9c62dfb7388397b5deda31bf40e8108ccd678b2" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "wit-bindgen" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10fb6648689b3929d56bbc7eb1acf70c9a42a29eb5358c67c10f54dbd5d695de" +dependencies = [ + "wit-bindgen-rt", + "wit-bindgen-rust-macro 0.41.0", +] + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro 0.51.0", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92fa781d4f2ff6d3f27f3cc9b74a73327b31ca0dc4a3ef25a0ce2983e0e5af9b" +dependencies = [ + "anyhow", + "heck", + "wit-parser 0.227.1", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck", + "wit-parser 0.244.0", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db52a11d4dfb0a59f194c064055794ee6564eb1ced88c25da2cf76e50c5621" +dependencies = [ + "bitflags", + "futures", + "once_cell", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d0809dc5ba19e2e98661bf32fc0addc5a3ca5bf3a6a7083aa6ba484085ff3ce" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata 0.227.1", + "wit-bindgen-core 0.41.0", + "wit-component 0.227.1", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata 0.244.0", + "wit-bindgen-core 0.51.0", + "wit-component 0.244.0", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad19eec017904e04c60719592a803ee5da76cb51c81e3f6fbf9457f59db49799" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core 0.41.0", + "wit-bindgen-rust 0.41.0", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core 0.51.0", + "wit-bindgen-rust 0.51.0", +] + +[[package]] +name = "wit-component" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "635c3adc595422cbf2341a17fb73a319669cc8d33deed3a48368a841df86b676" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder 0.227.1", + "wasm-metadata 0.227.1", + "wasmparser 0.227.1", + "wit-parser 0.227.1", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder 0.244.0", + "wasm-metadata 0.244.0", + "wasmparser 0.244.0", + "wit-parser 0.244.0", +] + +[[package]] +name = "wit-parser" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddf445ed5157046e4baf56f9138c124a0824d4d1657e7204d71886ad8ce2fc11" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser 0.227.1", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser 0.244.0", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zed_extension_api" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40ef88a8e5aeff67b0996b1795d56338f04c02de95f1f147577944aa37b801d6" +dependencies = [ + "serde", + "serde_json", + "wit-bindgen 0.41.0", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/src/Napper.Zed/Cargo.toml b/src/Napper.Zed/Cargo.toml new file mode 100644 index 0000000..896ea9c --- /dev/null +++ b/src/Napper.Zed/Cargo.toml @@ -0,0 +1,73 @@ +[package] +name = "nap-zed" +version = "0.1.0" +edition = "2021" +description = "Zed extension for Nap API testing files (.nap, .naplist, .napenv)" +license = "MIT" +publish = false + +[lib] +crate-type = ["cdylib"] + +[dependencies] +zed_extension_api = "0.5.0" + +[dev-dependencies] +tempfile = "3" + +[lints.rust] +unexpected_cfgs = { level = "warn", check-cfg = ['cfg(tarpaulin_include)'] } +unsafe_code = "deny" +missing_docs = "deny" +unused_imports = "deny" +dead_code = "deny" +unused_variables = "deny" +unused_mut = "deny" +unused_assignments = "deny" +unused_results = "deny" +# Stale #[expect] attributes must be errors, not warnings — no dead suppressions +unfulfilled_lint_expectations = "deny" + +[lints.clippy] +# all + pedantic at maximum — restriction & nursery cherry-picked (contain contradictions) +all = { level = "deny", priority = -1 } +pedantic = { level = "deny", priority = -1 } +unwrap_used = "deny" +expect_used = "deny" +panic = "deny" +todo = "deny" +unimplemented = "deny" +# Implicit panics from out-of-bounds indexing — use .get() instead +indexing_slicing = "deny" +# Silent lossy type conversions via `as` — use .into()/.try_into() instead +as_conversions = "deny" +# Forgotten debug macros leaking info to stderr +dbg_macro = "deny" +# Resource leaks from skipped destructors — use ManuallyDrop if needed +mem_forget = "deny" +# From impls that can panic should be TryFrom instead +fallible_impl_from = "deny" +# Catch needless implicit clones — use .clone() for explicit intent +implicit_clone = "deny" +# Require SAFETY comments on any unsafe block — defense in depth for allowed exceptions +undocumented_unsafe_blocks = "deny" +# Prevent silent precision loss in float literals — critical for numeric type correctness +lossy_float_literal = "deny" +# Prevent stack overflow from oversized local variables — critical for recursive AST walking +large_stack_frames = "deny" +# Force #[expect] over #[allow] so stale lint suppressions don't hide new violations +allow_attributes = "deny" +too_many_lines = "deny" +too_many_arguments = "deny" +cast_possible_truncation = "deny" +# Catch accidental infinite loops — require `-> !` return type for intentional ones +infinite_loop = "deny" +# Prevent silent error cause discarding in .map_err(|_| ...) — preserve error provenance +map_err_ignore = "deny" +# Prevent process::exit() — use proper error propagation, exit only via main() return +exit = "deny" +# Catch struct destructures that silently ignore new fields — critical during refactoring +rest_pat_in_fully_bound_structs = "deny" +# Require Arc::clone(&x) syntax — make ref-counted pointer cloning visually distinct +clone_on_ref_ptr = "deny" + diff --git a/src/Napper.Zed/LICENSE b/src/Napper.Zed/LICENSE new file mode 100644 index 0000000..4fdec69 --- /dev/null +++ b/src/Napper.Zed/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2026 Christian Findlay + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/src/Napper.Zed/README.md b/src/Napper.Zed/README.md new file mode 100644 index 0000000..486a75e --- /dev/null +++ b/src/Napper.Zed/README.md @@ -0,0 +1,31 @@ +# Nap — Zed Extension + +Language support for [Nap](https://napapi.dev) API testing files in Zed. + +## Features + +- **Syntax highlighting** for `.nap`, `.naplist`, and `.napenv` files +- **Code outline** — navigate sections via the symbol outline +- **Runnables** — run requests directly from the editor gutter +- **Bracket matching** — section headers, variable interpolation, strings +- **Redactions** — `{{variable}}` values masked during screen sharing +- **Slash commands** — `/nap-run` and `/nap-import-openapi` in the Assistant + +## Requirements + +The [Nap CLI](https://napapi.dev/docs/installation) must be installed and on your PATH for runnables and slash commands to work. + +## File Types + +| Extension | Description | +|-----------|-------------| +| `.nap` | API request definition | +| `.naplist` | Playlist (ordered request sequence) | +| `.napenv` | Environment variables | + +## Slash Commands + +| Command | Description | +|---------|-------------| +| `/nap-run <file>` | Run a `.nap` or `.naplist` file and show results | +| `/nap-import-openapi <spec>` | Generate `.nap` files from an OpenAPI spec | diff --git a/src/Napper.Zed/extension.toml b/src/Napper.Zed/extension.toml new file mode 100644 index 0000000..69caeb1 --- /dev/null +++ b/src/Napper.Zed/extension.toml @@ -0,0 +1,30 @@ +id = "nap" +name = "Nap" +description = "Language support for Nap API testing files (.nap, .naplist, .napenv)" +version = "0.1.0" +schema_version = 1 +authors = ["Christian Findlay <christian@napapi.dev>"] +repository = "https://github.com/nicknap/napper" + +[grammars.nap] +repository = "file:///Users/christianfindlay/Documents/Code/ApiTesting/src/Napper.Zed/grammars/tree-sitter-nap" +rev = "70c16517498078236bde80c8824af2ca5549f051" + +[grammars.naplist] +repository = "file:///Users/christianfindlay/Documents/Code/ApiTesting/src/Napper.Zed/grammars/tree-sitter-naplist" +rev = "ab2aa9511c3849f20fd488c8197cf78ebdd2648a" + +[grammars.napenv] +repository = "file:///Users/christianfindlay/Documents/Code/ApiTesting/src/Napper.Zed/grammars/tree-sitter-napenv" +rev = "0680806d478a53b3885b914cd48e1e9fffa812e6" + +[language_servers.nap-lsp] +languages = ["Nap", "Naplist", "Napenv"] + +[slash_commands.nap-run] +description = "Run a .nap or .naplist file and show results" +requires_argument = true + +[slash_commands.nap-import-openapi] +description = "Generate .nap files from an OpenAPI spec" +requires_argument = true diff --git a/src/Napper.Zed/grammars/tree-sitter-nap/grammar.js b/src/Napper.Zed/grammars/tree-sitter-nap/grammar.js new file mode 100644 index 0000000..4ccdb78 --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-nap/grammar.js @@ -0,0 +1,102 @@ +/// <reference types="tree-sitter-cli/dsl" /> + +const HTTP_METHODS = ["GET", "POST", "PUT", "PATCH", "DELETE", "HEAD", "OPTIONS"]; + +module.exports = grammar({ + name: "nap", + + extras: ($) => [/[ \t]/], + + rules: { + source_file: ($) => + repeat( + choice( + $.section_header, + $.shorthand_request, + $.pair, + $.assertion_exists, + $.assertion_contains, + $.assertion_matches, + $.assertion_lt, + $.assertion_gt, + $.triple_quoted_string, + $.comment, + $.newline, + ), + ), + + newline: (_) => /\r?\n/, + + comment: (_) => seq("#", /[^\r\n]*/), + + // --- Shorthand: `GET https://example.com` --- + shorthand_request: ($) => + seq($.http_method, $.value), + + // --- Section headers (flat) --- + section_header: (_) => + choice( + seq("[", "meta", "]"), + seq("[", "vars", "]"), + seq("[", "request", "]"), + seq("[", "request", ".", "headers", "]"), + seq("[", "request", ".", "body", "]"), + seq("[", "assert", "]"), + seq("[", "script", "]"), + ), + + // --- Key = value pair (covers all sections) --- + pair: ($) => + seq($.key, "=", choice($.array_value, $.value)), + + // --- Assertions (each operator is its own rule — no ambiguity with `=`) --- + assertion_exists: ($) => + seq($.key, "exists"), + + assertion_contains: ($) => + seq($.key, "contains", $.assertion_value), + + assertion_matches: ($) => + seq($.key, "matches", $.assertion_value), + + assertion_lt: ($) => + seq($.key, "<", $.assertion_value), + + assertion_gt: ($) => + seq($.key, ">", $.assertion_value), + + assertion_value: ($) => + choice($.duration_value, $.variable_ref, $.quoted_string, $.raw_value), + + duration_value: (_) => /[0-9]+ms/, + raw_value: (_) => /[^\r\n]+/, + + // --- Tokens --- + http_method: (_) => choice(...HTTP_METHODS), + + key: (_) => /[a-zA-Z_][a-zA-Z0-9_.\-]*/, + + value: ($) => + repeat1(choice($.variable_ref, $.quoted_string, $.text_fragment)), + + text_fragment: (_) => /[^\s"{\r\n][^{"\r\n]*/, + + quoted_string: ($) => + seq('"', repeat(choice($.variable_ref, $.string_content)), '"'), + + string_content: (_) => /[^"\\{}\r\n]+|\\./, + + triple_quoted_string: ($) => + seq('"""', /\r?\n/, optional($.body_content), '"""'), + + body_content: ($) => + repeat1(choice($.variable_ref, $.body_text)), + + body_text: (_) => /[^{}\r\n]+|\r?\n|[{}]/, + + array_value: ($) => + seq("[", optional(seq($.quoted_string, repeat(seq(",", $.quoted_string)))), "]"), + + variable_ref: (_) => seq("{{", /[a-zA-Z_][a-zA-Z0-9_\-]*/, "}}"), + }, +}); diff --git a/src/Napper.Zed/grammars/tree-sitter-nap/package.json b/src/Napper.Zed/grammars/tree-sitter-nap/package.json new file mode 100644 index 0000000..ccad0d0 --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-nap/package.json @@ -0,0 +1,12 @@ +{ + "name": "tree-sitter-nap", + "version": "0.1.0", + "description": "Tree-sitter grammar for Nap API testing files", + "tree-sitter": [ + { + "scope": "source.nap", + "file-types": ["nap"], + "highlights": "queries/highlights.scm" + } + ] +} diff --git a/src/Napper.Zed/grammars/tree-sitter-nap/queries/highlights.scm b/src/Napper.Zed/grammars/tree-sitter-nap/queries/highlights.scm new file mode 100644 index 0000000..8e206db --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-nap/queries/highlights.scm @@ -0,0 +1,55 @@ +; Section headers +(section_header "[" @punctuation.bracket) +(section_header "]" @punctuation.bracket) +(section_header "meta" @keyword) +(section_header "vars" @keyword) +(section_header "request" @keyword) +(section_header "headers" @keyword) +(section_header "body" @keyword) +(section_header "assert" @keyword) +(section_header "script" @keyword) +(section_header "." @punctuation.delimiter) + +; Comments +(comment) @comment + +; HTTP methods +(http_method) @function.method + +; Key-value pairs +(pair (key) @property) +(pair "=" @operator) + +; Values +(quoted_string "\"" @punctuation.delimiter) +(quoted_string (string_content) @string) +(text_fragment) @string +(triple_quoted_string "\"\"\"" @punctuation.delimiter) +(triple_quoted_string (body_content (body_text) @string)) + +; URLs (in shorthand requests) +(shorthand_request (value (text_fragment) @string.special.url)) + +; Variable interpolation +(variable_ref "{{" @punctuation.special) +(variable_ref "}}" @punctuation.special) +(variable_ref) @variable + +; Arrays +(array_value "[" @punctuation.bracket) +(array_value "]" @punctuation.bracket) +(array_value "," @punctuation.delimiter) + +; Assertions +(assertion_exists (key) @property) +(assertion_exists "exists" @keyword.operator) +(assertion_contains (key) @property) +(assertion_contains "contains" @keyword.operator) +(assertion_matches (key) @property) +(assertion_matches "matches" @keyword.operator) +(assertion_lt (key) @property) +(assertion_lt "<" @operator) +(assertion_gt (key) @property) +(assertion_gt ">" @operator) +(duration_value) @number +(raw_value) @string diff --git a/src/Napper.Zed/grammars/tree-sitter-nap/src/grammar.json b/src/Napper.Zed/grammars/tree-sitter-nap/src/grammar.json new file mode 100644 index 0000000..2258020 --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-nap/src/grammar.json @@ -0,0 +1,582 @@ +{ + "$schema": "https://tree-sitter.github.io/tree-sitter/assets/schemas/grammar.schema.json", + "name": "nap", + "rules": { + "source_file": { + "type": "REPEAT", + "content": { + "type": "CHOICE", + "members": [ + { + "type": "SYMBOL", + "name": "section_header" + }, + { + "type": "SYMBOL", + "name": "shorthand_request" + }, + { + "type": "SYMBOL", + "name": "pair" + }, + { + "type": "SYMBOL", + "name": "assertion_exists" + }, + { + "type": "SYMBOL", + "name": "assertion_contains" + }, + { + "type": "SYMBOL", + "name": "assertion_matches" + }, + { + "type": "SYMBOL", + "name": "assertion_lt" + }, + { + "type": "SYMBOL", + "name": "assertion_gt" + }, + { + "type": "SYMBOL", + "name": "triple_quoted_string" + }, + { + "type": "SYMBOL", + "name": "comment" + }, + { + "type": "SYMBOL", + "name": "newline" + } + ] + } + }, + "newline": { + "type": "PATTERN", + "value": "\\r?\\n" + }, + "comment": { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "#" + }, + { + "type": "PATTERN", + "value": "[^\\r\\n]*" + } + ] + }, + "shorthand_request": { + "type": "SEQ", + "members": [ + { + "type": "SYMBOL", + "name": "http_method" + }, + { + "type": "SYMBOL", + "name": "value" + } + ] + }, + "section_header": { + "type": "CHOICE", + "members": [ + { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "[" + }, + { + "type": "STRING", + "value": "meta" + }, + { + "type": "STRING", + "value": "]" + } + ] + }, + { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "[" + }, + { + "type": "STRING", + "value": "vars" + }, + { + "type": "STRING", + "value": "]" + } + ] + }, + { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "[" + }, + { + "type": "STRING", + "value": "request" + }, + { + "type": "STRING", + "value": "]" + } + ] + }, + { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "[" + }, + { + "type": "STRING", + "value": "request" + }, + { + "type": "STRING", + "value": "." + }, + { + "type": "STRING", + "value": "headers" + }, + { + "type": "STRING", + "value": "]" + } + ] + }, + { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "[" + }, + { + "type": "STRING", + "value": "request" + }, + { + "type": "STRING", + "value": "." + }, + { + "type": "STRING", + "value": "body" + }, + { + "type": "STRING", + "value": "]" + } + ] + }, + { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "[" + }, + { + "type": "STRING", + "value": "assert" + }, + { + "type": "STRING", + "value": "]" + } + ] + }, + { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "[" + }, + { + "type": "STRING", + "value": "script" + }, + { + "type": "STRING", + "value": "]" + } + ] + } + ] + }, + "pair": { + "type": "SEQ", + "members": [ + { + "type": "SYMBOL", + "name": "key" + }, + { + "type": "STRING", + "value": "=" + }, + { + "type": "CHOICE", + "members": [ + { + "type": "SYMBOL", + "name": "array_value" + }, + { + "type": "SYMBOL", + "name": "value" + } + ] + } + ] + }, + "assertion_exists": { + "type": "SEQ", + "members": [ + { + "type": "SYMBOL", + "name": "key" + }, + { + "type": "STRING", + "value": "exists" + } + ] + }, + "assertion_contains": { + "type": "SEQ", + "members": [ + { + "type": "SYMBOL", + "name": "key" + }, + { + "type": "STRING", + "value": "contains" + }, + { + "type": "SYMBOL", + "name": "assertion_value" + } + ] + }, + "assertion_matches": { + "type": "SEQ", + "members": [ + { + "type": "SYMBOL", + "name": "key" + }, + { + "type": "STRING", + "value": "matches" + }, + { + "type": "SYMBOL", + "name": "assertion_value" + } + ] + }, + "assertion_lt": { + "type": "SEQ", + "members": [ + { + "type": "SYMBOL", + "name": "key" + }, + { + "type": "STRING", + "value": "<" + }, + { + "type": "SYMBOL", + "name": "assertion_value" + } + ] + }, + "assertion_gt": { + "type": "SEQ", + "members": [ + { + "type": "SYMBOL", + "name": "key" + }, + { + "type": "STRING", + "value": ">" + }, + { + "type": "SYMBOL", + "name": "assertion_value" + } + ] + }, + "assertion_value": { + "type": "CHOICE", + "members": [ + { + "type": "SYMBOL", + "name": "duration_value" + }, + { + "type": "SYMBOL", + "name": "variable_ref" + }, + { + "type": "SYMBOL", + "name": "quoted_string" + }, + { + "type": "SYMBOL", + "name": "raw_value" + } + ] + }, + "duration_value": { + "type": "PATTERN", + "value": "[0-9]+ms" + }, + "raw_value": { + "type": "PATTERN", + "value": "[^\\r\\n]+" + }, + "http_method": { + "type": "CHOICE", + "members": [ + { + "type": "STRING", + "value": "GET" + }, + { + "type": "STRING", + "value": "POST" + }, + { + "type": "STRING", + "value": "PUT" + }, + { + "type": "STRING", + "value": "PATCH" + }, + { + "type": "STRING", + "value": "DELETE" + }, + { + "type": "STRING", + "value": "HEAD" + }, + { + "type": "STRING", + "value": "OPTIONS" + } + ] + }, + "key": { + "type": "PATTERN", + "value": "[a-zA-Z_][a-zA-Z0-9_.\\-]*" + }, + "value": { + "type": "REPEAT1", + "content": { + "type": "CHOICE", + "members": [ + { + "type": "SYMBOL", + "name": "variable_ref" + }, + { + "type": "SYMBOL", + "name": "quoted_string" + }, + { + "type": "SYMBOL", + "name": "text_fragment" + } + ] + } + }, + "text_fragment": { + "type": "PATTERN", + "value": "[^\\s\"{\\r\\n][^{\"\\r\\n]*" + }, + "quoted_string": { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "\"" + }, + { + "type": "REPEAT", + "content": { + "type": "CHOICE", + "members": [ + { + "type": "SYMBOL", + "name": "variable_ref" + }, + { + "type": "SYMBOL", + "name": "string_content" + } + ] + } + }, + { + "type": "STRING", + "value": "\"" + } + ] + }, + "string_content": { + "type": "PATTERN", + "value": "[^\"\\\\{}\\r\\n]+|\\\\." + }, + "triple_quoted_string": { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "\"\"\"" + }, + { + "type": "PATTERN", + "value": "\\r?\\n" + }, + { + "type": "CHOICE", + "members": [ + { + "type": "SYMBOL", + "name": "body_content" + }, + { + "type": "BLANK" + } + ] + }, + { + "type": "STRING", + "value": "\"\"\"" + } + ] + }, + "body_content": { + "type": "REPEAT1", + "content": { + "type": "CHOICE", + "members": [ + { + "type": "SYMBOL", + "name": "variable_ref" + }, + { + "type": "SYMBOL", + "name": "body_text" + } + ] + } + }, + "body_text": { + "type": "PATTERN", + "value": "[^{}\\r\\n]+|\\r?\\n|[{}]" + }, + "array_value": { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "[" + }, + { + "type": "CHOICE", + "members": [ + { + "type": "SEQ", + "members": [ + { + "type": "SYMBOL", + "name": "quoted_string" + }, + { + "type": "REPEAT", + "content": { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "," + }, + { + "type": "SYMBOL", + "name": "quoted_string" + } + ] + } + } + ] + }, + { + "type": "BLANK" + } + ] + }, + { + "type": "STRING", + "value": "]" + } + ] + }, + "variable_ref": { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "{{" + }, + { + "type": "PATTERN", + "value": "[a-zA-Z_][a-zA-Z0-9_\\-]*" + }, + { + "type": "STRING", + "value": "}}" + } + ] + } + }, + "extras": [ + { + "type": "PATTERN", + "value": "[ \\t]" + } + ], + "conflicts": [], + "precedences": [], + "externals": [], + "inline": [], + "supertypes": [], + "reserved": {} +} \ No newline at end of file diff --git a/src/Napper.Zed/grammars/tree-sitter-nap/src/node-types.json b/src/Napper.Zed/grammars/tree-sitter-nap/src/node-types.json new file mode 100644 index 0000000..7e1ca01 --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-nap/src/node-types.json @@ -0,0 +1,474 @@ +[ + { + "type": "array_value", + "named": true, + "fields": {}, + "children": { + "multiple": true, + "required": false, + "types": [ + { + "type": "quoted_string", + "named": true + } + ] + } + }, + { + "type": "assertion_contains", + "named": true, + "fields": {}, + "children": { + "multiple": true, + "required": true, + "types": [ + { + "type": "assertion_value", + "named": true + }, + { + "type": "key", + "named": true + } + ] + } + }, + { + "type": "assertion_exists", + "named": true, + "fields": {}, + "children": { + "multiple": false, + "required": true, + "types": [ + { + "type": "key", + "named": true + } + ] + } + }, + { + "type": "assertion_gt", + "named": true, + "fields": {}, + "children": { + "multiple": true, + "required": true, + "types": [ + { + "type": "assertion_value", + "named": true + }, + { + "type": "key", + "named": true + } + ] + } + }, + { + "type": "assertion_lt", + "named": true, + "fields": {}, + "children": { + "multiple": true, + "required": true, + "types": [ + { + "type": "assertion_value", + "named": true + }, + { + "type": "key", + "named": true + } + ] + } + }, + { + "type": "assertion_matches", + "named": true, + "fields": {}, + "children": { + "multiple": true, + "required": true, + "types": [ + { + "type": "assertion_value", + "named": true + }, + { + "type": "key", + "named": true + } + ] + } + }, + { + "type": "assertion_value", + "named": true, + "fields": {}, + "children": { + "multiple": false, + "required": true, + "types": [ + { + "type": "duration_value", + "named": true + }, + { + "type": "quoted_string", + "named": true + }, + { + "type": "raw_value", + "named": true + }, + { + "type": "variable_ref", + "named": true + } + ] + } + }, + { + "type": "body_content", + "named": true, + "fields": {}, + "children": { + "multiple": true, + "required": true, + "types": [ + { + "type": "body_text", + "named": true + }, + { + "type": "variable_ref", + "named": true + } + ] + } + }, + { + "type": "comment", + "named": true, + "fields": {} + }, + { + "type": "http_method", + "named": true, + "fields": {} + }, + { + "type": "newline", + "named": true, + "fields": {} + }, + { + "type": "pair", + "named": true, + "fields": {}, + "children": { + "multiple": true, + "required": true, + "types": [ + { + "type": "array_value", + "named": true + }, + { + "type": "key", + "named": true + }, + { + "type": "value", + "named": true + } + ] + } + }, + { + "type": "quoted_string", + "named": true, + "fields": {}, + "children": { + "multiple": true, + "required": false, + "types": [ + { + "type": "string_content", + "named": true + }, + { + "type": "variable_ref", + "named": true + } + ] + } + }, + { + "type": "section_header", + "named": true, + "fields": {} + }, + { + "type": "shorthand_request", + "named": true, + "fields": {}, + "children": { + "multiple": true, + "required": true, + "types": [ + { + "type": "http_method", + "named": true + }, + { + "type": "value", + "named": true + } + ] + } + }, + { + "type": "source_file", + "named": true, + "root": true, + "fields": {}, + "children": { + "multiple": true, + "required": false, + "types": [ + { + "type": "assertion_contains", + "named": true + }, + { + "type": "assertion_exists", + "named": true + }, + { + "type": "assertion_gt", + "named": true + }, + { + "type": "assertion_lt", + "named": true + }, + { + "type": "assertion_matches", + "named": true + }, + { + "type": "comment", + "named": true + }, + { + "type": "newline", + "named": true + }, + { + "type": "pair", + "named": true + }, + { + "type": "section_header", + "named": true + }, + { + "type": "shorthand_request", + "named": true + }, + { + "type": "triple_quoted_string", + "named": true + } + ] + } + }, + { + "type": "triple_quoted_string", + "named": true, + "fields": {}, + "children": { + "multiple": false, + "required": false, + "types": [ + { + "type": "body_content", + "named": true + } + ] + } + }, + { + "type": "value", + "named": true, + "fields": {}, + "children": { + "multiple": true, + "required": true, + "types": [ + { + "type": "quoted_string", + "named": true + }, + { + "type": "text_fragment", + "named": true + }, + { + "type": "variable_ref", + "named": true + } + ] + } + }, + { + "type": "variable_ref", + "named": true, + "fields": {} + }, + { + "type": "\"", + "named": false + }, + { + "type": "\"\"\"", + "named": false + }, + { + "type": "#", + "named": false + }, + { + "type": ",", + "named": false + }, + { + "type": ".", + "named": false + }, + { + "type": "<", + "named": false + }, + { + "type": "=", + "named": false + }, + { + "type": ">", + "named": false + }, + { + "type": "DELETE", + "named": false + }, + { + "type": "GET", + "named": false + }, + { + "type": "HEAD", + "named": false + }, + { + "type": "OPTIONS", + "named": false + }, + { + "type": "PATCH", + "named": false + }, + { + "type": "POST", + "named": false + }, + { + "type": "PUT", + "named": false + }, + { + "type": "[", + "named": false + }, + { + "type": "]", + "named": false + }, + { + "type": "assert", + "named": false + }, + { + "type": "body", + "named": false + }, + { + "type": "body_text", + "named": true + }, + { + "type": "contains", + "named": false + }, + { + "type": "duration_value", + "named": true + }, + { + "type": "exists", + "named": false + }, + { + "type": "headers", + "named": false + }, + { + "type": "key", + "named": true + }, + { + "type": "matches", + "named": false + }, + { + "type": "meta", + "named": false + }, + { + "type": "raw_value", + "named": true + }, + { + "type": "request", + "named": false + }, + { + "type": "script", + "named": false + }, + { + "type": "string_content", + "named": true + }, + { + "type": "text_fragment", + "named": true + }, + { + "type": "vars", + "named": false + }, + { + "type": "{{", + "named": false + }, + { + "type": "}}", + "named": false + } +] \ No newline at end of file diff --git a/src/Napper.Zed/grammars/tree-sitter-nap/src/parser.c b/src/Napper.Zed/grammars/tree-sitter-nap/src/parser.c new file mode 100644 index 0000000..507fe7c --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-nap/src/parser.c @@ -0,0 +1,3176 @@ +/* Automatically @generated by tree-sitter */ + +#include "tree_sitter/parser.h" + +#if defined(__GNUC__) || defined(__clang__) +#pragma GCC diagnostic ignored "-Wmissing-field-initializers" +#endif + +#define LANGUAGE_VERSION 15 +#define STATE_COUNT 70 +#define LARGE_STATE_COUNT 2 +#define SYMBOL_COUNT 63 +#define ALIAS_COUNT 0 +#define TOKEN_COUNT 39 +#define EXTERNAL_TOKEN_COUNT 0 +#define FIELD_COUNT 0 +#define MAX_ALIAS_SEQUENCE_LENGTH 5 +#define MAX_RESERVED_WORD_SET_SIZE 0 +#define PRODUCTION_ID_COUNT 1 +#define SUPERTYPE_COUNT 0 + +enum ts_symbol_identifiers { + aux_sym_newline_token1 = 1, + anon_sym_POUND = 2, + aux_sym_comment_token1 = 3, + anon_sym_LBRACK = 4, + anon_sym_meta = 5, + anon_sym_RBRACK = 6, + anon_sym_vars = 7, + anon_sym_request = 8, + anon_sym_DOT = 9, + anon_sym_headers = 10, + anon_sym_body = 11, + anon_sym_assert = 12, + anon_sym_script = 13, + anon_sym_EQ = 14, + anon_sym_exists = 15, + anon_sym_contains = 16, + anon_sym_matches = 17, + anon_sym_LT = 18, + anon_sym_GT = 19, + sym_duration_value = 20, + sym_raw_value = 21, + anon_sym_GET = 22, + anon_sym_POST = 23, + anon_sym_PUT = 24, + anon_sym_PATCH = 25, + anon_sym_DELETE = 26, + anon_sym_HEAD = 27, + anon_sym_OPTIONS = 28, + sym_key = 29, + sym_text_fragment = 30, + anon_sym_DQUOTE = 31, + sym_string_content = 32, + anon_sym_DQUOTE_DQUOTE_DQUOTE = 33, + sym_body_text = 34, + anon_sym_COMMA = 35, + anon_sym_LBRACE_LBRACE = 36, + aux_sym_variable_ref_token1 = 37, + anon_sym_RBRACE_RBRACE = 38, + sym_source_file = 39, + sym_newline = 40, + sym_comment = 41, + sym_shorthand_request = 42, + sym_section_header = 43, + sym_pair = 44, + sym_assertion_exists = 45, + sym_assertion_contains = 46, + sym_assertion_matches = 47, + sym_assertion_lt = 48, + sym_assertion_gt = 49, + sym_assertion_value = 50, + sym_http_method = 51, + sym_value = 52, + sym_quoted_string = 53, + sym_triple_quoted_string = 54, + sym_body_content = 55, + sym_array_value = 56, + sym_variable_ref = 57, + aux_sym_source_file_repeat1 = 58, + aux_sym_value_repeat1 = 59, + aux_sym_quoted_string_repeat1 = 60, + aux_sym_body_content_repeat1 = 61, + aux_sym_array_value_repeat1 = 62, +}; + +static const char * const ts_symbol_names[] = { + [ts_builtin_sym_end] = "end", + [aux_sym_newline_token1] = "newline_token1", + [anon_sym_POUND] = "#", + [aux_sym_comment_token1] = "comment_token1", + [anon_sym_LBRACK] = "[", + [anon_sym_meta] = "meta", + [anon_sym_RBRACK] = "]", + [anon_sym_vars] = "vars", + [anon_sym_request] = "request", + [anon_sym_DOT] = ".", + [anon_sym_headers] = "headers", + [anon_sym_body] = "body", + [anon_sym_assert] = "assert", + [anon_sym_script] = "script", + [anon_sym_EQ] = "=", + [anon_sym_exists] = "exists", + [anon_sym_contains] = "contains", + [anon_sym_matches] = "matches", + [anon_sym_LT] = "<", + [anon_sym_GT] = ">", + [sym_duration_value] = "duration_value", + [sym_raw_value] = "raw_value", + [anon_sym_GET] = "GET", + [anon_sym_POST] = "POST", + [anon_sym_PUT] = "PUT", + [anon_sym_PATCH] = "PATCH", + [anon_sym_DELETE] = "DELETE", + [anon_sym_HEAD] = "HEAD", + [anon_sym_OPTIONS] = "OPTIONS", + [sym_key] = "key", + [sym_text_fragment] = "text_fragment", + [anon_sym_DQUOTE] = "\"", + [sym_string_content] = "string_content", + [anon_sym_DQUOTE_DQUOTE_DQUOTE] = "\"\"\"", + [sym_body_text] = "body_text", + [anon_sym_COMMA] = ",", + [anon_sym_LBRACE_LBRACE] = "{{", + [aux_sym_variable_ref_token1] = "variable_ref_token1", + [anon_sym_RBRACE_RBRACE] = "}}", + [sym_source_file] = "source_file", + [sym_newline] = "newline", + [sym_comment] = "comment", + [sym_shorthand_request] = "shorthand_request", + [sym_section_header] = "section_header", + [sym_pair] = "pair", + [sym_assertion_exists] = "assertion_exists", + [sym_assertion_contains] = "assertion_contains", + [sym_assertion_matches] = "assertion_matches", + [sym_assertion_lt] = "assertion_lt", + [sym_assertion_gt] = "assertion_gt", + [sym_assertion_value] = "assertion_value", + [sym_http_method] = "http_method", + [sym_value] = "value", + [sym_quoted_string] = "quoted_string", + [sym_triple_quoted_string] = "triple_quoted_string", + [sym_body_content] = "body_content", + [sym_array_value] = "array_value", + [sym_variable_ref] = "variable_ref", + [aux_sym_source_file_repeat1] = "source_file_repeat1", + [aux_sym_value_repeat1] = "value_repeat1", + [aux_sym_quoted_string_repeat1] = "quoted_string_repeat1", + [aux_sym_body_content_repeat1] = "body_content_repeat1", + [aux_sym_array_value_repeat1] = "array_value_repeat1", +}; + +static const TSSymbol ts_symbol_map[] = { + [ts_builtin_sym_end] = ts_builtin_sym_end, + [aux_sym_newline_token1] = aux_sym_newline_token1, + [anon_sym_POUND] = anon_sym_POUND, + [aux_sym_comment_token1] = aux_sym_comment_token1, + [anon_sym_LBRACK] = anon_sym_LBRACK, + [anon_sym_meta] = anon_sym_meta, + [anon_sym_RBRACK] = anon_sym_RBRACK, + [anon_sym_vars] = anon_sym_vars, + [anon_sym_request] = anon_sym_request, + [anon_sym_DOT] = anon_sym_DOT, + [anon_sym_headers] = anon_sym_headers, + [anon_sym_body] = anon_sym_body, + [anon_sym_assert] = anon_sym_assert, + [anon_sym_script] = anon_sym_script, + [anon_sym_EQ] = anon_sym_EQ, + [anon_sym_exists] = anon_sym_exists, + [anon_sym_contains] = anon_sym_contains, + [anon_sym_matches] = anon_sym_matches, + [anon_sym_LT] = anon_sym_LT, + [anon_sym_GT] = anon_sym_GT, + [sym_duration_value] = sym_duration_value, + [sym_raw_value] = sym_raw_value, + [anon_sym_GET] = anon_sym_GET, + [anon_sym_POST] = anon_sym_POST, + [anon_sym_PUT] = anon_sym_PUT, + [anon_sym_PATCH] = anon_sym_PATCH, + [anon_sym_DELETE] = anon_sym_DELETE, + [anon_sym_HEAD] = anon_sym_HEAD, + [anon_sym_OPTIONS] = anon_sym_OPTIONS, + [sym_key] = sym_key, + [sym_text_fragment] = sym_text_fragment, + [anon_sym_DQUOTE] = anon_sym_DQUOTE, + [sym_string_content] = sym_string_content, + [anon_sym_DQUOTE_DQUOTE_DQUOTE] = anon_sym_DQUOTE_DQUOTE_DQUOTE, + [sym_body_text] = sym_body_text, + [anon_sym_COMMA] = anon_sym_COMMA, + [anon_sym_LBRACE_LBRACE] = anon_sym_LBRACE_LBRACE, + [aux_sym_variable_ref_token1] = aux_sym_variable_ref_token1, + [anon_sym_RBRACE_RBRACE] = anon_sym_RBRACE_RBRACE, + [sym_source_file] = sym_source_file, + [sym_newline] = sym_newline, + [sym_comment] = sym_comment, + [sym_shorthand_request] = sym_shorthand_request, + [sym_section_header] = sym_section_header, + [sym_pair] = sym_pair, + [sym_assertion_exists] = sym_assertion_exists, + [sym_assertion_contains] = sym_assertion_contains, + [sym_assertion_matches] = sym_assertion_matches, + [sym_assertion_lt] = sym_assertion_lt, + [sym_assertion_gt] = sym_assertion_gt, + [sym_assertion_value] = sym_assertion_value, + [sym_http_method] = sym_http_method, + [sym_value] = sym_value, + [sym_quoted_string] = sym_quoted_string, + [sym_triple_quoted_string] = sym_triple_quoted_string, + [sym_body_content] = sym_body_content, + [sym_array_value] = sym_array_value, + [sym_variable_ref] = sym_variable_ref, + [aux_sym_source_file_repeat1] = aux_sym_source_file_repeat1, + [aux_sym_value_repeat1] = aux_sym_value_repeat1, + [aux_sym_quoted_string_repeat1] = aux_sym_quoted_string_repeat1, + [aux_sym_body_content_repeat1] = aux_sym_body_content_repeat1, + [aux_sym_array_value_repeat1] = aux_sym_array_value_repeat1, +}; + +static const TSSymbolMetadata ts_symbol_metadata[] = { + [ts_builtin_sym_end] = { + .visible = false, + .named = true, + }, + [aux_sym_newline_token1] = { + .visible = false, + .named = false, + }, + [anon_sym_POUND] = { + .visible = true, + .named = false, + }, + [aux_sym_comment_token1] = { + .visible = false, + .named = false, + }, + [anon_sym_LBRACK] = { + .visible = true, + .named = false, + }, + [anon_sym_meta] = { + .visible = true, + .named = false, + }, + [anon_sym_RBRACK] = { + .visible = true, + .named = false, + }, + [anon_sym_vars] = { + .visible = true, + .named = false, + }, + [anon_sym_request] = { + .visible = true, + .named = false, + }, + [anon_sym_DOT] = { + .visible = true, + .named = false, + }, + [anon_sym_headers] = { + .visible = true, + .named = false, + }, + [anon_sym_body] = { + .visible = true, + .named = false, + }, + [anon_sym_assert] = { + .visible = true, + .named = false, + }, + [anon_sym_script] = { + .visible = true, + .named = false, + }, + [anon_sym_EQ] = { + .visible = true, + .named = false, + }, + [anon_sym_exists] = { + .visible = true, + .named = false, + }, + [anon_sym_contains] = { + .visible = true, + .named = false, + }, + [anon_sym_matches] = { + .visible = true, + .named = false, + }, + [anon_sym_LT] = { + .visible = true, + .named = false, + }, + [anon_sym_GT] = { + .visible = true, + .named = false, + }, + [sym_duration_value] = { + .visible = true, + .named = true, + }, + [sym_raw_value] = { + .visible = true, + .named = true, + }, + [anon_sym_GET] = { + .visible = true, + .named = false, + }, + [anon_sym_POST] = { + .visible = true, + .named = false, + }, + [anon_sym_PUT] = { + .visible = true, + .named = false, + }, + [anon_sym_PATCH] = { + .visible = true, + .named = false, + }, + [anon_sym_DELETE] = { + .visible = true, + .named = false, + }, + [anon_sym_HEAD] = { + .visible = true, + .named = false, + }, + [anon_sym_OPTIONS] = { + .visible = true, + .named = false, + }, + [sym_key] = { + .visible = true, + .named = true, + }, + [sym_text_fragment] = { + .visible = true, + .named = true, + }, + [anon_sym_DQUOTE] = { + .visible = true, + .named = false, + }, + [sym_string_content] = { + .visible = true, + .named = true, + }, + [anon_sym_DQUOTE_DQUOTE_DQUOTE] = { + .visible = true, + .named = false, + }, + [sym_body_text] = { + .visible = true, + .named = true, + }, + [anon_sym_COMMA] = { + .visible = true, + .named = false, + }, + [anon_sym_LBRACE_LBRACE] = { + .visible = true, + .named = false, + }, + [aux_sym_variable_ref_token1] = { + .visible = false, + .named = false, + }, + [anon_sym_RBRACE_RBRACE] = { + .visible = true, + .named = false, + }, + [sym_source_file] = { + .visible = true, + .named = true, + }, + [sym_newline] = { + .visible = true, + .named = true, + }, + [sym_comment] = { + .visible = true, + .named = true, + }, + [sym_shorthand_request] = { + .visible = true, + .named = true, + }, + [sym_section_header] = { + .visible = true, + .named = true, + }, + [sym_pair] = { + .visible = true, + .named = true, + }, + [sym_assertion_exists] = { + .visible = true, + .named = true, + }, + [sym_assertion_contains] = { + .visible = true, + .named = true, + }, + [sym_assertion_matches] = { + .visible = true, + .named = true, + }, + [sym_assertion_lt] = { + .visible = true, + .named = true, + }, + [sym_assertion_gt] = { + .visible = true, + .named = true, + }, + [sym_assertion_value] = { + .visible = true, + .named = true, + }, + [sym_http_method] = { + .visible = true, + .named = true, + }, + [sym_value] = { + .visible = true, + .named = true, + }, + [sym_quoted_string] = { + .visible = true, + .named = true, + }, + [sym_triple_quoted_string] = { + .visible = true, + .named = true, + }, + [sym_body_content] = { + .visible = true, + .named = true, + }, + [sym_array_value] = { + .visible = true, + .named = true, + }, + [sym_variable_ref] = { + .visible = true, + .named = true, + }, + [aux_sym_source_file_repeat1] = { + .visible = false, + .named = false, + }, + [aux_sym_value_repeat1] = { + .visible = false, + .named = false, + }, + [aux_sym_quoted_string_repeat1] = { + .visible = false, + .named = false, + }, + [aux_sym_body_content_repeat1] = { + .visible = false, + .named = false, + }, + [aux_sym_array_value_repeat1] = { + .visible = false, + .named = false, + }, +}; + +static const TSSymbol ts_alias_sequences[PRODUCTION_ID_COUNT][MAX_ALIAS_SEQUENCE_LENGTH] = { + [0] = {0}, +}; + +static const uint16_t ts_non_terminal_alias_map[] = { + 0, +}; + +static const TSStateId ts_primary_state_ids[STATE_COUNT] = { + [0] = 0, + [1] = 1, + [2] = 2, + [3] = 3, + [4] = 4, + [5] = 5, + [6] = 6, + [7] = 7, + [8] = 8, + [9] = 7, + [10] = 6, + [11] = 11, + [12] = 12, + [13] = 13, + [14] = 14, + [15] = 15, + [16] = 16, + [17] = 17, + [18] = 18, + [19] = 19, + [20] = 20, + [21] = 21, + [22] = 22, + [23] = 23, + [24] = 24, + [25] = 25, + [26] = 26, + [27] = 8, + [28] = 28, + [29] = 29, + [30] = 30, + [31] = 31, + [32] = 32, + [33] = 33, + [34] = 34, + [35] = 35, + [36] = 36, + [37] = 37, + [38] = 38, + [39] = 39, + [40] = 40, + [41] = 41, + [42] = 42, + [43] = 38, + [44] = 39, + [45] = 45, + [46] = 46, + [47] = 47, + [48] = 48, + [49] = 8, + [50] = 50, + [51] = 8, + [52] = 52, + [53] = 53, + [54] = 54, + [55] = 55, + [56] = 56, + [57] = 57, + [58] = 58, + [59] = 59, + [60] = 60, + [61] = 61, + [62] = 62, + [63] = 63, + [64] = 57, + [65] = 57, + [66] = 57, + [67] = 60, + [68] = 60, + [69] = 60, +}; + +static bool ts_lex(TSLexer *lexer, TSStateId state) { + START_LEXER(); + eof = lexer->eof(lexer); + switch (state) { + case 0: + if (eof) ADVANCE(89); + ADVANCE_MAP( + '\n', 90, + '\r', 1, + '"', 190, + '#', 91, + ',', 203, + '.', 101, + '<', 110, + '=', 106, + '>', 111, + 'D', 15, + 'G', 16, + 'H', 17, + 'O', 25, + 'P', 12, + '[', 95, + ']', 98, + 'a', 70, + 'b', 56, + 'c', 57, + 'e', 82, + 'h', 44, + 'm', 34, + 'r', 43, + 's', 39, + 'v', 37, + '{', 84, + '}', 85, + ); + if (lookahead == '\t' || + lookahead == ' ') SKIP(0); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(53); + END_STATE(); + case 1: + if (lookahead == '\n') ADVANCE(90); + END_STATE(); + case 2: + if (lookahead == '\n') ADVANCE(197); + END_STATE(); + case 3: + if (lookahead == '\r') ADVANCE(2); + if (lookahead == '"') ADVANCE(200); + if (lookahead == '{') ADVANCE(201); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(198); + if (lookahead == '\n' || + lookahead == '}') ADVANCE(197); + if (lookahead != 0) ADVANCE(202); + END_STATE(); + case 4: + if (lookahead == '"') ADVANCE(5); + END_STATE(); + case 5: + if (lookahead == '"') ADVANCE(195); + END_STATE(); + case 6: + if (lookahead == '"') ADVANCE(189); + if (lookahead == '[') ADVANCE(96); + if (lookahead == '{') ADVANCE(84); + if (lookahead == '\t' || + lookahead == ' ') SKIP(6); + if (lookahead != 0 && + (lookahead < '\t' || '\r' < lookahead)) ADVANCE(188); + END_STATE(); + case 7: + if (lookahead == '"') ADVANCE(189); + if (lookahead == '\\') ADVANCE(86); + if (lookahead == '{') ADVANCE(84); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(193); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '}') ADVANCE(194); + END_STATE(); + case 8: + if (lookahead == '"') ADVANCE(189); + if (lookahead == ']') ADVANCE(98); + if (lookahead == '\t' || + lookahead == ' ') SKIP(8); + if (('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(206); + END_STATE(); + case 9: + if (lookahead == '"') ADVANCE(189); + if (lookahead == '{') ADVANCE(84); + if (lookahead == '\t' || + lookahead == ' ') SKIP(9); + if (lookahead != 0 && + (lookahead < '\t' || '\r' < lookahead)) ADVANCE(188); + END_STATE(); + case 10: + if (lookahead == '"') ADVANCE(191); + if (lookahead == '{') ADVANCE(117); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(114); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(115); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r') ADVANCE(118); + END_STATE(); + case 11: + if (lookahead == 'A') ADVANCE(14); + END_STATE(); + case 12: + if (lookahead == 'A') ADVANCE(30); + if (lookahead == 'O') ADVANCE(27); + if (lookahead == 'U') ADVANCE(31); + END_STATE(); + case 13: + if (lookahead == 'C') ADVANCE(20); + END_STATE(); + case 14: + if (lookahead == 'D') ADVANCE(134); + END_STATE(); + case 15: + if (lookahead == 'E') ADVANCE(22); + END_STATE(); + case 16: + if (lookahead == 'E') ADVANCE(28); + END_STATE(); + case 17: + if (lookahead == 'E') ADVANCE(11); + END_STATE(); + case 18: + if (lookahead == 'E') ADVANCE(131); + END_STATE(); + case 19: + if (lookahead == 'E') ADVANCE(33); + END_STATE(); + case 20: + if (lookahead == 'H') ADVANCE(128); + END_STATE(); + case 21: + if (lookahead == 'I') ADVANCE(24); + END_STATE(); + case 22: + if (lookahead == 'L') ADVANCE(19); + END_STATE(); + case 23: + if (lookahead == 'N') ADVANCE(26); + END_STATE(); + case 24: + if (lookahead == 'O') ADVANCE(23); + END_STATE(); + case 25: + if (lookahead == 'P') ADVANCE(29); + END_STATE(); + case 26: + if (lookahead == 'S') ADVANCE(137); + END_STATE(); + case 27: + if (lookahead == 'S') ADVANCE(32); + END_STATE(); + case 28: + if (lookahead == 'T') ADVANCE(119); + END_STATE(); + case 29: + if (lookahead == 'T') ADVANCE(21); + END_STATE(); + case 30: + if (lookahead == 'T') ADVANCE(13); + END_STATE(); + case 31: + if (lookahead == 'T') ADVANCE(125); + END_STATE(); + case 32: + if (lookahead == 'T') ADVANCE(122); + END_STATE(); + case 33: + if (lookahead == 'T') ADVANCE(18); + END_STATE(); + case 34: + if (lookahead == 'a') ADVANCE(77); + if (lookahead == 'e') ADVANCE(78); + END_STATE(); + case 35: + if (lookahead == 'a') ADVANCE(97); + END_STATE(); + case 36: + if (lookahead == 'a') ADVANCE(42); + END_STATE(); + case 37: + if (lookahead == 'a') ADVANCE(62); + END_STATE(); + case 38: + if (lookahead == 'a') ADVANCE(51); + END_STATE(); + case 39: + if (lookahead == 'c') ADVANCE(60); + END_STATE(); + case 40: + if (lookahead == 'c') ADVANCE(49); + END_STATE(); + case 41: + if (lookahead == 'd') ADVANCE(83); + END_STATE(); + case 42: + if (lookahead == 'd') ADVANCE(48); + END_STATE(); + case 43: + if (lookahead == 'e') ADVANCE(59); + END_STATE(); + case 44: + if (lookahead == 'e') ADVANCE(36); + END_STATE(); + case 45: + if (lookahead == 'e') ADVANCE(61); + END_STATE(); + case 46: + if (lookahead == 'e') ADVANCE(73); + END_STATE(); + case 47: + if (lookahead == 'e') ADVANCE(68); + END_STATE(); + case 48: + if (lookahead == 'e') ADVANCE(63); + END_STATE(); + case 49: + if (lookahead == 'h') ADVANCE(47); + END_STATE(); + case 50: + if (lookahead == 'i') ADVANCE(58); + END_STATE(); + case 51: + if (lookahead == 'i') ADVANCE(54); + END_STATE(); + case 52: + if (lookahead == 'i') ADVANCE(72); + END_STATE(); + case 53: + if (lookahead == 'm') ADVANCE(64); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(53); + END_STATE(); + case 54: + if (lookahead == 'n') ADVANCE(69); + END_STATE(); + case 55: + if (lookahead == 'n') ADVANCE(79); + END_STATE(); + case 56: + if (lookahead == 'o') ADVANCE(41); + END_STATE(); + case 57: + if (lookahead == 'o') ADVANCE(55); + END_STATE(); + case 58: + if (lookahead == 'p') ADVANCE(75); + END_STATE(); + case 59: + if (lookahead == 'q') ADVANCE(81); + END_STATE(); + case 60: + if (lookahead == 'r') ADVANCE(50); + END_STATE(); + case 61: + if (lookahead == 'r') ADVANCE(74); + END_STATE(); + case 62: + if (lookahead == 'r') ADVANCE(65); + END_STATE(); + case 63: + if (lookahead == 'r') ADVANCE(67); + END_STATE(); + case 64: + if (lookahead == 's') ADVANCE(112); + END_STATE(); + case 65: + if (lookahead == 's') ADVANCE(99); + END_STATE(); + case 66: + if (lookahead == 's') ADVANCE(107); + END_STATE(); + case 67: + if (lookahead == 's') ADVANCE(102); + END_STATE(); + case 68: + if (lookahead == 's') ADVANCE(109); + END_STATE(); + case 69: + if (lookahead == 's') ADVANCE(108); + END_STATE(); + case 70: + if (lookahead == 's') ADVANCE(71); + END_STATE(); + case 71: + if (lookahead == 's') ADVANCE(45); + END_STATE(); + case 72: + if (lookahead == 's') ADVANCE(80); + END_STATE(); + case 73: + if (lookahead == 's') ADVANCE(76); + END_STATE(); + case 74: + if (lookahead == 't') ADVANCE(104); + END_STATE(); + case 75: + if (lookahead == 't') ADVANCE(105); + END_STATE(); + case 76: + if (lookahead == 't') ADVANCE(100); + END_STATE(); + case 77: + if (lookahead == 't') ADVANCE(40); + END_STATE(); + case 78: + if (lookahead == 't') ADVANCE(35); + END_STATE(); + case 79: + if (lookahead == 't') ADVANCE(38); + END_STATE(); + case 80: + if (lookahead == 't') ADVANCE(66); + END_STATE(); + case 81: + if (lookahead == 'u') ADVANCE(46); + END_STATE(); + case 82: + if (lookahead == 'x') ADVANCE(52); + END_STATE(); + case 83: + if (lookahead == 'y') ADVANCE(103); + END_STATE(); + case 84: + if (lookahead == '{') ADVANCE(204); + END_STATE(); + case 85: + if (lookahead == '}') ADVANCE(207); + END_STATE(); + case 86: + if (lookahead != 0 && + lookahead != '\n') ADVANCE(192); + END_STATE(); + case 87: + if (eof) ADVANCE(89); + ADVANCE_MAP( + '\n', 90, + '\r', 1, + '"', 190, + '#', 92, + 'D', 152, + 'G', 153, + 'H', 154, + 'O', 169, + 'P', 143, + '[', 96, + '{', 84, + ); + if (lookahead == '\t' || + lookahead == ' ') SKIP(87); + if (('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + (lookahead < '\t' || '\r' < lookahead)) ADVANCE(188); + END_STATE(); + case 88: + if (eof) ADVANCE(89); + ADVANCE_MAP( + '\n', 90, + '\r', 1, + '"', 4, + '#', 91, + ',', 203, + 'D', 148, + 'G', 149, + 'H', 150, + 'O', 168, + 'P', 142, + '[', 95, + ']', 98, + ); + if (lookahead == '\t' || + lookahead == ' ') SKIP(88); + if (('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 89: + ACCEPT_TOKEN(ts_builtin_sym_end); + END_STATE(); + case 90: + ACCEPT_TOKEN(aux_sym_newline_token1); + END_STATE(); + case 91: + ACCEPT_TOKEN(anon_sym_POUND); + END_STATE(); + case 92: + ACCEPT_TOKEN(anon_sym_POUND); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + lookahead != '{') ADVANCE(188); + END_STATE(); + case 93: + ACCEPT_TOKEN(aux_sym_comment_token1); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(93); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r') ADVANCE(94); + END_STATE(); + case 94: + ACCEPT_TOKEN(aux_sym_comment_token1); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(94); + END_STATE(); + case 95: + ACCEPT_TOKEN(anon_sym_LBRACK); + END_STATE(); + case 96: + ACCEPT_TOKEN(anon_sym_LBRACK); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + lookahead != '{') ADVANCE(188); + END_STATE(); + case 97: + ACCEPT_TOKEN(anon_sym_meta); + END_STATE(); + case 98: + ACCEPT_TOKEN(anon_sym_RBRACK); + END_STATE(); + case 99: + ACCEPT_TOKEN(anon_sym_vars); + END_STATE(); + case 100: + ACCEPT_TOKEN(anon_sym_request); + END_STATE(); + case 101: + ACCEPT_TOKEN(anon_sym_DOT); + END_STATE(); + case 102: + ACCEPT_TOKEN(anon_sym_headers); + END_STATE(); + case 103: + ACCEPT_TOKEN(anon_sym_body); + END_STATE(); + case 104: + ACCEPT_TOKEN(anon_sym_assert); + END_STATE(); + case 105: + ACCEPT_TOKEN(anon_sym_script); + END_STATE(); + case 106: + ACCEPT_TOKEN(anon_sym_EQ); + END_STATE(); + case 107: + ACCEPT_TOKEN(anon_sym_exists); + END_STATE(); + case 108: + ACCEPT_TOKEN(anon_sym_contains); + END_STATE(); + case 109: + ACCEPT_TOKEN(anon_sym_matches); + END_STATE(); + case 110: + ACCEPT_TOKEN(anon_sym_LT); + END_STATE(); + case 111: + ACCEPT_TOKEN(anon_sym_GT); + END_STATE(); + case 112: + ACCEPT_TOKEN(sym_duration_value); + END_STATE(); + case 113: + ACCEPT_TOKEN(sym_duration_value); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(118); + END_STATE(); + case 114: + ACCEPT_TOKEN(sym_raw_value); + if (lookahead == '"') ADVANCE(191); + if (lookahead == '{') ADVANCE(117); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(114); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(115); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r') ADVANCE(118); + END_STATE(); + case 115: + ACCEPT_TOKEN(sym_raw_value); + if (lookahead == 'm') ADVANCE(116); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(115); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(118); + END_STATE(); + case 116: + ACCEPT_TOKEN(sym_raw_value); + if (lookahead == 's') ADVANCE(113); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(118); + END_STATE(); + case 117: + ACCEPT_TOKEN(sym_raw_value); + if (lookahead == '{') ADVANCE(205); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(118); + END_STATE(); + case 118: + ACCEPT_TOKEN(sym_raw_value); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(118); + END_STATE(); + case 119: + ACCEPT_TOKEN(anon_sym_GET); + END_STATE(); + case 120: + ACCEPT_TOKEN(anon_sym_GET); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 121: + ACCEPT_TOKEN(anon_sym_GET); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 122: + ACCEPT_TOKEN(anon_sym_POST); + END_STATE(); + case 123: + ACCEPT_TOKEN(anon_sym_POST); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 124: + ACCEPT_TOKEN(anon_sym_POST); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 125: + ACCEPT_TOKEN(anon_sym_PUT); + END_STATE(); + case 126: + ACCEPT_TOKEN(anon_sym_PUT); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 127: + ACCEPT_TOKEN(anon_sym_PUT); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 128: + ACCEPT_TOKEN(anon_sym_PATCH); + END_STATE(); + case 129: + ACCEPT_TOKEN(anon_sym_PATCH); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 130: + ACCEPT_TOKEN(anon_sym_PATCH); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 131: + ACCEPT_TOKEN(anon_sym_DELETE); + END_STATE(); + case 132: + ACCEPT_TOKEN(anon_sym_DELETE); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 133: + ACCEPT_TOKEN(anon_sym_DELETE); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 134: + ACCEPT_TOKEN(anon_sym_HEAD); + END_STATE(); + case 135: + ACCEPT_TOKEN(anon_sym_HEAD); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 136: + ACCEPT_TOKEN(anon_sym_HEAD); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 137: + ACCEPT_TOKEN(anon_sym_OPTIONS); + END_STATE(); + case 138: + ACCEPT_TOKEN(anon_sym_OPTIONS); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 139: + ACCEPT_TOKEN(anon_sym_OPTIONS); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 140: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'A') ADVANCE(146); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('B' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 141: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'A') ADVANCE(147); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('B' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 142: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'A') ADVANCE(176); + if (lookahead == 'O') ADVANCE(172); + if (lookahead == 'U') ADVANCE(177); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('B' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 143: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'A') ADVANCE(181); + if (lookahead == 'O') ADVANCE(173); + if (lookahead == 'U') ADVANCE(182); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('B' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 144: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'C') ADVANCE(158); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 145: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'C') ADVANCE(159); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 146: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'D') ADVANCE(135); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 147: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'D') ADVANCE(136); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 148: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'E') ADVANCE(162); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 149: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'E') ADVANCE(174); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 150: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'E') ADVANCE(140); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 151: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'E') ADVANCE(132); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 152: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'E') ADVANCE(163); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 153: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'E') ADVANCE(179); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 154: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'E') ADVANCE(141); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 155: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'E') ADVANCE(133); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 156: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'E') ADVANCE(184); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 157: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'E') ADVANCE(185); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 158: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'H') ADVANCE(129); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 159: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'H') ADVANCE(130); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 160: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'I') ADVANCE(166); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 161: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'I') ADVANCE(167); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 162: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'L') ADVANCE(156); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 163: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'L') ADVANCE(157); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 164: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'N') ADVANCE(170); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 165: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'N') ADVANCE(171); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 166: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'O') ADVANCE(164); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 167: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'O') ADVANCE(165); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 168: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'P') ADVANCE(175); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 169: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'P') ADVANCE(180); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 170: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'S') ADVANCE(138); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 171: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'S') ADVANCE(139); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 172: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'S') ADVANCE(178); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 173: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'S') ADVANCE(183); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 174: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'T') ADVANCE(120); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 175: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'T') ADVANCE(160); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 176: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'T') ADVANCE(144); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 177: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'T') ADVANCE(126); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 178: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'T') ADVANCE(123); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 179: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'T') ADVANCE(121); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 180: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'T') ADVANCE(161); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 181: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'T') ADVANCE(145); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 182: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'T') ADVANCE(127); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 183: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'T') ADVANCE(124); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 184: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'T') ADVANCE(151); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 185: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'T') ADVANCE(155); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 186: + ACCEPT_TOKEN(sym_key); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(186); + END_STATE(); + case 187: + ACCEPT_TOKEN(sym_key); + if (lookahead == '-' || + lookahead == '.' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(187); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + (lookahead < 'a' || '{' < lookahead)) ADVANCE(188); + END_STATE(); + case 188: + ACCEPT_TOKEN(sym_text_fragment); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + lookahead != '{') ADVANCE(188); + END_STATE(); + case 189: + ACCEPT_TOKEN(anon_sym_DQUOTE); + END_STATE(); + case 190: + ACCEPT_TOKEN(anon_sym_DQUOTE); + if (lookahead == '"') ADVANCE(5); + END_STATE(); + case 191: + ACCEPT_TOKEN(anon_sym_DQUOTE); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(118); + END_STATE(); + case 192: + ACCEPT_TOKEN(sym_string_content); + END_STATE(); + case 193: + ACCEPT_TOKEN(sym_string_content); + if (lookahead == '\\') ADVANCE(86); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(193); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + lookahead != '{' && + lookahead != '}') ADVANCE(194); + END_STATE(); + case 194: + ACCEPT_TOKEN(sym_string_content); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + lookahead != '\\' && + lookahead != '{' && + lookahead != '}') ADVANCE(194); + END_STATE(); + case 195: + ACCEPT_TOKEN(anon_sym_DQUOTE_DQUOTE_DQUOTE); + END_STATE(); + case 196: + ACCEPT_TOKEN(anon_sym_DQUOTE_DQUOTE_DQUOTE); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '{' && + lookahead != '}') ADVANCE(202); + END_STATE(); + case 197: + ACCEPT_TOKEN(sym_body_text); + END_STATE(); + case 198: + ACCEPT_TOKEN(sym_body_text); + if (lookahead == '\r') ADVANCE(2); + if (lookahead == '"') ADVANCE(200); + if (lookahead == '{') ADVANCE(201); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(198); + if (lookahead == '\n' || + lookahead == '}') ADVANCE(197); + if (lookahead != 0) ADVANCE(202); + END_STATE(); + case 199: + ACCEPT_TOKEN(sym_body_text); + if (lookahead == '"') ADVANCE(196); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '{' && + lookahead != '}') ADVANCE(202); + END_STATE(); + case 200: + ACCEPT_TOKEN(sym_body_text); + if (lookahead == '"') ADVANCE(199); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '{' && + lookahead != '}') ADVANCE(202); + END_STATE(); + case 201: + ACCEPT_TOKEN(sym_body_text); + if (lookahead == '{') ADVANCE(204); + END_STATE(); + case 202: + ACCEPT_TOKEN(sym_body_text); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '{' && + lookahead != '}') ADVANCE(202); + END_STATE(); + case 203: + ACCEPT_TOKEN(anon_sym_COMMA); + END_STATE(); + case 204: + ACCEPT_TOKEN(anon_sym_LBRACE_LBRACE); + END_STATE(); + case 205: + ACCEPT_TOKEN(anon_sym_LBRACE_LBRACE); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(118); + END_STATE(); + case 206: + ACCEPT_TOKEN(aux_sym_variable_ref_token1); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(206); + END_STATE(); + case 207: + ACCEPT_TOKEN(anon_sym_RBRACE_RBRACE); + END_STATE(); + default: + return false; + } +} + +static const TSLexerMode ts_lex_modes[STATE_COUNT] = { + [0] = {.lex_state = 0}, + [1] = {.lex_state = 88}, + [2] = {.lex_state = 88}, + [3] = {.lex_state = 88}, + [4] = {.lex_state = 87}, + [5] = {.lex_state = 87}, + [6] = {.lex_state = 87}, + [7] = {.lex_state = 87}, + [8] = {.lex_state = 87}, + [9] = {.lex_state = 88}, + [10] = {.lex_state = 88}, + [11] = {.lex_state = 88}, + [12] = {.lex_state = 88}, + [13] = {.lex_state = 88}, + [14] = {.lex_state = 88}, + [15] = {.lex_state = 88}, + [16] = {.lex_state = 88}, + [17] = {.lex_state = 88}, + [18] = {.lex_state = 88}, + [19] = {.lex_state = 88}, + [20] = {.lex_state = 88}, + [21] = {.lex_state = 88}, + [22] = {.lex_state = 88}, + [23] = {.lex_state = 88}, + [24] = {.lex_state = 88}, + [25] = {.lex_state = 88}, + [26] = {.lex_state = 88}, + [27] = {.lex_state = 88}, + [28] = {.lex_state = 88}, + [29] = {.lex_state = 6}, + [30] = {.lex_state = 9}, + [31] = {.lex_state = 10}, + [32] = {.lex_state = 10}, + [33] = {.lex_state = 10}, + [34] = {.lex_state = 10}, + [35] = {.lex_state = 3}, + [36] = {.lex_state = 0}, + [37] = {.lex_state = 0}, + [38] = {.lex_state = 7}, + [39] = {.lex_state = 7}, + [40] = {.lex_state = 7}, + [41] = {.lex_state = 3}, + [42] = {.lex_state = 3}, + [43] = {.lex_state = 7}, + [44] = {.lex_state = 7}, + [45] = {.lex_state = 8}, + [46] = {.lex_state = 0}, + [47] = {.lex_state = 9}, + [48] = {.lex_state = 0}, + [49] = {.lex_state = 7}, + [50] = {.lex_state = 0}, + [51] = {.lex_state = 3}, + [52] = {.lex_state = 0}, + [53] = {.lex_state = 0}, + [54] = {.lex_state = 6}, + [55] = {.lex_state = 0}, + [56] = {.lex_state = 0}, + [57] = {.lex_state = 0}, + [58] = {.lex_state = 0}, + [59] = {.lex_state = 0}, + [60] = {.lex_state = 8}, + [61] = {.lex_state = 93}, + [62] = {.lex_state = 0}, + [63] = {.lex_state = 0}, + [64] = {.lex_state = 0}, + [65] = {.lex_state = 0}, + [66] = {.lex_state = 0}, + [67] = {.lex_state = 8}, + [68] = {.lex_state = 8}, + [69] = {.lex_state = 8}, +}; + +static const uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = { + [STATE(0)] = { + [ts_builtin_sym_end] = ACTIONS(1), + [aux_sym_newline_token1] = ACTIONS(1), + [anon_sym_POUND] = ACTIONS(1), + [anon_sym_LBRACK] = ACTIONS(1), + [anon_sym_meta] = ACTIONS(1), + [anon_sym_RBRACK] = ACTIONS(1), + [anon_sym_vars] = ACTIONS(1), + [anon_sym_request] = ACTIONS(1), + [anon_sym_DOT] = ACTIONS(1), + [anon_sym_headers] = ACTIONS(1), + [anon_sym_body] = ACTIONS(1), + [anon_sym_assert] = ACTIONS(1), + [anon_sym_script] = ACTIONS(1), + [anon_sym_EQ] = ACTIONS(1), + [anon_sym_exists] = ACTIONS(1), + [anon_sym_contains] = ACTIONS(1), + [anon_sym_matches] = ACTIONS(1), + [anon_sym_LT] = ACTIONS(1), + [anon_sym_GT] = ACTIONS(1), + [sym_duration_value] = ACTIONS(1), + [anon_sym_GET] = ACTIONS(1), + [anon_sym_POST] = ACTIONS(1), + [anon_sym_PUT] = ACTIONS(1), + [anon_sym_PATCH] = ACTIONS(1), + [anon_sym_DELETE] = ACTIONS(1), + [anon_sym_HEAD] = ACTIONS(1), + [anon_sym_OPTIONS] = ACTIONS(1), + [anon_sym_DQUOTE] = ACTIONS(1), + [anon_sym_DQUOTE_DQUOTE_DQUOTE] = ACTIONS(1), + [anon_sym_COMMA] = ACTIONS(1), + [anon_sym_LBRACE_LBRACE] = ACTIONS(1), + [anon_sym_RBRACE_RBRACE] = ACTIONS(1), + }, + [STATE(1)] = { + [sym_source_file] = STATE(58), + [sym_newline] = STATE(3), + [sym_comment] = STATE(3), + [sym_shorthand_request] = STATE(3), + [sym_section_header] = STATE(3), + [sym_pair] = STATE(3), + [sym_assertion_exists] = STATE(3), + [sym_assertion_contains] = STATE(3), + [sym_assertion_matches] = STATE(3), + [sym_assertion_lt] = STATE(3), + [sym_assertion_gt] = STATE(3), + [sym_http_method] = STATE(30), + [sym_triple_quoted_string] = STATE(3), + [aux_sym_source_file_repeat1] = STATE(3), + [ts_builtin_sym_end] = ACTIONS(3), + [aux_sym_newline_token1] = ACTIONS(5), + [anon_sym_POUND] = ACTIONS(7), + [anon_sym_LBRACK] = ACTIONS(9), + [anon_sym_GET] = ACTIONS(11), + [anon_sym_POST] = ACTIONS(11), + [anon_sym_PUT] = ACTIONS(11), + [anon_sym_PATCH] = ACTIONS(11), + [anon_sym_DELETE] = ACTIONS(11), + [anon_sym_HEAD] = ACTIONS(11), + [anon_sym_OPTIONS] = ACTIONS(11), + [sym_key] = ACTIONS(13), + [anon_sym_DQUOTE_DQUOTE_DQUOTE] = ACTIONS(15), + }, +}; + +static const uint16_t ts_small_parse_table[] = { + [0] = 9, + ACTIONS(17), 1, + ts_builtin_sym_end, + ACTIONS(19), 1, + aux_sym_newline_token1, + ACTIONS(22), 1, + anon_sym_POUND, + ACTIONS(25), 1, + anon_sym_LBRACK, + ACTIONS(31), 1, + sym_key, + ACTIONS(34), 1, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + STATE(30), 1, + sym_http_method, + ACTIONS(28), 7, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + STATE(2), 12, + sym_newline, + sym_comment, + sym_shorthand_request, + sym_section_header, + sym_pair, + sym_assertion_exists, + sym_assertion_contains, + sym_assertion_matches, + sym_assertion_lt, + sym_assertion_gt, + sym_triple_quoted_string, + aux_sym_source_file_repeat1, + [45] = 9, + ACTIONS(5), 1, + aux_sym_newline_token1, + ACTIONS(7), 1, + anon_sym_POUND, + ACTIONS(9), 1, + anon_sym_LBRACK, + ACTIONS(13), 1, + sym_key, + ACTIONS(15), 1, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(37), 1, + ts_builtin_sym_end, + STATE(30), 1, + sym_http_method, + ACTIONS(11), 7, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + STATE(2), 12, + sym_newline, + sym_comment, + sym_shorthand_request, + sym_section_header, + sym_pair, + sym_assertion_exists, + sym_assertion_contains, + sym_assertion_matches, + sym_assertion_lt, + sym_assertion_gt, + sym_triple_quoted_string, + aux_sym_source_file_repeat1, + [90] = 6, + ACTIONS(43), 1, + sym_text_fragment, + ACTIONS(45), 1, + anon_sym_DQUOTE, + ACTIONS(47), 1, + anon_sym_LBRACE_LBRACE, + ACTIONS(39), 3, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + STATE(5), 3, + sym_quoted_string, + sym_variable_ref, + aux_sym_value_repeat1, + ACTIONS(41), 10, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [122] = 6, + ACTIONS(53), 1, + sym_text_fragment, + ACTIONS(56), 1, + anon_sym_DQUOTE, + ACTIONS(59), 1, + anon_sym_LBRACE_LBRACE, + ACTIONS(49), 3, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + STATE(5), 3, + sym_quoted_string, + sym_variable_ref, + aux_sym_value_repeat1, + ACTIONS(51), 10, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [154] = 2, + ACTIONS(62), 4, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + anon_sym_LBRACE_LBRACE, + ACTIONS(64), 12, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + sym_text_fragment, + anon_sym_DQUOTE, + [175] = 2, + ACTIONS(66), 4, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + anon_sym_LBRACE_LBRACE, + ACTIONS(68), 12, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + sym_text_fragment, + anon_sym_DQUOTE, + [196] = 2, + ACTIONS(70), 4, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + anon_sym_LBRACE_LBRACE, + ACTIONS(72), 12, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + sym_text_fragment, + anon_sym_DQUOTE, + [217] = 2, + ACTIONS(66), 7, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_RBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + anon_sym_COMMA, + ACTIONS(68), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [237] = 2, + ACTIONS(62), 7, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_RBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + anon_sym_COMMA, + ACTIONS(64), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [257] = 2, + ACTIONS(74), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(76), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [275] = 2, + ACTIONS(78), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(80), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [293] = 2, + ACTIONS(82), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(84), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [311] = 2, + ACTIONS(86), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(88), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [329] = 2, + ACTIONS(90), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(92), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [347] = 2, + ACTIONS(94), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(96), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [365] = 2, + ACTIONS(98), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(100), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [383] = 2, + ACTIONS(102), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(104), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [401] = 2, + ACTIONS(106), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(108), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [419] = 2, + ACTIONS(110), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(112), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [437] = 2, + ACTIONS(114), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(116), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [455] = 2, + ACTIONS(118), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(120), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [473] = 2, + ACTIONS(122), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(124), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [491] = 2, + ACTIONS(126), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(128), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [509] = 2, + ACTIONS(130), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(132), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [527] = 2, + ACTIONS(134), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(136), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [545] = 2, + ACTIONS(70), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(72), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [563] = 2, + ACTIONS(138), 5, + ts_builtin_sym_end, + aux_sym_newline_token1, + anon_sym_POUND, + anon_sym_LBRACK, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(140), 8, + anon_sym_GET, + anon_sym_POST, + anon_sym_PUT, + anon_sym_PATCH, + anon_sym_DELETE, + anon_sym_HEAD, + anon_sym_OPTIONS, + sym_key, + [581] = 6, + ACTIONS(47), 1, + anon_sym_LBRACE_LBRACE, + ACTIONS(142), 1, + anon_sym_LBRACK, + ACTIONS(144), 1, + sym_text_fragment, + ACTIONS(146), 1, + anon_sym_DQUOTE, + STATE(15), 2, + sym_value, + sym_array_value, + STATE(4), 3, + sym_quoted_string, + sym_variable_ref, + aux_sym_value_repeat1, + [603] = 5, + ACTIONS(47), 1, + anon_sym_LBRACE_LBRACE, + ACTIONS(146), 1, + anon_sym_DQUOTE, + ACTIONS(148), 1, + sym_text_fragment, + STATE(14), 1, + sym_value, + STATE(4), 3, + sym_quoted_string, + sym_variable_ref, + aux_sym_value_repeat1, + [621] = 5, + ACTIONS(152), 1, + anon_sym_DQUOTE, + ACTIONS(154), 1, + anon_sym_LBRACE_LBRACE, + STATE(16), 1, + sym_assertion_value, + ACTIONS(150), 2, + sym_duration_value, + sym_raw_value, + STATE(11), 2, + sym_quoted_string, + sym_variable_ref, + [639] = 5, + ACTIONS(152), 1, + anon_sym_DQUOTE, + ACTIONS(154), 1, + anon_sym_LBRACE_LBRACE, + STATE(19), 1, + sym_assertion_value, + ACTIONS(150), 2, + sym_duration_value, + sym_raw_value, + STATE(11), 2, + sym_quoted_string, + sym_variable_ref, + [657] = 5, + ACTIONS(152), 1, + anon_sym_DQUOTE, + ACTIONS(154), 1, + anon_sym_LBRACE_LBRACE, + STATE(18), 1, + sym_assertion_value, + ACTIONS(150), 2, + sym_duration_value, + sym_raw_value, + STATE(11), 2, + sym_quoted_string, + sym_variable_ref, + [675] = 5, + ACTIONS(152), 1, + anon_sym_DQUOTE, + ACTIONS(154), 1, + anon_sym_LBRACE_LBRACE, + STATE(20), 1, + sym_assertion_value, + ACTIONS(150), 2, + sym_duration_value, + sym_raw_value, + STATE(11), 2, + sym_quoted_string, + sym_variable_ref, + [693] = 5, + ACTIONS(156), 1, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(158), 1, + sym_body_text, + ACTIONS(160), 1, + anon_sym_LBRACE_LBRACE, + STATE(62), 1, + sym_body_content, + STATE(42), 2, + sym_variable_ref, + aux_sym_body_content_repeat1, + [710] = 6, + ACTIONS(162), 1, + anon_sym_EQ, + ACTIONS(164), 1, + anon_sym_exists, + ACTIONS(166), 1, + anon_sym_contains, + ACTIONS(168), 1, + anon_sym_matches, + ACTIONS(170), 1, + anon_sym_LT, + ACTIONS(172), 1, + anon_sym_GT, + [729] = 2, + ACTIONS(176), 1, + anon_sym_request, + ACTIONS(174), 4, + anon_sym_meta, + anon_sym_vars, + anon_sym_assert, + anon_sym_script, + [739] = 4, + ACTIONS(178), 1, + anon_sym_DQUOTE, + ACTIONS(180), 1, + sym_string_content, + ACTIONS(182), 1, + anon_sym_LBRACE_LBRACE, + STATE(39), 2, + sym_variable_ref, + aux_sym_quoted_string_repeat1, + [753] = 4, + ACTIONS(182), 1, + anon_sym_LBRACE_LBRACE, + ACTIONS(184), 1, + anon_sym_DQUOTE, + ACTIONS(186), 1, + sym_string_content, + STATE(40), 2, + sym_variable_ref, + aux_sym_quoted_string_repeat1, + [767] = 4, + ACTIONS(188), 1, + anon_sym_DQUOTE, + ACTIONS(190), 1, + sym_string_content, + ACTIONS(193), 1, + anon_sym_LBRACE_LBRACE, + STATE(40), 2, + sym_variable_ref, + aux_sym_quoted_string_repeat1, + [781] = 4, + ACTIONS(196), 1, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(198), 1, + sym_body_text, + ACTIONS(201), 1, + anon_sym_LBRACE_LBRACE, + STATE(41), 2, + sym_variable_ref, + aux_sym_body_content_repeat1, + [795] = 4, + ACTIONS(160), 1, + anon_sym_LBRACE_LBRACE, + ACTIONS(204), 1, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + ACTIONS(206), 1, + sym_body_text, + STATE(41), 2, + sym_variable_ref, + aux_sym_body_content_repeat1, + [809] = 4, + ACTIONS(182), 1, + anon_sym_LBRACE_LBRACE, + ACTIONS(208), 1, + anon_sym_DQUOTE, + ACTIONS(210), 1, + sym_string_content, + STATE(44), 2, + sym_variable_ref, + aux_sym_quoted_string_repeat1, + [823] = 4, + ACTIONS(182), 1, + anon_sym_LBRACE_LBRACE, + ACTIONS(186), 1, + sym_string_content, + ACTIONS(212), 1, + anon_sym_DQUOTE, + STATE(40), 2, + sym_variable_ref, + aux_sym_quoted_string_repeat1, + [837] = 3, + ACTIONS(214), 1, + anon_sym_RBRACK, + ACTIONS(216), 1, + anon_sym_DQUOTE, + STATE(46), 1, + sym_quoted_string, + [847] = 3, + ACTIONS(218), 1, + anon_sym_RBRACK, + ACTIONS(220), 1, + anon_sym_COMMA, + STATE(48), 1, + aux_sym_array_value_repeat1, + [857] = 1, + ACTIONS(222), 3, + sym_text_fragment, + anon_sym_DQUOTE, + anon_sym_LBRACE_LBRACE, + [863] = 3, + ACTIONS(220), 1, + anon_sym_COMMA, + ACTIONS(224), 1, + anon_sym_RBRACK, + STATE(50), 1, + aux_sym_array_value_repeat1, + [873] = 2, + ACTIONS(70), 1, + sym_string_content, + ACTIONS(72), 2, + anon_sym_DQUOTE, + anon_sym_LBRACE_LBRACE, + [881] = 3, + ACTIONS(226), 1, + anon_sym_RBRACK, + ACTIONS(228), 1, + anon_sym_COMMA, + STATE(50), 1, + aux_sym_array_value_repeat1, + [891] = 1, + ACTIONS(72), 3, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + sym_body_text, + anon_sym_LBRACE_LBRACE, + [897] = 1, + ACTIONS(231), 2, + anon_sym_headers, + anon_sym_body, + [902] = 2, + ACTIONS(233), 1, + anon_sym_RBRACK, + ACTIONS(235), 1, + anon_sym_DOT, + [909] = 2, + ACTIONS(216), 1, + anon_sym_DQUOTE, + STATE(55), 1, + sym_quoted_string, + [916] = 1, + ACTIONS(226), 2, + anon_sym_RBRACK, + anon_sym_COMMA, + [921] = 1, + ACTIONS(233), 1, + anon_sym_RBRACK, + [925] = 1, + ACTIONS(237), 1, + anon_sym_RBRACE_RBRACE, + [929] = 1, + ACTIONS(239), 1, + ts_builtin_sym_end, + [933] = 1, + ACTIONS(241), 1, + anon_sym_RBRACK, + [937] = 1, + ACTIONS(243), 1, + aux_sym_variable_ref_token1, + [941] = 1, + ACTIONS(245), 1, + aux_sym_comment_token1, + [945] = 1, + ACTIONS(247), 1, + anon_sym_DQUOTE_DQUOTE_DQUOTE, + [949] = 1, + ACTIONS(249), 1, + aux_sym_newline_token1, + [953] = 1, + ACTIONS(251), 1, + anon_sym_RBRACE_RBRACE, + [957] = 1, + ACTIONS(253), 1, + anon_sym_RBRACE_RBRACE, + [961] = 1, + ACTIONS(255), 1, + anon_sym_RBRACE_RBRACE, + [965] = 1, + ACTIONS(257), 1, + aux_sym_variable_ref_token1, + [969] = 1, + ACTIONS(259), 1, + aux_sym_variable_ref_token1, + [973] = 1, + ACTIONS(261), 1, + aux_sym_variable_ref_token1, +}; + +static const uint32_t ts_small_parse_table_map[] = { + [SMALL_STATE(2)] = 0, + [SMALL_STATE(3)] = 45, + [SMALL_STATE(4)] = 90, + [SMALL_STATE(5)] = 122, + [SMALL_STATE(6)] = 154, + [SMALL_STATE(7)] = 175, + [SMALL_STATE(8)] = 196, + [SMALL_STATE(9)] = 217, + [SMALL_STATE(10)] = 237, + [SMALL_STATE(11)] = 257, + [SMALL_STATE(12)] = 275, + [SMALL_STATE(13)] = 293, + [SMALL_STATE(14)] = 311, + [SMALL_STATE(15)] = 329, + [SMALL_STATE(16)] = 347, + [SMALL_STATE(17)] = 365, + [SMALL_STATE(18)] = 383, + [SMALL_STATE(19)] = 401, + [SMALL_STATE(20)] = 419, + [SMALL_STATE(21)] = 437, + [SMALL_STATE(22)] = 455, + [SMALL_STATE(23)] = 473, + [SMALL_STATE(24)] = 491, + [SMALL_STATE(25)] = 509, + [SMALL_STATE(26)] = 527, + [SMALL_STATE(27)] = 545, + [SMALL_STATE(28)] = 563, + [SMALL_STATE(29)] = 581, + [SMALL_STATE(30)] = 603, + [SMALL_STATE(31)] = 621, + [SMALL_STATE(32)] = 639, + [SMALL_STATE(33)] = 657, + [SMALL_STATE(34)] = 675, + [SMALL_STATE(35)] = 693, + [SMALL_STATE(36)] = 710, + [SMALL_STATE(37)] = 729, + [SMALL_STATE(38)] = 739, + [SMALL_STATE(39)] = 753, + [SMALL_STATE(40)] = 767, + [SMALL_STATE(41)] = 781, + [SMALL_STATE(42)] = 795, + [SMALL_STATE(43)] = 809, + [SMALL_STATE(44)] = 823, + [SMALL_STATE(45)] = 837, + [SMALL_STATE(46)] = 847, + [SMALL_STATE(47)] = 857, + [SMALL_STATE(48)] = 863, + [SMALL_STATE(49)] = 873, + [SMALL_STATE(50)] = 881, + [SMALL_STATE(51)] = 891, + [SMALL_STATE(52)] = 897, + [SMALL_STATE(53)] = 902, + [SMALL_STATE(54)] = 909, + [SMALL_STATE(55)] = 916, + [SMALL_STATE(56)] = 921, + [SMALL_STATE(57)] = 925, + [SMALL_STATE(58)] = 929, + [SMALL_STATE(59)] = 933, + [SMALL_STATE(60)] = 937, + [SMALL_STATE(61)] = 941, + [SMALL_STATE(62)] = 945, + [SMALL_STATE(63)] = 949, + [SMALL_STATE(64)] = 953, + [SMALL_STATE(65)] = 957, + [SMALL_STATE(66)] = 961, + [SMALL_STATE(67)] = 965, + [SMALL_STATE(68)] = 969, + [SMALL_STATE(69)] = 973, +}; + +static const TSParseActionEntry ts_parse_actions[] = { + [0] = {.entry = {.count = 0, .reusable = false}}, + [1] = {.entry = {.count = 1, .reusable = false}}, RECOVER(), + [3] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 0, 0, 0), + [5] = {.entry = {.count = 1, .reusable = true}}, SHIFT(13), + [7] = {.entry = {.count = 1, .reusable = true}}, SHIFT(61), + [9] = {.entry = {.count = 1, .reusable = true}}, SHIFT(37), + [11] = {.entry = {.count = 1, .reusable = false}}, SHIFT(47), + [13] = {.entry = {.count = 1, .reusable = false}}, SHIFT(36), + [15] = {.entry = {.count = 1, .reusable = true}}, SHIFT(63), + [17] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), + [19] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(13), + [22] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(61), + [25] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(37), + [28] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(47), + [31] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(36), + [34] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(63), + [37] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 1, 0, 0), + [39] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_value, 1, 0, 0), + [41] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_value, 1, 0, 0), + [43] = {.entry = {.count = 1, .reusable = false}}, SHIFT(5), + [45] = {.entry = {.count = 1, .reusable = false}}, SHIFT(38), + [47] = {.entry = {.count = 1, .reusable = true}}, SHIFT(60), + [49] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_value_repeat1, 2, 0, 0), + [51] = {.entry = {.count = 1, .reusable = false}}, REDUCE(aux_sym_value_repeat1, 2, 0, 0), + [53] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_value_repeat1, 2, 0, 0), SHIFT_REPEAT(5), + [56] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_value_repeat1, 2, 0, 0), SHIFT_REPEAT(38), + [59] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_value_repeat1, 2, 0, 0), SHIFT_REPEAT(60), + [62] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_quoted_string, 2, 0, 0), + [64] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_quoted_string, 2, 0, 0), + [66] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_quoted_string, 3, 0, 0), + [68] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_quoted_string, 3, 0, 0), + [70] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_variable_ref, 3, 0, 0), + [72] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_variable_ref, 3, 0, 0), + [74] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_assertion_value, 1, 0, 0), + [76] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_assertion_value, 1, 0, 0), + [78] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_comment, 2, 0, 0), + [80] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_comment, 2, 0, 0), + [82] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_newline, 1, 0, 0), + [84] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_newline, 1, 0, 0), + [86] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_shorthand_request, 2, 0, 0), + [88] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_shorthand_request, 2, 0, 0), + [90] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_pair, 3, 0, 0), + [92] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_pair, 3, 0, 0), + [94] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_assertion_contains, 3, 0, 0), + [96] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_assertion_contains, 3, 0, 0), + [98] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_section_header, 3, 0, 0), + [100] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_section_header, 3, 0, 0), + [102] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_assertion_matches, 3, 0, 0), + [104] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_assertion_matches, 3, 0, 0), + [106] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_assertion_lt, 3, 0, 0), + [108] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_assertion_lt, 3, 0, 0), + [110] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_assertion_gt, 3, 0, 0), + [112] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_assertion_gt, 3, 0, 0), + [114] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_triple_quoted_string, 3, 0, 0), + [116] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_triple_quoted_string, 3, 0, 0), + [118] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_array_value, 3, 0, 0), + [120] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_array_value, 3, 0, 0), + [122] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_assertion_exists, 2, 0, 0), + [124] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_assertion_exists, 2, 0, 0), + [126] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_array_value, 2, 0, 0), + [128] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_array_value, 2, 0, 0), + [130] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_triple_quoted_string, 4, 0, 0), + [132] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_triple_quoted_string, 4, 0, 0), + [134] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_array_value, 4, 0, 0), + [136] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_array_value, 4, 0, 0), + [138] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_section_header, 5, 0, 0), + [140] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_section_header, 5, 0, 0), + [142] = {.entry = {.count = 1, .reusable = false}}, SHIFT(45), + [144] = {.entry = {.count = 1, .reusable = false}}, SHIFT(4), + [146] = {.entry = {.count = 1, .reusable = true}}, SHIFT(38), + [148] = {.entry = {.count = 1, .reusable = true}}, SHIFT(4), + [150] = {.entry = {.count = 1, .reusable = false}}, SHIFT(11), + [152] = {.entry = {.count = 1, .reusable = false}}, SHIFT(43), + [154] = {.entry = {.count = 1, .reusable = false}}, SHIFT(67), + [156] = {.entry = {.count = 1, .reusable = false}}, SHIFT(21), + [158] = {.entry = {.count = 1, .reusable = false}}, SHIFT(42), + [160] = {.entry = {.count = 1, .reusable = false}}, SHIFT(68), + [162] = {.entry = {.count = 1, .reusable = true}}, SHIFT(29), + [164] = {.entry = {.count = 1, .reusable = true}}, SHIFT(23), + [166] = {.entry = {.count = 1, .reusable = true}}, SHIFT(31), + [168] = {.entry = {.count = 1, .reusable = true}}, SHIFT(33), + [170] = {.entry = {.count = 1, .reusable = true}}, SHIFT(32), + [172] = {.entry = {.count = 1, .reusable = true}}, SHIFT(34), + [174] = {.entry = {.count = 1, .reusable = true}}, SHIFT(56), + [176] = {.entry = {.count = 1, .reusable = true}}, SHIFT(53), + [178] = {.entry = {.count = 1, .reusable = false}}, SHIFT(6), + [180] = {.entry = {.count = 1, .reusable = true}}, SHIFT(39), + [182] = {.entry = {.count = 1, .reusable = false}}, SHIFT(69), + [184] = {.entry = {.count = 1, .reusable = false}}, SHIFT(7), + [186] = {.entry = {.count = 1, .reusable = true}}, SHIFT(40), + [188] = {.entry = {.count = 1, .reusable = false}}, REDUCE(aux_sym_quoted_string_repeat1, 2, 0, 0), + [190] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_quoted_string_repeat1, 2, 0, 0), SHIFT_REPEAT(40), + [193] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_quoted_string_repeat1, 2, 0, 0), SHIFT_REPEAT(69), + [196] = {.entry = {.count = 1, .reusable = false}}, REDUCE(aux_sym_body_content_repeat1, 2, 0, 0), + [198] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_body_content_repeat1, 2, 0, 0), SHIFT_REPEAT(41), + [201] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_body_content_repeat1, 2, 0, 0), SHIFT_REPEAT(68), + [204] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_body_content, 1, 0, 0), + [206] = {.entry = {.count = 1, .reusable = false}}, SHIFT(41), + [208] = {.entry = {.count = 1, .reusable = false}}, SHIFT(10), + [210] = {.entry = {.count = 1, .reusable = true}}, SHIFT(44), + [212] = {.entry = {.count = 1, .reusable = false}}, SHIFT(9), + [214] = {.entry = {.count = 1, .reusable = true}}, SHIFT(24), + [216] = {.entry = {.count = 1, .reusable = true}}, SHIFT(43), + [218] = {.entry = {.count = 1, .reusable = true}}, SHIFT(22), + [220] = {.entry = {.count = 1, .reusable = true}}, SHIFT(54), + [222] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_http_method, 1, 0, 0), + [224] = {.entry = {.count = 1, .reusable = true}}, SHIFT(26), + [226] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_array_value_repeat1, 2, 0, 0), + [228] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_array_value_repeat1, 2, 0, 0), SHIFT_REPEAT(54), + [231] = {.entry = {.count = 1, .reusable = true}}, SHIFT(59), + [233] = {.entry = {.count = 1, .reusable = true}}, SHIFT(17), + [235] = {.entry = {.count = 1, .reusable = true}}, SHIFT(52), + [237] = {.entry = {.count = 1, .reusable = true}}, SHIFT(8), + [239] = {.entry = {.count = 1, .reusable = true}}, ACCEPT_INPUT(), + [241] = {.entry = {.count = 1, .reusable = true}}, SHIFT(28), + [243] = {.entry = {.count = 1, .reusable = true}}, SHIFT(57), + [245] = {.entry = {.count = 1, .reusable = true}}, SHIFT(12), + [247] = {.entry = {.count = 1, .reusable = true}}, SHIFT(25), + [249] = {.entry = {.count = 1, .reusable = true}}, SHIFT(35), + [251] = {.entry = {.count = 1, .reusable = true}}, SHIFT(27), + [253] = {.entry = {.count = 1, .reusable = true}}, SHIFT(51), + [255] = {.entry = {.count = 1, .reusable = true}}, SHIFT(49), + [257] = {.entry = {.count = 1, .reusable = true}}, SHIFT(64), + [259] = {.entry = {.count = 1, .reusable = true}}, SHIFT(65), + [261] = {.entry = {.count = 1, .reusable = true}}, SHIFT(66), +}; + +#ifdef __cplusplus +extern "C" { +#endif +#ifdef TREE_SITTER_HIDE_SYMBOLS +#define TS_PUBLIC +#elif defined(_WIN32) +#define TS_PUBLIC __declspec(dllexport) +#else +#define TS_PUBLIC __attribute__((visibility("default"))) +#endif + +TS_PUBLIC const TSLanguage *tree_sitter_nap(void) { + static const TSLanguage language = { + .abi_version = LANGUAGE_VERSION, + .symbol_count = SYMBOL_COUNT, + .alias_count = ALIAS_COUNT, + .token_count = TOKEN_COUNT, + .external_token_count = EXTERNAL_TOKEN_COUNT, + .state_count = STATE_COUNT, + .large_state_count = LARGE_STATE_COUNT, + .production_id_count = PRODUCTION_ID_COUNT, + .supertype_count = SUPERTYPE_COUNT, + .field_count = FIELD_COUNT, + .max_alias_sequence_length = MAX_ALIAS_SEQUENCE_LENGTH, + .parse_table = &ts_parse_table[0][0], + .small_parse_table = ts_small_parse_table, + .small_parse_table_map = ts_small_parse_table_map, + .parse_actions = ts_parse_actions, + .symbol_names = ts_symbol_names, + .symbol_metadata = ts_symbol_metadata, + .public_symbol_map = ts_symbol_map, + .alias_map = ts_non_terminal_alias_map, + .alias_sequences = &ts_alias_sequences[0][0], + .lex_modes = (const void*)ts_lex_modes, + .lex_fn = ts_lex, + .primary_state_ids = ts_primary_state_ids, + .name = "nap", + .max_reserved_word_set_size = 0, + .metadata = { + .major_version = 0, + .minor_version = 1, + .patch_version = 0, + }, + }; + return &language; +} +#ifdef __cplusplus +} +#endif diff --git a/src/Napper.Zed/grammars/tree-sitter-nap/src/tree_sitter/alloc.h b/src/Napper.Zed/grammars/tree-sitter-nap/src/tree_sitter/alloc.h new file mode 100644 index 0000000..1abdd12 --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-nap/src/tree_sitter/alloc.h @@ -0,0 +1,54 @@ +#ifndef TREE_SITTER_ALLOC_H_ +#define TREE_SITTER_ALLOC_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +#include <stdbool.h> +#include <stdio.h> +#include <stdlib.h> + +// Allow clients to override allocation functions +#ifdef TREE_SITTER_REUSE_ALLOCATOR + +extern void *(*ts_current_malloc)(size_t size); +extern void *(*ts_current_calloc)(size_t count, size_t size); +extern void *(*ts_current_realloc)(void *ptr, size_t size); +extern void (*ts_current_free)(void *ptr); + +#ifndef ts_malloc +#define ts_malloc ts_current_malloc +#endif +#ifndef ts_calloc +#define ts_calloc ts_current_calloc +#endif +#ifndef ts_realloc +#define ts_realloc ts_current_realloc +#endif +#ifndef ts_free +#define ts_free ts_current_free +#endif + +#else + +#ifndef ts_malloc +#define ts_malloc malloc +#endif +#ifndef ts_calloc +#define ts_calloc calloc +#endif +#ifndef ts_realloc +#define ts_realloc realloc +#endif +#ifndef ts_free +#define ts_free free +#endif + +#endif + +#ifdef __cplusplus +} +#endif + +#endif // TREE_SITTER_ALLOC_H_ diff --git a/src/Napper.Zed/grammars/tree-sitter-nap/src/tree_sitter/array.h b/src/Napper.Zed/grammars/tree-sitter-nap/src/tree_sitter/array.h new file mode 100644 index 0000000..56fc8cd --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-nap/src/tree_sitter/array.h @@ -0,0 +1,330 @@ +#ifndef TREE_SITTER_ARRAY_H_ +#define TREE_SITTER_ARRAY_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +#include "./alloc.h" + +#include <assert.h> +#include <stdbool.h> +#include <stdint.h> +#include <stdlib.h> +#include <string.h> + +#ifdef _MSC_VER +#pragma warning(push) +#pragma warning(disable : 4101) +#elif defined(__GNUC__) || defined(__clang__) +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wunused-variable" +#endif + +#define Array(T) \ + struct { \ + T *contents; \ + uint32_t size; \ + uint32_t capacity; \ + } + +/// Initialize an array. +#define array_init(self) \ + ((self)->size = 0, (self)->capacity = 0, (self)->contents = NULL) + +/// Create an empty array. +#define array_new() \ + { NULL, 0, 0 } + +/// Get a pointer to the element at a given `index` in the array. +#define array_get(self, _index) \ + (assert((uint32_t)(_index) < (self)->size), &(self)->contents[_index]) + +/// Get a pointer to the first element in the array. +#define array_front(self) array_get(self, 0) + +/// Get a pointer to the last element in the array. +#define array_back(self) array_get(self, (self)->size - 1) + +/// Clear the array, setting its size to zero. Note that this does not free any +/// memory allocated for the array's contents. +#define array_clear(self) ((self)->size = 0) + +/// Reserve `new_capacity` elements of space in the array. If `new_capacity` is +/// less than the array's current capacity, this function has no effect. +#define array_reserve(self, new_capacity) \ + ((self)->contents = _array__reserve( \ + (void *)(self)->contents, &(self)->capacity, \ + array_elem_size(self), new_capacity) \ + ) + +/// Free any memory allocated for this array. Note that this does not free any +/// memory allocated for the array's contents. +#define array_delete(self) \ + do { \ + if ((self)->contents) ts_free((self)->contents); \ + (self)->contents = NULL; \ + (self)->size = 0; \ + (self)->capacity = 0; \ + } while (0) + +/// Push a new `element` onto the end of the array. +#define array_push(self, element) \ + do { \ + (self)->contents = _array__grow( \ + (void *)(self)->contents, (self)->size, &(self)->capacity, \ + 1, array_elem_size(self) \ + ); \ + (self)->contents[(self)->size++] = (element); \ + } while(0) + +/// Increase the array's size by `count` elements. +/// New elements are zero-initialized. +#define array_grow_by(self, count) \ + do { \ + if ((count) == 0) break; \ + (self)->contents = _array__grow( \ + (self)->contents, (self)->size, &(self)->capacity, \ + count, array_elem_size(self) \ + ); \ + memset((self)->contents + (self)->size, 0, (count) * array_elem_size(self)); \ + (self)->size += (count); \ + } while (0) + +/// Append all elements from one array to the end of another. +#define array_push_all(self, other) \ + array_extend((self), (other)->size, (other)->contents) + +/// Append `count` elements to the end of the array, reading their values from the +/// `contents` pointer. +#define array_extend(self, count, other_contents) \ + (self)->contents = _array__splice( \ + (void*)(self)->contents, &(self)->size, &(self)->capacity, \ + array_elem_size(self), (self)->size, 0, count, other_contents \ + ) + +/// Remove `old_count` elements from the array starting at the given `index`. At +/// the same index, insert `new_count` new elements, reading their values from the +/// `new_contents` pointer. +#define array_splice(self, _index, old_count, new_count, new_contents) \ + (self)->contents = _array__splice( \ + (void *)(self)->contents, &(self)->size, &(self)->capacity, \ + array_elem_size(self), _index, old_count, new_count, new_contents \ + ) + +/// Insert one `element` into the array at the given `index`. +#define array_insert(self, _index, element) \ + (self)->contents = _array__splice( \ + (void *)(self)->contents, &(self)->size, &(self)->capacity, \ + array_elem_size(self), _index, 0, 1, &(element) \ + ) + +/// Remove one element from the array at the given `index`. +#define array_erase(self, _index) \ + _array__erase((void *)(self)->contents, &(self)->size, array_elem_size(self), _index) + +/// Pop the last element off the array, returning the element by value. +#define array_pop(self) ((self)->contents[--(self)->size]) + +/// Assign the contents of one array to another, reallocating if necessary. +#define array_assign(self, other) \ + (self)->contents = _array__assign( \ + (void *)(self)->contents, &(self)->size, &(self)->capacity, \ + (const void *)(other)->contents, (other)->size, array_elem_size(self) \ + ) + +/// Swap one array with another +#define array_swap(self, other) \ + do { \ + void *_array_swap_tmp = (void *)(self)->contents; \ + (self)->contents = (other)->contents; \ + (other)->contents = _array_swap_tmp; \ + _array__swap(&(self)->size, &(self)->capacity, \ + &(other)->size, &(other)->capacity); \ + } while (0) + +/// Get the size of the array contents +#define array_elem_size(self) (sizeof *(self)->contents) + +/// Search a sorted array for a given `needle` value, using the given `compare` +/// callback to determine the order. +/// +/// If an existing element is found to be equal to `needle`, then the `index` +/// out-parameter is set to the existing value's index, and the `exists` +/// out-parameter is set to true. Otherwise, `index` is set to an index where +/// `needle` should be inserted in order to preserve the sorting, and `exists` +/// is set to false. +#define array_search_sorted_with(self, compare, needle, _index, _exists) \ + _array__search_sorted(self, 0, compare, , needle, _index, _exists) + +/// Search a sorted array for a given `needle` value, using integer comparisons +/// of a given struct field (specified with a leading dot) to determine the order. +/// +/// See also `array_search_sorted_with`. +#define array_search_sorted_by(self, field, needle, _index, _exists) \ + _array__search_sorted(self, 0, _compare_int, field, needle, _index, _exists) + +/// Insert a given `value` into a sorted array, using the given `compare` +/// callback to determine the order. +#define array_insert_sorted_with(self, compare, value) \ + do { \ + unsigned _index, _exists; \ + array_search_sorted_with(self, compare, &(value), &_index, &_exists); \ + if (!_exists) array_insert(self, _index, value); \ + } while (0) + +/// Insert a given `value` into a sorted array, using integer comparisons of +/// a given struct field (specified with a leading dot) to determine the order. +/// +/// See also `array_search_sorted_by`. +#define array_insert_sorted_by(self, field, value) \ + do { \ + unsigned _index, _exists; \ + array_search_sorted_by(self, field, (value) field, &_index, &_exists); \ + if (!_exists) array_insert(self, _index, value); \ + } while (0) + +// Private + +// Pointers to individual `Array` fields (rather than the entire `Array` itself) +// are passed to the various `_array__*` functions below to address strict aliasing +// violations that arises when the _entire_ `Array` struct is passed as `Array(void)*`. +// +// The `Array` type itself was not altered as a solution in order to avoid breakage +// with existing consumers (in particular, parsers with external scanners). + +/// This is not what you're looking for, see `array_erase`. +static inline void _array__erase(void* self_contents, uint32_t *size, + size_t element_size, uint32_t index) { + assert(index < *size); + char *contents = (char *)self_contents; + memmove(contents + index * element_size, contents + (index + 1) * element_size, + (*size - index - 1) * element_size); + (*size)--; +} + +/// This is not what you're looking for, see `array_reserve`. +static inline void *_array__reserve(void *contents, uint32_t *capacity, + size_t element_size, uint32_t new_capacity) { + void *new_contents = contents; + if (new_capacity > *capacity) { + if (contents) { + new_contents = ts_realloc(contents, new_capacity * element_size); + } else { + new_contents = ts_malloc(new_capacity * element_size); + } + *capacity = new_capacity; + } + return new_contents; +} + +/// This is not what you're looking for, see `array_assign`. +static inline void *_array__assign(void* self_contents, uint32_t *self_size, uint32_t *self_capacity, + const void *other_contents, uint32_t other_size, size_t element_size) { + void *new_contents = _array__reserve(self_contents, self_capacity, element_size, other_size); + *self_size = other_size; + memcpy(new_contents, other_contents, *self_size * element_size); + return new_contents; +} + +/// This is not what you're looking for, see `array_swap`. +static inline void _array__swap(uint32_t *self_size, uint32_t *self_capacity, + uint32_t *other_size, uint32_t *other_capacity) { + uint32_t tmp_size = *self_size; + uint32_t tmp_capacity = *self_capacity; + *self_size = *other_size; + *self_capacity = *other_capacity; + *other_size = tmp_size; + *other_capacity = tmp_capacity; +} + +/// This is not what you're looking for, see `array_push` or `array_grow_by`. +static inline void *_array__grow(void *contents, uint32_t size, uint32_t *capacity, + uint32_t count, size_t element_size) { + void *new_contents = contents; + uint32_t new_size = size + count; + if (new_size > *capacity) { + uint32_t new_capacity = *capacity * 2; + if (new_capacity < 8) new_capacity = 8; + if (new_capacity < new_size) new_capacity = new_size; + new_contents = _array__reserve(contents, capacity, element_size, new_capacity); + } + return new_contents; +} + +/// This is not what you're looking for, see `array_splice`. +static inline void *_array__splice(void *self_contents, uint32_t *size, uint32_t *capacity, + size_t element_size, + uint32_t index, uint32_t old_count, + uint32_t new_count, const void *elements) { + uint32_t new_size = *size + new_count - old_count; + uint32_t old_end = index + old_count; + uint32_t new_end = index + new_count; + assert(old_end <= *size); + + void *new_contents = _array__reserve(self_contents, capacity, element_size, new_size); + + char *contents = (char *)new_contents; + if (*size > old_end) { + memmove( + contents + new_end * element_size, + contents + old_end * element_size, + (*size - old_end) * element_size + ); + } + if (new_count > 0) { + if (elements) { + memcpy( + (contents + index * element_size), + elements, + new_count * element_size + ); + } else { + memset( + (contents + index * element_size), + 0, + new_count * element_size + ); + } + } + *size += new_count - old_count; + + return new_contents; +} + +/// A binary search routine, based on Rust's `std::slice::binary_search_by`. +/// This is not what you're looking for, see `array_search_sorted_with` or `array_search_sorted_by`. +#define _array__search_sorted(self, start, compare, suffix, needle, _index, _exists) \ + do { \ + *(_index) = start; \ + *(_exists) = false; \ + uint32_t size = (self)->size - *(_index); \ + if (size == 0) break; \ + int comparison; \ + while (size > 1) { \ + uint32_t half_size = size / 2; \ + uint32_t mid_index = *(_index) + half_size; \ + comparison = compare(&((self)->contents[mid_index] suffix), (needle)); \ + if (comparison <= 0) *(_index) = mid_index; \ + size -= half_size; \ + } \ + comparison = compare(&((self)->contents[*(_index)] suffix), (needle)); \ + if (comparison == 0) *(_exists) = true; \ + else if (comparison < 0) *(_index) += 1; \ + } while (0) + +/// Helper macro for the `_sorted_by` routines below. This takes the left (existing) +/// parameter by reference in order to work with the generic sorting function above. +#define _compare_int(a, b) ((int)*(a) - (int)(b)) + +#ifdef _MSC_VER +#pragma warning(pop) +#elif defined(__GNUC__) || defined(__clang__) +#pragma GCC diagnostic pop +#endif + +#ifdef __cplusplus +} +#endif + +#endif // TREE_SITTER_ARRAY_H_ diff --git a/src/Napper.Zed/grammars/tree-sitter-nap/src/tree_sitter/parser.h b/src/Napper.Zed/grammars/tree-sitter-nap/src/tree_sitter/parser.h new file mode 100644 index 0000000..858107d --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-nap/src/tree_sitter/parser.h @@ -0,0 +1,286 @@ +#ifndef TREE_SITTER_PARSER_H_ +#define TREE_SITTER_PARSER_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +#include <stdbool.h> +#include <stdint.h> +#include <stdlib.h> + +#define ts_builtin_sym_error ((TSSymbol)-1) +#define ts_builtin_sym_end 0 +#define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 + +#ifndef TREE_SITTER_API_H_ +typedef uint16_t TSStateId; +typedef uint16_t TSSymbol; +typedef uint16_t TSFieldId; +typedef struct TSLanguage TSLanguage; +typedef struct TSLanguageMetadata { + uint8_t major_version; + uint8_t minor_version; + uint8_t patch_version; +} TSLanguageMetadata; +#endif + +typedef struct { + TSFieldId field_id; + uint8_t child_index; + bool inherited; +} TSFieldMapEntry; + +// Used to index the field and supertype maps. +typedef struct { + uint16_t index; + uint16_t length; +} TSMapSlice; + +typedef struct { + bool visible; + bool named; + bool supertype; +} TSSymbolMetadata; + +typedef struct TSLexer TSLexer; + +struct TSLexer { + int32_t lookahead; + TSSymbol result_symbol; + void (*advance)(TSLexer *, bool); + void (*mark_end)(TSLexer *); + uint32_t (*get_column)(TSLexer *); + bool (*is_at_included_range_start)(const TSLexer *); + bool (*eof)(const TSLexer *); + void (*log)(const TSLexer *, const char *, ...); +}; + +typedef enum { + TSParseActionTypeShift, + TSParseActionTypeReduce, + TSParseActionTypeAccept, + TSParseActionTypeRecover, +} TSParseActionType; + +typedef union { + struct { + uint8_t type; + TSStateId state; + bool extra; + bool repetition; + } shift; + struct { + uint8_t type; + uint8_t child_count; + TSSymbol symbol; + int16_t dynamic_precedence; + uint16_t production_id; + } reduce; + uint8_t type; +} TSParseAction; + +typedef struct { + uint16_t lex_state; + uint16_t external_lex_state; +} TSLexMode; + +typedef struct { + uint16_t lex_state; + uint16_t external_lex_state; + uint16_t reserved_word_set_id; +} TSLexerMode; + +typedef union { + TSParseAction action; + struct { + uint8_t count; + bool reusable; + } entry; +} TSParseActionEntry; + +typedef struct { + int32_t start; + int32_t end; +} TSCharacterRange; + +struct TSLanguage { + uint32_t abi_version; + uint32_t symbol_count; + uint32_t alias_count; + uint32_t token_count; + uint32_t external_token_count; + uint32_t state_count; + uint32_t large_state_count; + uint32_t production_id_count; + uint32_t field_count; + uint16_t max_alias_sequence_length; + const uint16_t *parse_table; + const uint16_t *small_parse_table; + const uint32_t *small_parse_table_map; + const TSParseActionEntry *parse_actions; + const char * const *symbol_names; + const char * const *field_names; + const TSMapSlice *field_map_slices; + const TSFieldMapEntry *field_map_entries; + const TSSymbolMetadata *symbol_metadata; + const TSSymbol *public_symbol_map; + const uint16_t *alias_map; + const TSSymbol *alias_sequences; + const TSLexerMode *lex_modes; + bool (*lex_fn)(TSLexer *, TSStateId); + bool (*keyword_lex_fn)(TSLexer *, TSStateId); + TSSymbol keyword_capture_token; + struct { + const bool *states; + const TSSymbol *symbol_map; + void *(*create)(void); + void (*destroy)(void *); + bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist); + unsigned (*serialize)(void *, char *); + void (*deserialize)(void *, const char *, unsigned); + } external_scanner; + const TSStateId *primary_state_ids; + const char *name; + const TSSymbol *reserved_words; + uint16_t max_reserved_word_set_size; + uint32_t supertype_count; + const TSSymbol *supertype_symbols; + const TSMapSlice *supertype_map_slices; + const TSSymbol *supertype_map_entries; + TSLanguageMetadata metadata; +}; + +static inline bool set_contains(const TSCharacterRange *ranges, uint32_t len, int32_t lookahead) { + uint32_t index = 0; + uint32_t size = len - index; + while (size > 1) { + uint32_t half_size = size / 2; + uint32_t mid_index = index + half_size; + const TSCharacterRange *range = &ranges[mid_index]; + if (lookahead >= range->start && lookahead <= range->end) { + return true; + } else if (lookahead > range->end) { + index = mid_index; + } + size -= half_size; + } + const TSCharacterRange *range = &ranges[index]; + return (lookahead >= range->start && lookahead <= range->end); +} + +/* + * Lexer Macros + */ + +#ifdef _MSC_VER +#define UNUSED __pragma(warning(suppress : 4101)) +#else +#define UNUSED __attribute__((unused)) +#endif + +#define START_LEXER() \ + bool result = false; \ + bool skip = false; \ + UNUSED \ + bool eof = false; \ + int32_t lookahead; \ + goto start; \ + next_state: \ + lexer->advance(lexer, skip); \ + start: \ + skip = false; \ + lookahead = lexer->lookahead; + +#define ADVANCE(state_value) \ + { \ + state = state_value; \ + goto next_state; \ + } + +#define ADVANCE_MAP(...) \ + { \ + static const uint16_t map[] = { __VA_ARGS__ }; \ + for (uint32_t i = 0; i < sizeof(map) / sizeof(map[0]); i += 2) { \ + if (map[i] == lookahead) { \ + state = map[i + 1]; \ + goto next_state; \ + } \ + } \ + } + +#define SKIP(state_value) \ + { \ + skip = true; \ + state = state_value; \ + goto next_state; \ + } + +#define ACCEPT_TOKEN(symbol_value) \ + result = true; \ + lexer->result_symbol = symbol_value; \ + lexer->mark_end(lexer); + +#define END_STATE() return result; + +/* + * Parse Table Macros + */ + +#define SMALL_STATE(id) ((id) - LARGE_STATE_COUNT) + +#define STATE(id) id + +#define ACTIONS(id) id + +#define SHIFT(state_value) \ + {{ \ + .shift = { \ + .type = TSParseActionTypeShift, \ + .state = (state_value) \ + } \ + }} + +#define SHIFT_REPEAT(state_value) \ + {{ \ + .shift = { \ + .type = TSParseActionTypeShift, \ + .state = (state_value), \ + .repetition = true \ + } \ + }} + +#define SHIFT_EXTRA() \ + {{ \ + .shift = { \ + .type = TSParseActionTypeShift, \ + .extra = true \ + } \ + }} + +#define REDUCE(symbol_name, children, precedence, prod_id) \ + {{ \ + .reduce = { \ + .type = TSParseActionTypeReduce, \ + .symbol = symbol_name, \ + .child_count = children, \ + .dynamic_precedence = precedence, \ + .production_id = prod_id \ + }, \ + }} + +#define RECOVER() \ + {{ \ + .type = TSParseActionTypeRecover \ + }} + +#define ACCEPT_INPUT() \ + {{ \ + .type = TSParseActionTypeAccept \ + }} + +#ifdef __cplusplus +} +#endif + +#endif // TREE_SITTER_PARSER_H_ diff --git a/src/Napper.Zed/grammars/tree-sitter-nap/tree-sitter.json b/src/Napper.Zed/grammars/tree-sitter-nap/tree-sitter.json new file mode 100644 index 0000000..8aaaf2d --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-nap/tree-sitter.json @@ -0,0 +1,22 @@ +{ + "metadata": { + "version": "0.1.0", + "license": "MIT", + "description": "Tree-sitter grammar for Nap API testing files", + "authors": [ + { "name": "Christian Findlay" } + ], + "links": { + "repository": "https://github.com/nicknap/napper" + } + }, + "grammars": [ + { + "name": "nap", + "scope": "source.nap", + "file-types": ["nap"], + "path": ".", + "highlights": "queries/highlights.scm" + } + ] +} diff --git a/src/Napper.Zed/grammars/tree-sitter-napenv/grammar.js b/src/Napper.Zed/grammars/tree-sitter-napenv/grammar.js new file mode 100644 index 0000000..8dda488 --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-napenv/grammar.js @@ -0,0 +1,34 @@ +/// <reference types="tree-sitter-cli/dsl" /> + +module.exports = grammar({ + name: "napenv", + + extras: ($) => [/[ \t]/], + + rules: { + source_file: ($) => + repeat( + choice( + $.pair, + $.comment, + $.newline, + ), + ), + + newline: (_) => /\r?\n/, + + comment: (_) => seq("#", /[^\r\n]*/), + + pair: ($) => + seq($.key, "=", choice($.quoted_string, $.unquoted_value)), + + key: (_) => /[a-zA-Z_][a-zA-Z0-9_\-]*/, + + unquoted_value: (_) => /[^\r\n]+/, + + quoted_string: ($) => + seq('"', optional($.string_content), '"'), + + string_content: (_) => /[^"\\\r\n]+|\\./, + }, +}); diff --git a/src/Napper.Zed/grammars/tree-sitter-napenv/src/grammar.json b/src/Napper.Zed/grammars/tree-sitter-napenv/src/grammar.json new file mode 100644 index 0000000..61cd14a --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-napenv/src/grammar.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://tree-sitter.github.io/tree-sitter/assets/schemas/grammar.schema.json", + "name": "napenv", + "rules": { + "source_file": { + "type": "REPEAT", + "content": { + "type": "CHOICE", + "members": [ + { + "type": "SYMBOL", + "name": "pair" + }, + { + "type": "SYMBOL", + "name": "comment" + }, + { + "type": "SYMBOL", + "name": "newline" + } + ] + } + }, + "newline": { + "type": "PATTERN", + "value": "\\r?\\n" + }, + "comment": { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "#" + }, + { + "type": "PATTERN", + "value": "[^\\r\\n]*" + } + ] + }, + "pair": { + "type": "SEQ", + "members": [ + { + "type": "SYMBOL", + "name": "key" + }, + { + "type": "STRING", + "value": "=" + }, + { + "type": "CHOICE", + "members": [ + { + "type": "SYMBOL", + "name": "quoted_string" + }, + { + "type": "SYMBOL", + "name": "unquoted_value" + } + ] + } + ] + }, + "key": { + "type": "PATTERN", + "value": "[a-zA-Z_][a-zA-Z0-9_\\-]*" + }, + "unquoted_value": { + "type": "PATTERN", + "value": "[^\\r\\n]+" + }, + "quoted_string": { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "\"" + }, + { + "type": "CHOICE", + "members": [ + { + "type": "SYMBOL", + "name": "string_content" + }, + { + "type": "BLANK" + } + ] + }, + { + "type": "STRING", + "value": "\"" + } + ] + }, + "string_content": { + "type": "PATTERN", + "value": "[^\"\\\\\\r\\n]+|\\\\." + } + }, + "extras": [ + { + "type": "PATTERN", + "value": "[ \\t]" + } + ], + "conflicts": [], + "precedences": [], + "externals": [], + "inline": [], + "supertypes": [], + "reserved": {} +} \ No newline at end of file diff --git a/src/Napper.Zed/grammars/tree-sitter-napenv/src/node-types.json b/src/Napper.Zed/grammars/tree-sitter-napenv/src/node-types.json new file mode 100644 index 0000000..e8bab41 --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-napenv/src/node-types.json @@ -0,0 +1,97 @@ +[ + { + "type": "comment", + "named": true, + "fields": {} + }, + { + "type": "pair", + "named": true, + "fields": {}, + "children": { + "multiple": true, + "required": true, + "types": [ + { + "type": "key", + "named": true + }, + { + "type": "quoted_string", + "named": true + }, + { + "type": "unquoted_value", + "named": true + } + ] + } + }, + { + "type": "quoted_string", + "named": true, + "fields": {}, + "children": { + "multiple": false, + "required": false, + "types": [ + { + "type": "string_content", + "named": true + } + ] + } + }, + { + "type": "source_file", + "named": true, + "root": true, + "fields": {}, + "children": { + "multiple": true, + "required": false, + "types": [ + { + "type": "comment", + "named": true + }, + { + "type": "newline", + "named": true + }, + { + "type": "pair", + "named": true + } + ] + } + }, + { + "type": "\"", + "named": false + }, + { + "type": "#", + "named": false + }, + { + "type": "=", + "named": false + }, + { + "type": "key", + "named": true + }, + { + "type": "newline", + "named": true + }, + { + "type": "string_content", + "named": true + }, + { + "type": "unquoted_value", + "named": true + } +] \ No newline at end of file diff --git a/src/Napper.Zed/grammars/tree-sitter-napenv/src/parser.c b/src/Napper.Zed/grammars/tree-sitter-napenv/src/parser.c new file mode 100644 index 0000000..c9ce0bd --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-napenv/src/parser.c @@ -0,0 +1,483 @@ +/* Automatically @generated by tree-sitter */ + +#include "tree_sitter/parser.h" + +#if defined(__GNUC__) || defined(__clang__) +#pragma GCC diagnostic ignored "-Wmissing-field-initializers" +#endif + +#define LANGUAGE_VERSION 14 +#define STATE_COUNT 14 +#define LARGE_STATE_COUNT 2 +#define SYMBOL_COUNT 14 +#define ALIAS_COUNT 0 +#define TOKEN_COUNT 9 +#define EXTERNAL_TOKEN_COUNT 0 +#define FIELD_COUNT 0 +#define MAX_ALIAS_SEQUENCE_LENGTH 3 +#define MAX_RESERVED_WORD_SET_SIZE 0 +#define PRODUCTION_ID_COUNT 1 +#define SUPERTYPE_COUNT 0 + +enum ts_symbol_identifiers { + sym_newline = 1, + anon_sym_POUND = 2, + aux_sym_comment_token1 = 3, + anon_sym_EQ = 4, + sym_key = 5, + sym_unquoted_value = 6, + anon_sym_DQUOTE = 7, + sym_string_content = 8, + sym_source_file = 9, + sym_comment = 10, + sym_pair = 11, + sym_quoted_string = 12, + aux_sym_source_file_repeat1 = 13, +}; + +static const char * const ts_symbol_names[] = { + [ts_builtin_sym_end] = "end", + [sym_newline] = "newline", + [anon_sym_POUND] = "#", + [aux_sym_comment_token1] = "comment_token1", + [anon_sym_EQ] = "=", + [sym_key] = "key", + [sym_unquoted_value] = "unquoted_value", + [anon_sym_DQUOTE] = "\"", + [sym_string_content] = "string_content", + [sym_source_file] = "source_file", + [sym_comment] = "comment", + [sym_pair] = "pair", + [sym_quoted_string] = "quoted_string", + [aux_sym_source_file_repeat1] = "source_file_repeat1", +}; + +static const TSSymbol ts_symbol_map[] = { + [ts_builtin_sym_end] = ts_builtin_sym_end, + [sym_newline] = sym_newline, + [anon_sym_POUND] = anon_sym_POUND, + [aux_sym_comment_token1] = aux_sym_comment_token1, + [anon_sym_EQ] = anon_sym_EQ, + [sym_key] = sym_key, + [sym_unquoted_value] = sym_unquoted_value, + [anon_sym_DQUOTE] = anon_sym_DQUOTE, + [sym_string_content] = sym_string_content, + [sym_source_file] = sym_source_file, + [sym_comment] = sym_comment, + [sym_pair] = sym_pair, + [sym_quoted_string] = sym_quoted_string, + [aux_sym_source_file_repeat1] = aux_sym_source_file_repeat1, +}; + +static const TSSymbolMetadata ts_symbol_metadata[] = { + [ts_builtin_sym_end] = { + .visible = false, + .named = true, + }, + [sym_newline] = { + .visible = true, + .named = true, + }, + [anon_sym_POUND] = { + .visible = true, + .named = false, + }, + [aux_sym_comment_token1] = { + .visible = false, + .named = false, + }, + [anon_sym_EQ] = { + .visible = true, + .named = false, + }, + [sym_key] = { + .visible = true, + .named = true, + }, + [sym_unquoted_value] = { + .visible = true, + .named = true, + }, + [anon_sym_DQUOTE] = { + .visible = true, + .named = false, + }, + [sym_string_content] = { + .visible = true, + .named = true, + }, + [sym_source_file] = { + .visible = true, + .named = true, + }, + [sym_comment] = { + .visible = true, + .named = true, + }, + [sym_pair] = { + .visible = true, + .named = true, + }, + [sym_quoted_string] = { + .visible = true, + .named = true, + }, + [aux_sym_source_file_repeat1] = { + .visible = false, + .named = false, + }, +}; + +static const TSSymbol ts_alias_sequences[PRODUCTION_ID_COUNT][MAX_ALIAS_SEQUENCE_LENGTH] = { + [0] = {0}, +}; + +static const uint16_t ts_non_terminal_alias_map[] = { + 0, +}; + +static const TSStateId ts_primary_state_ids[STATE_COUNT] = { + [0] = 0, + [1] = 1, + [2] = 2, + [3] = 3, + [4] = 4, + [5] = 5, + [6] = 6, + [7] = 7, + [8] = 8, + [9] = 9, + [10] = 10, + [11] = 11, + [12] = 12, + [13] = 13, +}; + +static bool ts_lex(TSLexer *lexer, TSStateId state) { + START_LEXER(); + eof = lexer->eof(lexer); + switch (state) { + case 0: + if (eof) ADVANCE(5); + if (lookahead == '\n') ADVANCE(6); + if (lookahead == '\r') ADVANCE(1); + if (lookahead == '"') ADVANCE(14); + if (lookahead == '#') ADVANCE(7); + if (lookahead == '=') ADVANCE(10); + if (lookahead == '\t' || + lookahead == ' ') SKIP(0); + if (('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(11); + END_STATE(); + case 1: + if (lookahead == '\n') ADVANCE(6); + END_STATE(); + case 2: + if (lookahead == '"') ADVANCE(14); + if (lookahead == '\\') ADVANCE(4); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(17); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r') ADVANCE(18); + END_STATE(); + case 3: + if (lookahead == '"') ADVANCE(15); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(12); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r') ADVANCE(13); + END_STATE(); + case 4: + if (lookahead != 0 && + lookahead != '\n') ADVANCE(16); + END_STATE(); + case 5: + ACCEPT_TOKEN(ts_builtin_sym_end); + END_STATE(); + case 6: + ACCEPT_TOKEN(sym_newline); + END_STATE(); + case 7: + ACCEPT_TOKEN(anon_sym_POUND); + END_STATE(); + case 8: + ACCEPT_TOKEN(aux_sym_comment_token1); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(8); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r') ADVANCE(9); + END_STATE(); + case 9: + ACCEPT_TOKEN(aux_sym_comment_token1); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(9); + END_STATE(); + case 10: + ACCEPT_TOKEN(anon_sym_EQ); + END_STATE(); + case 11: + ACCEPT_TOKEN(sym_key); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(11); + END_STATE(); + case 12: + ACCEPT_TOKEN(sym_unquoted_value); + if (lookahead == '"') ADVANCE(15); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(12); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r') ADVANCE(13); + END_STATE(); + case 13: + ACCEPT_TOKEN(sym_unquoted_value); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(13); + END_STATE(); + case 14: + ACCEPT_TOKEN(anon_sym_DQUOTE); + END_STATE(); + case 15: + ACCEPT_TOKEN(anon_sym_DQUOTE); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(13); + END_STATE(); + case 16: + ACCEPT_TOKEN(sym_string_content); + END_STATE(); + case 17: + ACCEPT_TOKEN(sym_string_content); + if (lookahead == '\\') ADVANCE(4); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(17); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"') ADVANCE(18); + END_STATE(); + case 18: + ACCEPT_TOKEN(sym_string_content); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + lookahead != '\\') ADVANCE(18); + END_STATE(); + default: + return false; + } +} + +static const TSLexMode ts_lex_modes[STATE_COUNT] = { + [0] = {.lex_state = 0}, + [1] = {.lex_state = 0}, + [2] = {.lex_state = 0}, + [3] = {.lex_state = 0}, + [4] = {.lex_state = 0}, + [5] = {.lex_state = 0}, + [6] = {.lex_state = 0}, + [7] = {.lex_state = 0}, + [8] = {.lex_state = 3}, + [9] = {.lex_state = 2}, + [10] = {.lex_state = 8}, + [11] = {.lex_state = 0}, + [12] = {.lex_state = 0}, + [13] = {.lex_state = 0}, +}; + +static const uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = { + [STATE(0)] = { + [ts_builtin_sym_end] = ACTIONS(1), + [sym_newline] = ACTIONS(1), + [anon_sym_POUND] = ACTIONS(1), + [anon_sym_EQ] = ACTIONS(1), + [sym_key] = ACTIONS(1), + [anon_sym_DQUOTE] = ACTIONS(1), + }, + [STATE(1)] = { + [sym_source_file] = STATE(12), + [sym_comment] = STATE(2), + [sym_pair] = STATE(2), + [aux_sym_source_file_repeat1] = STATE(2), + [ts_builtin_sym_end] = ACTIONS(3), + [sym_newline] = ACTIONS(5), + [anon_sym_POUND] = ACTIONS(7), + [sym_key] = ACTIONS(9), + }, +}; + +static const uint16_t ts_small_parse_table[] = { + [0] = 5, + ACTIONS(7), 1, + anon_sym_POUND, + ACTIONS(9), 1, + sym_key, + ACTIONS(11), 1, + ts_builtin_sym_end, + ACTIONS(13), 1, + sym_newline, + STATE(3), 3, + sym_comment, + sym_pair, + aux_sym_source_file_repeat1, + [18] = 5, + ACTIONS(15), 1, + ts_builtin_sym_end, + ACTIONS(17), 1, + sym_newline, + ACTIONS(20), 1, + anon_sym_POUND, + ACTIONS(23), 1, + sym_key, + STATE(3), 3, + sym_comment, + sym_pair, + aux_sym_source_file_repeat1, + [36] = 1, + ACTIONS(26), 4, + ts_builtin_sym_end, + sym_newline, + anon_sym_POUND, + sym_key, + [43] = 1, + ACTIONS(28), 4, + ts_builtin_sym_end, + sym_newline, + anon_sym_POUND, + sym_key, + [50] = 1, + ACTIONS(30), 4, + ts_builtin_sym_end, + sym_newline, + anon_sym_POUND, + sym_key, + [57] = 1, + ACTIONS(32), 4, + ts_builtin_sym_end, + sym_newline, + anon_sym_POUND, + sym_key, + [64] = 3, + ACTIONS(34), 1, + sym_unquoted_value, + ACTIONS(36), 1, + anon_sym_DQUOTE, + STATE(5), 1, + sym_quoted_string, + [74] = 2, + ACTIONS(38), 1, + anon_sym_DQUOTE, + ACTIONS(40), 1, + sym_string_content, + [81] = 1, + ACTIONS(42), 1, + aux_sym_comment_token1, + [85] = 1, + ACTIONS(44), 1, + anon_sym_EQ, + [89] = 1, + ACTIONS(46), 1, + ts_builtin_sym_end, + [93] = 1, + ACTIONS(48), 1, + anon_sym_DQUOTE, +}; + +static const uint32_t ts_small_parse_table_map[] = { + [SMALL_STATE(2)] = 0, + [SMALL_STATE(3)] = 18, + [SMALL_STATE(4)] = 36, + [SMALL_STATE(5)] = 43, + [SMALL_STATE(6)] = 50, + [SMALL_STATE(7)] = 57, + [SMALL_STATE(8)] = 64, + [SMALL_STATE(9)] = 74, + [SMALL_STATE(10)] = 81, + [SMALL_STATE(11)] = 85, + [SMALL_STATE(12)] = 89, + [SMALL_STATE(13)] = 93, +}; + +static const TSParseActionEntry ts_parse_actions[] = { + [0] = {.entry = {.count = 0, .reusable = false}}, + [1] = {.entry = {.count = 1, .reusable = false}}, RECOVER(), + [3] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 0, 0, 0), + [5] = {.entry = {.count = 1, .reusable = true}}, SHIFT(2), + [7] = {.entry = {.count = 1, .reusable = true}}, SHIFT(10), + [9] = {.entry = {.count = 1, .reusable = true}}, SHIFT(11), + [11] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 1, 0, 0), + [13] = {.entry = {.count = 1, .reusable = true}}, SHIFT(3), + [15] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), + [17] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(3), + [20] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(10), + [23] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(11), + [26] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_comment, 2, 0, 0), + [28] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_pair, 3, 0, 0), + [30] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_quoted_string, 2, 0, 0), + [32] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_quoted_string, 3, 0, 0), + [34] = {.entry = {.count = 1, .reusable = false}}, SHIFT(5), + [36] = {.entry = {.count = 1, .reusable = false}}, SHIFT(9), + [38] = {.entry = {.count = 1, .reusable = false}}, SHIFT(6), + [40] = {.entry = {.count = 1, .reusable = true}}, SHIFT(13), + [42] = {.entry = {.count = 1, .reusable = true}}, SHIFT(4), + [44] = {.entry = {.count = 1, .reusable = true}}, SHIFT(8), + [46] = {.entry = {.count = 1, .reusable = true}}, ACCEPT_INPUT(), + [48] = {.entry = {.count = 1, .reusable = true}}, SHIFT(7), +}; + +#ifdef __cplusplus +extern "C" { +#endif +#ifdef TREE_SITTER_HIDE_SYMBOLS +#define TS_PUBLIC +#elif defined(_WIN32) +#define TS_PUBLIC __declspec(dllexport) +#else +#define TS_PUBLIC __attribute__((visibility("default"))) +#endif + +TS_PUBLIC const TSLanguage *tree_sitter_napenv(void) { + static const TSLanguage language = { + .abi_version = LANGUAGE_VERSION, + .symbol_count = SYMBOL_COUNT, + .alias_count = ALIAS_COUNT, + .token_count = TOKEN_COUNT, + .external_token_count = EXTERNAL_TOKEN_COUNT, + .state_count = STATE_COUNT, + .large_state_count = LARGE_STATE_COUNT, + .production_id_count = PRODUCTION_ID_COUNT, + .field_count = FIELD_COUNT, + .max_alias_sequence_length = MAX_ALIAS_SEQUENCE_LENGTH, + .parse_table = &ts_parse_table[0][0], + .small_parse_table = ts_small_parse_table, + .small_parse_table_map = ts_small_parse_table_map, + .parse_actions = ts_parse_actions, + .symbol_names = ts_symbol_names, + .symbol_metadata = ts_symbol_metadata, + .public_symbol_map = ts_symbol_map, + .alias_map = ts_non_terminal_alias_map, + .alias_sequences = &ts_alias_sequences[0][0], + .lex_modes = (const void*)ts_lex_modes, + .lex_fn = ts_lex, + .primary_state_ids = ts_primary_state_ids, + }; + return &language; +} +#ifdef __cplusplus +} +#endif diff --git a/src/Napper.Zed/grammars/tree-sitter-napenv/src/tree_sitter/alloc.h b/src/Napper.Zed/grammars/tree-sitter-napenv/src/tree_sitter/alloc.h new file mode 100644 index 0000000..1abdd12 --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-napenv/src/tree_sitter/alloc.h @@ -0,0 +1,54 @@ +#ifndef TREE_SITTER_ALLOC_H_ +#define TREE_SITTER_ALLOC_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +#include <stdbool.h> +#include <stdio.h> +#include <stdlib.h> + +// Allow clients to override allocation functions +#ifdef TREE_SITTER_REUSE_ALLOCATOR + +extern void *(*ts_current_malloc)(size_t size); +extern void *(*ts_current_calloc)(size_t count, size_t size); +extern void *(*ts_current_realloc)(void *ptr, size_t size); +extern void (*ts_current_free)(void *ptr); + +#ifndef ts_malloc +#define ts_malloc ts_current_malloc +#endif +#ifndef ts_calloc +#define ts_calloc ts_current_calloc +#endif +#ifndef ts_realloc +#define ts_realloc ts_current_realloc +#endif +#ifndef ts_free +#define ts_free ts_current_free +#endif + +#else + +#ifndef ts_malloc +#define ts_malloc malloc +#endif +#ifndef ts_calloc +#define ts_calloc calloc +#endif +#ifndef ts_realloc +#define ts_realloc realloc +#endif +#ifndef ts_free +#define ts_free free +#endif + +#endif + +#ifdef __cplusplus +} +#endif + +#endif // TREE_SITTER_ALLOC_H_ diff --git a/src/Napper.Zed/grammars/tree-sitter-napenv/src/tree_sitter/array.h b/src/Napper.Zed/grammars/tree-sitter-napenv/src/tree_sitter/array.h new file mode 100644 index 0000000..56fc8cd --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-napenv/src/tree_sitter/array.h @@ -0,0 +1,330 @@ +#ifndef TREE_SITTER_ARRAY_H_ +#define TREE_SITTER_ARRAY_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +#include "./alloc.h" + +#include <assert.h> +#include <stdbool.h> +#include <stdint.h> +#include <stdlib.h> +#include <string.h> + +#ifdef _MSC_VER +#pragma warning(push) +#pragma warning(disable : 4101) +#elif defined(__GNUC__) || defined(__clang__) +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wunused-variable" +#endif + +#define Array(T) \ + struct { \ + T *contents; \ + uint32_t size; \ + uint32_t capacity; \ + } + +/// Initialize an array. +#define array_init(self) \ + ((self)->size = 0, (self)->capacity = 0, (self)->contents = NULL) + +/// Create an empty array. +#define array_new() \ + { NULL, 0, 0 } + +/// Get a pointer to the element at a given `index` in the array. +#define array_get(self, _index) \ + (assert((uint32_t)(_index) < (self)->size), &(self)->contents[_index]) + +/// Get a pointer to the first element in the array. +#define array_front(self) array_get(self, 0) + +/// Get a pointer to the last element in the array. +#define array_back(self) array_get(self, (self)->size - 1) + +/// Clear the array, setting its size to zero. Note that this does not free any +/// memory allocated for the array's contents. +#define array_clear(self) ((self)->size = 0) + +/// Reserve `new_capacity` elements of space in the array. If `new_capacity` is +/// less than the array's current capacity, this function has no effect. +#define array_reserve(self, new_capacity) \ + ((self)->contents = _array__reserve( \ + (void *)(self)->contents, &(self)->capacity, \ + array_elem_size(self), new_capacity) \ + ) + +/// Free any memory allocated for this array. Note that this does not free any +/// memory allocated for the array's contents. +#define array_delete(self) \ + do { \ + if ((self)->contents) ts_free((self)->contents); \ + (self)->contents = NULL; \ + (self)->size = 0; \ + (self)->capacity = 0; \ + } while (0) + +/// Push a new `element` onto the end of the array. +#define array_push(self, element) \ + do { \ + (self)->contents = _array__grow( \ + (void *)(self)->contents, (self)->size, &(self)->capacity, \ + 1, array_elem_size(self) \ + ); \ + (self)->contents[(self)->size++] = (element); \ + } while(0) + +/// Increase the array's size by `count` elements. +/// New elements are zero-initialized. +#define array_grow_by(self, count) \ + do { \ + if ((count) == 0) break; \ + (self)->contents = _array__grow( \ + (self)->contents, (self)->size, &(self)->capacity, \ + count, array_elem_size(self) \ + ); \ + memset((self)->contents + (self)->size, 0, (count) * array_elem_size(self)); \ + (self)->size += (count); \ + } while (0) + +/// Append all elements from one array to the end of another. +#define array_push_all(self, other) \ + array_extend((self), (other)->size, (other)->contents) + +/// Append `count` elements to the end of the array, reading their values from the +/// `contents` pointer. +#define array_extend(self, count, other_contents) \ + (self)->contents = _array__splice( \ + (void*)(self)->contents, &(self)->size, &(self)->capacity, \ + array_elem_size(self), (self)->size, 0, count, other_contents \ + ) + +/// Remove `old_count` elements from the array starting at the given `index`. At +/// the same index, insert `new_count` new elements, reading their values from the +/// `new_contents` pointer. +#define array_splice(self, _index, old_count, new_count, new_contents) \ + (self)->contents = _array__splice( \ + (void *)(self)->contents, &(self)->size, &(self)->capacity, \ + array_elem_size(self), _index, old_count, new_count, new_contents \ + ) + +/// Insert one `element` into the array at the given `index`. +#define array_insert(self, _index, element) \ + (self)->contents = _array__splice( \ + (void *)(self)->contents, &(self)->size, &(self)->capacity, \ + array_elem_size(self), _index, 0, 1, &(element) \ + ) + +/// Remove one element from the array at the given `index`. +#define array_erase(self, _index) \ + _array__erase((void *)(self)->contents, &(self)->size, array_elem_size(self), _index) + +/// Pop the last element off the array, returning the element by value. +#define array_pop(self) ((self)->contents[--(self)->size]) + +/// Assign the contents of one array to another, reallocating if necessary. +#define array_assign(self, other) \ + (self)->contents = _array__assign( \ + (void *)(self)->contents, &(self)->size, &(self)->capacity, \ + (const void *)(other)->contents, (other)->size, array_elem_size(self) \ + ) + +/// Swap one array with another +#define array_swap(self, other) \ + do { \ + void *_array_swap_tmp = (void *)(self)->contents; \ + (self)->contents = (other)->contents; \ + (other)->contents = _array_swap_tmp; \ + _array__swap(&(self)->size, &(self)->capacity, \ + &(other)->size, &(other)->capacity); \ + } while (0) + +/// Get the size of the array contents +#define array_elem_size(self) (sizeof *(self)->contents) + +/// Search a sorted array for a given `needle` value, using the given `compare` +/// callback to determine the order. +/// +/// If an existing element is found to be equal to `needle`, then the `index` +/// out-parameter is set to the existing value's index, and the `exists` +/// out-parameter is set to true. Otherwise, `index` is set to an index where +/// `needle` should be inserted in order to preserve the sorting, and `exists` +/// is set to false. +#define array_search_sorted_with(self, compare, needle, _index, _exists) \ + _array__search_sorted(self, 0, compare, , needle, _index, _exists) + +/// Search a sorted array for a given `needle` value, using integer comparisons +/// of a given struct field (specified with a leading dot) to determine the order. +/// +/// See also `array_search_sorted_with`. +#define array_search_sorted_by(self, field, needle, _index, _exists) \ + _array__search_sorted(self, 0, _compare_int, field, needle, _index, _exists) + +/// Insert a given `value` into a sorted array, using the given `compare` +/// callback to determine the order. +#define array_insert_sorted_with(self, compare, value) \ + do { \ + unsigned _index, _exists; \ + array_search_sorted_with(self, compare, &(value), &_index, &_exists); \ + if (!_exists) array_insert(self, _index, value); \ + } while (0) + +/// Insert a given `value` into a sorted array, using integer comparisons of +/// a given struct field (specified with a leading dot) to determine the order. +/// +/// See also `array_search_sorted_by`. +#define array_insert_sorted_by(self, field, value) \ + do { \ + unsigned _index, _exists; \ + array_search_sorted_by(self, field, (value) field, &_index, &_exists); \ + if (!_exists) array_insert(self, _index, value); \ + } while (0) + +// Private + +// Pointers to individual `Array` fields (rather than the entire `Array` itself) +// are passed to the various `_array__*` functions below to address strict aliasing +// violations that arises when the _entire_ `Array` struct is passed as `Array(void)*`. +// +// The `Array` type itself was not altered as a solution in order to avoid breakage +// with existing consumers (in particular, parsers with external scanners). + +/// This is not what you're looking for, see `array_erase`. +static inline void _array__erase(void* self_contents, uint32_t *size, + size_t element_size, uint32_t index) { + assert(index < *size); + char *contents = (char *)self_contents; + memmove(contents + index * element_size, contents + (index + 1) * element_size, + (*size - index - 1) * element_size); + (*size)--; +} + +/// This is not what you're looking for, see `array_reserve`. +static inline void *_array__reserve(void *contents, uint32_t *capacity, + size_t element_size, uint32_t new_capacity) { + void *new_contents = contents; + if (new_capacity > *capacity) { + if (contents) { + new_contents = ts_realloc(contents, new_capacity * element_size); + } else { + new_contents = ts_malloc(new_capacity * element_size); + } + *capacity = new_capacity; + } + return new_contents; +} + +/// This is not what you're looking for, see `array_assign`. +static inline void *_array__assign(void* self_contents, uint32_t *self_size, uint32_t *self_capacity, + const void *other_contents, uint32_t other_size, size_t element_size) { + void *new_contents = _array__reserve(self_contents, self_capacity, element_size, other_size); + *self_size = other_size; + memcpy(new_contents, other_contents, *self_size * element_size); + return new_contents; +} + +/// This is not what you're looking for, see `array_swap`. +static inline void _array__swap(uint32_t *self_size, uint32_t *self_capacity, + uint32_t *other_size, uint32_t *other_capacity) { + uint32_t tmp_size = *self_size; + uint32_t tmp_capacity = *self_capacity; + *self_size = *other_size; + *self_capacity = *other_capacity; + *other_size = tmp_size; + *other_capacity = tmp_capacity; +} + +/// This is not what you're looking for, see `array_push` or `array_grow_by`. +static inline void *_array__grow(void *contents, uint32_t size, uint32_t *capacity, + uint32_t count, size_t element_size) { + void *new_contents = contents; + uint32_t new_size = size + count; + if (new_size > *capacity) { + uint32_t new_capacity = *capacity * 2; + if (new_capacity < 8) new_capacity = 8; + if (new_capacity < new_size) new_capacity = new_size; + new_contents = _array__reserve(contents, capacity, element_size, new_capacity); + } + return new_contents; +} + +/// This is not what you're looking for, see `array_splice`. +static inline void *_array__splice(void *self_contents, uint32_t *size, uint32_t *capacity, + size_t element_size, + uint32_t index, uint32_t old_count, + uint32_t new_count, const void *elements) { + uint32_t new_size = *size + new_count - old_count; + uint32_t old_end = index + old_count; + uint32_t new_end = index + new_count; + assert(old_end <= *size); + + void *new_contents = _array__reserve(self_contents, capacity, element_size, new_size); + + char *contents = (char *)new_contents; + if (*size > old_end) { + memmove( + contents + new_end * element_size, + contents + old_end * element_size, + (*size - old_end) * element_size + ); + } + if (new_count > 0) { + if (elements) { + memcpy( + (contents + index * element_size), + elements, + new_count * element_size + ); + } else { + memset( + (contents + index * element_size), + 0, + new_count * element_size + ); + } + } + *size += new_count - old_count; + + return new_contents; +} + +/// A binary search routine, based on Rust's `std::slice::binary_search_by`. +/// This is not what you're looking for, see `array_search_sorted_with` or `array_search_sorted_by`. +#define _array__search_sorted(self, start, compare, suffix, needle, _index, _exists) \ + do { \ + *(_index) = start; \ + *(_exists) = false; \ + uint32_t size = (self)->size - *(_index); \ + if (size == 0) break; \ + int comparison; \ + while (size > 1) { \ + uint32_t half_size = size / 2; \ + uint32_t mid_index = *(_index) + half_size; \ + comparison = compare(&((self)->contents[mid_index] suffix), (needle)); \ + if (comparison <= 0) *(_index) = mid_index; \ + size -= half_size; \ + } \ + comparison = compare(&((self)->contents[*(_index)] suffix), (needle)); \ + if (comparison == 0) *(_exists) = true; \ + else if (comparison < 0) *(_index) += 1; \ + } while (0) + +/// Helper macro for the `_sorted_by` routines below. This takes the left (existing) +/// parameter by reference in order to work with the generic sorting function above. +#define _compare_int(a, b) ((int)*(a) - (int)(b)) + +#ifdef _MSC_VER +#pragma warning(pop) +#elif defined(__GNUC__) || defined(__clang__) +#pragma GCC diagnostic pop +#endif + +#ifdef __cplusplus +} +#endif + +#endif // TREE_SITTER_ARRAY_H_ diff --git a/src/Napper.Zed/grammars/tree-sitter-napenv/src/tree_sitter/parser.h b/src/Napper.Zed/grammars/tree-sitter-napenv/src/tree_sitter/parser.h new file mode 100644 index 0000000..858107d --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-napenv/src/tree_sitter/parser.h @@ -0,0 +1,286 @@ +#ifndef TREE_SITTER_PARSER_H_ +#define TREE_SITTER_PARSER_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +#include <stdbool.h> +#include <stdint.h> +#include <stdlib.h> + +#define ts_builtin_sym_error ((TSSymbol)-1) +#define ts_builtin_sym_end 0 +#define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 + +#ifndef TREE_SITTER_API_H_ +typedef uint16_t TSStateId; +typedef uint16_t TSSymbol; +typedef uint16_t TSFieldId; +typedef struct TSLanguage TSLanguage; +typedef struct TSLanguageMetadata { + uint8_t major_version; + uint8_t minor_version; + uint8_t patch_version; +} TSLanguageMetadata; +#endif + +typedef struct { + TSFieldId field_id; + uint8_t child_index; + bool inherited; +} TSFieldMapEntry; + +// Used to index the field and supertype maps. +typedef struct { + uint16_t index; + uint16_t length; +} TSMapSlice; + +typedef struct { + bool visible; + bool named; + bool supertype; +} TSSymbolMetadata; + +typedef struct TSLexer TSLexer; + +struct TSLexer { + int32_t lookahead; + TSSymbol result_symbol; + void (*advance)(TSLexer *, bool); + void (*mark_end)(TSLexer *); + uint32_t (*get_column)(TSLexer *); + bool (*is_at_included_range_start)(const TSLexer *); + bool (*eof)(const TSLexer *); + void (*log)(const TSLexer *, const char *, ...); +}; + +typedef enum { + TSParseActionTypeShift, + TSParseActionTypeReduce, + TSParseActionTypeAccept, + TSParseActionTypeRecover, +} TSParseActionType; + +typedef union { + struct { + uint8_t type; + TSStateId state; + bool extra; + bool repetition; + } shift; + struct { + uint8_t type; + uint8_t child_count; + TSSymbol symbol; + int16_t dynamic_precedence; + uint16_t production_id; + } reduce; + uint8_t type; +} TSParseAction; + +typedef struct { + uint16_t lex_state; + uint16_t external_lex_state; +} TSLexMode; + +typedef struct { + uint16_t lex_state; + uint16_t external_lex_state; + uint16_t reserved_word_set_id; +} TSLexerMode; + +typedef union { + TSParseAction action; + struct { + uint8_t count; + bool reusable; + } entry; +} TSParseActionEntry; + +typedef struct { + int32_t start; + int32_t end; +} TSCharacterRange; + +struct TSLanguage { + uint32_t abi_version; + uint32_t symbol_count; + uint32_t alias_count; + uint32_t token_count; + uint32_t external_token_count; + uint32_t state_count; + uint32_t large_state_count; + uint32_t production_id_count; + uint32_t field_count; + uint16_t max_alias_sequence_length; + const uint16_t *parse_table; + const uint16_t *small_parse_table; + const uint32_t *small_parse_table_map; + const TSParseActionEntry *parse_actions; + const char * const *symbol_names; + const char * const *field_names; + const TSMapSlice *field_map_slices; + const TSFieldMapEntry *field_map_entries; + const TSSymbolMetadata *symbol_metadata; + const TSSymbol *public_symbol_map; + const uint16_t *alias_map; + const TSSymbol *alias_sequences; + const TSLexerMode *lex_modes; + bool (*lex_fn)(TSLexer *, TSStateId); + bool (*keyword_lex_fn)(TSLexer *, TSStateId); + TSSymbol keyword_capture_token; + struct { + const bool *states; + const TSSymbol *symbol_map; + void *(*create)(void); + void (*destroy)(void *); + bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist); + unsigned (*serialize)(void *, char *); + void (*deserialize)(void *, const char *, unsigned); + } external_scanner; + const TSStateId *primary_state_ids; + const char *name; + const TSSymbol *reserved_words; + uint16_t max_reserved_word_set_size; + uint32_t supertype_count; + const TSSymbol *supertype_symbols; + const TSMapSlice *supertype_map_slices; + const TSSymbol *supertype_map_entries; + TSLanguageMetadata metadata; +}; + +static inline bool set_contains(const TSCharacterRange *ranges, uint32_t len, int32_t lookahead) { + uint32_t index = 0; + uint32_t size = len - index; + while (size > 1) { + uint32_t half_size = size / 2; + uint32_t mid_index = index + half_size; + const TSCharacterRange *range = &ranges[mid_index]; + if (lookahead >= range->start && lookahead <= range->end) { + return true; + } else if (lookahead > range->end) { + index = mid_index; + } + size -= half_size; + } + const TSCharacterRange *range = &ranges[index]; + return (lookahead >= range->start && lookahead <= range->end); +} + +/* + * Lexer Macros + */ + +#ifdef _MSC_VER +#define UNUSED __pragma(warning(suppress : 4101)) +#else +#define UNUSED __attribute__((unused)) +#endif + +#define START_LEXER() \ + bool result = false; \ + bool skip = false; \ + UNUSED \ + bool eof = false; \ + int32_t lookahead; \ + goto start; \ + next_state: \ + lexer->advance(lexer, skip); \ + start: \ + skip = false; \ + lookahead = lexer->lookahead; + +#define ADVANCE(state_value) \ + { \ + state = state_value; \ + goto next_state; \ + } + +#define ADVANCE_MAP(...) \ + { \ + static const uint16_t map[] = { __VA_ARGS__ }; \ + for (uint32_t i = 0; i < sizeof(map) / sizeof(map[0]); i += 2) { \ + if (map[i] == lookahead) { \ + state = map[i + 1]; \ + goto next_state; \ + } \ + } \ + } + +#define SKIP(state_value) \ + { \ + skip = true; \ + state = state_value; \ + goto next_state; \ + } + +#define ACCEPT_TOKEN(symbol_value) \ + result = true; \ + lexer->result_symbol = symbol_value; \ + lexer->mark_end(lexer); + +#define END_STATE() return result; + +/* + * Parse Table Macros + */ + +#define SMALL_STATE(id) ((id) - LARGE_STATE_COUNT) + +#define STATE(id) id + +#define ACTIONS(id) id + +#define SHIFT(state_value) \ + {{ \ + .shift = { \ + .type = TSParseActionTypeShift, \ + .state = (state_value) \ + } \ + }} + +#define SHIFT_REPEAT(state_value) \ + {{ \ + .shift = { \ + .type = TSParseActionTypeShift, \ + .state = (state_value), \ + .repetition = true \ + } \ + }} + +#define SHIFT_EXTRA() \ + {{ \ + .shift = { \ + .type = TSParseActionTypeShift, \ + .extra = true \ + } \ + }} + +#define REDUCE(symbol_name, children, precedence, prod_id) \ + {{ \ + .reduce = { \ + .type = TSParseActionTypeReduce, \ + .symbol = symbol_name, \ + .child_count = children, \ + .dynamic_precedence = precedence, \ + .production_id = prod_id \ + }, \ + }} + +#define RECOVER() \ + {{ \ + .type = TSParseActionTypeRecover \ + }} + +#define ACCEPT_INPUT() \ + {{ \ + .type = TSParseActionTypeAccept \ + }} + +#ifdef __cplusplus +} +#endif + +#endif // TREE_SITTER_PARSER_H_ diff --git a/src/Napper.Zed/grammars/tree-sitter-naplist/grammar.js b/src/Napper.Zed/grammars/tree-sitter-naplist/grammar.js new file mode 100644 index 0000000..ea58c38 --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-naplist/grammar.js @@ -0,0 +1,49 @@ +/// <reference types="tree-sitter-cli/dsl" /> + +module.exports = grammar({ + name: "naplist", + + extras: ($) => [/[ \t]/], + + rules: { + source_file: ($) => + repeat( + choice( + $.section_header, + $.pair, + $.step, + $.comment, + $.newline, + ), + ), + + newline: (_) => /\r?\n/, + + comment: (_) => seq("#", /[^\r\n]*/), + + // --- Section headers (flat) --- + section_header: (_) => + choice( + seq("[", "meta", "]"), + seq("[", "vars", "]"), + seq("[", "steps", "]"), + ), + + // --- Key-value pairs --- + pair: ($) => + seq($.key, "=", choice($.quoted_string, $.unquoted_value)), + + // --- Steps (file paths) --- + step: (_) => /[.\/][^\s#\r\n][^\r\n]*/, + + // --- Tokens --- + key: (_) => /[a-zA-Z_][a-zA-Z0-9_\-]*/, + + unquoted_value: (_) => /[^\r\n]+/, + + quoted_string: ($) => + seq('"', optional($.string_content), '"'), + + string_content: (_) => /[^"\\\r\n]+|\\./, + }, +}); diff --git a/src/Napper.Zed/grammars/tree-sitter-naplist/src/grammar.json b/src/Napper.Zed/grammars/tree-sitter-naplist/src/grammar.json new file mode 100644 index 0000000..2a87372 --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-naplist/src/grammar.json @@ -0,0 +1,186 @@ +{ + "$schema": "https://tree-sitter.github.io/tree-sitter/assets/schemas/grammar.schema.json", + "name": "naplist", + "rules": { + "source_file": { + "type": "REPEAT", + "content": { + "type": "CHOICE", + "members": [ + { + "type": "SYMBOL", + "name": "section_header" + }, + { + "type": "SYMBOL", + "name": "pair" + }, + { + "type": "SYMBOL", + "name": "step" + }, + { + "type": "SYMBOL", + "name": "comment" + }, + { + "type": "SYMBOL", + "name": "newline" + } + ] + } + }, + "newline": { + "type": "PATTERN", + "value": "\\r?\\n" + }, + "comment": { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "#" + }, + { + "type": "PATTERN", + "value": "[^\\r\\n]*" + } + ] + }, + "section_header": { + "type": "CHOICE", + "members": [ + { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "[" + }, + { + "type": "STRING", + "value": "meta" + }, + { + "type": "STRING", + "value": "]" + } + ] + }, + { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "[" + }, + { + "type": "STRING", + "value": "vars" + }, + { + "type": "STRING", + "value": "]" + } + ] + }, + { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "[" + }, + { + "type": "STRING", + "value": "steps" + }, + { + "type": "STRING", + "value": "]" + } + ] + } + ] + }, + "pair": { + "type": "SEQ", + "members": [ + { + "type": "SYMBOL", + "name": "key" + }, + { + "type": "STRING", + "value": "=" + }, + { + "type": "CHOICE", + "members": [ + { + "type": "SYMBOL", + "name": "quoted_string" + }, + { + "type": "SYMBOL", + "name": "unquoted_value" + } + ] + } + ] + }, + "step": { + "type": "PATTERN", + "value": "[.\\/][^\\s#\\r\\n][^\\r\\n]*" + }, + "key": { + "type": "PATTERN", + "value": "[a-zA-Z_][a-zA-Z0-9_\\-]*" + }, + "unquoted_value": { + "type": "PATTERN", + "value": "[^\\r\\n]+" + }, + "quoted_string": { + "type": "SEQ", + "members": [ + { + "type": "STRING", + "value": "\"" + }, + { + "type": "CHOICE", + "members": [ + { + "type": "SYMBOL", + "name": "string_content" + }, + { + "type": "BLANK" + } + ] + }, + { + "type": "STRING", + "value": "\"" + } + ] + }, + "string_content": { + "type": "PATTERN", + "value": "[^\"\\\\\\r\\n]+|\\\\." + } + }, + "extras": [ + { + "type": "PATTERN", + "value": "[ \\t]" + } + ], + "conflicts": [], + "precedences": [], + "externals": [], + "inline": [], + "supertypes": [], + "reserved": {} +} \ No newline at end of file diff --git a/src/Napper.Zed/grammars/tree-sitter-naplist/src/node-types.json b/src/Napper.Zed/grammars/tree-sitter-naplist/src/node-types.json new file mode 100644 index 0000000..d3decf1 --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-naplist/src/node-types.json @@ -0,0 +1,134 @@ +[ + { + "type": "comment", + "named": true, + "fields": {} + }, + { + "type": "pair", + "named": true, + "fields": {}, + "children": { + "multiple": true, + "required": true, + "types": [ + { + "type": "key", + "named": true + }, + { + "type": "quoted_string", + "named": true + }, + { + "type": "unquoted_value", + "named": true + } + ] + } + }, + { + "type": "quoted_string", + "named": true, + "fields": {}, + "children": { + "multiple": false, + "required": false, + "types": [ + { + "type": "string_content", + "named": true + } + ] + } + }, + { + "type": "section_header", + "named": true, + "fields": {} + }, + { + "type": "source_file", + "named": true, + "root": true, + "fields": {}, + "children": { + "multiple": true, + "required": false, + "types": [ + { + "type": "comment", + "named": true + }, + { + "type": "newline", + "named": true + }, + { + "type": "pair", + "named": true + }, + { + "type": "section_header", + "named": true + }, + { + "type": "step", + "named": true + } + ] + } + }, + { + "type": "\"", + "named": false + }, + { + "type": "#", + "named": false + }, + { + "type": "=", + "named": false + }, + { + "type": "[", + "named": false + }, + { + "type": "]", + "named": false + }, + { + "type": "key", + "named": true + }, + { + "type": "meta", + "named": false + }, + { + "type": "newline", + "named": true + }, + { + "type": "step", + "named": true + }, + { + "type": "steps", + "named": false + }, + { + "type": "string_content", + "named": true + }, + { + "type": "unquoted_value", + "named": true + }, + { + "type": "vars", + "named": false + } +] \ No newline at end of file diff --git a/src/Napper.Zed/grammars/tree-sitter-naplist/src/parser.c b/src/Napper.Zed/grammars/tree-sitter-naplist/src/parser.c new file mode 100644 index 0000000..d1c90f5 --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-naplist/src/parser.c @@ -0,0 +1,788 @@ +/* Automatically @generated by tree-sitter */ + +#include "tree_sitter/parser.h" + +#if defined(__GNUC__) || defined(__clang__) +#pragma GCC diagnostic ignored "-Wmissing-field-initializers" +#endif + +#define LANGUAGE_VERSION 14 +#define STATE_COUNT 17 +#define LARGE_STATE_COUNT 2 +#define SYMBOL_COUNT 21 +#define ALIAS_COUNT 0 +#define TOKEN_COUNT 15 +#define EXTERNAL_TOKEN_COUNT 0 +#define FIELD_COUNT 0 +#define MAX_ALIAS_SEQUENCE_LENGTH 3 +#define MAX_RESERVED_WORD_SET_SIZE 0 +#define PRODUCTION_ID_COUNT 1 +#define SUPERTYPE_COUNT 0 + +enum ts_symbol_identifiers { + sym_newline = 1, + anon_sym_POUND = 2, + aux_sym_comment_token1 = 3, + anon_sym_LBRACK = 4, + anon_sym_meta = 5, + anon_sym_RBRACK = 6, + anon_sym_vars = 7, + anon_sym_steps = 8, + anon_sym_EQ = 9, + sym_step = 10, + sym_key = 11, + sym_unquoted_value = 12, + anon_sym_DQUOTE = 13, + sym_string_content = 14, + sym_source_file = 15, + sym_comment = 16, + sym_section_header = 17, + sym_pair = 18, + sym_quoted_string = 19, + aux_sym_source_file_repeat1 = 20, +}; + +static const char * const ts_symbol_names[] = { + [ts_builtin_sym_end] = "end", + [sym_newline] = "newline", + [anon_sym_POUND] = "#", + [aux_sym_comment_token1] = "comment_token1", + [anon_sym_LBRACK] = "[", + [anon_sym_meta] = "meta", + [anon_sym_RBRACK] = "]", + [anon_sym_vars] = "vars", + [anon_sym_steps] = "steps", + [anon_sym_EQ] = "=", + [sym_step] = "step", + [sym_key] = "key", + [sym_unquoted_value] = "unquoted_value", + [anon_sym_DQUOTE] = "\"", + [sym_string_content] = "string_content", + [sym_source_file] = "source_file", + [sym_comment] = "comment", + [sym_section_header] = "section_header", + [sym_pair] = "pair", + [sym_quoted_string] = "quoted_string", + [aux_sym_source_file_repeat1] = "source_file_repeat1", +}; + +static const TSSymbol ts_symbol_map[] = { + [ts_builtin_sym_end] = ts_builtin_sym_end, + [sym_newline] = sym_newline, + [anon_sym_POUND] = anon_sym_POUND, + [aux_sym_comment_token1] = aux_sym_comment_token1, + [anon_sym_LBRACK] = anon_sym_LBRACK, + [anon_sym_meta] = anon_sym_meta, + [anon_sym_RBRACK] = anon_sym_RBRACK, + [anon_sym_vars] = anon_sym_vars, + [anon_sym_steps] = anon_sym_steps, + [anon_sym_EQ] = anon_sym_EQ, + [sym_step] = sym_step, + [sym_key] = sym_key, + [sym_unquoted_value] = sym_unquoted_value, + [anon_sym_DQUOTE] = anon_sym_DQUOTE, + [sym_string_content] = sym_string_content, + [sym_source_file] = sym_source_file, + [sym_comment] = sym_comment, + [sym_section_header] = sym_section_header, + [sym_pair] = sym_pair, + [sym_quoted_string] = sym_quoted_string, + [aux_sym_source_file_repeat1] = aux_sym_source_file_repeat1, +}; + +static const TSSymbolMetadata ts_symbol_metadata[] = { + [ts_builtin_sym_end] = { + .visible = false, + .named = true, + }, + [sym_newline] = { + .visible = true, + .named = true, + }, + [anon_sym_POUND] = { + .visible = true, + .named = false, + }, + [aux_sym_comment_token1] = { + .visible = false, + .named = false, + }, + [anon_sym_LBRACK] = { + .visible = true, + .named = false, + }, + [anon_sym_meta] = { + .visible = true, + .named = false, + }, + [anon_sym_RBRACK] = { + .visible = true, + .named = false, + }, + [anon_sym_vars] = { + .visible = true, + .named = false, + }, + [anon_sym_steps] = { + .visible = true, + .named = false, + }, + [anon_sym_EQ] = { + .visible = true, + .named = false, + }, + [sym_step] = { + .visible = true, + .named = true, + }, + [sym_key] = { + .visible = true, + .named = true, + }, + [sym_unquoted_value] = { + .visible = true, + .named = true, + }, + [anon_sym_DQUOTE] = { + .visible = true, + .named = false, + }, + [sym_string_content] = { + .visible = true, + .named = true, + }, + [sym_source_file] = { + .visible = true, + .named = true, + }, + [sym_comment] = { + .visible = true, + .named = true, + }, + [sym_section_header] = { + .visible = true, + .named = true, + }, + [sym_pair] = { + .visible = true, + .named = true, + }, + [sym_quoted_string] = { + .visible = true, + .named = true, + }, + [aux_sym_source_file_repeat1] = { + .visible = false, + .named = false, + }, +}; + +static const TSSymbol ts_alias_sequences[PRODUCTION_ID_COUNT][MAX_ALIAS_SEQUENCE_LENGTH] = { + [0] = {0}, +}; + +static const uint16_t ts_non_terminal_alias_map[] = { + 0, +}; + +static const TSStateId ts_primary_state_ids[STATE_COUNT] = { + [0] = 0, + [1] = 1, + [2] = 2, + [3] = 3, + [4] = 4, + [5] = 5, + [6] = 6, + [7] = 7, + [8] = 8, + [9] = 9, + [10] = 10, + [11] = 11, + [12] = 12, + [13] = 13, + [14] = 14, + [15] = 15, + [16] = 16, +}; + +static bool ts_lex(TSLexer *lexer, TSStateId state) { + START_LEXER(); + eof = lexer->eof(lexer); + switch (state) { + case 0: + if (eof) ADVANCE(18); + ADVANCE_MAP( + '\n', 19, + '\r', 1, + '"', 46, + '#', 20, + '=', 31, + '[', 23, + ']', 26, + 'm', 36, + 's', 41, + 'v', 33, + ); + if (lookahead == '\t' || + lookahead == ' ') SKIP(0); + if (lookahead == '.' || + lookahead == '/') ADVANCE(15); + if (('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 1: + if (lookahead == '\n') ADVANCE(19); + END_STATE(); + case 2: + if (lookahead == '"') ADVANCE(46); + if (lookahead == '\\') ADVANCE(16); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(49); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r') ADVANCE(50); + END_STATE(); + case 3: + if (lookahead == '"') ADVANCE(47); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(44); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r') ADVANCE(45); + END_STATE(); + case 4: + if (lookahead == 'a') ADVANCE(10); + END_STATE(); + case 5: + if (lookahead == 'a') ADVANCE(24); + END_STATE(); + case 6: + if (lookahead == 'e') ADVANCE(9); + END_STATE(); + case 7: + if (lookahead == 'e') ADVANCE(14); + END_STATE(); + case 8: + if (lookahead == 'm') ADVANCE(7); + if (lookahead == 's') ADVANCE(13); + if (lookahead == 'v') ADVANCE(4); + if (lookahead == '\t' || + lookahead == ' ') SKIP(8); + END_STATE(); + case 9: + if (lookahead == 'p') ADVANCE(12); + END_STATE(); + case 10: + if (lookahead == 'r') ADVANCE(11); + END_STATE(); + case 11: + if (lookahead == 's') ADVANCE(27); + END_STATE(); + case 12: + if (lookahead == 's') ADVANCE(29); + END_STATE(); + case 13: + if (lookahead == 't') ADVANCE(6); + END_STATE(); + case 14: + if (lookahead == 't') ADVANCE(5); + END_STATE(); + case 15: + if (lookahead != 0 && + (lookahead < '\t' || '\r' < lookahead) && + lookahead != ' ' && + lookahead != '#') ADVANCE(32); + END_STATE(); + case 16: + if (lookahead != 0 && + lookahead != '\n') ADVANCE(48); + END_STATE(); + case 17: + if (eof) ADVANCE(18); + if (lookahead == '\n') ADVANCE(19); + if (lookahead == '\r') ADVANCE(1); + if (lookahead == '#') ADVANCE(20); + if (lookahead == '[') ADVANCE(23); + if (lookahead == '\t' || + lookahead == ' ') SKIP(17); + if (lookahead == '.' || + lookahead == '/') ADVANCE(15); + if (('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 18: + ACCEPT_TOKEN(ts_builtin_sym_end); + END_STATE(); + case 19: + ACCEPT_TOKEN(sym_newline); + END_STATE(); + case 20: + ACCEPT_TOKEN(anon_sym_POUND); + END_STATE(); + case 21: + ACCEPT_TOKEN(aux_sym_comment_token1); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(21); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r') ADVANCE(22); + END_STATE(); + case 22: + ACCEPT_TOKEN(aux_sym_comment_token1); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(22); + END_STATE(); + case 23: + ACCEPT_TOKEN(anon_sym_LBRACK); + END_STATE(); + case 24: + ACCEPT_TOKEN(anon_sym_meta); + END_STATE(); + case 25: + ACCEPT_TOKEN(anon_sym_meta); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 26: + ACCEPT_TOKEN(anon_sym_RBRACK); + END_STATE(); + case 27: + ACCEPT_TOKEN(anon_sym_vars); + END_STATE(); + case 28: + ACCEPT_TOKEN(anon_sym_vars); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 29: + ACCEPT_TOKEN(anon_sym_steps); + END_STATE(); + case 30: + ACCEPT_TOKEN(anon_sym_steps); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 31: + ACCEPT_TOKEN(anon_sym_EQ); + END_STATE(); + case 32: + ACCEPT_TOKEN(sym_step); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(32); + END_STATE(); + case 33: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'a') ADVANCE(38); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('b' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 34: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'a') ADVANCE(25); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('b' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 35: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'e') ADVANCE(37); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 36: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'e') ADVANCE(42); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 37: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'p') ADVANCE(40); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 38: + ACCEPT_TOKEN(sym_key); + if (lookahead == 'r') ADVANCE(39); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 39: + ACCEPT_TOKEN(sym_key); + if (lookahead == 's') ADVANCE(28); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 40: + ACCEPT_TOKEN(sym_key); + if (lookahead == 's') ADVANCE(30); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 41: + ACCEPT_TOKEN(sym_key); + if (lookahead == 't') ADVANCE(35); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 42: + ACCEPT_TOKEN(sym_key); + if (lookahead == 't') ADVANCE(34); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 43: + ACCEPT_TOKEN(sym_key); + if (lookahead == '-' || + ('0' <= lookahead && lookahead <= '9') || + ('A' <= lookahead && lookahead <= 'Z') || + lookahead == '_' || + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(43); + END_STATE(); + case 44: + ACCEPT_TOKEN(sym_unquoted_value); + if (lookahead == '"') ADVANCE(47); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(44); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r') ADVANCE(45); + END_STATE(); + case 45: + ACCEPT_TOKEN(sym_unquoted_value); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(45); + END_STATE(); + case 46: + ACCEPT_TOKEN(anon_sym_DQUOTE); + END_STATE(); + case 47: + ACCEPT_TOKEN(anon_sym_DQUOTE); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r') ADVANCE(45); + END_STATE(); + case 48: + ACCEPT_TOKEN(sym_string_content); + END_STATE(); + case 49: + ACCEPT_TOKEN(sym_string_content); + if (lookahead == '\\') ADVANCE(16); + if (lookahead == '\t' || + lookahead == ' ') ADVANCE(49); + if (lookahead != 0 && + lookahead != '\t' && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"') ADVANCE(50); + END_STATE(); + case 50: + ACCEPT_TOKEN(sym_string_content); + if (lookahead != 0 && + lookahead != '\n' && + lookahead != '\r' && + lookahead != '"' && + lookahead != '\\') ADVANCE(50); + END_STATE(); + default: + return false; + } +} + +static const TSLexMode ts_lex_modes[STATE_COUNT] = { + [0] = {.lex_state = 0}, + [1] = {.lex_state = 17}, + [2] = {.lex_state = 17}, + [3] = {.lex_state = 17}, + [4] = {.lex_state = 17}, + [5] = {.lex_state = 17}, + [6] = {.lex_state = 17}, + [7] = {.lex_state = 17}, + [8] = {.lex_state = 17}, + [9] = {.lex_state = 8}, + [10] = {.lex_state = 3}, + [11] = {.lex_state = 2}, + [12] = {.lex_state = 21}, + [13] = {.lex_state = 0}, + [14] = {.lex_state = 0}, + [15] = {.lex_state = 0}, + [16] = {.lex_state = 0}, +}; + +static const uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = { + [STATE(0)] = { + [ts_builtin_sym_end] = ACTIONS(1), + [sym_newline] = ACTIONS(1), + [anon_sym_POUND] = ACTIONS(1), + [anon_sym_LBRACK] = ACTIONS(1), + [anon_sym_meta] = ACTIONS(1), + [anon_sym_RBRACK] = ACTIONS(1), + [anon_sym_vars] = ACTIONS(1), + [anon_sym_steps] = ACTIONS(1), + [anon_sym_EQ] = ACTIONS(1), + [sym_step] = ACTIONS(1), + [sym_key] = ACTIONS(1), + [anon_sym_DQUOTE] = ACTIONS(1), + }, + [STATE(1)] = { + [sym_source_file] = STATE(14), + [sym_comment] = STATE(2), + [sym_section_header] = STATE(2), + [sym_pair] = STATE(2), + [aux_sym_source_file_repeat1] = STATE(2), + [ts_builtin_sym_end] = ACTIONS(3), + [sym_newline] = ACTIONS(5), + [anon_sym_POUND] = ACTIONS(7), + [anon_sym_LBRACK] = ACTIONS(9), + [sym_step] = ACTIONS(5), + [sym_key] = ACTIONS(11), + }, +}; + +static const uint16_t ts_small_parse_table[] = { + [0] = 6, + ACTIONS(7), 1, + anon_sym_POUND, + ACTIONS(9), 1, + anon_sym_LBRACK, + ACTIONS(11), 1, + sym_key, + ACTIONS(13), 1, + ts_builtin_sym_end, + ACTIONS(15), 2, + sym_newline, + sym_step, + STATE(3), 4, + sym_comment, + sym_section_header, + sym_pair, + aux_sym_source_file_repeat1, + [23] = 6, + ACTIONS(17), 1, + ts_builtin_sym_end, + ACTIONS(22), 1, + anon_sym_POUND, + ACTIONS(25), 1, + anon_sym_LBRACK, + ACTIONS(28), 1, + sym_key, + ACTIONS(19), 2, + sym_newline, + sym_step, + STATE(3), 4, + sym_comment, + sym_section_header, + sym_pair, + aux_sym_source_file_repeat1, + [46] = 1, + ACTIONS(31), 6, + ts_builtin_sym_end, + sym_newline, + anon_sym_POUND, + anon_sym_LBRACK, + sym_step, + sym_key, + [55] = 1, + ACTIONS(33), 6, + ts_builtin_sym_end, + sym_newline, + anon_sym_POUND, + anon_sym_LBRACK, + sym_step, + sym_key, + [64] = 1, + ACTIONS(35), 6, + ts_builtin_sym_end, + sym_newline, + anon_sym_POUND, + anon_sym_LBRACK, + sym_step, + sym_key, + [73] = 1, + ACTIONS(37), 6, + ts_builtin_sym_end, + sym_newline, + anon_sym_POUND, + anon_sym_LBRACK, + sym_step, + sym_key, + [82] = 1, + ACTIONS(39), 6, + ts_builtin_sym_end, + sym_newline, + anon_sym_POUND, + anon_sym_LBRACK, + sym_step, + sym_key, + [91] = 1, + ACTIONS(41), 3, + anon_sym_meta, + anon_sym_vars, + anon_sym_steps, + [97] = 3, + ACTIONS(43), 1, + sym_unquoted_value, + ACTIONS(45), 1, + anon_sym_DQUOTE, + STATE(6), 1, + sym_quoted_string, + [107] = 2, + ACTIONS(47), 1, + anon_sym_DQUOTE, + ACTIONS(49), 1, + sym_string_content, + [114] = 1, + ACTIONS(51), 1, + aux_sym_comment_token1, + [118] = 1, + ACTIONS(53), 1, + anon_sym_EQ, + [122] = 1, + ACTIONS(55), 1, + ts_builtin_sym_end, + [126] = 1, + ACTIONS(57), 1, + anon_sym_RBRACK, + [130] = 1, + ACTIONS(59), 1, + anon_sym_DQUOTE, +}; + +static const uint32_t ts_small_parse_table_map[] = { + [SMALL_STATE(2)] = 0, + [SMALL_STATE(3)] = 23, + [SMALL_STATE(4)] = 46, + [SMALL_STATE(5)] = 55, + [SMALL_STATE(6)] = 64, + [SMALL_STATE(7)] = 73, + [SMALL_STATE(8)] = 82, + [SMALL_STATE(9)] = 91, + [SMALL_STATE(10)] = 97, + [SMALL_STATE(11)] = 107, + [SMALL_STATE(12)] = 114, + [SMALL_STATE(13)] = 118, + [SMALL_STATE(14)] = 122, + [SMALL_STATE(15)] = 126, + [SMALL_STATE(16)] = 130, +}; + +static const TSParseActionEntry ts_parse_actions[] = { + [0] = {.entry = {.count = 0, .reusable = false}}, + [1] = {.entry = {.count = 1, .reusable = false}}, RECOVER(), + [3] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 0, 0, 0), + [5] = {.entry = {.count = 1, .reusable = true}}, SHIFT(2), + [7] = {.entry = {.count = 1, .reusable = true}}, SHIFT(12), + [9] = {.entry = {.count = 1, .reusable = true}}, SHIFT(9), + [11] = {.entry = {.count = 1, .reusable = true}}, SHIFT(13), + [13] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 1, 0, 0), + [15] = {.entry = {.count = 1, .reusable = true}}, SHIFT(3), + [17] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), + [19] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(3), + [22] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(12), + [25] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(9), + [28] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(13), + [31] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_comment, 2, 0, 0), + [33] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_section_header, 3, 0, 0), + [35] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_pair, 3, 0, 0), + [37] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_quoted_string, 2, 0, 0), + [39] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_quoted_string, 3, 0, 0), + [41] = {.entry = {.count = 1, .reusable = true}}, SHIFT(15), + [43] = {.entry = {.count = 1, .reusable = false}}, SHIFT(6), + [45] = {.entry = {.count = 1, .reusable = false}}, SHIFT(11), + [47] = {.entry = {.count = 1, .reusable = false}}, SHIFT(7), + [49] = {.entry = {.count = 1, .reusable = true}}, SHIFT(16), + [51] = {.entry = {.count = 1, .reusable = true}}, SHIFT(4), + [53] = {.entry = {.count = 1, .reusable = true}}, SHIFT(10), + [55] = {.entry = {.count = 1, .reusable = true}}, ACCEPT_INPUT(), + [57] = {.entry = {.count = 1, .reusable = true}}, SHIFT(5), + [59] = {.entry = {.count = 1, .reusable = true}}, SHIFT(8), +}; + +#ifdef __cplusplus +extern "C" { +#endif +#ifdef TREE_SITTER_HIDE_SYMBOLS +#define TS_PUBLIC +#elif defined(_WIN32) +#define TS_PUBLIC __declspec(dllexport) +#else +#define TS_PUBLIC __attribute__((visibility("default"))) +#endif + +TS_PUBLIC const TSLanguage *tree_sitter_naplist(void) { + static const TSLanguage language = { + .abi_version = LANGUAGE_VERSION, + .symbol_count = SYMBOL_COUNT, + .alias_count = ALIAS_COUNT, + .token_count = TOKEN_COUNT, + .external_token_count = EXTERNAL_TOKEN_COUNT, + .state_count = STATE_COUNT, + .large_state_count = LARGE_STATE_COUNT, + .production_id_count = PRODUCTION_ID_COUNT, + .field_count = FIELD_COUNT, + .max_alias_sequence_length = MAX_ALIAS_SEQUENCE_LENGTH, + .parse_table = &ts_parse_table[0][0], + .small_parse_table = ts_small_parse_table, + .small_parse_table_map = ts_small_parse_table_map, + .parse_actions = ts_parse_actions, + .symbol_names = ts_symbol_names, + .symbol_metadata = ts_symbol_metadata, + .public_symbol_map = ts_symbol_map, + .alias_map = ts_non_terminal_alias_map, + .alias_sequences = &ts_alias_sequences[0][0], + .lex_modes = (const void*)ts_lex_modes, + .lex_fn = ts_lex, + .primary_state_ids = ts_primary_state_ids, + }; + return &language; +} +#ifdef __cplusplus +} +#endif diff --git a/src/Napper.Zed/grammars/tree-sitter-naplist/src/tree_sitter/alloc.h b/src/Napper.Zed/grammars/tree-sitter-naplist/src/tree_sitter/alloc.h new file mode 100644 index 0000000..1abdd12 --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-naplist/src/tree_sitter/alloc.h @@ -0,0 +1,54 @@ +#ifndef TREE_SITTER_ALLOC_H_ +#define TREE_SITTER_ALLOC_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +#include <stdbool.h> +#include <stdio.h> +#include <stdlib.h> + +// Allow clients to override allocation functions +#ifdef TREE_SITTER_REUSE_ALLOCATOR + +extern void *(*ts_current_malloc)(size_t size); +extern void *(*ts_current_calloc)(size_t count, size_t size); +extern void *(*ts_current_realloc)(void *ptr, size_t size); +extern void (*ts_current_free)(void *ptr); + +#ifndef ts_malloc +#define ts_malloc ts_current_malloc +#endif +#ifndef ts_calloc +#define ts_calloc ts_current_calloc +#endif +#ifndef ts_realloc +#define ts_realloc ts_current_realloc +#endif +#ifndef ts_free +#define ts_free ts_current_free +#endif + +#else + +#ifndef ts_malloc +#define ts_malloc malloc +#endif +#ifndef ts_calloc +#define ts_calloc calloc +#endif +#ifndef ts_realloc +#define ts_realloc realloc +#endif +#ifndef ts_free +#define ts_free free +#endif + +#endif + +#ifdef __cplusplus +} +#endif + +#endif // TREE_SITTER_ALLOC_H_ diff --git a/src/Napper.Zed/grammars/tree-sitter-naplist/src/tree_sitter/array.h b/src/Napper.Zed/grammars/tree-sitter-naplist/src/tree_sitter/array.h new file mode 100644 index 0000000..56fc8cd --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-naplist/src/tree_sitter/array.h @@ -0,0 +1,330 @@ +#ifndef TREE_SITTER_ARRAY_H_ +#define TREE_SITTER_ARRAY_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +#include "./alloc.h" + +#include <assert.h> +#include <stdbool.h> +#include <stdint.h> +#include <stdlib.h> +#include <string.h> + +#ifdef _MSC_VER +#pragma warning(push) +#pragma warning(disable : 4101) +#elif defined(__GNUC__) || defined(__clang__) +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wunused-variable" +#endif + +#define Array(T) \ + struct { \ + T *contents; \ + uint32_t size; \ + uint32_t capacity; \ + } + +/// Initialize an array. +#define array_init(self) \ + ((self)->size = 0, (self)->capacity = 0, (self)->contents = NULL) + +/// Create an empty array. +#define array_new() \ + { NULL, 0, 0 } + +/// Get a pointer to the element at a given `index` in the array. +#define array_get(self, _index) \ + (assert((uint32_t)(_index) < (self)->size), &(self)->contents[_index]) + +/// Get a pointer to the first element in the array. +#define array_front(self) array_get(self, 0) + +/// Get a pointer to the last element in the array. +#define array_back(self) array_get(self, (self)->size - 1) + +/// Clear the array, setting its size to zero. Note that this does not free any +/// memory allocated for the array's contents. +#define array_clear(self) ((self)->size = 0) + +/// Reserve `new_capacity` elements of space in the array. If `new_capacity` is +/// less than the array's current capacity, this function has no effect. +#define array_reserve(self, new_capacity) \ + ((self)->contents = _array__reserve( \ + (void *)(self)->contents, &(self)->capacity, \ + array_elem_size(self), new_capacity) \ + ) + +/// Free any memory allocated for this array. Note that this does not free any +/// memory allocated for the array's contents. +#define array_delete(self) \ + do { \ + if ((self)->contents) ts_free((self)->contents); \ + (self)->contents = NULL; \ + (self)->size = 0; \ + (self)->capacity = 0; \ + } while (0) + +/// Push a new `element` onto the end of the array. +#define array_push(self, element) \ + do { \ + (self)->contents = _array__grow( \ + (void *)(self)->contents, (self)->size, &(self)->capacity, \ + 1, array_elem_size(self) \ + ); \ + (self)->contents[(self)->size++] = (element); \ + } while(0) + +/// Increase the array's size by `count` elements. +/// New elements are zero-initialized. +#define array_grow_by(self, count) \ + do { \ + if ((count) == 0) break; \ + (self)->contents = _array__grow( \ + (self)->contents, (self)->size, &(self)->capacity, \ + count, array_elem_size(self) \ + ); \ + memset((self)->contents + (self)->size, 0, (count) * array_elem_size(self)); \ + (self)->size += (count); \ + } while (0) + +/// Append all elements from one array to the end of another. +#define array_push_all(self, other) \ + array_extend((self), (other)->size, (other)->contents) + +/// Append `count` elements to the end of the array, reading their values from the +/// `contents` pointer. +#define array_extend(self, count, other_contents) \ + (self)->contents = _array__splice( \ + (void*)(self)->contents, &(self)->size, &(self)->capacity, \ + array_elem_size(self), (self)->size, 0, count, other_contents \ + ) + +/// Remove `old_count` elements from the array starting at the given `index`. At +/// the same index, insert `new_count` new elements, reading their values from the +/// `new_contents` pointer. +#define array_splice(self, _index, old_count, new_count, new_contents) \ + (self)->contents = _array__splice( \ + (void *)(self)->contents, &(self)->size, &(self)->capacity, \ + array_elem_size(self), _index, old_count, new_count, new_contents \ + ) + +/// Insert one `element` into the array at the given `index`. +#define array_insert(self, _index, element) \ + (self)->contents = _array__splice( \ + (void *)(self)->contents, &(self)->size, &(self)->capacity, \ + array_elem_size(self), _index, 0, 1, &(element) \ + ) + +/// Remove one element from the array at the given `index`. +#define array_erase(self, _index) \ + _array__erase((void *)(self)->contents, &(self)->size, array_elem_size(self), _index) + +/// Pop the last element off the array, returning the element by value. +#define array_pop(self) ((self)->contents[--(self)->size]) + +/// Assign the contents of one array to another, reallocating if necessary. +#define array_assign(self, other) \ + (self)->contents = _array__assign( \ + (void *)(self)->contents, &(self)->size, &(self)->capacity, \ + (const void *)(other)->contents, (other)->size, array_elem_size(self) \ + ) + +/// Swap one array with another +#define array_swap(self, other) \ + do { \ + void *_array_swap_tmp = (void *)(self)->contents; \ + (self)->contents = (other)->contents; \ + (other)->contents = _array_swap_tmp; \ + _array__swap(&(self)->size, &(self)->capacity, \ + &(other)->size, &(other)->capacity); \ + } while (0) + +/// Get the size of the array contents +#define array_elem_size(self) (sizeof *(self)->contents) + +/// Search a sorted array for a given `needle` value, using the given `compare` +/// callback to determine the order. +/// +/// If an existing element is found to be equal to `needle`, then the `index` +/// out-parameter is set to the existing value's index, and the `exists` +/// out-parameter is set to true. Otherwise, `index` is set to an index where +/// `needle` should be inserted in order to preserve the sorting, and `exists` +/// is set to false. +#define array_search_sorted_with(self, compare, needle, _index, _exists) \ + _array__search_sorted(self, 0, compare, , needle, _index, _exists) + +/// Search a sorted array for a given `needle` value, using integer comparisons +/// of a given struct field (specified with a leading dot) to determine the order. +/// +/// See also `array_search_sorted_with`. +#define array_search_sorted_by(self, field, needle, _index, _exists) \ + _array__search_sorted(self, 0, _compare_int, field, needle, _index, _exists) + +/// Insert a given `value` into a sorted array, using the given `compare` +/// callback to determine the order. +#define array_insert_sorted_with(self, compare, value) \ + do { \ + unsigned _index, _exists; \ + array_search_sorted_with(self, compare, &(value), &_index, &_exists); \ + if (!_exists) array_insert(self, _index, value); \ + } while (0) + +/// Insert a given `value` into a sorted array, using integer comparisons of +/// a given struct field (specified with a leading dot) to determine the order. +/// +/// See also `array_search_sorted_by`. +#define array_insert_sorted_by(self, field, value) \ + do { \ + unsigned _index, _exists; \ + array_search_sorted_by(self, field, (value) field, &_index, &_exists); \ + if (!_exists) array_insert(self, _index, value); \ + } while (0) + +// Private + +// Pointers to individual `Array` fields (rather than the entire `Array` itself) +// are passed to the various `_array__*` functions below to address strict aliasing +// violations that arises when the _entire_ `Array` struct is passed as `Array(void)*`. +// +// The `Array` type itself was not altered as a solution in order to avoid breakage +// with existing consumers (in particular, parsers with external scanners). + +/// This is not what you're looking for, see `array_erase`. +static inline void _array__erase(void* self_contents, uint32_t *size, + size_t element_size, uint32_t index) { + assert(index < *size); + char *contents = (char *)self_contents; + memmove(contents + index * element_size, contents + (index + 1) * element_size, + (*size - index - 1) * element_size); + (*size)--; +} + +/// This is not what you're looking for, see `array_reserve`. +static inline void *_array__reserve(void *contents, uint32_t *capacity, + size_t element_size, uint32_t new_capacity) { + void *new_contents = contents; + if (new_capacity > *capacity) { + if (contents) { + new_contents = ts_realloc(contents, new_capacity * element_size); + } else { + new_contents = ts_malloc(new_capacity * element_size); + } + *capacity = new_capacity; + } + return new_contents; +} + +/// This is not what you're looking for, see `array_assign`. +static inline void *_array__assign(void* self_contents, uint32_t *self_size, uint32_t *self_capacity, + const void *other_contents, uint32_t other_size, size_t element_size) { + void *new_contents = _array__reserve(self_contents, self_capacity, element_size, other_size); + *self_size = other_size; + memcpy(new_contents, other_contents, *self_size * element_size); + return new_contents; +} + +/// This is not what you're looking for, see `array_swap`. +static inline void _array__swap(uint32_t *self_size, uint32_t *self_capacity, + uint32_t *other_size, uint32_t *other_capacity) { + uint32_t tmp_size = *self_size; + uint32_t tmp_capacity = *self_capacity; + *self_size = *other_size; + *self_capacity = *other_capacity; + *other_size = tmp_size; + *other_capacity = tmp_capacity; +} + +/// This is not what you're looking for, see `array_push` or `array_grow_by`. +static inline void *_array__grow(void *contents, uint32_t size, uint32_t *capacity, + uint32_t count, size_t element_size) { + void *new_contents = contents; + uint32_t new_size = size + count; + if (new_size > *capacity) { + uint32_t new_capacity = *capacity * 2; + if (new_capacity < 8) new_capacity = 8; + if (new_capacity < new_size) new_capacity = new_size; + new_contents = _array__reserve(contents, capacity, element_size, new_capacity); + } + return new_contents; +} + +/// This is not what you're looking for, see `array_splice`. +static inline void *_array__splice(void *self_contents, uint32_t *size, uint32_t *capacity, + size_t element_size, + uint32_t index, uint32_t old_count, + uint32_t new_count, const void *elements) { + uint32_t new_size = *size + new_count - old_count; + uint32_t old_end = index + old_count; + uint32_t new_end = index + new_count; + assert(old_end <= *size); + + void *new_contents = _array__reserve(self_contents, capacity, element_size, new_size); + + char *contents = (char *)new_contents; + if (*size > old_end) { + memmove( + contents + new_end * element_size, + contents + old_end * element_size, + (*size - old_end) * element_size + ); + } + if (new_count > 0) { + if (elements) { + memcpy( + (contents + index * element_size), + elements, + new_count * element_size + ); + } else { + memset( + (contents + index * element_size), + 0, + new_count * element_size + ); + } + } + *size += new_count - old_count; + + return new_contents; +} + +/// A binary search routine, based on Rust's `std::slice::binary_search_by`. +/// This is not what you're looking for, see `array_search_sorted_with` or `array_search_sorted_by`. +#define _array__search_sorted(self, start, compare, suffix, needle, _index, _exists) \ + do { \ + *(_index) = start; \ + *(_exists) = false; \ + uint32_t size = (self)->size - *(_index); \ + if (size == 0) break; \ + int comparison; \ + while (size > 1) { \ + uint32_t half_size = size / 2; \ + uint32_t mid_index = *(_index) + half_size; \ + comparison = compare(&((self)->contents[mid_index] suffix), (needle)); \ + if (comparison <= 0) *(_index) = mid_index; \ + size -= half_size; \ + } \ + comparison = compare(&((self)->contents[*(_index)] suffix), (needle)); \ + if (comparison == 0) *(_exists) = true; \ + else if (comparison < 0) *(_index) += 1; \ + } while (0) + +/// Helper macro for the `_sorted_by` routines below. This takes the left (existing) +/// parameter by reference in order to work with the generic sorting function above. +#define _compare_int(a, b) ((int)*(a) - (int)(b)) + +#ifdef _MSC_VER +#pragma warning(pop) +#elif defined(__GNUC__) || defined(__clang__) +#pragma GCC diagnostic pop +#endif + +#ifdef __cplusplus +} +#endif + +#endif // TREE_SITTER_ARRAY_H_ diff --git a/src/Napper.Zed/grammars/tree-sitter-naplist/src/tree_sitter/parser.h b/src/Napper.Zed/grammars/tree-sitter-naplist/src/tree_sitter/parser.h new file mode 100644 index 0000000..858107d --- /dev/null +++ b/src/Napper.Zed/grammars/tree-sitter-naplist/src/tree_sitter/parser.h @@ -0,0 +1,286 @@ +#ifndef TREE_SITTER_PARSER_H_ +#define TREE_SITTER_PARSER_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +#include <stdbool.h> +#include <stdint.h> +#include <stdlib.h> + +#define ts_builtin_sym_error ((TSSymbol)-1) +#define ts_builtin_sym_end 0 +#define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 + +#ifndef TREE_SITTER_API_H_ +typedef uint16_t TSStateId; +typedef uint16_t TSSymbol; +typedef uint16_t TSFieldId; +typedef struct TSLanguage TSLanguage; +typedef struct TSLanguageMetadata { + uint8_t major_version; + uint8_t minor_version; + uint8_t patch_version; +} TSLanguageMetadata; +#endif + +typedef struct { + TSFieldId field_id; + uint8_t child_index; + bool inherited; +} TSFieldMapEntry; + +// Used to index the field and supertype maps. +typedef struct { + uint16_t index; + uint16_t length; +} TSMapSlice; + +typedef struct { + bool visible; + bool named; + bool supertype; +} TSSymbolMetadata; + +typedef struct TSLexer TSLexer; + +struct TSLexer { + int32_t lookahead; + TSSymbol result_symbol; + void (*advance)(TSLexer *, bool); + void (*mark_end)(TSLexer *); + uint32_t (*get_column)(TSLexer *); + bool (*is_at_included_range_start)(const TSLexer *); + bool (*eof)(const TSLexer *); + void (*log)(const TSLexer *, const char *, ...); +}; + +typedef enum { + TSParseActionTypeShift, + TSParseActionTypeReduce, + TSParseActionTypeAccept, + TSParseActionTypeRecover, +} TSParseActionType; + +typedef union { + struct { + uint8_t type; + TSStateId state; + bool extra; + bool repetition; + } shift; + struct { + uint8_t type; + uint8_t child_count; + TSSymbol symbol; + int16_t dynamic_precedence; + uint16_t production_id; + } reduce; + uint8_t type; +} TSParseAction; + +typedef struct { + uint16_t lex_state; + uint16_t external_lex_state; +} TSLexMode; + +typedef struct { + uint16_t lex_state; + uint16_t external_lex_state; + uint16_t reserved_word_set_id; +} TSLexerMode; + +typedef union { + TSParseAction action; + struct { + uint8_t count; + bool reusable; + } entry; +} TSParseActionEntry; + +typedef struct { + int32_t start; + int32_t end; +} TSCharacterRange; + +struct TSLanguage { + uint32_t abi_version; + uint32_t symbol_count; + uint32_t alias_count; + uint32_t token_count; + uint32_t external_token_count; + uint32_t state_count; + uint32_t large_state_count; + uint32_t production_id_count; + uint32_t field_count; + uint16_t max_alias_sequence_length; + const uint16_t *parse_table; + const uint16_t *small_parse_table; + const uint32_t *small_parse_table_map; + const TSParseActionEntry *parse_actions; + const char * const *symbol_names; + const char * const *field_names; + const TSMapSlice *field_map_slices; + const TSFieldMapEntry *field_map_entries; + const TSSymbolMetadata *symbol_metadata; + const TSSymbol *public_symbol_map; + const uint16_t *alias_map; + const TSSymbol *alias_sequences; + const TSLexerMode *lex_modes; + bool (*lex_fn)(TSLexer *, TSStateId); + bool (*keyword_lex_fn)(TSLexer *, TSStateId); + TSSymbol keyword_capture_token; + struct { + const bool *states; + const TSSymbol *symbol_map; + void *(*create)(void); + void (*destroy)(void *); + bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist); + unsigned (*serialize)(void *, char *); + void (*deserialize)(void *, const char *, unsigned); + } external_scanner; + const TSStateId *primary_state_ids; + const char *name; + const TSSymbol *reserved_words; + uint16_t max_reserved_word_set_size; + uint32_t supertype_count; + const TSSymbol *supertype_symbols; + const TSMapSlice *supertype_map_slices; + const TSSymbol *supertype_map_entries; + TSLanguageMetadata metadata; +}; + +static inline bool set_contains(const TSCharacterRange *ranges, uint32_t len, int32_t lookahead) { + uint32_t index = 0; + uint32_t size = len - index; + while (size > 1) { + uint32_t half_size = size / 2; + uint32_t mid_index = index + half_size; + const TSCharacterRange *range = &ranges[mid_index]; + if (lookahead >= range->start && lookahead <= range->end) { + return true; + } else if (lookahead > range->end) { + index = mid_index; + } + size -= half_size; + } + const TSCharacterRange *range = &ranges[index]; + return (lookahead >= range->start && lookahead <= range->end); +} + +/* + * Lexer Macros + */ + +#ifdef _MSC_VER +#define UNUSED __pragma(warning(suppress : 4101)) +#else +#define UNUSED __attribute__((unused)) +#endif + +#define START_LEXER() \ + bool result = false; \ + bool skip = false; \ + UNUSED \ + bool eof = false; \ + int32_t lookahead; \ + goto start; \ + next_state: \ + lexer->advance(lexer, skip); \ + start: \ + skip = false; \ + lookahead = lexer->lookahead; + +#define ADVANCE(state_value) \ + { \ + state = state_value; \ + goto next_state; \ + } + +#define ADVANCE_MAP(...) \ + { \ + static const uint16_t map[] = { __VA_ARGS__ }; \ + for (uint32_t i = 0; i < sizeof(map) / sizeof(map[0]); i += 2) { \ + if (map[i] == lookahead) { \ + state = map[i + 1]; \ + goto next_state; \ + } \ + } \ + } + +#define SKIP(state_value) \ + { \ + skip = true; \ + state = state_value; \ + goto next_state; \ + } + +#define ACCEPT_TOKEN(symbol_value) \ + result = true; \ + lexer->result_symbol = symbol_value; \ + lexer->mark_end(lexer); + +#define END_STATE() return result; + +/* + * Parse Table Macros + */ + +#define SMALL_STATE(id) ((id) - LARGE_STATE_COUNT) + +#define STATE(id) id + +#define ACTIONS(id) id + +#define SHIFT(state_value) \ + {{ \ + .shift = { \ + .type = TSParseActionTypeShift, \ + .state = (state_value) \ + } \ + }} + +#define SHIFT_REPEAT(state_value) \ + {{ \ + .shift = { \ + .type = TSParseActionTypeShift, \ + .state = (state_value), \ + .repetition = true \ + } \ + }} + +#define SHIFT_EXTRA() \ + {{ \ + .shift = { \ + .type = TSParseActionTypeShift, \ + .extra = true \ + } \ + }} + +#define REDUCE(symbol_name, children, precedence, prod_id) \ + {{ \ + .reduce = { \ + .type = TSParseActionTypeReduce, \ + .symbol = symbol_name, \ + .child_count = children, \ + .dynamic_precedence = precedence, \ + .production_id = prod_id \ + }, \ + }} + +#define RECOVER() \ + {{ \ + .type = TSParseActionTypeRecover \ + }} + +#define ACCEPT_INPUT() \ + {{ \ + .type = TSParseActionTypeAccept \ + }} + +#ifdef __cplusplus +} +#endif + +#endif // TREE_SITTER_PARSER_H_ diff --git a/src/Napper.Zed/languages/nap/brackets.scm b/src/Napper.Zed/languages/nap/brackets.scm new file mode 100644 index 0000000..2c2e3fb --- /dev/null +++ b/src/Napper.Zed/languages/nap/brackets.scm @@ -0,0 +1,14 @@ +; Section header brackets +("[" @open "]" @close) + +; Variable interpolation brackets +("{{" @open "}}" @close) + +; Triple-quoted string delimiters +("\"\"\"" @open "\"\"\"" @close) + +; Array brackets +(array_value "[" @open "]" @close) + +; Quoted string delimiters +(quoted_string "\"" @open "\"" @close) diff --git a/src/Napper.Zed/languages/nap/config.toml b/src/Napper.Zed/languages/nap/config.toml new file mode 100644 index 0000000..df621b0 --- /dev/null +++ b/src/Napper.Zed/languages/nap/config.toml @@ -0,0 +1,5 @@ +name = "Nap" +grammar = "nap" +path_suffixes = ["nap"] +line_comments = ["# "] +tab_size = 2 diff --git a/src/Napper.Zed/languages/nap/highlights.scm b/src/Napper.Zed/languages/nap/highlights.scm new file mode 100644 index 0000000..8e206db --- /dev/null +++ b/src/Napper.Zed/languages/nap/highlights.scm @@ -0,0 +1,55 @@ +; Section headers +(section_header "[" @punctuation.bracket) +(section_header "]" @punctuation.bracket) +(section_header "meta" @keyword) +(section_header "vars" @keyword) +(section_header "request" @keyword) +(section_header "headers" @keyword) +(section_header "body" @keyword) +(section_header "assert" @keyword) +(section_header "script" @keyword) +(section_header "." @punctuation.delimiter) + +; Comments +(comment) @comment + +; HTTP methods +(http_method) @function.method + +; Key-value pairs +(pair (key) @property) +(pair "=" @operator) + +; Values +(quoted_string "\"" @punctuation.delimiter) +(quoted_string (string_content) @string) +(text_fragment) @string +(triple_quoted_string "\"\"\"" @punctuation.delimiter) +(triple_quoted_string (body_content (body_text) @string)) + +; URLs (in shorthand requests) +(shorthand_request (value (text_fragment) @string.special.url)) + +; Variable interpolation +(variable_ref "{{" @punctuation.special) +(variable_ref "}}" @punctuation.special) +(variable_ref) @variable + +; Arrays +(array_value "[" @punctuation.bracket) +(array_value "]" @punctuation.bracket) +(array_value "," @punctuation.delimiter) + +; Assertions +(assertion_exists (key) @property) +(assertion_exists "exists" @keyword.operator) +(assertion_contains (key) @property) +(assertion_contains "contains" @keyword.operator) +(assertion_matches (key) @property) +(assertion_matches "matches" @keyword.operator) +(assertion_lt (key) @property) +(assertion_lt "<" @operator) +(assertion_gt (key) @property) +(assertion_gt ">" @operator) +(duration_value) @number +(raw_value) @string diff --git a/src/Napper.Zed/languages/nap/indents.scm b/src/Napper.Zed/languages/nap/indents.scm new file mode 100644 index 0000000..e30611a --- /dev/null +++ b/src/Napper.Zed/languages/nap/indents.scm @@ -0,0 +1,2 @@ +; Indent after section headers +(section_header) @indent diff --git a/src/Napper.Zed/languages/nap/injections.scm b/src/Napper.Zed/languages/nap/injections.scm new file mode 100644 index 0000000..03408b8 --- /dev/null +++ b/src/Napper.Zed/languages/nap/injections.scm @@ -0,0 +1,4 @@ +; Inject JSON highlighting into triple-quoted body content +(triple_quoted_string + (body_content) @content + (#set! "language" "json")) diff --git a/src/Napper.Zed/languages/nap/outline.scm b/src/Napper.Zed/languages/nap/outline.scm new file mode 100644 index 0000000..fd7aed2 --- /dev/null +++ b/src/Napper.Zed/languages/nap/outline.scm @@ -0,0 +1,2 @@ +; Expose section headers as outline items +(section_header) @item diff --git a/src/Napper.Zed/languages/nap/redactions.scm b/src/Napper.Zed/languages/nap/redactions.scm new file mode 100644 index 0000000..d76cc34 --- /dev/null +++ b/src/Napper.Zed/languages/nap/redactions.scm @@ -0,0 +1,2 @@ +; Mask variable interpolation values during screen sharing +(variable_ref) @redact diff --git a/src/Napper.Zed/languages/nap/runnables.scm b/src/Napper.Zed/languages/nap/runnables.scm new file mode 100644 index 0000000..e967295 --- /dev/null +++ b/src/Napper.Zed/languages/nap/runnables.scm @@ -0,0 +1,2 @@ +; Detect [request] section as runnable — offers "Run" in gutter +(section_header "request" @run (#set! tag "nap-request")) diff --git a/src/Napper.Zed/languages/napenv/brackets.scm b/src/Napper.Zed/languages/napenv/brackets.scm new file mode 100644 index 0000000..eca4d5c --- /dev/null +++ b/src/Napper.Zed/languages/napenv/brackets.scm @@ -0,0 +1,2 @@ +; Quoted string delimiters +(quoted_string "\"" @open "\"" @close) diff --git a/src/Napper.Zed/languages/napenv/config.toml b/src/Napper.Zed/languages/napenv/config.toml new file mode 100644 index 0000000..0086dfb --- /dev/null +++ b/src/Napper.Zed/languages/napenv/config.toml @@ -0,0 +1,5 @@ +name = "Napenv" +grammar = "napenv" +path_suffixes = ["napenv"] +line_comments = ["# "] +tab_size = 2 diff --git a/src/Napper.Zed/languages/napenv/highlights.scm b/src/Napper.Zed/languages/napenv/highlights.scm new file mode 100644 index 0000000..6078808 --- /dev/null +++ b/src/Napper.Zed/languages/napenv/highlights.scm @@ -0,0 +1,11 @@ +; Comments +(comment) @comment + +; Key-value pairs +(pair (key) @property) +(pair "=" @operator) + +; Values +(quoted_string "\"" @punctuation.delimiter) +(quoted_string (string_content) @string) +(unquoted_value) @string diff --git a/src/Napper.Zed/languages/naplist/brackets.scm b/src/Napper.Zed/languages/naplist/brackets.scm new file mode 100644 index 0000000..a555965 --- /dev/null +++ b/src/Napper.Zed/languages/naplist/brackets.scm @@ -0,0 +1,5 @@ +; Section header brackets +("[" @open "]" @close) + +; Quoted string delimiters +(quoted_string "\"" @open "\"" @close) diff --git a/src/Napper.Zed/languages/naplist/config.toml b/src/Napper.Zed/languages/naplist/config.toml new file mode 100644 index 0000000..cf10f38 --- /dev/null +++ b/src/Napper.Zed/languages/naplist/config.toml @@ -0,0 +1,5 @@ +name = "Naplist" +grammar = "naplist" +path_suffixes = ["naplist"] +line_comments = ["# "] +tab_size = 2 diff --git a/src/Napper.Zed/languages/naplist/highlights.scm b/src/Napper.Zed/languages/naplist/highlights.scm new file mode 100644 index 0000000..e85bd2e --- /dev/null +++ b/src/Napper.Zed/languages/naplist/highlights.scm @@ -0,0 +1,21 @@ +; Section headers +(section_header "[" @punctuation.bracket) +(section_header "]" @punctuation.bracket) +(section_header "meta" @keyword) +(section_header "vars" @keyword) +(section_header "steps" @keyword) + +; Comments +(comment) @comment + +; Key-value pairs +(pair (key) @property) +(pair "=" @operator) + +; Values +(quoted_string "\"" @punctuation.delimiter) +(quoted_string (string_content) @string) +(unquoted_value) @string + +; Steps (file paths) +(step) @string.special.path diff --git a/src/Napper.Zed/languages/naplist/indents.scm b/src/Napper.Zed/languages/naplist/indents.scm new file mode 100644 index 0000000..e30611a --- /dev/null +++ b/src/Napper.Zed/languages/naplist/indents.scm @@ -0,0 +1,2 @@ +; Indent after section headers +(section_header) @indent diff --git a/src/Napper.Zed/languages/naplist/outline.scm b/src/Napper.Zed/languages/naplist/outline.scm new file mode 100644 index 0000000..fd7aed2 --- /dev/null +++ b/src/Napper.Zed/languages/naplist/outline.scm @@ -0,0 +1,2 @@ +; Expose section headers as outline items +(section_header) @item diff --git a/src/Napper.Zed/src/lib.rs b/src/Napper.Zed/src/lib.rs new file mode 100644 index 0000000..6e70803 --- /dev/null +++ b/src/Napper.Zed/src/lib.rs @@ -0,0 +1,265 @@ +//! Nap Zed extension — language support for `.nap`, `.naplist`, `.napenv` files. +//! +//! Provides syntax highlighting (via Tree-sitter), runnables, slash commands, +//! and a language server integration point for the Nap Language Server. + +use std::{fs, path::Path}; +use zed_extension_api::{ + self as zed, process::Output, serde_json, Command, LanguageServerId, SlashCommand, + SlashCommandArgumentCompletion, SlashCommandOutput, SlashCommandOutputSection, Worktree, +}; + +/// Named constant for the nap-run slash command. +const NAP_RUN_COMMAND: &str = "nap-run"; + +/// Named constant for the nap-import-openapi slash command. +const NAP_IMPORT_OPENAPI_COMMAND: &str = "nap-import-openapi"; + +/// File extension for request files. +const NAP_FILE_EXTENSION: &str = "nap"; + +/// File extension for playlist files. +const NAPLIST_FILE_EXTENSION: &str = "naplist"; + +/// Language server ID registered in extension.toml. +const NAP_LSP_ID: &str = "nap-lsp"; + +/// CLI binary name. +const NAP_CLI: &str = "nap"; + +/// Usage message for the nap-run command. +const NAP_RUN_USAGE: &str = "Usage: /nap-run <file.nap>"; + +/// Usage message for the nap-import-openapi command. +const OPENAPI_IMPORT_USAGE: &str = "Usage: /nap-import-openapi <spec.json|spec.yaml>"; + +/// Error prefix for CLI launch failures. +const CLI_LAUNCH_ERROR: &str = "Is `nap` installed and on PATH?"; + +/// Stderr separator in error output. +const STDERR_SEPARATOR: &str = "\n--- stderr ---\n"; + +/// LSP not-yet-available message. +const LSP_NOT_AVAILABLE: &str = "Nap Language Server not yet available — install when released"; + +/// Nap Zed extension entry point — implements all Zed extension traits. +pub struct NapExtension; + +#[cfg(not(tarpaulin_include))] +impl zed::Extension for NapExtension { + fn new() -> Self { + NapExtension + } + + fn language_server_command( + &mut self, + language_server_id: &LanguageServerId, + worktree: &Worktree, + ) -> Result<Command, String> { + let _ = worktree; + resolve_language_server(language_server_id.as_ref()) + } + + fn language_server_initialization_options( + &mut self, + language_server_id: &LanguageServerId, + worktree: &Worktree, + ) -> Result<Option<serde_json::Value>, String> { + let _ = (language_server_id, worktree); + Ok(None) + } + + fn language_server_workspace_configuration( + &mut self, + language_server_id: &LanguageServerId, + worktree: &Worktree, + ) -> Result<Option<serde_json::Value>, String> { + let _ = (language_server_id, worktree); + Ok(None) + } + + fn complete_slash_command_argument( + &self, + command: SlashCommand, + _args: Vec<String>, + ) -> Result<Vec<SlashCommandArgumentCompletion>, String> { + let cwd = + std::env::current_dir().map_err(|e| format!("Failed to get working directory: {e}"))?; + route_completions(&command.name, &cwd) + } + + fn run_slash_command( + &self, + command: SlashCommand, + args: Vec<String>, + _worktree: Option<&Worktree>, + ) -> Result<SlashCommandOutput, String> { + match command.name.as_str() { + NAP_RUN_COMMAND => run_nap_command(&args), + NAP_IMPORT_OPENAPI_COMMAND => run_import_openapi_command(&args), + _ => Err(format!("Unknown command: {}", command.name)), + } + } +} + +/// Resolve language server command by ID. +fn resolve_language_server(id: &str) -> Result<Command, String> { + if id != NAP_LSP_ID { + return Err(format!("Unknown language server: {id}")); + } + // TODO: LOUD — implement LSP binary discovery and launch + Err(LSP_NOT_AVAILABLE.to_string()) +} + +/// Route slash command argument completions by command name. +fn route_completions( + name: &str, + path: &Path, +) -> Result<Vec<SlashCommandArgumentCompletion>, String> { + match name { + NAP_RUN_COMMAND => { + collect_file_completions(path, &[NAP_FILE_EXTENSION, NAPLIST_FILE_EXTENSION]) + } + NAP_IMPORT_OPENAPI_COMMAND => collect_file_completions(path, &["json", "yaml", "yml"]), + _ => Ok(Vec::new()), + } +} + +/// Recursively collect files matching given extensions for slash command argument completion. +fn collect_file_completions( + path: &Path, + extensions: &[&str], +) -> Result<Vec<SlashCommandArgumentCompletion>, String> { + let mut completions = Vec::new(); + let entries = fs::read_dir(path).map_err(|e| format!("Failed to read directory: {e}"))?; + collect_files_recursive(entries, extensions, "", &mut completions); + Ok(completions) +} + +/// Walk directory tree, adding files with matching extensions to completions. +fn collect_files_recursive( + entries: fs::ReadDir, + extensions: &[&str], + prefix: &str, + completions: &mut Vec<SlashCommandArgumentCompletion>, +) { + let valid_entries = entries.flatten().filter_map(|e| { + e.file_name() + .into_string() + .ok() + .map(|name| (e.path(), name)) + }); + + for (path, name) in valid_entries { + let full_path = build_relative_path(prefix, &name); + + if path.is_dir() && !name.starts_with('.') { + if let Ok(sub_entries) = fs::read_dir(&path) { + collect_files_recursive(sub_entries, extensions, &full_path, completions); + } + } else if let Some(ext) = path.extension().and_then(|e| e.to_str()) { + if extensions.contains(&ext) { + completions.push(SlashCommandArgumentCompletion { + label: full_path.clone(), + new_text: full_path, + run_command: true, + }); + } + } + } +} + +/// Build a relative path by joining prefix and name. +fn build_relative_path(prefix: &str, name: &str) -> String { + if prefix.is_empty() { + name.to_string() + } else { + format!("{prefix}/{name}") + } +} + +/// Format CLI output for a successful nap run. +fn format_run_success(stdout: &[u8]) -> String { + String::from_utf8_lossy(stdout).to_string() +} + +/// Format CLI output for a failed command. +fn format_command_error(stdout: &[u8], stderr: &[u8]) -> String { + let stdout_str = String::from_utf8_lossy(stdout); + let stderr_str = String::from_utf8_lossy(stderr); + format!("{stdout_str}{STDERR_SEPARATOR}{stderr_str}") +} + +/// Format CLI output for a successful `OpenAPI` import. +fn format_import_success(spec_path: &str, stdout: &[u8]) -> String { + let stdout_str = String::from_utf8_lossy(stdout); + format!("Generated .nap files from {spec_path}:\n{stdout_str}") +} + +/// Format CLI output for a failed `OpenAPI` import. +fn format_import_error(stdout: &[u8], stderr: &[u8]) -> String { + let stdout_str = String::from_utf8_lossy(stdout); + let stderr_str = String::from_utf8_lossy(stderr); + format!("OpenAPI import failed:\n{stdout_str}\n{stderr_str}") +} + +/// Build a `SlashCommandOutput` with a single section spanning the full text. +fn build_slash_output(text: &str, label: String) -> SlashCommandOutput { + SlashCommandOutput { + text: text.to_string(), + sections: vec![SlashCommandOutputSection { + range: (0..text.len()).into(), + label, + }], + } +} + +/// Process nap run CLI output into a `SlashCommandOutput`. +fn process_run_output(output: &Output, file_path: &str) -> SlashCommandOutput { + let result = if output.status == Some(0) { + format_run_success(&output.stdout) + } else { + format_command_error(&output.stdout, &output.stderr) + }; + build_slash_output(&result, format!("nap run {file_path}")) +} + +/// Process `OpenAPI` import CLI output into a `SlashCommandOutput`. +fn process_import_output(output: &Output, spec_path: &str) -> SlashCommandOutput { + let result = if output.status == Some(0) { + format_import_success(spec_path, &output.stdout) + } else { + format_import_error(&output.stdout, &output.stderr) + }; + build_slash_output(&result, format!("nap generate openapi {spec_path}")) +} + +/// Execute `nap run <file>` — thin WASM wrapper over [`process_run_output`]. +#[cfg(not(tarpaulin_include))] +fn run_nap_command(args: &[String]) -> Result<SlashCommandOutput, String> { + let file_path = args.first().ok_or(NAP_RUN_USAGE)?; + let output = Command::new(NAP_CLI) + .args(["run", file_path, "--output", "text"]) + .output() + .map_err(|e| format!("Failed to run nap CLI: {e}. {CLI_LAUNCH_ERROR}"))?; + Ok(process_run_output(&output, file_path)) +} + +/// Execute `nap generate openapi` — thin WASM wrapper over [`process_import_output`]. +#[cfg(not(tarpaulin_include))] +fn run_import_openapi_command(args: &[String]) -> Result<SlashCommandOutput, String> { + let spec_path = args.first().ok_or(OPENAPI_IMPORT_USAGE)?; + let output = Command::new(NAP_CLI) + .args(["generate", "openapi", "--spec", spec_path]) + .output() + .map_err(|e| format!("Failed to run nap CLI: {e}. {CLI_LAUNCH_ERROR}"))?; + Ok(process_import_output(&output, spec_path)) +} + +mod _register { + use zed_extension_api as zed; + zed::register_extension!(super::NapExtension); +} + +#[cfg(test)] +mod tests; diff --git a/src/Napper.Zed/src/tests.rs b/src/Napper.Zed/src/tests.rs new file mode 100644 index 0000000..e14e940 --- /dev/null +++ b/src/Napper.Zed/src/tests.rs @@ -0,0 +1,2 @@ +mod tests_fs; +mod tests_pure; diff --git a/src/Napper.Zed/src/tests/tests_fs.rs b/src/Napper.Zed/src/tests/tests_fs.rs new file mode 100644 index 0000000..d75b499 --- /dev/null +++ b/src/Napper.Zed/src/tests/tests_fs.rs @@ -0,0 +1,244 @@ +use crate::*; +use std::{ + fs::{self, File}, + path::Path, +}; +use tempfile::TempDir; +use zed_extension_api::Extension; + +/// Create a temp dir structure for file collection tests. +fn create_test_dir() -> TempDir { + let dir = TempDir::new().unwrap(); + let root = dir.path(); + + let _ = File::create(root.join("api.nap")).unwrap(); + let _ = File::create(root.join("suite.naplist")).unwrap(); + let _ = File::create(root.join("spec.json")).unwrap(); + let _ = File::create(root.join("spec.yaml")).unwrap(); + let _ = File::create(root.join("readme.txt")).unwrap(); + let _ = File::create(root.join("config.toml")).unwrap(); + + let _ = fs::create_dir_all(root.join("pets")).unwrap(); + let _ = File::create(root.join("pets/get-all.nap")).unwrap(); + let _ = File::create(root.join("pets/create.nap")).unwrap(); + let _ = File::create(root.join("pets/openapi.yml")).unwrap(); + + let _ = fs::create_dir_all(root.join("pets/v2")).unwrap(); + let _ = File::create(root.join("pets/v2/get-all.nap")).unwrap(); + + let _ = fs::create_dir_all(root.join(".hidden")).unwrap(); + let _ = File::create(root.join(".hidden/secret.nap")).unwrap(); + + let _ = fs::create_dir_all(root.join("empty")).unwrap(); + + dir +} + +fn test_slash_command(name: &str) -> SlashCommand { + SlashCommand { + name: name.to_string(), + description: String::new(), + tooltip_text: String::new(), + requires_argument: false, + } +} + +// ─── collect_files_recursive ──────────────────────────────── + +#[test] +fn collects_nap_files_from_root_and_subdirs() { + let dir = create_test_dir(); + let mut completions = Vec::new(); + let entries = fs::read_dir(dir.path()).unwrap(); + collect_files_recursive(entries, &["nap"], "", &mut completions); + + let labels: Vec<&str> = completions.iter().map(|c| c.label.as_str()).collect(); + assert!(labels.contains(&"api.nap"), "should find root nap file"); + assert!(labels.contains(&"pets/get-all.nap")); + assert!(labels.contains(&"pets/create.nap")); + assert!(labels.contains(&"pets/v2/get-all.nap")); +} + +#[test] +fn skips_hidden_directories() { + let dir = create_test_dir(); + let mut completions = Vec::new(); + let entries = fs::read_dir(dir.path()).unwrap(); + collect_files_recursive(entries, &["nap"], "", &mut completions); + + for c in &completions { + assert!(!c.label.contains(".hidden"), "leaked: {}", c.label); + } +} + +#[test] +fn filters_by_extension() { + let dir = create_test_dir(); + let mut completions = Vec::new(); + let entries = fs::read_dir(dir.path()).unwrap(); + collect_files_recursive(entries, &["nap"], "", &mut completions); + + for c in &completions { + assert!(c.label.ends_with(".nap"), "unexpected: {}", c.label); + } +} + +#[test] +fn collects_multiple_extensions() { + let dir = create_test_dir(); + let mut completions = Vec::new(); + let entries = fs::read_dir(dir.path()).unwrap(); + collect_files_recursive(entries, &["json", "yaml", "yml"], "", &mut completions); + + let labels: Vec<&str> = completions.iter().map(|c| c.label.as_str()).collect(); + assert!(labels.contains(&"spec.json")); + assert!(labels.contains(&"spec.yaml")); + assert!(labels.contains(&"pets/openapi.yml")); + for l in &labels { + assert!(!l.ends_with(".nap") && !l.ends_with(".txt"), "bad: {l}"); + } +} + +#[test] +fn collects_naplist_files() { + let dir = create_test_dir(); + let mut completions = Vec::new(); + let entries = fs::read_dir(dir.path()).unwrap(); + collect_files_recursive(entries, &["nap", "naplist"], "", &mut completions); + + let labels: Vec<&str> = completions.iter().map(|c| c.label.as_str()).collect(); + assert!(labels.contains(&"suite.naplist")); + assert!(labels.contains(&"api.nap")); +} + +#[test] +fn empty_directory_returns_no_completions() { + let dir = TempDir::new().unwrap(); + let mut completions = Vec::new(); + let entries = fs::read_dir(dir.path()).unwrap(); + collect_files_recursive(entries, &["nap"], "", &mut completions); + assert!(completions.is_empty()); +} + +#[test] +fn no_matching_extensions_returns_empty() { + let dir = create_test_dir(); + let mut completions = Vec::new(); + let entries = fs::read_dir(dir.path()).unwrap(); + collect_files_recursive(entries, &["rs", "py", "go"], "", &mut completions); + assert!(completions.is_empty()); +} + +#[test] +fn completions_have_run_command_true() { + let dir = create_test_dir(); + let mut completions = Vec::new(); + let entries = fs::read_dir(dir.path()).unwrap(); + collect_files_recursive(entries, &["nap"], "", &mut completions); + for c in &completions { + assert!(c.run_command); + } +} + +#[test] +fn completion_label_matches_new_text() { + let dir = create_test_dir(); + let mut completions = Vec::new(); + let entries = fs::read_dir(dir.path()).unwrap(); + collect_files_recursive(entries, &["nap"], "", &mut completions); + for c in &completions { + assert_eq!(c.label, c.new_text); + } +} + +#[test] +fn prefix_is_applied_to_all_paths() { + let dir = create_test_dir(); + let mut completions = Vec::new(); + let entries = fs::read_dir(dir.path()).unwrap(); + collect_files_recursive(entries, &["nap"], "workspace", &mut completions); + for c in &completions { + assert!(c.label.starts_with("workspace/"), "bad: {}", c.label); + } +} + +#[test] +#[cfg(target_os = "linux")] +fn skips_non_utf8_filenames() { + use std::ffi::OsStr; + use std::os::unix::ffi::OsStrExt; + + let dir = TempDir::new().unwrap(); + let root = dir.path(); + let _ = File::create(root.join("valid.nap")).unwrap(); + let invalid_name = OsStr::from_bytes(&[0xFF, 0xFE, b'.', b'n', b'a', b'p']); + let _ = File::create(root.join(invalid_name)).unwrap(); + + let mut completions = Vec::new(); + let entries = fs::read_dir(root).unwrap(); + collect_files_recursive(entries, &["nap"], "", &mut completions); + + assert_eq!(completions.len(), 1); + assert_eq!(completions[0].label, "valid.nap"); +} + +// ─── collect_file_completions ─────────────────────────────── + +#[test] +fn collect_file_completions_from_real_dir() { + let dir = create_test_dir(); + + let completions = collect_file_completions(dir.path(), &["nap", "naplist"]).unwrap(); + assert!(!completions.is_empty()); + assert!(completions.iter().any(|c| c.label.ends_with(".nap"))); +} + +// ─── route_completions ────────────────────────────────────── + +#[test] +fn route_completions_nap_run_finds_nap_files() { + let dir = create_test_dir(); + + let result = route_completions(NAP_RUN_COMMAND, dir.path()).unwrap(); + assert!(result.iter().any(|c| c.label.ends_with(".nap"))); + assert!(result.iter().any(|c| c.label.ends_with(".naplist"))); + for c in &result { + assert!(c.label.ends_with(".nap") || c.label.ends_with(".naplist")); + } +} + +#[test] +fn route_completions_openapi_finds_spec_files() { + let dir = create_test_dir(); + + let result = route_completions(NAP_IMPORT_OPENAPI_COMMAND, dir.path()).unwrap(); + assert!(result.iter().any(|c| c.label.ends_with(".json"))); + assert!(result + .iter() + .any(|c| c.label.ends_with(".yaml") || c.label.ends_with(".yml"))); +} + +#[test] +fn route_completions_unknown_returns_empty() { + let result = route_completions("unknown", Path::new(".")).unwrap(); + assert!(result.is_empty()); +} + +#[test] +fn complete_unknown_command_returns_empty() { + let ext = NapExtension; + let result = ext + .complete_slash_command_argument(test_slash_command("nonexistent"), vec![]) + .unwrap(); + assert!(result.is_empty()); +} + +#[test] +fn run_unknown_command_returns_error() { + let ext = NapExtension; + let err = ext + .run_slash_command(test_slash_command("bogus"), vec![], None) + .unwrap_err(); + assert!(err.contains("Unknown command")); + assert!(err.contains("bogus")); +} diff --git a/src/Napper.Zed/src/tests/tests_pure.rs b/src/Napper.Zed/src/tests/tests_pure.rs new file mode 100644 index 0000000..71a784e --- /dev/null +++ b/src/Napper.Zed/src/tests/tests_pure.rs @@ -0,0 +1,291 @@ +use crate::*; +use zed_extension_api::{process::Output, Extension}; + +// ─── build_relative_path ──────────────────────────────────── + +#[test] +fn relative_path_empty_prefix_returns_name() { + assert_eq!(build_relative_path("", "file.nap"), "file.nap"); +} + +#[test] +fn relative_path_with_prefix_joins_with_slash() { + assert_eq!(build_relative_path("subdir", "file.nap"), "subdir/file.nap"); +} + +#[test] +fn relative_path_nested_prefix() { + assert_eq!(build_relative_path("a/b/c", "deep.nap"), "a/b/c/deep.nap"); +} + +// ─── format_run_success ───────────────────────────────────── + +#[test] +fn run_success_returns_stdout_as_string() { + let stdout = b"HTTP/1.1 200 OK\r\nContent-Type: application/json"; + assert_eq!( + format_run_success(stdout), + "HTTP/1.1 200 OK\r\nContent-Type: application/json" + ); +} + +#[test] +fn run_success_handles_empty_stdout() { + assert_eq!(format_run_success(b""), ""); +} + +#[test] +fn run_success_handles_non_utf8_bytes() { + let stdout = vec![0xFF, 0xFE, b'O', b'K']; + let result = format_run_success(&stdout); + assert!(result.contains("OK")); + assert!(result.contains('\u{FFFD}')); +} + +// ─── format_command_error ─────────────────────────────────── + +#[test] +fn command_error_combines_stdout_and_stderr() { + let result = format_command_error(b"partial output", b"connection refused"); + assert!(result.contains("partial output")); + assert!(result.contains(STDERR_SEPARATOR)); + assert!(result.contains("connection refused")); +} + +#[test] +fn command_error_with_empty_stderr() { + let result = format_command_error(b"output", b""); + assert!(result.contains("output")); + assert!(result.contains(STDERR_SEPARATOR)); +} + +#[test] +fn command_error_with_empty_stdout() { + let result = format_command_error(b"", b"error happened"); + assert!(result.contains(STDERR_SEPARATOR)); + assert!(result.contains("error happened")); +} + +// ─── format_import_success / error ────────────────────────── + +#[test] +fn import_success_includes_spec_path_and_stdout() { + let result = format_import_success("petstore.json", b"Created pets.nap\nCreated users.nap"); + assert!(result.starts_with("Generated .nap files from petstore.json:")); + assert!(result.contains("Created pets.nap")); + assert!(result.contains("Created users.nap")); +} + +#[test] +fn import_success_empty_stdout() { + let result = format_import_success("empty.yaml", b""); + assert!(result.contains("Generated .nap files from empty.yaml:")); +} + +#[test] +fn import_error_includes_both_streams() { + let result = format_import_error(b"partial", b"invalid spec"); + assert!(result.starts_with("OpenAPI import failed:")); + assert!(result.contains("partial")); + assert!(result.contains("invalid spec")); +} + +// ─── build_slash_output ───────────────────────────────────── + +#[test] +fn slash_output_text_matches() { + let output = build_slash_output("hello world", "test".to_string()); + assert_eq!(output.text, "hello world"); +} + +#[test] +fn slash_output_has_single_section() { + let output = build_slash_output("hello world", "test".to_string()); + assert_eq!(output.sections.len(), 1); +} + +#[test] +fn slash_output_section_label() { + let output = build_slash_output("content", "my label".to_string()); + assert_eq!(output.sections[0].label, "my label"); +} + +#[test] +fn slash_output_section_range_spans_full_text() { + let text = "some response text"; + let output = build_slash_output(text, "label".to_string()); + let range = &output.sections[0].range; + assert_eq!(range.start, 0); + assert_eq!(range.end as usize, text.len()); +} + +#[test] +fn slash_output_empty_text() { + let output = build_slash_output("", "empty".to_string()); + assert_eq!(output.text, ""); + assert_eq!(output.sections[0].range.start, 0u32); + assert_eq!(output.sections[0].range.end, 0u32); +} + +// ─── Constants ────────────────────────────────────────────── + +#[test] +fn nap_run_usage_mentions_file() { + assert!(NAP_RUN_USAGE.contains("file.nap")); +} + +#[test] +fn openapi_usage_mentions_spec() { + assert!(OPENAPI_IMPORT_USAGE.contains("spec.json")); + assert!(OPENAPI_IMPORT_USAGE.contains("spec.yaml")); +} + +#[test] +fn cli_launch_error_mentions_nap() { + assert!(CLI_LAUNCH_ERROR.contains("nap")); +} + +#[test] +fn lsp_id_constant_is_nap_lsp() { + assert_eq!(NAP_LSP_ID, "nap-lsp"); +} + +#[test] +fn cli_constant_is_nap() { + assert_eq!(NAP_CLI, "nap"); +} + +#[test] +fn command_constants_match_extension_toml() { + assert_eq!(NAP_RUN_COMMAND, "nap-run"); + assert_eq!(NAP_IMPORT_OPENAPI_COMMAND, "nap-import-openapi"); +} + +#[test] +fn file_extension_constants() { + assert_eq!(NAP_FILE_EXTENSION, "nap"); + assert_eq!(NAPLIST_FILE_EXTENSION, "naplist"); +} + +// ─── resolve_language_server ──────────────────────────────── + +#[test] +fn resolve_known_lsp_returns_not_available() { + let result = resolve_language_server(NAP_LSP_ID); + let err = result.unwrap_err(); + assert_eq!(err, LSP_NOT_AVAILABLE); +} + +#[test] +fn resolve_unknown_lsp_returns_error_with_id() { + let result = resolve_language_server("some-other-lsp"); + let err = result.unwrap_err(); + assert!(err.contains("Unknown language server")); + assert!(err.contains("some-other-lsp")); +} + +// ─── run_nap_command / run_import_openapi_command args ────── + +#[test] +fn run_nap_command_empty_args_returns_usage_error() { + let result = run_nap_command(&[]); + let err = result.unwrap_err(); + assert_eq!(err, NAP_RUN_USAGE); +} + +#[test] +fn run_import_openapi_empty_args_returns_usage_error() { + let result = run_import_openapi_command(&[]); + let err = result.unwrap_err(); + assert_eq!(err, OPENAPI_IMPORT_USAGE); +} + +// ─── Extension::new ───────────────────────────────────────── + +#[test] +fn extension_new_creates_instance() { + let _ext = <NapExtension as Extension>::new(); +} + +// ─── process_run_output ───────────────────────────────────── + +fn make_output(status: i32, stdout: &[u8], stderr: &[u8]) -> Output { + Output { + status: Some(status), + stdout: stdout.to_vec(), + stderr: stderr.to_vec(), + } +} + +#[test] +fn process_run_output_success_uses_stdout() { + let output = make_output(0, b"HTTP/1.1 200 OK", b""); + let result = process_run_output(&output, "api.nap"); + assert_eq!(result.text, "HTTP/1.1 200 OK"); + assert_eq!(result.sections[0].label, "nap run api.nap"); +} + +#[test] +fn process_run_output_failure_combines_streams() { + let output = make_output(1, b"partial", b"connection refused"); + let result = process_run_output(&output, "api.nap"); + assert!(result.text.contains("partial")); + assert!(result.text.contains(STDERR_SEPARATOR)); + assert!(result.text.contains("connection refused")); +} + +#[test] +fn process_run_output_section_range() { + let output = make_output(0, b"response body", b""); + let result = process_run_output(&output, "test.nap"); + assert_eq!(result.sections.len(), 1); + assert_eq!(result.sections[0].range.start, 0); + assert_eq!(result.sections[0].range.end as usize, result.text.len()); +} + +#[test] +fn process_run_output_none_status_treated_as_error() { + let output = Output { + status: None, + stdout: b"partial".to_vec(), + stderr: b"terminated".to_vec(), + }; + let result = process_run_output(&output, "test.nap"); + assert!(result.text.contains(STDERR_SEPARATOR)); + assert!(result.text.contains("terminated")); +} + +// ─── process_import_output ────────────────────────────────── + +#[test] +fn process_import_output_success_prepends_message() { + let output = make_output(0, b"pets.nap\nusers.nap", b""); + let result = process_import_output(&output, "petstore.json"); + assert!(result + .text + .starts_with("Generated .nap files from petstore.json:")); + assert!(result.text.contains("pets.nap")); + assert_eq!( + result.sections[0].label, + "nap generate openapi petstore.json" + ); +} + +#[test] +fn process_import_output_failure_shows_error() { + let output = make_output(1, b"", b"invalid spec format"); + let result = process_import_output(&output, "bad.yaml"); + assert!(result.text.starts_with("OpenAPI import failed:")); + assert!(result.text.contains("invalid spec format")); +} + +#[test] +fn process_import_output_none_status_treated_as_error() { + let output = Output { + status: None, + stdout: b"killed".to_vec(), + stderr: b"signal".to_vec(), + }; + let result = process_import_output(&output, "spec.json"); + assert!(result.text.starts_with("OpenAPI import failed:")); +} diff --git a/tests/Nap.Core.Tests/OpenApiCliTests.fs b/tests/Nap.Core.Tests/OpenApiCliTests.fs deleted file mode 100644 index 45180c8..0000000 --- a/tests/Nap.Core.Tests/OpenApiCliTests.fs +++ /dev/null @@ -1,120 +0,0 @@ -module OpenApiCliTests - -open System -open System.Net.Http -open Xunit -open Nap.Core.OpenApiTypes - -/// Direct F# API tests against the live Petstore OpenAPI spec. -/// CLI-based e2e tests are in OpenApiE2eTests.fs — these test -/// the OpenApiGenerator.generate function without a CLI process. - -// --- Constants --- - -[<Literal>] -let private PetstoreSpecUrl = "https://petstore3.swagger.io/api/v3/openapi.json" - -[<Literal>] -let private MinExpectedNapFiles = 10 - -[<Literal>] -let private PetTagFolder = "pet" - -[<Literal>] -let private StoreTagFolder = "store" - -[<Literal>] -let private UserTagFolder = "user" - -// --- Helpers --- - -let private httpClient = new HttpClient() - -let private downloadSpec () : string = - httpClient.GetStringAsync(PetstoreSpecUrl) - |> Async.AwaitTask - |> Async.RunSynchronously - -// --- E2E: F# API directly (no CLI process) --- - -[<Fact>] -let ``OpenApiGenerator.generate succeeds with live Petstore spec`` () = - let specContent = downloadSpec () - match Nap.Core.OpenApiGenerator.generate specContent with - | Ok result -> - Assert.True(result.NapFiles.Length >= MinExpectedNapFiles) - Assert.False(String.IsNullOrEmpty(result.Playlist.Content)) - Assert.False(String.IsNullOrEmpty(result.Environment.Content)) - | Error msg -> - Assert.Fail($"Expected Ok but got Error: {msg}") - -[<Fact>] -let ``OpenApiGenerator.generate produces correct tag folders for Petstore`` () = - let specContent = downloadSpec () - match Nap.Core.OpenApiGenerator.generate specContent with - | Ok result -> - let hasPet = result.NapFiles |> List.exists (fun f -> f.FileName.StartsWith($"{PetTagFolder}/")) - let hasStore = result.NapFiles |> List.exists (fun f -> f.FileName.StartsWith($"{StoreTagFolder}/")) - let hasUser = result.NapFiles |> List.exists (fun f -> f.FileName.StartsWith($"{UserTagFolder}/")) - Assert.True(hasPet, "Should have pet/ files") - Assert.True(hasStore, "Should have store/ files") - Assert.True(hasUser, "Should have user/ files") - | Error msg -> - Assert.Fail($"Expected Ok but got Error: {msg}") - -[<Fact>] -let ``OpenApiGenerator.generate includes api_key auth for Petstore`` () = - let specContent = downloadSpec () - match Nap.Core.OpenApiGenerator.generate specContent with - | Ok result -> - let hasApiKey = - result.NapFiles |> List.exists (fun f -> - f.Content.Contains(SectionRequestHeaders) && f.Content.Contains("api_key")) - Assert.True(hasApiKey, "At least one endpoint should have api_key auth header") - | Error msg -> - Assert.Fail($"Expected Ok but got Error: {msg}") - -[<Fact>] -let ``OpenApiGenerator.generate produces baseUrl in environment`` () = - let specContent = downloadSpec () - match Nap.Core.OpenApiGenerator.generate specContent with - | Ok result -> - Assert.Contains(BaseUrlKey, result.Environment.Content) - Assert.Contains("/api/v3", result.Environment.Content) - | Error msg -> - Assert.Fail($"Expected Ok but got Error: {msg}") - -[<Fact>] -let ``OpenApiGenerator.generate produces playlist referencing all files`` () = - let specContent = downloadSpec () - match Nap.Core.OpenApiGenerator.generate specContent with - | Ok result -> - Assert.Contains(SectionSteps, result.Playlist.Content) - for napFile in result.NapFiles do - Assert.Contains(napFile.FileName, result.Playlist.Content) - | Error msg -> - Assert.Fail($"Expected Ok but got Error: {msg}") - -[<Fact>] -let ``OpenApiGenerator.generate produces vars for path params`` () = - let specContent = downloadSpec () - match Nap.Core.OpenApiGenerator.generate specContent with - | Ok result -> - let hasVars = - result.NapFiles |> List.exists (fun f -> - f.Content.Contains(SectionVars) && f.Content.Contains(VarsPlaceholder)) - Assert.True(hasVars, "At least one endpoint should have [vars] with REPLACE_ME") - | Error msg -> - Assert.Fail($"Expected Ok but got Error: {msg}") - -[<Fact>] -let ``OpenApiGenerator.generate produces request bodies for POST endpoints`` () = - let specContent = downloadSpec () - match Nap.Core.OpenApiGenerator.generate specContent with - | Ok result -> - let hasBody = - result.NapFiles |> List.exists (fun f -> - f.Content.Contains("POST") && f.Content.Contains(SectionRequestBody)) - Assert.True(hasBody, "At least one POST endpoint should have [request.body]") - | Error msg -> - Assert.Fail($"Expected Ok but got Error: {msg}") diff --git a/website/eleventy.config.js b/website/eleventy.config.js index c6c0036..476e2ac 100644 --- a/website/eleventy.config.js +++ b/website/eleventy.config.js @@ -69,6 +69,28 @@ export default function (eleventyConfig) { return content; }); + // Fix OG site_name: use short name instead of full title + eleventyConfig.addTransform("og-site-name", function (content) { + if (this.page.outputPath?.endsWith(".html")) { + return content.replace( + '<meta property="og:site_name" content="Napper — CLI-First API Testing for VS Code">', + '<meta property="og:site_name" content="Napper">' + ); + } + return content; + }); + + // Replace techdoc generator tag with project branding + eleventyConfig.addTransform("generator-tag", function (content) { + if (this.page.outputPath?.endsWith(".html")) { + return content.replace( + '<meta name="generator" content="Eleventy + techdoc">', + '<meta name="generator" content="Eleventy">' + ); + } + return content; + }); + // Fix llms.txt: remove dead /api/ link eleventyConfig.addTransform("llms-fix", function (content) { if (this.page.outputPath === "llms.txt" || this.page.outputPath?.endsWith("/llms.txt")) { diff --git a/website/src/_data/navigation.json b/website/src/_data/navigation.json index f43ace9..cac1e53 100644 --- a/website/src/_data/navigation.json +++ b/website/src/_data/navigation.json @@ -32,7 +32,7 @@ ] }, { - "title": "Compare", + "title": "Migrate", "items": [ { "text": "vs Postman", "url": "/docs/vs-postman/" }, { "text": "vs Bruno", "url": "/docs/vs-bruno/" }, @@ -60,7 +60,7 @@ ] }, { - "title": "Compare", + "title": "Migrate", "items": [ { "text": "vs Postman", "url": "/docs/vs-postman/" }, { "text": "vs Bruno", "url": "/docs/vs-bruno/" }, diff --git a/website/src/_data/site.json b/website/src/_data/site.json index d28e6c5..26876e4 100644 --- a/website/src/_data/site.json +++ b/website/src/_data/site.json @@ -9,7 +9,9 @@ "stylesheet": "/assets/css/styles.css", "github": "https://github.com/MelbourneDeveloper/napper", "ogImage": "/assets/images/logo.png", - "keywords": "API testing, HTTP client, VS Code extension, F# scripting, C# scripting, Postman alternative, Bruno alternative, CLI testing, REST API, test automation", + "ogImageWidth": "800", + "ogImageHeight": "800", + "keywords": "API testing, HTTP client, VS Code extension, F# scripting, C# scripting, Postman alternative, Bruno alternative, CLI testing, REST API, test automation, http file converter, dothttp migration", "company": { "name": "Nimblesite", "url": "https://nimblesite.co" diff --git a/website/src/blog/introducing-napper.md b/website/src/blog/introducing-napper.md index 31fa0c9..8d5861c 100644 --- a/website/src/blog/introducing-napper.md +++ b/website/src/blog/introducing-napper.md @@ -6,8 +6,8 @@ author: Christian Findlay tags: posts category: announcements excerpt: "Meet Napper — a free, open-source API testing tool that puts the CLI first, stores everything as plain text, and gives you the full power of C# and F# scripting with the entire .NET ecosystem." -description: "Introducing Napper, a free, open-source, CLI-first API testing tool for VS Code. A modern alternative to Postman, Bruno, and .http files with C# and F# scripting, declarative assertions, composable test suites, and CI/CD integration via JUnit XML." -keywords: "API testing, VS Code extension, C# scripting, F# scripting, CLI API testing, Postman alternative, Bruno alternative, HTTP testing, REST API testing, .NET API testing, CI/CD testing, JUnit XML, open source API testing tool" +description: "Introducing Napper, a free, open-source, CLI-first API testing tool for VS Code. A modern alternative to Postman, Bruno, and .http files with C# and F# scripting, declarative assertions, composable test suites, built-in .http file conversion, and CI/CD integration via JUnit XML." +keywords: "API testing, VS Code extension, C# scripting, F# scripting, CLI API testing, Postman alternative, Bruno alternative, HTTP testing, REST API testing, .NET API testing, CI/CD testing, JUnit XML, open source API testing tool, http file converter, convert http to nap" --- # Introducing Napper: CLI-First API Testing for VS Code with C# and F# Scripting @@ -212,7 +212,7 @@ name = Full API Suite ## Built for CI/CD from day one -Napper is designed for [continuous integration](/docs/ci-integration/). The CLI binary is self-contained with no runtime dependencies. It outputs [JUnit XML](https://github.com/testmoapp/junitxml), TAP, JSON, and NDJSON formats natively. +Napper is designed for [continuous integration](/docs/ci-integration/). The CLI binary is self-contained with no runtime dependencies. It outputs [JUnit XML](https://github.com/testmoapp/junitxml), JSON, and NDJSON formats natively (`cli-output`). ### GitHub Actions @@ -245,6 +245,24 @@ jobs: Napper exits with code `0` when all assertions pass and `1` when any assertion fails. This integrates natively with [GitHub Actions](https://github.com/features/actions), [GitLab CI](https://docs.gitlab.com/ci/), [Jenkins](https://www.jenkins.io/), [Azure DevOps](https://azure.microsoft.com/en-us/products/devops), and any CI platform that fails on non-zero exit codes. +## Migrate from .http files with one command + +Already using `.http` files with VS Code REST Client or JetBrains IDEs? Napper includes a **built-in converter** that transforms your existing `.http` files into `.nap` format: + +```bash +# Convert a single file +napper convert http ./requests.http + +# Convert an entire directory +napper convert http ./api-tests/ --output-dir ./nap-tests/ +``` + +The converter supports both **Microsoft** (VS Code REST Client) and **JetBrains** (IntelliJ, Rider, WebStorm) `.http` dialects. It maps variables to `.napenv` files, preserves request names, converts JetBrains `http-client.env.json` environments, and warns about unsupported features like WebSocket or gRPC requests. + +Migration is non-destructive — your original `.http` files are untouched. Use `--dry-run` to preview what will be generated before writing any files. Once converted, you get all the benefits of Napper: declarative assertions, composable test suites, F# and C# scripting, and CI/CD integration. + +See [Napper vs .http files](/docs/vs-http-files/) for a full comparison. + ## VS Code extension — native editor integration The [Napper VS Code extension](https://marketplace.visualstudio.com/items?itemName=nimblesite.napper) brings the full experience into your editor: @@ -257,7 +275,7 @@ The [Napper VS Code extension](https://marketplace.visualstudio.com/items?itemNa - **Response inspection** with headers, body, and timing information - **Copy as curl** to share requests with teammates who don't use Napper -Install it from the [VS Code Marketplace](https://marketplace.visualstudio.com/items?itemName=nimblesite.napper): +The extension relies on the CLI binary to run requests — [install the CLI](/docs/installation/) first, then install the extension from the [VS Code Marketplace](https://marketplace.visualstudio.com/items?itemName=nimblesite.napper): ```bash code --install-extension nimblesite.napper @@ -272,8 +290,9 @@ code --install-extension nimblesite.napper | Git-friendly files | Plain text | JSON blobs | Yes | Yes | | Assertions | Declarative + scripts | JS scripts | JS scripts | None | | Scripting language | **C# + F# (.NET)** | Sandboxed JS | Sandboxed JS | None | -| CI/CD output | JUnit, TAP, JSON | Via Newman | Via CLI | None | +| CI/CD output | JUnit, JSON, NDJSON | Via Newman | Via CLI | None | | Test Explorer | Native | No | No | No | +| .http file migration | Built-in converter | Import only | No | N/A | | Account required | No | Yes | No | No | | Price | Free (MIT) | Freemium | Free (MIT) | Free | @@ -281,9 +300,10 @@ code --install-extension nimblesite.napper 1. [Install the CLI or VS Code extension](/docs/installation/) 2. Follow the [Quick Start guide](/docs/quick-start/) to create your first request -3. Add [assertions](/docs/assertions/) to validate responses -4. Set up [environments](/docs/environments/) for different targets -5. Write [C# scripts](/docs/csharp-scripting/) or [F# scripts](/docs/fsharp-scripting/) for advanced flows -6. Run everything in [CI/CD](/docs/ci-integration/) with JUnit XML output +3. [Migrate existing .http files](/docs/vs-http-files/) with `napper convert http` +4. Add [assertions](/docs/assertions/) to validate responses +5. Set up [environments](/docs/environments/) for different targets +6. Write [C# scripts](/docs/csharp-scripting/) or [F# scripts](/docs/fsharp-scripting/) for advanced flows +7. Run everything in [CI/CD](/docs/ci-integration/) with JUnit XML output Napper is free, open source, and [MIT licensed](https://github.com/MelbourneDeveloper/napper/blob/main/LICENSE). Browse the source code and examples on [GitHub](https://github.com/MelbourneDeveloper/napper). diff --git a/website/src/docs/assertions.md b/website/src/docs/assertions.md index 3ba3261..b4f5d9d 100644 --- a/website/src/docs/assertions.md +++ b/website/src/docs/assertions.md @@ -8,7 +8,7 @@ eleventyNavigation: order: 8 --- -# Assertions +# Assertions (spec: nap-assert) The `[assert]` section in `.nap` files provides declarative assertions on HTTP responses. No scripting needed for common checks. @@ -20,14 +20,14 @@ Each assertion is a single line in the form: target operator value ``` -## Status code +## Status code (spec: assert-status) ``` [assert] status = 200 ``` -## JSON body paths +## JSON body paths (spec: assert-equals, assert-exists, assert-gt) Assert on values in the JSON response body using dot notation: @@ -39,7 +39,7 @@ body.email exists body.users.length > 0 ``` -## Headers +## Headers (spec: assert-contains, assert-exists) Check response headers: @@ -49,7 +49,7 @@ headers.Content-Type contains "application/json" headers.X-Request-Id exists ``` -## Response time +## Response time (spec: assert-lt) Assert that the response completes within a time limit: @@ -61,13 +61,14 @@ duration < 2s ## Operators -| Operator | Description | Example | -|----------|-------------|---------| -| `=` | Equals | `status = 200` | -| `>` | Greater than | `body.count > 0` | -| `<` | Less than | `duration < 500ms` | -| `exists` | Field is present | `body.id exists` | -| `contains` | String contains | `headers.Content-Type contains "json"` | +| Operator | Description | Example | Spec | +|----------|-------------|---------|------| +| `=` | Equals | `status = 200` | (spec: assert-equals) | +| `>` | Greater than | `body.count > 0` | (spec: assert-gt) | +| `<` | Less than | `duration < 500ms` | (spec: assert-lt) | +| `exists` | Field is present | `body.id exists` | (spec: assert-exists) | +| `contains` | String contains | `headers.Content-Type contains "json"` | (spec: assert-contains) | +| `matches` | Regex match | `body.email matches "^.+@.+$"` | (spec: assert-matches) | ## Multiple assertions diff --git a/website/src/docs/ci-integration.md b/website/src/docs/ci-integration.md index 1ed52cc..aa53717 100644 --- a/website/src/docs/ci-integration.md +++ b/website/src/docs/ci-integration.md @@ -8,7 +8,7 @@ eleventyNavigation: order: 10 --- -# CI Integration +# CI Integration (spec: cli-run, cli-output, cli-exit-codes) Napper is built for CI/CD. The CLI binary is self-contained with no runtime dependencies, and outputs standard formats like JUnit XML. @@ -57,7 +57,7 @@ api-tests: junit: results.xml ``` -## Environment variables +## Environment variables (spec: cli-env, cli-var) Create a `.napenv.ci` file for CI-specific configuration: @@ -72,15 +72,15 @@ Override secrets via CLI flags: napper run ./tests/ --env ci --var token=$API_TOKEN ``` -## Output formats +## Output formats (spec: cli-output, output-pretty, output-junit, output-json, output-ndjson) | Format | Use case | |--------|----------| +| `pretty` | Human-readable terminal output (default) | | `junit` | Most CI platforms (GitHub Actions, GitLab, Jenkins, Azure DevOps) | -| `tap` | TAP consumers, some CI tools | | `json` | Custom processing, dashboards | | `ndjson` | Streaming to log aggregators | -## Exit codes +## Exit codes (spec: cli-exit-codes) Napper exits with code `0` when all assertions pass, `1` when any assertion fails, and `2` on runtime errors. This integrates naturally with CI pipelines that fail on non-zero exit codes. diff --git a/website/src/docs/cli-reference.md b/website/src/docs/cli-reference.md index 8609216..9309d97 100644 --- a/website/src/docs/cli-reference.md +++ b/website/src/docs/cli-reference.md @@ -12,7 +12,7 @@ eleventyNavigation: ## Commands -### `napper run` +### `napper run` (spec: cli-run) Run a `.nap` file, `.naplist` file, or folder. @@ -29,23 +29,23 @@ napper run ./tests/ #### Flags -| Flag | Description | Example | -|------|-------------|---------| -| `--env <name>` | Use a named environment | `--env staging` | -| `--var <key=value>` | Override a variable | `--var userId=42` | -| `--output <format>` | Output format | `--output junit` | +| Flag | Description | Example | Spec | +|------|-------------|---------|------| +| `--env <name>` | Use a named environment | `--env staging` | (spec: cli-env) | +| `--var <key=value>` | Override a variable | `--var userId=42` | (spec: cli-var) | +| `--output <format>` | Output format | `--output junit` | (spec: cli-output) | +| `--verbose` | Enable verbose output with detailed request/response info | `--verbose` | (spec: cli-verbose) | -#### Output formats +#### Output formats (spec: cli-output) -| Format | Description | -|--------|-------------| -| `pretty` | Human-readable colored output (default) | -| `junit` | JUnit XML for CI integration | -| `tap` | TAP (Test Anything Protocol) | -| `json` | JSON report | -| `ndjson` | Newline-delimited JSON (streaming) | +| Format | Description | Spec | +|--------|-------------|------| +| `pretty` | Human-readable colored output (default) | (spec: output-pretty) | +| `junit` | JUnit XML for CI integration | (spec: output-junit) | +| `json` | JSON report | (spec: output-json) | +| `ndjson` | Newline-delimited JSON (streaming) | (spec: output-ndjson) | -### `napper check` +### `napper check` (spec: cli-check) Validate syntax without executing requests. @@ -53,15 +53,45 @@ Validate syntax without executing requests. napper check ./suite.naplist ``` -### `napper list` +### `napper convert http` (spec: cli-convert) -List all requests in a path. +Convert `.http` files to `.nap` format. Supports both Microsoft (VS Code REST Client) and JetBrains (IntelliJ, Rider, WebStorm) dialects. ```bash -napper list ./ +# Convert a single .http file +napper convert http ./requests.http + +# Convert a directory of .http files +napper convert http ./api-tests/ --output-dir ./nap-tests/ + +# Preview without writing files +napper convert http ./requests.http --dry-run + +# Specify dialect explicitly +napper convert http ./requests.http --dialect jb +``` + +#### Flags + +| Flag | Description | Example | Spec | +|------|-------------|---------|------| +| `--output-dir <path>` | Output directory for converted files | `--output-dir ./nap/` | (spec: cli-convert) | +| `--env-file <path>` | JetBrains environment file | `--env-file http-client.env.json` | (spec: cli-convert) | +| `--dialect <ms\|jb\|auto>` | Force dialect detection | `--dialect jb` | (spec: cli-convert) | +| `--dry-run` | Preview conversion without writing | `--dry-run` | (spec: cli-convert) | +| `--verbose` | Show detailed conversion output | `--verbose` | (spec: cli-verbose) | + +The converter maps variables to `.napenv` files, preserves request names, and warns about unsupported features (WebSocket, gRPC, GraphQL). + +### `napper generate` (spec: cli-generate, openapi-generate) + +Generate `.nap` files from an OpenAPI specification. + +```bash +napper generate openapi ./openapi.json ``` -## Exit codes +## Exit codes (spec: cli-exit-codes) | Code | Meaning | |------|---------| diff --git a/website/src/docs/csharp-scripting.md b/website/src/docs/csharp-scripting.md index a87e57a..259ffef 100644 --- a/website/src/docs/csharp-scripting.md +++ b/website/src/docs/csharp-scripting.md @@ -8,11 +8,11 @@ eleventyNavigation: order: 8 --- -# C# Scripting +# C# Scripting (spec: script-csx) Napper supports C# scripts (`.csx` files) for pre/post request hooks and test orchestration. This gives you the full power of .NET for complex testing scenarios, using familiar C# syntax. -## Pre/post request hooks +## Pre/post request hooks (spec: script-pre, script-post) Reference scripts in your `.nap` file: @@ -22,7 +22,7 @@ pre = ./scripts/setup-auth.csx post = ./scripts/validate-response.csx ``` -### Pre-request scripts +### Pre-request scripts (spec: script-pre) Run before the HTTP request is sent. Use them to set up authentication, generate dynamic data, or modify variables. @@ -33,7 +33,7 @@ ctx.Set("token", token); ctx.Log($"Token generated: {token[..8]}..."); ``` -### Post-request scripts +### Post-request scripts (spec: script-post) Run after the response is received. Use them for complex validation, data extraction, or chaining. @@ -52,7 +52,7 @@ if (userId <= 0) ctx.Log($"Created user {userId}"); ``` -## NapContext +## NapContext (spec: script-context) Scripts receive a `ctx` object with these members: @@ -66,7 +66,7 @@ Scripts receive a `ctx` object with these members: | `Fail(message)` | Pre + Post | Fail the test with a message | | `Log(message)` | Pre + Post | Write to test output | -## Orchestration scripts +## Orchestration scripts (spec: script-orchestration) For complex flows, use orchestration scripts that control execution directly: @@ -100,7 +100,7 @@ Reference orchestration scripts in a `.naplist`: ./scripts/orchestration.csx ``` -## NapRunner +## NapRunner (spec: script-runner) Orchestration scripts receive a `runner` object: diff --git a/website/src/docs/environments.md b/website/src/docs/environments.md index c998214..a6183c8 100644 --- a/website/src/docs/environments.md +++ b/website/src/docs/environments.md @@ -8,13 +8,13 @@ eleventyNavigation: order: 6 --- -# Environments +# Environments (spec: env-file) Napper uses `.napenv` files for environment-specific configuration. These are simple key-value files that define variables for your requests. -## .napenv files +## .napenv files (spec: env-file) -### Base environment +### Base environment (spec: env-base) Create a `.napenv` file in your project root: @@ -25,7 +25,7 @@ timeout = 5000 This file should be committed to version control. -### Named environments +### Named environments (spec: env-named) Create environment-specific files like `.napenv.staging` or `.napenv.production`: @@ -47,7 +47,7 @@ napper run ./tests/ --env staging Or use the environment switcher in the VS Code status bar. -### Local secrets +### Local secrets (spec: env-local) Create a `.napenv.local` file for secrets that should never be committed: @@ -59,7 +59,7 @@ adminPassword = supersecret The VS Code extension masks values from `.napenv.local` in hover tooltips. -## Resolution order +## Resolution order (spec: env-resolution) Variables are resolved from highest to lowest priority: @@ -71,7 +71,7 @@ Variables are resolved from highest to lowest priority: This means CLI flags always win, and file-level defaults are the fallback. -## Usage in requests +## Usage in requests (spec: env-interpolation) Reference variables with double curly braces: diff --git a/website/src/docs/fsharp-scripting.md b/website/src/docs/fsharp-scripting.md index 75a1ed9..28e2807 100644 --- a/website/src/docs/fsharp-scripting.md +++ b/website/src/docs/fsharp-scripting.md @@ -8,11 +8,11 @@ eleventyNavigation: order: 7 --- -# F# Scripting +# F# Scripting (spec: script-fsx) Napper supports F# Interactive scripts (`.fsx` files) for pre/post request hooks and test orchestration. This gives you the full power of .NET for complex testing scenarios. -## Pre/post request hooks +## Pre/post request hooks (spec: script-pre, script-post) Reference scripts in your `.nap` file: @@ -22,7 +22,7 @@ pre = ./scripts/setup-auth.fsx post = ./scripts/validate-response.fsx ``` -### Pre-request scripts +### Pre-request scripts (spec: script-pre) Run before the HTTP request is sent. Use them to set up authentication, generate dynamic data, or modify variables. @@ -33,7 +33,7 @@ ctx.Set "token" token ctx.Log $"Token generated: {token.[..8]}..." ``` -### Post-request scripts +### Post-request scripts (spec: script-post) Run after the response is received. Use them for complex validation, data extraction, or chaining. @@ -52,7 +52,7 @@ if userId <= 0 then ctx.Log $"Created user {userId}" ``` -## NapContext +## NapContext (spec: script-context) Scripts receive a `ctx` object with these members: @@ -66,7 +66,7 @@ Scripts receive a `ctx` object with these members: | `Fail message` | Pre + Post | Fail the test with a message | | `Log message` | Pre + Post | Write to test output | -## Orchestration scripts +## Orchestration scripts (spec: script-orchestration) For complex flows, use orchestration scripts that control execution directly: @@ -98,7 +98,7 @@ Reference orchestration scripts in a `.naplist`: ./scripts/orchestration.fsx ``` -## NapRunner +## NapRunner (spec: script-runner) Orchestration scripts receive a `runner` object: diff --git a/website/src/docs/index.md b/website/src/docs/index.md index 0bfa039..9b034c2 100644 --- a/website/src/docs/index.md +++ b/website/src/docs/index.md @@ -14,14 +14,15 @@ eleventyNavigation: Napper is built for developers who want: -- **Simple things to be simple** — a one-off request is nearly as terse as curl -- **Complex things to be possible** — full F# and C# scripting for advanced flows -- **Everything in version control** — plain text files, no binary blobs +- **Simple things to be simple** — a one-off request is nearly as terse as curl (spec: nap-minimal) +- **Complex things to be possible** — full F# and C# scripting for advanced flows (spec: script-fsx, script-csx) +- **Everything in version control** — plain text files, no binary blobs (spec: nap-file, naplist-file, env-file) - **First-class VS Code support** — syntax highlighting, Test Explorer, environment switching +- **Easy migration** — convert existing `.http` files with a single CLI command (spec: cli-convert) ## How does Napper work? -Every HTTP request is a `.nap` file: +Every HTTP request is a `.nap` file (spec: nap-file): ``` GET https://api.example.com/health @@ -37,7 +38,7 @@ Or from VS Code with a single click. ## What happens when you need more? -Add headers, bodies, assertions, and environment variables: +Add headers, bodies, assertions, and environment variables (spec: nap-full): ``` [meta] @@ -64,7 +65,17 @@ body.id exists duration < 500ms ``` -Chain requests into test suites with `.naplist` files. Add F# or C# scripts for advanced orchestration. Output JUnit XML for your CI pipeline. +Chain requests into test suites with `.naplist` files (spec: naplist-file). Add F# or C# scripts for advanced orchestration (spec: script-fsx, script-csx). Output JUnit XML for your CI pipeline (spec: output-junit). + +## Already using .http files? (spec: cli-convert) + +Napper includes a built-in converter to migrate your existing `.http` files. Both Microsoft (VS Code REST Client) and JetBrains (IntelliJ, Rider, WebStorm) dialects are supported: + +```bash +napper convert http ./requests.http +``` + +The converter maps variables to `.napenv` files, preserves request names, and converts JetBrains `http-client.env.json` environments. See the [.http file comparison](/docs/vs-http-files/) for details. ## Why is the CLI the primary interface? @@ -75,3 +86,4 @@ Napper is not a GUI-first tool with a CLI bolted on. The CLI is the primary inte - [Install Napper](/docs/installation/) to get started - Follow the [Quick Start](/docs/quick-start/) guide - Learn about [.nap file format](/docs/nap-files/) +- [Migrate from .http files](/docs/vs-http-files/) with the built-in converter diff --git a/website/src/docs/installation.md b/website/src/docs/installation.md index f86c1a5..55b06b5 100644 --- a/website/src/docs/installation.md +++ b/website/src/docs/installation.md @@ -10,64 +10,85 @@ eleventyNavigation: # Installation -Napper has two components: the **CLI binary** and the **VS Code extension**. The extension bundles the CLI, so installing the extension is all most users need. +## Download from GitHub Releases (spec: cli-run) -## VS Code Extension +The fastest way to get Napper is to download the CLI binary from [GitHub Releases](https://github.com/MelbourneDeveloper/napper/releases). The current release is **v0.9.0**. -Install from the marketplace: +| Platform | Binary | +|----------|--------| +| macOS (Apple Silicon) | [`napper-osx-arm64`](https://github.com/MelbourneDeveloper/napper/releases/latest/download/napper-osx-arm64) | +| macOS (Intel) | [`napper-osx-x64`](https://github.com/MelbourneDeveloper/napper/releases/latest/download/napper-osx-x64) | +| Linux (x64) | [`napper-linux-x64`](https://github.com/MelbourneDeveloper/napper/releases/latest/download/napper-linux-x64) | +| Windows (x64) | [`napper-win-x64.exe`](https://github.com/MelbourneDeveloper/napper/releases/latest/download/napper-win-x64.exe) | + +Download the binary, make it executable (`chmod +x` on macOS/Linux), and move it somewhere on your PATH. + +### Verify installation ```bash -code --install-extension nimblesite.napper +napper --help ``` -Or search for **"Napper"** in the VS Code Extensions panel. +## Install script -The extension includes: -- Syntax highlighting for `.nap`, `.naplist`, and `.napenv` files -- Request explorer in the activity bar -- Test Explorer integration -- Environment switching via status bar -- CodeLens actions (Run, Copy as curl) +Alternatively, use the install script which auto-detects your platform and verifies the SHA256 checksum. -## CLI Binary +### macOS / Linux -For CI/CD pipelines or terminal-only workflows, install the standalone CLI. +```bash +curl -fsSL https://raw.githubusercontent.com/MelbourneDeveloper/napper/main/scripts/install.sh | bash +``` -### From GitHub Releases +Or install a specific version: -Download the latest binary for your platform from [GitHub Releases](https://github.com/MelbourneDeveloper/napper/releases): +```bash +curl -fsSL https://raw.githubusercontent.com/MelbourneDeveloper/napper/main/scripts/install.sh | bash -s 0.9.0 +``` -| Platform | Binary | -|----------|--------| -| macOS (Apple Silicon) | `napper-osx-arm64` | -| macOS (Intel) | `napper-osx-x64` | -| Linux (x64) | `napper-linux-x64` | -| Windows (x64) | `napper-win-x64.exe` | +### Windows -### macOS / Linux +```powershell +irm https://raw.githubusercontent.com/MelbourneDeveloper/napper/main/scripts/install.ps1 | iex +``` -```bash -# Download (replace with your platform) -curl -L -o napper https://github.com/MelbourneDeveloper/napper/releases/latest/download/napper-osx-arm64 +Or install a specific version: + +```powershell +.\scripts\install.ps1 -Version 0.9.0 +``` + +## Build from source -# Make executable -chmod +x napper +If you have the .NET SDK and `make` installed, you can build and install the CLI from source: -# Move to PATH -mv napper ~/.local/bin/ +```bash +git clone https://github.com/MelbourneDeveloper/napper.git +cd napper +make install-binaries ``` -### Windows +This builds the CLI for your platform and installs it to `~/.local/bin/napper`. -Download `napper-win-x64.exe` from releases and add it to your PATH. +## VS Code Extension -### Verify installation +The extension provides editor integration but relies on the CLI binary to run requests. Install the CLI first (see above), then install the extension. + +Install from the marketplace: ```bash -napper --help +code --install-extension nimblesite.napper ``` -## Requirements +Or search for **"Napper"** in the VS Code Extensions panel. + +The extension provides: +- Syntax highlighting for `.nap`, `.naplist`, and `.napenv` files +- Request explorer in the activity bar +- Test Explorer integration +- Environment switching via status bar +- CodeLens actions (Run, Copy as curl) + +## Requirements (spec: script-fsx, script-csx) - **CLI**: Self-contained binary, no runtime dependencies - **VS Code Extension**: VS Code 1.100.0 or later diff --git a/website/src/docs/nap-files.md b/website/src/docs/nap-files.md index fe8b52f..2cbfbdc 100644 --- a/website/src/docs/nap-files.md +++ b/website/src/docs/nap-files.md @@ -8,11 +8,11 @@ eleventyNavigation: order: 4 --- -# .nap Files +# .nap Files (spec: nap-file) A `.nap` file defines a single HTTP request with optional metadata, headers, body, assertions, and script hooks. -## Minimal format +## Minimal format (spec: nap-minimal) The simplest possible `.nap` file is just a method and URL: @@ -20,7 +20,7 @@ The simplest possible `.nap` file is just a method and URL: GET https://api.example.com/health ``` -## Full format +## Full format (spec: nap-full) {% raw %} ``` @@ -53,7 +53,7 @@ post = ./scripts/log-response.fsx ## Sections -### `[meta]` +### `[meta]` (spec: nap-meta) Optional metadata about the request. @@ -63,7 +63,7 @@ Optional metadata about the request. | `description` | Longer description for documentation | | `tags` | Comma-separated tags for filtering | -### `[vars]` +### `[vars]` (spec: nap-vars) Local variable defaults. These are overridden by environment files and CLI flags. @@ -72,7 +72,7 @@ userId = 1 baseUrl = https://api.example.com ``` -### `[request]` +### `[request]` (spec: nap-request) The HTTP method and URL. This is the only required part of a `.nap` file. @@ -82,9 +82,9 @@ GET {{baseUrl}}/users/{{userId}} ``` {% endraw %} -Supported methods: `GET`, `POST`, `PUT`, `PATCH`, `DELETE`, `HEAD`, `OPTIONS`. +Supported methods: `GET`, `POST`, `PUT`, `PATCH`, `DELETE`, `HEAD`, `OPTIONS` (spec: http-methods). -### `[request.headers]` +### `[request.headers]` (spec: nap-headers) Key-value pairs for HTTP headers. Variables are interpolated. @@ -96,7 +96,7 @@ X-Custom-Header = {{customValue}} ``` {% endraw %} -### `[request.body]` +### `[request.body]` (spec: nap-body) Request body for `POST`, `PUT`, and `PATCH` requests. Content is wrapped in triple quotes: @@ -110,11 +110,11 @@ Request body for `POST`, `PUT`, and `PATCH` requests. Content is wrapped in trip """ ``` -### `[assert]` +### `[assert]` (spec: nap-assert) Declarative assertions on the response. See [Assertions](/docs/assertions/) for the full reference. -### `[script]` +### `[script]` (spec: nap-script) References to F# or C# scripts that run before or after the request. @@ -126,7 +126,7 @@ post = ./scripts/validate.csx See [F# Scripting](/docs/fsharp-scripting/) and [C# Scripting](/docs/csharp-scripting/) for details. -## Variable interpolation +## Variable interpolation (spec: env-interpolation) Use {% raw %}`{{variableName}}`{% endraw %} anywhere in the request. Variables are resolved from (highest priority first): @@ -136,7 +136,7 @@ Use {% raw %}`{{variableName}}`{% endraw %} anywhere in the request. Variables a 4. `.napenv` (base environment) 5. `[vars]` in the `.nap` file -## Comments +## Comments (spec: nap-comments) Lines starting with `#` are comments: diff --git a/website/src/docs/naplist-files.md b/website/src/docs/naplist-files.md index 9af7adc..831b75e 100644 --- a/website/src/docs/naplist-files.md +++ b/website/src/docs/naplist-files.md @@ -8,11 +8,11 @@ eleventyNavigation: order: 5 --- -# .naplist Playlists +# .naplist Playlists (spec: naplist-file) A `.naplist` file defines an ordered sequence of steps to execute. Steps can be `.nap` files, folders, other playlists, or F#/C# scripts. -## Basic format +## Basic format (spec: naplist-meta, naplist-steps) ``` [meta] @@ -25,7 +25,7 @@ description = Quick checks for core endpoints ./users/create-user.nap ``` -## Full format +## Full format (spec: naplist-meta, naplist-vars, naplist-steps) ``` [meta] @@ -55,7 +55,7 @@ adminToken = {% raw %}{{ADMIN_TOKEN}}{% endraw %} ## Step types -### .nap files +### .nap files (spec: naplist-nap-step) Run a single HTTP request: @@ -63,15 +63,15 @@ Run a single HTTP request: ./users/get-user.nap ``` -### Folders +### Folders (spec: naplist-folder-step) -Run all `.nap` files in a folder, sorted by filename: +Run all `.nap` files in a folder, sorted by filename (spec: collection-sort): ``` ./users/ ``` -### Nested playlists +### Nested playlists (spec: naplist-nested) Run another `.naplist` file: @@ -81,7 +81,7 @@ Run another `.naplist` file: Nesting is recursive — playlists can reference other playlists. -### F# and C# scripts +### F# and C# scripts (spec: naplist-script-step) Run an orchestration script: @@ -92,7 +92,7 @@ Run an orchestration script: Scripts can use the injected `NapRunner` to run requests and playlists programmatically. See [F# Scripting](/docs/fsharp-scripting/) or [C# Scripting](/docs/csharp-scripting/). -## Variables +## Variables (spec: naplist-var-scope) Variables defined in `[vars]` are available to all steps. Steps can also set variables for downstream steps using F# or C# scripts. diff --git a/website/src/docs/quick-start.md b/website/src/docs/quick-start.md index 66fcd35..f01c0d5 100644 --- a/website/src/docs/quick-start.md +++ b/website/src/docs/quick-start.md @@ -12,7 +12,7 @@ eleventyNavigation: Get up and running with Napper in under 5 minutes. -## How do I create my first request? +## How do I create my first request? (spec: nap-minimal, nap-request) Create a file called `hello.nap`: @@ -28,7 +28,7 @@ napper run ./hello.nap You should see the JSON response printed to your terminal. -## How do I add assertions? +## How do I add assertions? (spec: nap-assert) Edit `hello.nap` to verify the response: @@ -44,7 +44,7 @@ body.title exists Run it again. Napper will report whether each assertion passed or failed. -## How do I use variables and environments? +## How do I use variables and environments? (spec: nap-vars, cli-env) Create a `.napenv` file in the same directory: @@ -84,7 +84,7 @@ Run the entire suite: napper run ./smoke.naplist ``` -## How do I use Napper in CI/CD? +## How do I use Napper in CI/CD? (spec: cli-output, cli-exit-codes) Output JUnit XML for your pipeline: diff --git a/website/src/docs/vs-bruno.md b/website/src/docs/vs-bruno.md index 83055cb..c227716 100644 --- a/website/src/docs/vs-bruno.md +++ b/website/src/docs/vs-bruno.md @@ -8,7 +8,7 @@ eleventyNavigation: order: 12 --- -# Napper vs Bruno +# Napper vs Bruno (spec: cli-run, nap-file) Napper and Bruno are both free, open-source alternatives to Postman that store requests as plain text files. Here is how they differ. @@ -20,17 +20,17 @@ Bruno is a GUI-first tool with a standalone desktop application. It focuses on p Bruno has its own standalone desktop application built with Electron. Napper integrates directly into VS Code as a native extension with syntax highlighting, a request explorer, environment switching, and Test Explorer integration. If you already work in VS Code, Napper fits into your existing workflow without switching applications. -## How does scripting compare? +## How does scripting compare? (spec: script-fsx, script-csx) Bruno provides sandboxed JavaScript for pre-request and post-request scripts, similar to Postman. Napper supports both F# (`.fsx`) and C# (`.csx`) scripts with full access to the .NET ecosystem. Scripts in Napper are not sandboxed, so you can import NuGet packages, call databases, parse XML, generate tokens, and perform any operation the .NET runtime supports. -## How do file formats compare? +## How do file formats compare? (spec: nap-file) Both Napper and Bruno store requests as plain text files that work well with git. Bruno uses its own Bru markup language. Napper uses `.nap` files with a TOML-inspired section-based format. Both produce clean diffs in code reviews. -## How does CI/CD integration compare? +## How does CI/CD integration compare? (spec: cli-run, cli-output) -Bruno provides a CLI for running collections from the terminal. Napper is designed CLI-first, meaning the command line is the primary interface rather than an afterthought. Napper outputs JUnit XML, TAP, JSON, and NDJSON formats natively and requires no runtime dependencies. +Bruno provides a CLI for running collections from the terminal. Napper is designed CLI-first, meaning the command line is the primary interface rather than an afterthought. Napper outputs JUnit XML, JSON, and NDJSON formats natively and requires no runtime dependencies. ## Feature comparison @@ -43,7 +43,8 @@ Bruno provides a CLI for running collections from the terminal. Napper is design | Scripting | Full F# and C# with .NET access | Sandboxed JavaScript | | Editor integration | Native VS Code extension | Standalone Electron app | | Test Explorer | Native VS Code support | No | -| CI/CD output | JUnit, TAP, JSON, NDJSON | JSON via CLI | +| CI/CD output | JUnit, JSON, NDJSON | JSON via CLI | +| .http file migration | Built-in converter | No | | Pricing | Free, MIT license | Free, MIT license | ## When should you choose Napper over Bruno? diff --git a/website/src/docs/vs-http-files.md b/website/src/docs/vs-http-files.md index 08b7d0b..27740bd 100644 --- a/website/src/docs/vs-http-files.md +++ b/website/src/docs/vs-http-files.md @@ -1,30 +1,72 @@ --- layout: layouts/docs.njk title: "Napper vs .http Files" -description: "Comparing Napper and .http files for API testing. Napper adds assertions, test suites, environments, F# and C# scripting, and CLI execution to the plain-text simplicity of .http files." -keywords: "Napper vs http files, http file alternative, REST Client alternative, VS Code API testing" +description: "Comparing Napper and .http files for API testing. Napper adds assertions, test suites, environments, F# and C# scripting, CLI execution, and a built-in converter to migrate your existing .http files." +keywords: "Napper vs http files, http file alternative, REST Client alternative, VS Code API testing, http file converter, convert http to nap, JetBrains http migration" eleventyNavigation: key: vs .http Files order: 13 --- -# Napper vs .http Files +# Napper vs .http Files (spec: nap-file, cli-run) -`.http` files are the simplest way to send HTTP requests from VS Code. Napper builds on the same plain-text philosophy but adds assertions, test suites, environments, scripting, and a full CLI. +`.http` files are the simplest way to send HTTP requests from VS Code. Napper builds on the same plain-text philosophy but adds assertions, test suites, environments, scripting, a full CLI, and a **built-in converter** to migrate your existing `.http` files. ## What are .http files? -`.http` files (also called `.rest` files) are plain text files supported by the REST Client extension in VS Code and by JetBrains IDEs. They let you define HTTP requests and send them directly from your editor. They are simple and lightweight, but limited in functionality. +`.http` files (also called `.rest` files) are plain text files supported by the REST Client extension in VS Code and by JetBrains IDEs (IntelliJ, Rider, WebStorm). They let you define HTTP requests and send them directly from your editor. They are simple and lightweight, but limited in functionality. -## What does Napper add beyond .http files? +## What does Napper add beyond .http files? (spec: nap-assert, nap-vars, script-fsx, script-csx, cli-output) -Napper adds five major capabilities that `.http` files lack: +Napper adds six major capabilities that `.http` files lack: -- **Declarative assertions** — Verify status codes, JSON body paths, headers, and response times with a clean, readable syntax directly in the request file. +- **Built-in .http converter** — Migrate your existing `.http` files to `.nap` format with a single CLI command. Supports both Microsoft and JetBrains dialects. +- **Declarative assertions** (spec: nap-assert) — Verify status codes, JSON body paths, headers, and response times with a clean, readable syntax directly in the request file. - **Composable test suites** — Chain multiple requests into ordered playlists with `.naplist` files. Nest playlists and reference entire folders. -- **Environment management** — Define variables in `.napenv` files, create named environments for staging and production, and override secrets locally with `.napenv.local`. -- **F# and C# scripting** — Run pre-request and post-request scripts with full access to the .NET ecosystem for token generation, data setup, and complex validation. -- **CLI execution** — Run any request or test suite from the terminal. Output JUnit XML, TAP, JSON, or NDJSON for CI/CD pipelines. +- **Environment management** (spec: nap-vars, cli-env) — Define variables in `.napenv` files, create named environments for staging and production, and override secrets locally with `.napenv.local`. +- **F# and C# scripting** (spec: script-fsx, script-csx) — Run pre-request and post-request scripts with full access to the .NET ecosystem for token generation, data setup, and complex validation. +- **CLI execution** (spec: cli-run, cli-output) — Run any request or test suite from the terminal. Output JUnit XML, JSON, or NDJSON for CI/CD pipelines. + +## How do I convert .http files to Napper? (spec: cli-convert) + +Napper includes a built-in converter that transforms `.http` files into `.nap` files. Run a single command to migrate: + +```bash +# Convert a single file +napper convert http ./requests.http + +# Convert an entire directory +napper convert http ./api-tests/ --output-dir ./nap-tests/ + +# Dry run to preview without writing files +napper convert http ./requests.http --dry-run +``` + +### What does the converter handle? + +The converter parses your `.http` files and produces equivalent `.nap` files: + +- **Request methods, URLs, headers, and bodies** are mapped to the corresponding `.nap` sections +- **Request names** (`# @name` in Microsoft format, `### name` in JetBrains format) become `[meta] name` +- **Variables** (`@variable = value` in Microsoft, `{{"{{variable}}"}}` in JetBrains) are extracted into `.napenv` files +- **JetBrains environment files** (`http-client.env.json`) are converted to `.napenv.<name>` files +- **JetBrains private environments** (`http-client.private.env.json`) become `.napenv.local` +- **Simple assertions** from JetBrains response handlers are extracted where possible +- **Unsupported features** (WebSocket, gRPC, GraphQL) generate warnings so you know what needs manual attention + +### Which .http dialects are supported? + +Napper supports both major `.http` dialects: + +| Feature | Microsoft (REST Client) | JetBrains (IntelliJ/Rider) | +|---------|------------------------|---------------------------| +| Variable syntax | `@var = value` | `{{"{{var}}"}}` from env files | +| Request naming | `# @name requestName` | `### Request Name` | +| Request separator | `###` | `###` | +| Environment files | REST Client settings | `http-client.env.json` | +| Response handlers | Not supported | `> {%raw%}{%{%endraw%} ... {%raw%}%}{%endraw%}` (partial) | + +The converter auto-detects the dialect, or you can specify it explicitly with `--dialect ms` or `--dialect jb`. ## Feature comparison @@ -37,18 +79,16 @@ Napper adds five major capabilities that `.http` files lack: | Test suites | `.naplist` playlists | None | | Environment variables | `.napenv` files with layering | Limited (REST Client) | | Scripting | Full F# and C# scripting | None | -| CI/CD output | JUnit, TAP, JSON, NDJSON | None | +| CI/CD output | JUnit, JSON, NDJSON | None | | Test Explorer | Native VS Code support | No | +| .http migration | Built-in converter | N/A | ## When should you choose Napper over .http files? -Choose Napper when you need to verify API responses, run automated test suites, integrate with CI/CD pipelines, use environment variables across different deployment targets, or script complex request flows. Stay with `.http` files if you only need to send quick one-off requests from your editor without any validation or automation. - -## Can I migrate from .http files to Napper? - -The `.nap` file format is similar in philosophy to `.http` files. A minimal `.nap` file is just a method and URL on one line, similar to the simplest `.http` request. You can migrate by creating `.nap` files with the same requests and progressively adding assertions, environments, and test suites. +Choose Napper when you need to verify API responses, run automated test suites, integrate with CI/CD pipelines, use environment variables across different deployment targets, or script complex request flows. If you already have `.http` files, the built-in converter makes migration straightforward. Stay with `.http` files if you only need to send quick one-off requests from your editor without any validation or automation. ## Get started - [Install Napper](/docs/installation/) - [Quick Start guide](/docs/quick-start/) +- [CLI Reference](/docs/cli-reference/) for full `convert http` options diff --git a/website/src/docs/vs-postman.md b/website/src/docs/vs-postman.md index cc06971..6308fd1 100644 --- a/website/src/docs/vs-postman.md +++ b/website/src/docs/vs-postman.md @@ -8,7 +8,7 @@ eleventyNavigation: order: 11 --- -# Napper vs Postman +# Napper vs Postman (spec: cli-run, nap-file) Napper is a free, open-source, CLI-first alternative to Postman for API testing. Here is how they compare. @@ -20,17 +20,17 @@ Postman is a GUI-first application with a standalone desktop client. The command No. Napper requires no account, no sign-up, and no cloud sync. Postman requires an account to use the desktop application and locks collaboration features, advanced scripting, and API monitoring behind paid tiers. -## How do file formats compare? +## How do file formats compare? (spec: nap-file) Postman stores collections as JSON blobs that are difficult to read in diffs and code reviews. Napper stores every request as a plain text `.nap` file, every test suite as a `.naplist` file, and every environment as a `.napenv` file. All formats are human-readable and produce clean git diffs. -## How does scripting compare? +## How does scripting compare? (spec: script-fsx, script-csx) Postman provides a sandboxed JavaScript environment with a limited set of built-in libraries. Napper supports both F# (`.fsx`) and C# (`.csx`) scripts with full access to the .NET ecosystem. You can parse XML, call databases, generate cryptographic tokens, validate JSON schemas, and reference any NuGet package. -## How does CI/CD integration compare? +## How does CI/CD integration compare? (spec: cli-run, cli-output) -Postman requires Newman (a separate npm package) for running collections from the command line. Napper is CLI-first with a self-contained binary and no runtime dependencies. It outputs JUnit XML, TAP, JSON, and NDJSON formats natively. +Postman requires Newman (a separate npm package) for running collections from the command line. Napper is CLI-first with a self-contained binary and no runtime dependencies. It outputs JUnit XML, JSON, and NDJSON formats natively. ## Feature comparison @@ -41,9 +41,10 @@ Postman requires Newman (a separate npm package) for running collections from th | Git-friendly files | Plain text `.nap` files | JSON blobs | | Assertions | Declarative + F#/C# scripts | JavaScript scripts | | Scripting | Full F# and C# with .NET access | Sandboxed JavaScript | -| CI/CD output | JUnit, TAP, JSON, NDJSON | Via Newman | +| CI/CD output | JUnit, JSON, NDJSON | Via Newman | | Test Explorer | Native VS Code support | No | | Account required | No | Yes | +| .http file migration | Built-in converter | Import only | | Pricing | Free, MIT license | Freemium with paid tiers | ## When should you choose Napper over Postman? diff --git a/website/src/index.njk b/website/src/index.njk index 6b41b65..aafeb74 100644 --- a/website/src/index.njk +++ b/website/src/index.njk @@ -13,7 +13,7 @@ permalink: / <p class="hero-subtitle"> Napper is a free, open-source API testing tool that runs from the command line and integrates natively with VS Code. Define HTTP requests as plain text <code>.nap</code> files, add declarative assertions, chain them into test suites, and run everything in CI/CD with JUnit output. - As simple as curl for quick requests. As powerful as F# and C# for full test suites. + Migrate from <code>.http</code> files with a single command. As simple as curl for quick requests. As powerful as F# and C# for full test suites. </p> <div class="hero-actions"> <a href="/docs/installation/" class="btn btn-primary">Get Started</a> @@ -113,6 +113,14 @@ permalink: / <h3>Plain Text, Git Friendly</h3> <p>Every request is a <code>.nap</code> file. Every environment is a <code>.napenv</code> file. Version control everything. No binary blobs, no lock-in.</p> </div> + + <div class="feature-card"> + <div class="feature-icon" style="background: rgba(27,73,101,0.12); color: #1B4965;"> + <svg width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"></path><polyline points="14 2 14 8 20 8"></polyline><line x1="16" y1="13" x2="8" y2="13"></line><line x1="16" y1="17" x2="8" y2="17"></line></svg> + </div> + <h3>.http File Conversion</h3> + <p>Migrate from <code>.http</code> files with one command. Napper converts both Microsoft (REST Client) and JetBrains formats to <code>.nap</code> files, including variables and environments.</p> + </div> </div> </div> </section> @@ -173,7 +181,7 @@ permalink: / </tr> <tr> <td>CI/CD output formats</td> - <td><span class="check">JUnit, TAP, JSON</span></td> + <td><span class="check">JUnit, JSON, NDJSON</span></td> <td><span class="check">Via Newman</span></td> <td><span class="check">Via CLI</span></td> <td><span class="cross">None</span></td> @@ -192,6 +200,13 @@ permalink: / <td><span class="check">Yes</span></td> <td><span class="check">Yes</span></td> </tr> + <tr> + <td>.http file migration</td> + <td><span class="check">Built-in converter</span></td> + <td><span class="cross">Import only</span></td> + <td><span class="cross">No</span></td> + <td><span class="cross">N/A</span></td> + </tr> <tr> <td>No account required</td> <td><span class="check">Yes</span></td> @@ -313,7 +328,7 @@ permalink: / <div class="faq-item"> <h3>Does Napper work with CI/CD pipelines?</h3> - <p>Yes. Napper is designed for CI/CD from the ground up. The CLI binary is self-contained with no runtime dependencies. It outputs JUnit XML, TAP, JSON, and NDJSON formats. It integrates with GitHub Actions, GitLab CI, Jenkins, Azure DevOps, and any platform that supports standard test output formats. Exit code 0 means all assertions passed, exit code 1 means a test failed.</p> + <p>Yes. Napper is designed for CI/CD from the ground up. The CLI binary is self-contained with no runtime dependencies. It outputs JUnit XML, JSON, and NDJSON formats (<code>cli-output</code>). It integrates with GitHub Actions, GitLab CI, Jenkins, Azure DevOps, and any platform that supports standard test output formats. Exit code 0 means all assertions passed, exit code 1 means a test failed (<code>cli-exit-codes</code>).</p> </div> <div class="faq-item"> @@ -326,6 +341,11 @@ permalink: / <p>Napper uses three plain text file formats: <code>.nap</code> files for individual HTTP requests, <code>.naplist</code> files for composing requests into ordered test suites, and <code>.napenv</code> files for environment-specific variables like base URLs and tokens. All files are human-readable, git-friendly, and produce clean diffs in code reviews.</p> </div> + <div class="faq-item"> + <h3>Can I convert my existing .http files to Napper?</h3> + <p>Yes. Napper includes a built-in converter that transforms <code>.http</code> files into <code>.nap</code> files. Run <code>napper convert http ./requests.http</code> and Napper will parse your requests, map variables to <code>.napenv</code> files, and preserve request names. The converter supports both Microsoft (VS Code REST Client) and JetBrains (IntelliJ, Rider, WebStorm) <code>.http</code> dialects, including environment files like <code>http-client.env.json</code>.</p> + </div> + <div class="faq-item"> <h3>Does Napper require VS Code?</h3> <p>No. The Napper CLI runs independently from any editor or IDE. You can use it entirely from the terminal. The VS Code extension is optional and provides syntax highlighting, a request explorer, environment switching, and Test Explorer integration for developers who prefer working in an editor.</p> @@ -390,7 +410,7 @@ permalink: / "name": "Does Napper work with CI/CD pipelines?", "acceptedAnswer": { "@type": "Answer", - "text": "Yes. Napper is designed for CI/CD from the ground up. The CLI binary is self-contained with no runtime dependencies. It outputs JUnit XML, TAP, JSON, and NDJSON formats. It integrates with GitHub Actions, GitLab CI, Jenkins, Azure DevOps, and any platform that supports standard test output formats." + "text": "Yes. Napper is designed for CI/CD from the ground up. The CLI binary is self-contained with no runtime dependencies. It outputs JUnit XML, JSON, and NDJSON formats. It integrates with GitHub Actions, GitLab CI, Jenkins, Azure DevOps, and any platform that supports standard test output formats." } }, { @@ -409,6 +429,14 @@ permalink: / "text": "Napper uses three plain text file formats: .nap files for individual HTTP requests, .naplist files for composing requests into ordered test suites, and .napenv files for environment-specific variables like base URLs and tokens." } }, + { + "@type": "Question", + "name": "Can I convert my existing .http files to Napper?", + "acceptedAnswer": { + "@type": "Answer", + "text": "Yes. Napper includes a built-in converter that transforms .http files into .nap files. Run napper convert http ./requests.http and Napper will parse your requests, map variables to .napenv files, and preserve request names. The converter supports both Microsoft (VS Code REST Client) and JetBrains (IntelliJ, Rider, WebStorm) .http dialects, including environment files like http-client.env.json." + } + }, { "@type": "Question", "name": "Does Napper require VS Code?", @@ -449,8 +477,9 @@ permalink: / "F# and C# scripting for advanced request flows", "Composable test suites with .naplist playlists", "Environment variable management with .napenv files", - "JUnit XML, TAP, JSON, NDJSON output for CI/CD", - "Native VS Code Test Explorer integration" + "JUnit XML, JSON, NDJSON output for CI/CD", + "Native VS Code Test Explorer integration", + "Built-in .http file converter for Microsoft and JetBrains formats" ] } </script>