From 32b2e8715a0db40acda1aa5afed83ec7fe9f3c9a Mon Sep 17 00:00:00 2001 From: GitHub Copilot Agent Date: Thu, 19 Feb 2026 17:16:03 +0100 Subject: [PATCH 1/9] refactor(ci): python-auslagerung, qodana-reuse und dependency-canary --- .github/workflows/_qodana-contract.yml | 84 +++++ .github/workflows/dependency-canary.yml | 46 +++ .github/workflows/qodana.yml | 62 +--- docs/audit/009_SUPPLY_CHAIN_BASELINE.MD | 6 + docs/audit/109_SUPPLY_CHAIN_BASELINE.MD | 6 + docs/versioning/001_POLICY_VERSIONING.MD | 11 + docs/versioning/101_POLICY_VERSIONING.MD | 11 + .../ArchiveSharpCompressCompatUnitTests.cs | 56 ++++ .../FileTypeDetectionLib.Tests/Unit/README.md | 1 + tools/ci/bin/bdd_readable_from_trx.py | 134 ++++++++ tools/ci/bin/check_naming_snt.py | 286 ++++++++++++++++++ tools/ci/bin/check_versioning_svt.py | 276 +++++++++++++++++ tools/ci/bin/dependency-canary.sh | 60 ++++ tools/ci/bin/dependency_canary.py | 107 +++++++ tools/ci/bin/keychain_get_secret.py | 32 ++ tools/ci/bin/nuget_migration_helpers.py | 51 ++++ tools/ci/bin/read_json_field.py | 32 ++ tools/ci/bin/run.sh | 15 +- tools/ci/bin/sharpcompress-canary.sh | 6 + tools/ci/bin/verify_nuget_release_helpers.py | 155 ++++++++++ tools/ci/check-naming-snt.sh | 285 +---------------- tools/ci/check-versioning-svt.sh | 279 +---------------- tools/ci/nuget-migrate-legacy-package.sh | 24 +- tools/ci/policies/data/dependency_canary.json | 19 ++ tools/ci/publish_nuget_local.sh | 18 +- .../ci/release/assert_naming_summary_pass.sh | 9 +- tools/ci/release/read_summary_status.py | 24 ++ .../resolve_workflow_run_release_meta.sh | 12 +- tools/ci/verify_nuget_release.sh | 129 +------- tools/test-bdd-readable.sh | 122 +------- tools/versioning/count_prefixed_labels.py | 27 ++ tools/versioning/run-versioning-policy.sh | 2 +- 32 files changed, 1453 insertions(+), 934 deletions(-) create mode 100644 .github/workflows/_qodana-contract.yml create mode 100644 .github/workflows/dependency-canary.yml create mode 100644 tests/FileTypeDetectionLib.Tests/Unit/ArchiveSharpCompressCompatUnitTests.cs create mode 100755 tools/ci/bin/bdd_readable_from_trx.py create mode 100755 tools/ci/bin/check_naming_snt.py create mode 100755 tools/ci/bin/check_versioning_svt.py create mode 100755 tools/ci/bin/dependency-canary.sh create mode 100755 tools/ci/bin/dependency_canary.py create mode 100755 tools/ci/bin/keychain_get_secret.py create mode 100755 tools/ci/bin/nuget_migration_helpers.py create mode 100755 tools/ci/bin/read_json_field.py create mode 100755 tools/ci/bin/sharpcompress-canary.sh create mode 100755 tools/ci/bin/verify_nuget_release_helpers.py create mode 100644 tools/ci/policies/data/dependency_canary.json create mode 100755 tools/ci/release/read_summary_status.py create mode 100755 tools/versioning/count_prefixed_labels.py diff --git a/.github/workflows/_qodana-contract.yml b/.github/workflows/_qodana-contract.yml new file mode 100644 index 00000000..17f90f3f --- /dev/null +++ b/.github/workflows/_qodana-contract.yml @@ -0,0 +1,84 @@ +name: qodana-contract-core + +on: + workflow_call: + inputs: + pre_command: + description: "Optional shell command executed before Qodana." + required: false + type: string + default: "" + artifact_name: + description: "Artifact name for uploaded Qodana evidence." + required: false + type: string + default: "ci-qodana" + upload_sarif: + description: "Upload SARIF to code scanning on non-PR events." + required: false + type: boolean + default: true + +permissions: + contents: read + +jobs: + qodana: + if: github.event_name != 'pull_request' || (github.event.pull_request.head.repo.fork == false && github.actor != 'dependabot[bot]') + runs-on: ubuntu-latest + permissions: + contents: read + security-events: write + env: + QODANA_TOKEN: ${{ secrets.QODANA_TOKEN }} + PRE_COMMAND: ${{ inputs.pre_command }} + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Setup .NET + uses: actions/setup-dotnet@67a3573c9a986a3f9c594539f4ab511d57bb3ce9 # v4 + with: + dotnet-version: "10.0.102" + + - name: Run pre command + if: env.PRE_COMMAND != '' + shell: bash + run: bash -euo pipefail -c "${PRE_COMMAND}" + + - name: Assert QODANA_TOKEN present + shell: bash + run: | + test -n "${QODANA_TOKEN:-}" || (echo "FAIL: QODANA_TOKEN missing" >&2; exit 1) + echo "OK: QODANA_TOKEN present" + + - name: Run Qodana + uses: JetBrains/qodana-action@42dad391966aca8ca344ca2340a7f43a5507e9b2 # v2025.3.1 + with: + args: --linter=jetbrains/qodana-dotnet:2025.3 + results-dir: artifacts/ci/qodana + upload-result: false + use-caches: false + + - name: Run Entry Check + if: always() + shell: bash + env: + CI_DEFER_ARTIFACT_LINK_RESOLUTION: "1" + run: bash tools/ci/bin/run.sh qodana + + - name: Upload SARIF To Code Scanning + if: inputs.upload_sarif && github.event_name != 'pull_request' + uses: github/codeql-action/upload-sarif@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3 + with: + sarif_file: artifacts/ci/qodana/qodana.upload.sarif.json + + - name: Upload Artifact + if: always() + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + with: + name: ${{ inputs.artifact_name }} + path: artifacts/ci/qodana/ + if-no-files-found: error diff --git a/.github/workflows/dependency-canary.yml b/.github/workflows/dependency-canary.yml new file mode 100644 index 00000000..55109195 --- /dev/null +++ b/.github/workflows/dependency-canary.yml @@ -0,0 +1,46 @@ +name: dependency-canary + +on: + schedule: + - cron: "0 5 * * 1" + workflow_dispatch: + inputs: + dependency: + description: "Dependency to probe (all or exact package id)" + required: false + default: "all" + version: + description: "Target version or 'latest'" + required: false + default: "latest" + +permissions: + contents: read + +jobs: + canary-sharpcompress: + if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == 'SharpCompress' + uses: ./.github/workflows/_qodana-contract.yml + with: + pre_command: bash -euo pipefail tools/ci/bin/dependency-canary.sh "SharpCompress" "${{ inputs.version || 'latest' }}" + artifact_name: canary-SharpCompress + upload_sarif: false + secrets: inherit + + canary-recyclable: + if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == 'Microsoft.IO.RecyclableMemoryStream' + uses: ./.github/workflows/_qodana-contract.yml + with: + pre_command: bash -euo pipefail tools/ci/bin/dependency-canary.sh "Microsoft.IO.RecyclableMemoryStream" "${{ inputs.version || 'latest' }}" + artifact_name: canary-Microsoft.IO.RecyclableMemoryStream + upload_sarif: false + secrets: inherit + + canary-system-text-json: + if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == 'System.Text.Json' + uses: ./.github/workflows/_qodana-contract.yml + with: + pre_command: bash -euo pipefail tools/ci/bin/dependency-canary.sh "System.Text.Json" "${{ inputs.version || 'latest' }}" + artifact_name: canary-System.Text.Json + upload_sarif: false + secrets: inherit diff --git a/.github/workflows/qodana.yml b/.github/workflows/qodana.yml index 40659c60..b01302b1 100644 --- a/.github/workflows/qodana.yml +++ b/.github/workflows/qodana.yml @@ -12,60 +12,8 @@ permissions: jobs: qodana: - # In untrusted PR contexts (forks, dependabot), repository secrets are unavailable. - if: github.event_name != 'pull_request' || (github.event.pull_request.head.repo.fork == false && github.actor != 'dependabot[bot]') - runs-on: ubuntu-latest - permissions: - contents: read - security-events: write - env: - QODANA_TOKEN: ${{ secrets.QODANA_TOKEN }} - steps: - - name: Checkout - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - with: - fetch-depth: 0 - - - name: Assert QODANA_TOKEN present - # Fail-closed in trusted CI contexts where Qodana is expected to run. - shell: bash - run: | - test -n "${QODANA_TOKEN:-}" || (echo "FAIL: QODANA_TOKEN missing" >&2; exit 1) - echo "OK: QODANA_TOKEN present" - - - name: Setup .NET - uses: actions/setup-dotnet@67a3573c9a986a3f9c594539f4ab511d57bb3ce9 # v4 - with: - dotnet-version: "10.0.102" - - - name: Run Qodana - uses: JetBrains/qodana-action@42dad391966aca8ca344ca2340a7f43a5507e9b2 # v2025.3.1 - with: - # gh-actions input is a single string; use --flag=value form to avoid passing a single argv containing a space. - args: --linter=jetbrains/qodana-dotnet:2025.3 - results-dir: artifacts/ci/qodana - upload-result: false - use-caches: false - - - name: Run Entry Check - if: always() - shell: bash - env: - # The job uploads the qodana artifact after this step; defer artifact-link rendering to avoid false-negative - # "artifact_not_found" failures when earlier steps fail. - CI_DEFER_ARTIFACT_LINK_RESOLUTION: "1" - run: bash tools/ci/bin/run.sh qodana - - - name: Upload SARIF To Code Scanning - if: github.event_name != 'pull_request' - uses: github/codeql-action/upload-sarif@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3 - with: - sarif_file: artifacts/ci/qodana/qodana.upload.sarif.json - - - name: Upload Artifact - if: always() - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 - with: - name: ci-qodana - path: artifacts/ci/qodana/ - if-no-files-found: error + uses: ./.github/workflows/_qodana-contract.yml + with: + artifact_name: ci-qodana + upload_sarif: true + secrets: inherit diff --git a/docs/audit/009_SUPPLY_CHAIN_BASELINE.MD b/docs/audit/009_SUPPLY_CHAIN_BASELINE.MD index d1634759..150f7e97 100644 --- a/docs/audit/009_SUPPLY_CHAIN_BASELINE.MD +++ b/docs/audit/009_SUPPLY_CHAIN_BASELINE.MD @@ -34,6 +34,10 @@ Minimum an reproduzierbaren Kontrollen fuer Source-to-Package-Integritaet in die - E4 Release/Provenance-Nachweise: - `.github/workflows/release.yml` - `artifacts/nuget/attestation-verify.txt` (wenn der Release-Workflow laeuft) +- E5 Dependency-Canary-Nachweise: + - `.github/workflows/dependency-canary.yml` + - Workflow-Artefakte `canary-*` mit Qodana-Contract-Output (`artifacts/ci/qodana/`) + - Laufhistorie: https://github.com/tomtastisch/FileClassifier/actions/workflows/dependency-canary.yml ## 4. Verifikationskommandos Alle Kommandos sind fuer Ausfuehrung im Repository-Root gedacht. @@ -45,6 +49,8 @@ NUPKG="$(find artifacts/nuget -maxdepth 1 -type f -name '*.nupkg' | head -n 1)" test -n "$NUPKG" dotnet nuget verify "$NUPKG" gh attestation verify "$NUPKG" --repo tomtastisch/FileClassifier +# Optionaler Canary-Nachweis: +gh run list --workflow dependency-canary.yml --limit 5 ``` ## 5. Operative Kadenz diff --git a/docs/audit/109_SUPPLY_CHAIN_BASELINE.MD b/docs/audit/109_SUPPLY_CHAIN_BASELINE.MD index 2ebc6769..e5275ac1 100644 --- a/docs/audit/109_SUPPLY_CHAIN_BASELINE.MD +++ b/docs/audit/109_SUPPLY_CHAIN_BASELINE.MD @@ -34,6 +34,10 @@ Define minimum reproducible controls for source-to-package integrity in this rep - E4 Release/provenance evidence: - `.github/workflows/release.yml` - `artifacts/nuget/attestation-verify.txt` (when release workflow runs) +- E5 Dependency canary evidence: + - `.github/workflows/dependency-canary.yml` + - workflow artifacts `canary-*` with Qodana contract output (`artifacts/ci/qodana/`) + - run history: https://github.com/tomtastisch/FileClassifier/actions/workflows/dependency-canary.yml ## 4. Verification Commands All commands are intended to run from the repository root. @@ -45,6 +49,8 @@ NUPKG="$(find artifacts/nuget -maxdepth 1 -type f -name '*.nupkg' | head -n 1)" test -n "$NUPKG" dotnet nuget verify "$NUPKG" gh attestation verify "$NUPKG" --repo tomtastisch/FileClassifier +# Optional canary evidence: +gh run list --workflow dependency-canary.yml --limit 5 ``` ## 5. Operational Cadence diff --git a/docs/versioning/001_POLICY_VERSIONING.MD b/docs/versioning/001_POLICY_VERSIONING.MD index ae3ff185..e786a27b 100644 --- a/docs/versioning/001_POLICY_VERSIONING.MD +++ b/docs/versioning/001_POLICY_VERSIONING.MD @@ -93,6 +93,17 @@ Primary-Prioritaet ist fix: ## 11. Qodana Qodana ist zusaetzliche Static-Analysis und ersetzt keine CI-Quality-Gates fuer Build/Test/Coverage/Versioning. +## 12. Dependency-Upgrade-Policy (Canary) +- Patch-/Minor-Upgrades fuer risikoreiche Dependencies werden ueber den Canary-Workflow vorab verifiziert: + - `.github/workflows/dependency-canary.yml` + - `tools/ci/bin/dependency-canary.sh` + - `tools/ci/policies/data/dependency_canary.json` +- Verbindliche Canary-Nachweise: + - fokussierte dependency-spezifische Fail-closed-/Guard-Tests erfolgreich + - Qodana-Lauf plus `run.sh qodana` Contract-Validierung erfolgreich +- Major-Upgrades (`X+1.0.0`) sind fuer alle Canary-Dependencies nicht automatisch: + - nur via dediziertem PR mit API-Impact-Review und expliziter SemVer-Entscheidung + ## RoC-Bezug - [Artifact-Contract-Regel](https://github.com/tomtastisch/FileClassifier/blob/main/tools/ci/policies/rules/artifact_contract.yaml) - [Docs-Drift-Regel](https://github.com/tomtastisch/FileClassifier/blob/main/tools/ci/policies/rules/docs_drift.yaml) diff --git a/docs/versioning/101_POLICY_VERSIONING.MD b/docs/versioning/101_POLICY_VERSIONING.MD index 7d574a74..e5ba29fa 100644 --- a/docs/versioning/101_POLICY_VERSIONING.MD +++ b/docs/versioning/101_POLICY_VERSIONING.MD @@ -93,6 +93,17 @@ Primary priority is fixed: ## 11. Qodana Qodana is additional static analysis and does not replace CI quality gates for build/test/coverage/versioning. +## 12. Dependency Upgrade Policy (Canary) +- Patch/minor upgrades for risky dependencies are pre-verified via the canary workflow: + - `.github/workflows/dependency-canary.yml` + - `tools/ci/bin/dependency-canary.sh` + - `tools/ci/policies/data/dependency_canary.json` +- Mandatory canary evidence: + - focused dependency-specific fail-closed/guard tests pass + - Qodana run plus `run.sh qodana` contract validation pass +- Major upgrades (`X+1.0.0`) are not automatic for all canary dependencies: + - only via dedicated PR with API impact review and explicit SemVer decision + ## RoC References - [Artifact-Contract-Regel](https://github.com/tomtastisch/FileClassifier/blob/main/tools/ci/policies/rules/artifact_contract.yaml) - [Docs-Drift-Regel](https://github.com/tomtastisch/FileClassifier/blob/main/tools/ci/policies/rules/docs_drift.yaml) diff --git a/tests/FileTypeDetectionLib.Tests/Unit/ArchiveSharpCompressCompatUnitTests.cs b/tests/FileTypeDetectionLib.Tests/Unit/ArchiveSharpCompressCompatUnitTests.cs new file mode 100644 index 00000000..a6e8f84a --- /dev/null +++ b/tests/FileTypeDetectionLib.Tests/Unit/ArchiveSharpCompressCompatUnitTests.cs @@ -0,0 +1,56 @@ +using FileTypeDetectionLib.Tests.Support; +using Tomtastisch.FileClassifier; + +namespace FileTypeDetectionLib.Tests.Unit; + +public sealed class ArchiveSharpCompressCompatUnitTests +{ + [Fact] + public void OpenArchive_ReturnsNull_ForNonArchivePayload() + { + using var stream = new MemoryStream(new byte[] { 0x01, 0x02, 0x03 }, false); + var archive = ArchiveSharpCompressCompat.OpenArchive(stream); + Assert.Null(archive); + } + + [Fact] + public void OpenArchive_ReturnsArchive_ForTarPayload() + { + var tar = ArchivePayloadFactory.CreateTarWithSingleEntry("note.txt", "ok"); + using var stream = new MemoryStream(tar, false); + using var archive = ArchiveSharpCompressCompat.OpenArchive(stream); + Assert.NotNull(archive); + } + + [Fact] + public void OpenArchiveForContainer_ReturnsArchive_ForGZipPayload() + { + var gzip = ArchivePayloadFactory.CreateGZipWithSingleEntry("payload.bin", new byte[] { 0x11, 0x22, 0x33 }); + using var stream = new MemoryStream(gzip, false); + using var archive = ArchiveSharpCompressCompat.OpenArchiveForContainer(stream, ArchiveContainerType.GZip); + Assert.NotNull(archive); + } + + [Fact] + public void HasGZipMagic_ReturnsTrue_ForGZipHeader() + { + using var stream = new MemoryStream(new byte[] { 0x1F, 0x8B, 0x08 }, false); + Assert.True(ArchiveSharpCompressCompat.HasGZipMagic(stream)); + } + + [Fact] + public void HasGZipMagic_ReturnsFalse_ForNonSeekableStream() + { + using var nonSeekable = new NonSeekableStream(new byte[] { 0x1F, 0x8B, 0x08 }); + Assert.False(ArchiveSharpCompressCompat.HasGZipMagic(nonSeekable)); + } + + private sealed class NonSeekableStream : MemoryStream + { + internal NonSeekableStream(byte[] buffer) : base(buffer, false) + { + } + + public override bool CanSeek => false; + } +} diff --git a/tests/FileTypeDetectionLib.Tests/Unit/README.md b/tests/FileTypeDetectionLib.Tests/Unit/README.md index c0d5c3dc..b3c2d4cf 100644 --- a/tests/FileTypeDetectionLib.Tests/Unit/README.md +++ b/tests/FileTypeDetectionLib.Tests/Unit/README.md @@ -51,6 +51,7 @@ Direkter Nachweis von API-Korrektheit, Security-Regeln und Determinismus. | `ArchiveTypeResolverAdditionalUnitTests.cs` | Stream/Bytes Branches in ArchiveTypeResolver | | `ArchiveTypeResolverExceptionUnitTests.cs` | Exception-Pfade in ArchiveTypeResolver | | `SharpCompressArchiveBackendUnitTests.cs` | Branches fuer SharpCompress-Backend | +| `ArchiveSharpCompressCompatUnitTests.cs` | Contract-Guards fuer SharpCompress-Kompat-Schicht | | `SharpCompressEntryModelUnitTests.cs` | Null-Entry Defaults im SharpCompressEntryModel | | `SharpCompressEntryModelNonNullUnitTests.cs` | Real-Entry Pfade im SharpCompressEntryModel | | `FileTypeDetectorAdditionalUnitTests.cs` | LoadOptions/ReadFileSafe/Detect Branches | diff --git a/tools/ci/bin/bdd_readable_from_trx.py b/tools/ci/bin/bdd_readable_from_trx.py new file mode 100755 index 00000000..e5e44378 --- /dev/null +++ b/tools/ci/bin/bdd_readable_from_trx.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import re +import sys +import xml.etree.ElementTree as ET + +BLUE = "\033[94m" +WHITE = "\033[97m" +GREEN = "\033[32m" +RED = "\033[31m" +RESET = "\033[0m" +DIM = "\033[2m" + +CHECK = "✔" +CROSS = "✘" + + +def strip_param_suffix(text: str) -> str: + value = text.strip() + while True: + updated = re.sub(r"\s*\([^()]*\)\s*$", "", value).strip() + if updated == value: + return value + value = updated + + +def humanize_identifier(text: str) -> str: + value = strip_param_suffix(text) + value = value.replace("_", " ") + value = re.sub(r"([a-z0-9])([A-Z])", r"\1 \2", value) + value = re.sub(r"([A-Z]+)([A-Z][a-z])", r"\1 \2", value) + value = re.sub(r"\s+", " ", value).strip() + return value + + +def normalize_title(test_name: str, scenario: str | None) -> str: + if scenario: + return strip_param_suffix(scenario) + raw = strip_param_suffix(test_name) + if "." in raw: + raw = raw.rsplit(".", 1)[-1] + return humanize_identifier(raw) + + +def iter_step_lines(stdout: str) -> list[str]: + if not stdout: + return [] + lines: list[str] = [] + for raw in stdout.splitlines(): + line = raw.strip() + if not line: + continue + if line.startswith("[BDD]"): + continue + if line.startswith("-> done:"): + continue + if line.startswith("--- table step argument ---"): + continue + if line.startswith("|"): + continue + if line.startswith("Standardausgabemeldungen:"): + continue + if re.match(r"^(Angenommen|Wenn|Dann|Und|Aber)\b", line): + lines.append(line) + deduped: list[str] = [] + seen: set[str] = set() + for line in lines: + if line not in seen: + deduped.append(line) + seen.add(line) + return deduped + + +def main() -> int: + if len(sys.argv) != 2: + print("Usage: bdd_readable_from_trx.py ", file=sys.stderr) + return 2 + + trx_path = sys.argv[1] + root = ET.parse(trx_path).getroot() + ns = {"t": root.tag.split("}")[0].strip("{")} if root.tag.startswith("{") else {} + + def findall(path: str): + return root.findall(path, ns) if ns else root.findall(path) + + def find(node, path: str): + return node.find(path, ns) if ns else node.find(path) + + results: list[tuple[str, str, list[str]]] = [] + for node in findall(".//t:UnitTestResult" if ns else ".//UnitTestResult"): + outcome = (node.attrib.get("outcome") or "").strip() + test_name = (node.attrib.get("testName") or "").strip() + output = find(node, "t:Output" if ns else "Output") + stdout = "" + if output is not None: + std_node = find(output, "t:StdOut" if ns else "StdOut") + if std_node is not None and std_node.text: + stdout = std_node.text + + scenario = None + if stdout: + for line in stdout.splitlines(): + l = line.strip() + m = re.match(r"^\[BDD\]\s*Szenario startet:\s*(.+)$", l) + if m: + scenario = m.group(1).strip() + break + + title = normalize_title(test_name, scenario) + steps = iter_step_lines(stdout) + results.append((title, outcome, steps)) + + for title, outcome, steps in results: + passed = outcome.lower() == "passed" + icon = CHECK if passed else CROSS + icon_color = GREEN if passed else RED + end_word = "FINISHED" if passed else "FAILED" + + if not steps: + steps = ["Test erfolgreich abgeschlossen" if passed else "Test fehlgeschlagen"] + + print(f"{DIM}────────────────────────────────────────────────────────────────{RESET}") + print(f"{BLUE}{title}{RESET}") + for step in steps: + print(f"{icon_color}{icon}{RESET} {WHITE}{step}{RESET}") + print(f"{icon_color}{end_word}{RESET}") + print("") + + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tools/ci/bin/check_naming_snt.py b/tools/ci/bin/check_naming_snt.py new file mode 100755 index 00000000..b36e649b --- /dev/null +++ b/tools/ci/bin/check_naming_snt.py @@ -0,0 +1,286 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import json +import re +import subprocess +import sys +import xml.etree.ElementTree as ET +from pathlib import Path + + +def main() -> int: + if len(sys.argv) != 4: + print("Usage: check_naming_snt.py ", file=sys.stderr) + return 2 + + repo_root = Path(sys.argv[1]).resolve() + ssot_path = Path(sys.argv[2]).resolve() + out_path = Path(sys.argv[3]).resolve() + report_json_path = repo_root / "artifacts" / "naming_snt_report.json" + report_tsv_path = repo_root / "artifacts" / "naming_snt_report.tsv" + + violations: list[dict[str, str]] = [] + deprecated_hits: list[dict[str, str]] = [] + checked_paths: list[str] = [] + + def rel(path: Path) -> str: + return path.resolve().relative_to(repo_root).as_posix() + + def add_violation(scope: str, expected: str, actual: str, evidence: str, message: str) -> None: + violations.append({ + "scope": scope, + "expected": expected, + "actual": actual, + "evidence": evidence, + "message": message, + }) + + def file_line_hit(path: Path, needle: str) -> str: + try: + lines = path.read_text(encoding="utf-8").splitlines() + except Exception: + return f"{rel(path)}:1" + for idx, line in enumerate(lines, start=1): + if needle in line: + return f"{rel(path)}:{idx}" + return f"{rel(path)}:1" + + def normalize_repo_url(url: str) -> str: + text = url.strip() + if text.startswith("git@github.com:"): + text = "https://github.com/" + text.split(":", 1)[1] + text = re.sub(r"^ssh://git@github.com/", "https://github.com/", text) + text = re.sub(r"\.git$", "", text) + return text.rstrip("/") + + if not ssot_path.exists(): + add_violation("ssot_file", str(ssot_path), "missing", str(ssot_path), "SSOT file missing") + report = { + "schema_version": 1, + "check_id": "naming-snt", + "status": "fail", + "canonical": {}, + "violations": violations, + "deprecated_hits": deprecated_hits, + "file_counts": {}, + "checked_paths": [], + "mismatches": violations, + } + report_json_path.write_text(json.dumps(report, indent=2, ensure_ascii=True) + "\n", encoding="utf-8") + out_path.write_text(json.dumps({ + "schema_version": 1, + "check_id": "naming-snt", + "status": "fail", + "rule_violations": [{ + "rule_id": "CI-NAMING-001", + "severity": "fail", + "message": "SSOT file missing", + "evidence_paths": [str(ssot_path)], + }], + "evidence_paths": [str(ssot_path)], + "artifacts": ["artifacts/naming_snt_report.json", "artifacts/naming_snt_report.tsv"], + }, indent=2, ensure_ascii=True) + "\n", encoding="utf-8") + return 1 + + ssot = json.loads(ssot_path.read_text(encoding="utf-8")) + canonical_name = str(ssot.get("canonical_name", "")) + package_id = str(ssot.get("package_id", "")) + root_namespace = str(ssot.get("root_namespace", "")) + assembly_name = str(ssot.get("assembly_name", "")) + repo_identity = str(ssot.get("repo_identity", ssot.get("repo_slug", ""))) + repo_slug = str(ssot.get("repo_slug", repo_identity)) + repo_display_name = str(ssot.get("repo_display_name", "")) + repository_url = str(ssot.get("repository_url", "")) + deprecated_package_ids = ssot.get("deprecated_package_ids", []) + namespace_decision = ssot.get("namespace_decision", {}) + target_root_namespace = str(namespace_decision.get("target_root_namespace", "")) + docs_expected_links = ssot.get("docs_expected_links", []) + + for scope, expected, actual in [ + ("canonical_vs_package_id", canonical_name, package_id), + ("canonical_vs_root_namespace", canonical_name, root_namespace), + ("canonical_vs_assembly_name", canonical_name, assembly_name), + ]: + if expected != actual: + add_violation(scope, expected, actual, rel(ssot_path), f"{scope} mismatch") + + if not isinstance(deprecated_package_ids, list) or len(deprecated_package_ids) == 0: + add_violation("deprecated_package_ids", "non-empty list", str(deprecated_package_ids), rel(ssot_path), "deprecated_package_ids must be non-empty") + + if canonical_name in deprecated_package_ids: + add_violation("deprecated_package_ids", "canonical absent", canonical_name, rel(ssot_path), "canonical_name must not be deprecated") + + remote_raw = "" + try: + remote_raw = subprocess.check_output(["git", "-C", str(repo_root), "remote", "get-url", "origin"], text=True).strip() + except Exception as ex: + add_violation("git_remote", "origin configured", "missing", ".git/config", f"cannot read origin remote: {ex}") + + normalized_remote = normalize_repo_url(remote_raw) if remote_raw else "" + normalized_repo = normalize_repo_url(repository_url) + if normalized_remote and normalized_repo and normalized_remote != normalized_repo: + add_violation("repository_url.matches_origin", normalized_repo, normalized_remote, ".git/config", "repository_url differs from origin URL") + + if normalized_remote: + m = re.search(r"/([^/]+)$", normalized_remote) + remote_slug = m.group(1) if m else "" + if repo_slug and remote_slug != repo_slug: + add_violation("repo_slug.matches_remote_slug", repo_slug, remote_slug, ".git/config", "repo_slug differs from remote slug") + + project_path = repo_root / "src" / "FileTypeDetection" / "FileTypeDetectionLib.vbproj" + checked_paths.append(rel(project_path)) + if not project_path.exists(): + add_violation("vbproj.exists", rel(project_path), "missing", rel(project_path), "vbproj missing") + else: + tree = ET.parse(project_path) + proj_root = tree.getroot() + values: dict[str, str] = {} + for elem in proj_root.iter(): + tag = elem.tag.split("}")[-1] + if tag in {"PackageId", "RootNamespace", "AssemblyName"} and elem.text and tag not in values: + values[tag] = elem.text.strip() + if values.get("PackageId", "") != package_id: + add_violation("vbproj.PackageId", package_id, values.get("PackageId", ""), file_line_hit(project_path, ""), "vbproj PackageId mismatch") + if values.get("RootNamespace", "") != root_namespace: + add_violation("vbproj.RootNamespace", root_namespace, values.get("RootNamespace", ""), file_line_hit(project_path, ""), "vbproj RootNamespace mismatch") + if values.get("AssemblyName", "") != assembly_name: + add_violation("vbproj.AssemblyName", assembly_name, values.get("AssemblyName", ""), file_line_hit(project_path, ""), "vbproj AssemblyName mismatch") + + namespace_files = sorted((repo_root / "src" / "FileTypeDetection").rglob("*.vb")) + for vb in namespace_files: + checked_paths.append(rel(vb)) + lines = vb.read_text(encoding="utf-8").splitlines() + for idx, line in enumerate(lines, start=1): + m = re.match(r"^\s*Namespace\s+([A-Za-z_][A-Za-z0-9_.]*)\s*$", line) + if not m: + continue + ns = re.sub(r"^Global\.", "", m.group(1)) + if target_root_namespace and not ns.startswith(target_root_namespace): + add_violation("code.namespace.prefix", target_root_namespace + "*", ns, f"{rel(vb)}:{idx}", "Namespace must start with target_root_namespace") + + scan_paths = [ + repo_root / "README.md", + repo_root / "docs", + repo_root / "samples", + repo_root / "tests", + ] + + docs_required = {"README.md"} + if isinstance(docs_expected_links, list): + for link in docs_expected_links: + if not isinstance(link, str): + continue + marker = "/blob/main/" + if marker in link: + docs_required.add(link.split(marker, 1)[1]) + + canonical_required_paths = set(docs_required) | { + "samples/PortableConsumer/PortableConsumer.csproj", + "tests/PackageBacked.Tests/PackageBacked.Tests.csproj", + } + for base in scan_paths: + if not base.exists(): + continue + files = [base] if base.is_file() else sorted(p for p in base.rglob("*") if p.is_file()) + for path in files: + if path.suffix.lower() not in {".md", ".csproj", ".cs", ".vb", ".txt", ".json", ".yml", ".yaml"}: + continue + rpath = rel(path) + checked_paths.append(rpath) + text = path.read_text(encoding="utf-8", errors="ignore") + if package_id not in text and rpath in canonical_required_paths: + add_violation("canonical_reference", package_id, "missing", rpath, "Canonical package reference missing in required install/consumer file") + + install_targets = set(docs_required) + migration_doc_rel = "docs/guides/004_GUIDE_MIGRATE_LEGACY_NUGET.MD" + for rpath in sorted(install_targets): + path = repo_root / rpath + if not path.exists(): + continue + text = path.read_text(encoding="utf-8", errors="ignore") + lines = text.splitlines() + for dep in deprecated_package_ids if isinstance(deprecated_package_ids, list) else []: + if not isinstance(dep, str) or not dep: + continue + in_install_snippet = False + for line in lines: + if dep in line and ("dotnet add package" in line or "PackageReference" in line): + in_install_snippet = True + break + if in_install_snippet: + hit = { + "id": dep, + "evidence": file_line_hit(path, dep), + } + deprecated_hits.append(hit) + if rpath != migration_doc_rel: + add_violation("deprecated_id_in_install_docs", "absent", dep, hit["evidence"], "Deprecated package id appears in install docs") + + file_counts = { + "checked_paths": len(sorted(set(checked_paths))), + "violations": len(violations), + "deprecated_hits": len(deprecated_hits), + } + status = "pass" if len(violations) == 0 else "fail" + + report = { + "schema_version": 1, + "check_id": "naming-snt", + "status": status, + "canonical": { + "canonical_name": canonical_name, + "package_id": package_id, + "root_namespace": root_namespace, + "assembly_name": assembly_name, + "repo_identity": repo_identity, + "repo_slug": repo_slug, + "repo_display_name": repo_display_name, + "repository_url": repository_url, + "deprecated_package_ids": deprecated_package_ids, + "ssot_file": rel(ssot_path), + }, + "violations": violations, + "deprecated_hits": deprecated_hits, + "file_counts": file_counts, + "checked_paths": sorted(set(checked_paths)), + "mismatches": violations, + } + + summary = { + "schema_version": 1, + "check_id": "naming-snt", + "status": status, + "rule_violations": [ + { + "rule_id": "CI-NAMING-001", + "severity": "fail", + "message": v["message"], + "evidence_paths": [v["evidence"]], + } + for v in violations + ], + "evidence_paths": sorted({v["evidence"] for v in violations}), + "artifacts": [ + "artifacts/naming_snt_report.json", + "artifacts/naming_snt_report.tsv", + out_path.resolve().relative_to(repo_root).as_posix(), + ], + } + + report_json_path.write_text(json.dumps(report, indent=2, ensure_ascii=True) + "\n", encoding="utf-8") + out_path.write_text(json.dumps(summary, indent=2, ensure_ascii=True) + "\n", encoding="utf-8") + + rows = ["scope\texpected\tactual\tevidence\tmessage"] + for v in violations: + rows.append(f"{v['scope']}\t{v['expected']}\t{v['actual']}\t{v['evidence']}\t{v['message']}") + for d in deprecated_hits: + rows.append(f"deprecated_hit\t{d['id']}\t{d['id']}\t{d['evidence']}\tdeprecated id hit") + report_tsv_path.write_text("\n".join(rows) + "\n", encoding="utf-8") + + print(json.dumps(report, indent=2, ensure_ascii=True)) + return 0 if status == "pass" else 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tools/ci/bin/check_versioning_svt.py b/tools/ci/bin/check_versioning_svt.py new file mode 100755 index 00000000..abc794fc --- /dev/null +++ b/tools/ci/bin/check_versioning_svt.py @@ -0,0 +1,276 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import json +import os +import re +import subprocess +import sys +import zipfile +import xml.etree.ElementTree as ET +from pathlib import Path + + +def main() -> int: + if len(sys.argv) != 5: + print("Usage: check_versioning_svt.py ", file=sys.stderr) + return 2 + + repo_root = Path(sys.argv[1]).resolve() + naming_ssot = Path(sys.argv[2]).resolve() + versioning_ssot = Path(sys.argv[3]).resolve() + out_path = Path(sys.argv[4]).resolve() + report_path = repo_root / "artifacts" / "versioning_report.json" + + violations: list[dict[str, str]] = [] + checks: list[dict[str, str]] = [] + require_release_tag = os.environ.get("REQUIRE_RELEASE_TAG", "0") == "1" + expected_release_tag = os.environ.get("EXPECTED_RELEASE_TAG", "").strip() + release_tag_regex = re.compile(r"^v[0-9]+\.[0-9]+\.[0-9]+(-[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$") + + def semver_core(version: str) -> str: + m = re.match(r"^([0-9]+\.[0-9]+\.[0-9]+)(?:-[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)?$", version or "") + return m.group(1) if m else "" + + def rel(p: Path) -> str: + return p.resolve().relative_to(repo_root).as_posix() + + def fail(scope: str, expected: str, actual: str, evidence: str, message: str) -> None: + violations.append({ + "scope": scope, + "expected": expected, + "actual": actual, + "evidence": evidence, + "message": message, + }) + + def check(scope: str, expected: str, actual: str, evidence: str) -> None: + status = "pass" if expected == actual else "fail" + checks.append({ + "scope": scope, + "status": status, + "expected": expected, + "actual": actual, + "evidence": evidence, + }) + if status == "fail": + fail(scope, expected, actual, evidence, f"Mismatch in {scope}") + + if not naming_ssot.exists(): + fail("ssot.naming", str(naming_ssot), "missing", str(naming_ssot), "Naming SSOT missing") + if not versioning_ssot.exists(): + fail("ssot.versioning", str(versioning_ssot), "missing", str(versioning_ssot), "Versioning SSOT missing") + + naming: dict = {} + versioning: dict = {} + if naming_ssot.exists(): + naming = json.loads(naming_ssot.read_text(encoding="utf-8")) + if versioning_ssot.exists(): + versioning = json.loads(versioning_ssot.read_text(encoding="utf-8")) + + canonical = str(naming.get("canonical_name", "")) + canonical_package = str(naming.get("package_id", "")) + check("identity.canonical_vs_package", canonical, canonical_package, rel(naming_ssot) if naming_ssot.exists() else str(naming_ssot)) + + project_files = versioning.get("project_files", []) + required_fields = versioning.get("require_vbproj_version_fields", []) + if not isinstance(project_files, list) or len(project_files) == 0: + fail("versioning.project_files", "non-empty list", str(project_files), rel(versioning_ssot) if versioning_ssot.exists() else str(versioning_ssot), "project_files must be configured") + + if not isinstance(required_fields, list) or len(required_fields) == 0: + fail("versioning.require_vbproj_version_fields", "non-empty list", str(required_fields), rel(versioning_ssot) if versioning_ssot.exists() else str(versioning_ssot), "require_vbproj_version_fields must be configured") + + head_tags: list[str] = [] + try: + raw_tags = subprocess.check_output(["git", "-C", str(repo_root), "tag", "--points-at", "HEAD"], text=True) + for line in raw_tags.splitlines(): + t = line.strip() + if release_tag_regex.match(t): + head_tags.append(t) + except Exception as ex: + fail("git.tag_lookup", "command succeeds", "failed", ".git", f"Failed to resolve HEAD tags: {ex}") + + head_tags = sorted(set(head_tags)) + expected_version = "" + if expected_release_tag: + if not release_tag_regex.match(expected_release_tag): + fail("svt.expected_release_tag", "vX.Y.Z[-prerelease]", expected_release_tag, "env:EXPECTED_RELEASE_TAG", "Invalid EXPECTED_RELEASE_TAG format") + else: + expected_version = expected_release_tag[1:] + checks.append({ + "scope": "svt.expected_release_tag", + "status": "pass", + "expected": expected_release_tag, + "actual": expected_release_tag, + "evidence": "env:EXPECTED_RELEASE_TAG", + }) + elif len(head_tags) == 0: + if require_release_tag: + fail("svt.head_tag", "exactly one tag vX.Y.Z[-prerelease] on HEAD", "none", ".git", "No exact release tag on HEAD") + elif len(head_tags) > 1: + fail("svt.head_tag", "exactly one tag vX.Y.Z[-prerelease] on HEAD", ",".join(head_tags), ".git", "Multiple release tags on HEAD") + else: + expected_version = head_tags[0][1:] + + def read_repo_version(props_path: Path) -> str: + if not props_path.exists(): + fail("repo.props.exists", "Directory.Build.props present", "missing", str(props_path), "Directory.Build.props missing") + return "" + txt = props_path.read_text(encoding="utf-8", errors="ignore") + m = re.search(r"\s*([^<\s]+)\s*", txt) + return m.group(1).strip() if m else "" + + def check_csproj_uses_repo_version(csproj: Path, prop_name: str) -> None: + if not csproj.exists(): + fail(f"repo.csproj.exists.{csproj.name}", "present", "missing", rel(csproj), f"{csproj.name} missing") + return + txt = csproj.read_text(encoding="utf-8", errors="ignore") + pat = rf"<{re.escape(prop_name)}\s+Condition=\"'\$\({re.escape(prop_name)}\)'\s*==\s*''\">([^<]+)" + m = re.search(pat, txt) + if not m: + fail(f"repo.csproj.{csproj.name}.{prop_name}", "default property present", "missing", rel(csproj), f"Default {prop_name} missing") + return + actual = m.group(1).strip() + check(f"repo.csproj.{csproj.name}.{prop_name}", "$(RepoVersion)", actual, rel(csproj)) + + repo_props = repo_root / "Directory.Build.props" + repo_version = read_repo_version(repo_props) + if repo_version == "": + fail("repo.ssot.RepoVersion", "non-empty", "empty", rel(repo_props), "RepoVersion missing in Directory.Build.props") + else: + if not re.match(r"^[0-9]+\.[0-9]+\.[0-9]+$", repo_version): + fail("repo.ssot.RepoVersion.semver", "X.Y.Z", repo_version, rel(repo_props), "RepoVersion is not semver X.Y.Z") + if expected_version: + expected_core_version = semver_core(expected_version) + if expected_core_version == "": + fail("repo.ssot.RepoVersion.tag.semver", "X.Y.Z[-prerelease]", expected_version, "env:EXPECTED_RELEASE_TAG", "Expected release version is not semver-compatible") + elif "-" in expected_version: + check("repo.ssot.RepoVersion.core", expected_core_version, repo_version, rel(repo_props)) + else: + check("repo.ssot.RepoVersion", expected_version, repo_version, rel(repo_props)) + + check_csproj_uses_repo_version(repo_root / "samples" / "PortableConsumer" / "PortableConsumer.csproj", "PortableConsumerPackageVersion") + check_csproj_uses_repo_version(repo_root / "tests" / "PackageBacked.Tests" / "PackageBacked.Tests.csproj", "PackageBackedVersion") + + vbproj_version = "" + vbproj_package_version = "" + project_path = None + if isinstance(project_files, list) and len(project_files) > 0: + project_path = (repo_root / str(project_files[0])).resolve() + if not project_path.exists(): + fail("vbproj.exists", str(project_path), "missing", str(project_path), "Configured vbproj missing") + else: + tree = ET.parse(project_path) + root = tree.getroot() + for elem in root.iter(): + tag = elem.tag.split("}")[-1] + if tag == "Version" and elem.text: + vbproj_version = elem.text.strip() + if tag == "PackageVersion" and elem.text: + vbproj_package_version = elem.text.strip() + + if "Version" in required_fields and vbproj_version == "": + fail("vbproj.Version", "non-empty", "empty", rel(project_path), "Version field missing in vbproj") + if "PackageVersion" in required_fields and vbproj_package_version == "": + fail("vbproj.PackageVersion", "non-empty", "empty", rel(project_path), "PackageVersion field missing in vbproj") + if vbproj_version and vbproj_package_version: + check("vbproj.Version_vs_PackageVersion", vbproj_version, vbproj_package_version, rel(project_path)) + + if expected_version and project_path is not None and project_path.exists(): + expected_core_version = semver_core(expected_version) + if expected_core_version == "": + fail("svt.expected_version.semver", "X.Y.Z[-prerelease]", expected_version, "env:EXPECTED_RELEASE_TAG", "Derived expected version is not semver-compatible") + elif "-" in expected_version: + check("svt.tag_core_vs_vbproj.Version", expected_core_version, vbproj_version, rel(project_path)) + check("svt.tag_core_vs_vbproj.PackageVersion", expected_core_version, vbproj_package_version, rel(project_path)) + else: + check("svt.tag_vs_vbproj.Version", expected_version, vbproj_version, rel(project_path)) + check("svt.tag_vs_vbproj.PackageVersion", expected_version, vbproj_package_version, rel(project_path)) + + nupkg_dir = repo_root / "artifacts" / "nuget" + nupkg_files = sorted([p for p in nupkg_dir.glob("*.nupkg") if not p.name.endswith(".snupkg")]) + if len(nupkg_files) == 0: + if require_release_tag: + fail("nupkg.exists", "at least one nupkg in artifacts/nuget", "none", rel(nupkg_dir), "No nupkg found for SVT verification") + else: + checks.append({ + "scope": "nupkg.exists", + "status": "pass", + "expected": "pre-pack check allows missing nupkg", + "actual": "none", + "evidence": rel(nupkg_dir), + }) + else: + records: list[tuple[Path, str, str]] = [] + for nupkg in nupkg_files: + try: + with zipfile.ZipFile(nupkg, "r") as zf: + nuspec_names = sorted([n for n in zf.namelist() if n.endswith(".nuspec")]) + if not nuspec_names: + fail("nupkg.nuspec", "present", "missing", rel(nupkg), "nuspec missing in nupkg") + continue + nuspec_content = zf.read(nuspec_names[0]).decode("utf-8", errors="ignore") + mid = re.search(r"([^<]+)", nuspec_content) + mver = re.search(r"([^<]+)", nuspec_content) + nupkg_id = mid.group(1).strip() if mid else "" + nupkg_ver = mver.group(1).strip() if mver else "" + records.append((nupkg, nupkg_id, nupkg_ver)) + except Exception as ex: + fail("nupkg.read", "readable", "failed", rel(nupkg), f"Failed to inspect nupkg: {ex}") + + canonical_records = [r for r in records if r[1] == canonical_package] if canonical_package else records + if not canonical_records: + fail("nupkg.canonical", canonical_package or "canonical package", "missing", rel(nupkg_dir), "No canonical nupkg found in artifacts/nuget") + else: + def sem_key(v: str): + m = re.match(r"^([0-9]+)\.([0-9]+)\.([0-9]+)", v) + if not m: + return (-1, -1, -1, v) + return (int(m.group(1)), int(m.group(2)), int(m.group(3)), v) + + chosen = sorted(canonical_records, key=lambda r: (sem_key(r[2]), r[0].name), reverse=True)[0] + chosen_path, chosen_id, chosen_ver = chosen + check("nupkg.id", canonical_package, chosen_id, rel(chosen_path)) + if expected_version: + check("svt.tag_vs_nupkg.version", expected_version, chosen_ver, rel(chosen_path)) + elif vbproj_package_version: + check("svt.vbproj_vs_nupkg.version", vbproj_package_version, chosen_ver, rel(chosen_path)) + + status = "pass" if len(violations) == 0 else "fail" + report = { + "schema_version": 1, + "check_id": "versioning-svt", + "status": status, + "expected_version": expected_version, + "canonical_package_id": canonical_package, + "checks": checks, + "violations": violations, + "mismatches": violations, + } + summary = { + "schema_version": 1, + "check_id": "versioning-svt", + "status": status, + "rule_violations": [ + { + "rule_id": "CI-VERSION-001", + "severity": "fail", + "message": v["message"], + "evidence_paths": [v["evidence"]], + } + for v in violations + ], + "evidence_paths": sorted({v["evidence"] for v in violations}), + "artifacts": [ + "artifacts/versioning_report.json", + out_path.resolve().relative_to(repo_root).as_posix(), + ], + } + report_path.write_text(json.dumps(report, indent=2, ensure_ascii=True) + "\n", encoding="utf-8") + out_path.write_text(json.dumps(summary, indent=2, ensure_ascii=True) + "\n", encoding="utf-8") + print(json.dumps(report, indent=2, ensure_ascii=True)) + return 0 if status == "pass" else 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tools/ci/bin/dependency-canary.sh b/tools/ci/bin/dependency-canary.sh new file mode 100755 index 00000000..ff41e4ec --- /dev/null +++ b/tools/ci/bin/dependency-canary.sh @@ -0,0 +1,60 @@ +#!/usr/bin/env bash +set -euo pipefail + +SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR="$(cd -- "${SCRIPT_DIR}/../../.." && pwd)" +PACKAGE_FILE="${ROOT_DIR}/Directory.Packages.props" +CONFIG_FILE="${ROOT_DIR}/tools/ci/policies/data/dependency_canary.json" +TEST_PROJECT="${ROOT_DIR}/tests/FileTypeDetectionLib.Tests/FileTypeDetectionLib.Tests.csproj" + +log() { + printf '%s\n' "$*" +} + +fail() { + printf 'ERROR: %s\n' "$*" >&2 + exit 1 +} + +require_cmd() { + local cmd="$1" + command -v "$cmd" >/dev/null 2>&1 || fail "required command not found: ${cmd}" +} + +main() { + local dependency="${1:-SharpCompress}" + local requested="${2:-latest}" + local prepare_out + local current_version="" + local target_version="" + local updated="" + local test_filter="" + + require_cmd python3 + require_cmd dotnet + + prepare_out="$( + python3 "${ROOT_DIR}/tools/ci/bin/dependency_canary.py" prepare \ + --dependency "${dependency}" \ + --requested "${requested}" \ + --config "${CONFIG_FILE}" \ + --packages-file "${PACKAGE_FILE}" + )" + + while IFS='=' read -r key value; do + case "${key}" in + CURRENT_VERSION) current_version="${value}" ;; + TARGET_VERSION) target_version="${value}" ;; + UPDATED) updated="${value}" ;; + TEST_FILTER) test_filter="${value}" ;; + esac + done <<< "${prepare_out}" + + [[ -n "${current_version}" && -n "${target_version}" && -n "${test_filter}" ]] || fail "invalid prepare output" + + log "INFO: Dependency=${dependency} current=${current_version} target=${target_version} updated=${updated}" + dotnet restore "${ROOT_DIR}/FileClassifier.sln" -v minimal + dotnet test "${TEST_PROJECT}" -c Release -v minimal --filter "${test_filter}" +} + +main "$@" diff --git a/tools/ci/bin/dependency_canary.py b/tools/ci/bin/dependency_canary.py new file mode 100755 index 00000000..2edccf58 --- /dev/null +++ b/tools/ci/bin/dependency_canary.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import re +import sys +import urllib.request +from pathlib import Path + + +def load_config(config_path: Path) -> dict: + return json.loads(config_path.read_text(encoding="utf-8")) + + +def get_dependency(config: dict, name: str) -> dict: + for dep in config.get("dependencies", []): + if dep.get("name") == name: + return dep + raise KeyError(name) + + +def read_current_version(packages_file: Path, dependency: str) -> str: + text = packages_file.read_text(encoding="utf-8") + pattern = re.compile( + rf'()' + ) + match = pattern.search(text) + if not match: + raise ValueError(f"dependency '{dependency}' not found in {packages_file}") + return match.group(2) + + +def resolve_latest_stable_version(dependency: str) -> str: + url = f"https://api.nuget.org/v3-flatcontainer/{dependency.lower()}/index.json" + with urllib.request.urlopen(url, timeout=30) as response: + payload = json.loads(response.read().decode("utf-8")) + versions = [v for v in payload.get("versions", []) if "-" not in v] + if not versions: + raise ValueError(f"no stable version found for {dependency}") + return versions[-1] + + +def apply_version(packages_file: Path, dependency: str, target_version: str) -> bool: + text = packages_file.read_text(encoding="utf-8") + pattern = re.compile( + rf'()' + ) + match = pattern.search(text) + if not match: + raise ValueError(f"dependency '{dependency}' not found in {packages_file}") + + if match.group(2) == target_version: + return False + + updated = pattern.sub(rf"\g<1>{target_version}\g<3>", text, count=1) + packages_file.write_text(updated, encoding="utf-8") + return True + + +def run_prepare(args: argparse.Namespace) -> int: + config = load_config(args.config) + dep = get_dependency(config, args.dependency) + current = read_current_version(args.packages_file, args.dependency) + + if args.requested == "latest": + target = resolve_latest_stable_version(args.dependency) + else: + target = args.requested + + changed = apply_version(args.packages_file, args.dependency, target) + print(f"CURRENT_VERSION={current}") + print(f"TARGET_VERSION={target}") + print(f"UPDATED={'1' if changed else '0'}") + print(f"TEST_FILTER={dep.get('test_filter', '')}") + return 0 + + +def build_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser() + sub = parser.add_subparsers(dest="cmd", required=True) + + prepare = sub.add_parser("prepare") + prepare.add_argument("--dependency", required=True) + prepare.add_argument("--requested", default="latest") + prepare.add_argument("--config", type=Path, required=True) + prepare.add_argument("--packages-file", type=Path, required=True) + prepare.set_defaults(func=run_prepare) + + return parser + + +def main() -> int: + parser = build_parser() + args = parser.parse_args() + try: + return args.func(args) + except KeyError as ex: + print(f"ERROR: unknown dependency in config: {ex}", file=sys.stderr) + return 2 + except Exception as ex: # fail-closed for CI scripting + print(f"ERROR: {ex}", file=sys.stderr) + return 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tools/ci/bin/keychain_get_secret.py b/tools/ci/bin/keychain_get_secret.py new file mode 100755 index 00000000..ead85370 --- /dev/null +++ b/tools/ci/bin/keychain_get_secret.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import os +import subprocess +import sys + + +def main() -> int: + parser = argparse.ArgumentParser() + parser.add_argument("--service", required=True) + parser.add_argument("--account", default=os.environ.get("USER", "")) + args = parser.parse_args() + + try: + result = subprocess.run( + ["security", "find-generic-password", "-a", args.account, "-s", args.service, "-w"], + check=False, + capture_output=True, + text=True, + timeout=5, + ) + if result.returncode == 0: + print(result.stdout.strip()) + except Exception: + return 0 + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tools/ci/bin/nuget_migration_helpers.py b/tools/ci/bin/nuget_migration_helpers.py new file mode 100755 index 00000000..065e2e74 --- /dev/null +++ b/tools/ci/bin/nuget_migration_helpers.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import sys +from pathlib import Path + + +def cmd_read_ssot(args: argparse.Namespace) -> int: + obj = json.loads(Path(args.ssot).read_text(encoding="utf-8")) + canon = str(obj.get("package_id", "")) + dep = obj.get("deprecated_package_ids", []) + if not isinstance(dep, list): + dep = [] + print(canon) + for item in dep: + print(str(item)) + return 0 + + +def cmd_extract_versions(args: argparse.Namespace) -> int: + obj = json.loads(args.versions_json) + for v in obj.get("versions", []): + if isinstance(v, str) and v.strip(): + print(v.strip()) + return 0 + + +def main() -> int: + parser = argparse.ArgumentParser() + sub = parser.add_subparsers(dest="cmd", required=True) + + p1 = sub.add_parser("read-ssot") + p1.add_argument("--ssot", required=True) + p1.set_defaults(func=cmd_read_ssot) + + p2 = sub.add_parser("extract-versions") + p2.add_argument("--versions-json", required=True) + p2.set_defaults(func=cmd_extract_versions) + + args = parser.parse_args() + try: + return args.func(args) + except Exception as ex: + print(f"ERROR: {ex}", file=sys.stderr) + return 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tools/ci/bin/read_json_field.py b/tools/ci/bin/read_json_field.py new file mode 100755 index 00000000..e992bbc9 --- /dev/null +++ b/tools/ci/bin/read_json_field.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import sys +from pathlib import Path + + +def main() -> int: + parser = argparse.ArgumentParser() + parser.add_argument("--file", required=True, type=Path) + parser.add_argument("--field", required=True) + args = parser.parse_args() + + try: + obj = json.loads(args.file.read_text(encoding="utf-8")) + except Exception as ex: + print(f"ERROR: cannot read json file {args.file}: {ex}", file=sys.stderr) + return 1 + + value = obj.get(args.field, "") + if isinstance(value, (dict, list)): + print("") + return 0 + + print(str(value)) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tools/ci/bin/run.sh b/tools/ci/bin/run.sh index 63bc448b..b2008dd1 100755 --- a/tools/ci/bin/run.sh +++ b/tools/ci/bin/run.sh @@ -96,20 +96,7 @@ read_nupkg_metadata() { read_naming_ssot_field() { local field="$1" local ssot_file="${ROOT_DIR}/tools/ci/policies/data/naming.json" - python3 - "$ssot_file" "$field" <<'PY' -import json -import sys -from pathlib import Path - -ssot_file = Path(sys.argv[1]) -field = sys.argv[2] -obj = json.loads(ssot_file.read_text(encoding="utf-8")) -value = obj.get(field, "") -if isinstance(value, (dict, list)): - print("") -else: - print(str(value)) -PY + python3 "${ROOT_DIR}/tools/ci/bin/read_json_field.py" --file "$ssot_file" --field "$field" } run_policy_runner_bridge() { diff --git a/tools/ci/bin/sharpcompress-canary.sh b/tools/ci/bin/sharpcompress-canary.sh new file mode 100755 index 00000000..102267cd --- /dev/null +++ b/tools/ci/bin/sharpcompress-canary.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash +set -euo pipefail + +SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR="$(cd -- "${SCRIPT_DIR}/../../.." && pwd)" +exec "${ROOT_DIR}/tools/ci/bin/dependency-canary.sh" "SharpCompress" "${1:-latest}" diff --git a/tools/ci/bin/verify_nuget_release_helpers.py b/tools/ci/bin/verify_nuget_release_helpers.py new file mode 100755 index 00000000..807dc50f --- /dev/null +++ b/tools/ci/bin/verify_nuget_release_helpers.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import os +import re +import sys +import xml.etree.ElementTree as ET + + +def cmd_derive_filename(args: argparse.Namespace) -> int: + m = re.match(r"^(?P.+)\.(?P\d+\.\d+\.\d+(?:-[0-9A-Za-z.-]+)?)\.nupkg$", args.filename) + if not m: + print() + print() + return 0 + print(m.group("id")) + print(m.group("ver")) + return 0 + + +def cmd_derive_nuspec(args: argparse.Namespace) -> int: + text = args.nuspec_xml + try: + root = ET.fromstring(text) + + def find_first(node: ET.Element, tag_name: str) -> str: + for elem in node.iter(): + local = elem.tag.rsplit("}", 1)[-1] + if local == tag_name and elem.text: + return elem.text.strip() + return "" + + print(find_first(root, "id")) + print(find_first(root, "version")) + return 0 + except ET.ParseError: + def by_regex(tag: str) -> str: + m = re.search(rf"<{tag}>\s*([^<]+?)\s*", text, flags=re.IGNORECASE) + return m.group(1).strip() if m else "" + + print(by_regex("id")) + print(by_regex("version")) + return 0 + + +def cmd_query_search(args: argparse.Namespace) -> int: + pkg = args.pkg_id.lower() + ver = args.pkg_ver + data = json.loads(args.response_json) + registration = "" + has_id = False + has_ver = False + + for item in data.get("data", []): + item_id = str(item.get("id", "")) + if item_id.lower() != pkg: + continue + has_id = True + for v in item.get("versions", []): + if isinstance(v, dict) and str(v.get("version", "")) == ver: + has_ver = True + break + if item.get("registration"): + registration = str(item["registration"]) + + if not has_id: + print("missing_id", file=sys.stderr) + return 2 + if not has_ver: + print("missing_version", file=sys.stderr) + return 3 + if not registration: + print("missing_registration", file=sys.stderr) + return 4 + print(registration) + return 0 + + +def cmd_registration_contains(args: argparse.Namespace) -> int: + target = args.pkg_ver.lower() + obj = json.loads(args.response_json) + + def walk(node) -> bool: + if isinstance(node, dict): + for k, v in node.items(): + if k.lower() == "version" and isinstance(v, str) and v.lower() == target: + return True + if walk(v): + return True + elif isinstance(node, list): + for item in node: + if walk(item): + return True + return False + + return 0 if walk(obj) else 2 + + +def cmd_emit_summary(_: argparse.Namespace) -> int: + print(json.dumps({ + "id": os.environ.get("PKG_ID", ""), + "version": os.environ.get("PKG_VER", ""), + "expected": os.environ.get("EXPECTED_VERSION", ""), + "verify_online": os.environ.get("VERIFY_ONLINE", ""), + "require_search": os.environ.get("REQUIRE_SEARCH", ""), + "require_registration": os.environ.get("REQUIRE_REGISTRATION", ""), + "require_flatcontainer": os.environ.get("REQUIRE_FLATCONTAINER", ""), + "require_v2_download": os.environ.get("REQUIRE_V2_DOWNLOAD", ""), + "registration": os.environ.get("REGISTRATION_URL", ""), + "search": os.environ.get("SEARCH_OK", "skipped"), + "registration_check": os.environ.get("REGISTRATION_OK", "skipped"), + "flatcontainer": os.environ.get("FLATCONTAINER_OK", "skipped"), + "v2_download": os.environ.get("V2_DOWNLOAD_OK", "skipped") + }, separators=(",", ":"))) + return 0 + + +def main() -> int: + parser = argparse.ArgumentParser() + sub = parser.add_subparsers(dest="cmd", required=True) + + p1 = sub.add_parser("derive-filename") + p1.add_argument("--filename", required=True) + p1.set_defaults(func=cmd_derive_filename) + + p2 = sub.add_parser("derive-nuspec") + p2.add_argument("--nuspec-xml", required=True) + p2.set_defaults(func=cmd_derive_nuspec) + + p3 = sub.add_parser("query-search") + p3.add_argument("--response-json", required=True) + p3.add_argument("--pkg-id", required=True) + p3.add_argument("--pkg-ver", required=True) + p3.set_defaults(func=cmd_query_search) + + p4 = sub.add_parser("registration-contains") + p4.add_argument("--response-json", required=True) + p4.add_argument("--pkg-ver", required=True) + p4.set_defaults(func=cmd_registration_contains) + + p5 = sub.add_parser("emit-summary") + p5.set_defaults(func=cmd_emit_summary) + + args = parser.parse_args() + try: + return args.func(args) + except Exception as ex: + print(f"ERROR: {ex}", file=sys.stderr) + return 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tools/ci/check-naming-snt.sh b/tools/ci/check-naming-snt.sh index 5ee1554a..e527c0f2 100755 --- a/tools/ci/check-naming-snt.sh +++ b/tools/ci/check-naming-snt.sh @@ -31,287 +31,4 @@ done mkdir -p "$(dirname -- "${OUT_PATH}")" "${ROOT_DIR}/artifacts" -python3 - "${ROOT_DIR}" "${SSOT_PATH}" "${OUT_PATH}" <<'PY' -import json -import re -import subprocess -import sys -import xml.etree.ElementTree as ET -from pathlib import Path - -repo_root = Path(sys.argv[1]).resolve() -ssot_path = Path(sys.argv[2]).resolve() -out_path = Path(sys.argv[3]).resolve() -report_json_path = repo_root / "artifacts" / "naming_snt_report.json" -report_tsv_path = repo_root / "artifacts" / "naming_snt_report.tsv" - -violations = [] -deprecated_hits = [] -checked_paths = [] - - -def rel(path: Path) -> str: - return path.resolve().relative_to(repo_root).as_posix() - - -def add_violation(scope: str, expected: str, actual: str, evidence: str, message: str) -> None: - violations.append({ - "scope": scope, - "expected": expected, - "actual": actual, - "evidence": evidence, - "message": message, - }) - - -def file_line_hit(path: Path, needle: str) -> str: - try: - lines = path.read_text(encoding="utf-8").splitlines() - except Exception: - return f"{rel(path)}:1" - for idx, line in enumerate(lines, start=1): - if needle in line: - return f"{rel(path)}:{idx}" - return f"{rel(path)}:1" - - -def normalize_repo_url(url: str) -> str: - text = url.strip() - if text.startswith("git@github.com:"): - text = "https://github.com/" + text.split(":", 1)[1] - text = re.sub(r"^ssh://git@github.com/", "https://github.com/", text) - text = re.sub(r"\.git$", "", text) - return text.rstrip("/") - - -if not ssot_path.exists(): - add_violation("ssot_file", str(ssot_path), "missing", str(ssot_path), "SSOT file missing") - report = { - "schema_version": 1, - "check_id": "naming-snt", - "status": "fail", - "canonical": {}, - "violations": violations, - "deprecated_hits": deprecated_hits, - "file_counts": {}, - "checked_paths": [], - "mismatches": violations, - } - report_json_path.write_text(json.dumps(report, indent=2, ensure_ascii=True) + "\n", encoding="utf-8") - out_path.write_text(json.dumps({ - "schema_version": 1, - "check_id": "naming-snt", - "status": "fail", - "rule_violations": [{ - "rule_id": "CI-NAMING-001", - "severity": "fail", - "message": "SSOT file missing", - "evidence_paths": [str(ssot_path)], - }], - "evidence_paths": [str(ssot_path)], - "artifacts": ["artifacts/naming_snt_report.json", "artifacts/naming_snt_report.tsv"], - }, indent=2, ensure_ascii=True) + "\n", encoding="utf-8") - sys.exit(1) - -ssot = json.loads(ssot_path.read_text(encoding="utf-8")) -canonical_name = str(ssot.get("canonical_name", "")) -package_id = str(ssot.get("package_id", "")) -root_namespace = str(ssot.get("root_namespace", "")) -assembly_name = str(ssot.get("assembly_name", "")) -repo_identity = str(ssot.get("repo_identity", ssot.get("repo_slug", ""))) -repo_slug = str(ssot.get("repo_slug", repo_identity)) -repo_display_name = str(ssot.get("repo_display_name", "")) -repository_url = str(ssot.get("repository_url", "")) -deprecated_package_ids = ssot.get("deprecated_package_ids", []) -namespace_decision = ssot.get("namespace_decision", {}) -target_root_namespace = str(namespace_decision.get("target_root_namespace", "")) -docs_expected_links = ssot.get("docs_expected_links", []) - -# Canonical equality constraints -for scope, expected, actual in [ - ("canonical_vs_package_id", canonical_name, package_id), - ("canonical_vs_root_namespace", canonical_name, root_namespace), - ("canonical_vs_assembly_name", canonical_name, assembly_name), -]: - if expected != actual: - add_violation(scope, expected, actual, rel(ssot_path), f"{scope} mismatch") - -if not isinstance(deprecated_package_ids, list) or len(deprecated_package_ids) == 0: - add_violation("deprecated_package_ids", "non-empty list", str(deprecated_package_ids), rel(ssot_path), "deprecated_package_ids must be non-empty") - -if canonical_name in deprecated_package_ids: - add_violation("deprecated_package_ids", "canonical absent", canonical_name, rel(ssot_path), "canonical_name must not be deprecated") - -remote_raw = "" -try: - remote_raw = subprocess.check_output(["git", "-C", str(repo_root), "remote", "get-url", "origin"], text=True).strip() -except Exception as ex: - add_violation("git_remote", "origin configured", "missing", ".git/config", f"cannot read origin remote: {ex}") - -normalized_remote = normalize_repo_url(remote_raw) if remote_raw else "" -normalized_repo = normalize_repo_url(repository_url) -if normalized_remote and normalized_repo and normalized_remote != normalized_repo: - add_violation("repository_url.matches_origin", normalized_repo, normalized_remote, ".git/config", "repository_url differs from origin URL") - -if normalized_remote: - m = re.search(r"/([^/]+)$", normalized_remote) - remote_slug = m.group(1) if m else "" - if repo_slug and remote_slug != repo_slug: - add_violation("repo_slug.matches_remote_slug", repo_slug, remote_slug, ".git/config", "repo_slug differs from remote slug") - -project_path = repo_root / "src" / "FileTypeDetection" / "FileTypeDetectionLib.vbproj" -checked_paths.append(rel(project_path)) -if not project_path.exists(): - add_violation("vbproj.exists", rel(project_path), "missing", rel(project_path), "vbproj missing") -else: - tree = ET.parse(project_path) - proj_root = tree.getroot() - values = {} - for elem in proj_root.iter(): - tag = elem.tag.split("}")[-1] - if tag in {"PackageId", "RootNamespace", "AssemblyName"} and elem.text and tag not in values: - values[tag] = elem.text.strip() - if values.get("PackageId", "") != package_id: - add_violation("vbproj.PackageId", package_id, values.get("PackageId", ""), file_line_hit(project_path, ""), "vbproj PackageId mismatch") - if values.get("RootNamespace", "") != root_namespace: - add_violation("vbproj.RootNamespace", root_namespace, values.get("RootNamespace", ""), file_line_hit(project_path, ""), "vbproj RootNamespace mismatch") - if values.get("AssemblyName", "") != assembly_name: - add_violation("vbproj.AssemblyName", assembly_name, values.get("AssemblyName", ""), file_line_hit(project_path, ""), "vbproj AssemblyName mismatch") - -# Namespace declarations -namespace_files = sorted((repo_root / "src" / "FileTypeDetection").rglob("*.vb")) -for vb in namespace_files: - checked_paths.append(rel(vb)) - lines = vb.read_text(encoding="utf-8").splitlines() - for idx, line in enumerate(lines, start=1): - m = re.match(r"^\s*Namespace\s+([A-Za-z_][A-Za-z0-9_.]*)\s*$", line) - if not m: - continue - ns = re.sub(r"^Global\.", "", m.group(1)) - if target_root_namespace and not ns.startswith(target_root_namespace): - add_violation("code.namespace.prefix", target_root_namespace + "*", ns, f"{rel(vb)}:{idx}", "Namespace must start with target_root_namespace") - -scan_paths = [ - repo_root / "README.md", - repo_root / "docs", - repo_root / "samples", - repo_root / "tests", -] - -docs_required = {"README.md"} -if isinstance(docs_expected_links, list): - for link in docs_expected_links: - if not isinstance(link, str): - continue - marker = "/blob/main/" - if marker in link: - docs_required.add(link.split(marker, 1)[1]) - -canonical_required_paths = set(docs_required) | { - "samples/PortableConsumer/PortableConsumer.csproj", - "tests/PackageBacked.Tests/PackageBacked.Tests.csproj", -} -for base in scan_paths: - if not base.exists(): - continue - files = [base] if base.is_file() else sorted(p for p in base.rglob("*") if p.is_file()) - for path in files: - if path.suffix.lower() not in {".md", ".csproj", ".cs", ".vb", ".txt", ".json", ".yml", ".yaml"}: - continue - rpath = rel(path) - checked_paths.append(rpath) - text = path.read_text(encoding="utf-8", errors="ignore") - if package_id not in text and rpath in canonical_required_paths: - add_violation("canonical_reference", package_id, "missing", rpath, "Canonical package reference missing in required install/consumer file") - -# Deprecated IDs must not appear in install snippets except migration guide. -install_targets = set(docs_required) -migration_doc_rel = "docs/guides/004_GUIDE_MIGRATE_LEGACY_NUGET.MD" -for rpath in sorted(install_targets): - path = repo_root / rpath - if not path.exists(): - continue - text = path.read_text(encoding="utf-8", errors="ignore") - lines = text.splitlines() - for dep in deprecated_package_ids if isinstance(deprecated_package_ids, list) else []: - if not isinstance(dep, str) or not dep: - continue - in_install_snippet = False - for line in lines: - if dep in line and ("dotnet add package" in line or "PackageReference" in line): - in_install_snippet = True - break - if in_install_snippet: - hit = { - "id": dep, - "evidence": file_line_hit(path, dep), - } - deprecated_hits.append(hit) - if rpath != migration_doc_rel: - add_violation("deprecated_id_in_install_docs", "absent", dep, hit["evidence"], "Deprecated package id appears in install docs") - -file_counts = { - "checked_paths": len(sorted(set(checked_paths))), - "violations": len(violations), - "deprecated_hits": len(deprecated_hits), -} -status = "pass" if len(violations) == 0 else "fail" - -report = { - "schema_version": 1, - "check_id": "naming-snt", - "status": status, - "canonical": { - "canonical_name": canonical_name, - "package_id": package_id, - "root_namespace": root_namespace, - "assembly_name": assembly_name, - "repo_identity": repo_identity, - "repo_slug": repo_slug, - "repo_display_name": repo_display_name, - "repository_url": repository_url, - "deprecated_package_ids": deprecated_package_ids, - "ssot_file": rel(ssot_path), - }, - "violations": violations, - "deprecated_hits": deprecated_hits, - "file_counts": file_counts, - "checked_paths": sorted(set(checked_paths)), - "mismatches": violations, -} - -summary = { - "schema_version": 1, - "check_id": "naming-snt", - "status": status, - "rule_violations": [ - { - "rule_id": "CI-NAMING-001", - "severity": "fail", - "message": v["message"], - "evidence_paths": [v["evidence"]], - } - for v in violations - ], - "evidence_paths": sorted({v["evidence"] for v in violations}), - "artifacts": [ - "artifacts/naming_snt_report.json", - "artifacts/naming_snt_report.tsv", - out_path.resolve().relative_to(repo_root).as_posix(), - ], -} - -report_json_path.write_text(json.dumps(report, indent=2, ensure_ascii=True) + "\n", encoding="utf-8") -out_path.write_text(json.dumps(summary, indent=2, ensure_ascii=True) + "\n", encoding="utf-8") - -rows = ["scope\texpected\tactual\tevidence\tmessage"] -for v in violations: - rows.append(f"{v['scope']}\t{v['expected']}\t{v['actual']}\t{v['evidence']}\t{v['message']}") -for d in deprecated_hits: - rows.append(f"deprecated_hit\t{d['id']}\t{d['id']}\t{d['evidence']}\tdeprecated id hit") -report_tsv_path.write_text("\n".join(rows) + "\n", encoding="utf-8") - -print(json.dumps(report, indent=2, ensure_ascii=True)) -if status != "pass": - sys.exit(1) -PY +python3 "${ROOT_DIR}/tools/ci/bin/check_naming_snt.py" "${ROOT_DIR}" "${SSOT_PATH}" "${OUT_PATH}" diff --git a/tools/ci/check-versioning-svt.sh b/tools/ci/check-versioning-svt.sh index 32b74b7b..ddfb732b 100755 --- a/tools/ci/check-versioning-svt.sh +++ b/tools/ci/check-versioning-svt.sh @@ -40,281 +40,4 @@ OUT_PATH="${OUT_PATH:-${ROOT_DIR}/artifacts/ci/versioning-svt/versioning-svt-sum mkdir -p "${ROOT_DIR}/artifacts" "$(dirname -- "${OUT_PATH}")" -python3 - "${ROOT_DIR}" "${NAMING_SSOT}" "${VERSIONING_SSOT}" "${OUT_PATH}" <<'PY' -import json -import os -import re -import subprocess -import sys -import zipfile -import xml.etree.ElementTree as ET -from pathlib import Path - -repo_root = Path(sys.argv[1]).resolve() -naming_ssot = Path(sys.argv[2]).resolve() -versioning_ssot = Path(sys.argv[3]).resolve() -out_path = Path(sys.argv[4]).resolve() -report_path = repo_root / "artifacts" / "versioning_report.json" - -violations = [] -checks = [] -require_release_tag = os.environ.get("REQUIRE_RELEASE_TAG", "0") == "1" -expected_release_tag = os.environ.get("EXPECTED_RELEASE_TAG", "").strip() -release_tag_regex = re.compile(r"^v[0-9]+\.[0-9]+\.[0-9]+(-[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$") - - -def semver_core(version: str) -> str: - m = re.match(r"^([0-9]+\.[0-9]+\.[0-9]+)(?:-[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)?$", version or "") - return m.group(1) if m else "" - - -def rel(p: Path) -> str: - return p.resolve().relative_to(repo_root).as_posix() - - -def fail(scope: str, expected: str, actual: str, evidence: str, message: str) -> None: - violations.append({ - "scope": scope, - "expected": expected, - "actual": actual, - "evidence": evidence, - "message": message, - }) - - -def check(scope: str, expected: str, actual: str, evidence: str) -> None: - status = "pass" if expected == actual else "fail" - checks.append({ - "scope": scope, - "status": status, - "expected": expected, - "actual": actual, - "evidence": evidence, - }) - if status == "fail": - fail(scope, expected, actual, evidence, f"Mismatch in {scope}") - - -if not naming_ssot.exists(): - fail("ssot.naming", str(naming_ssot), "missing", str(naming_ssot), "Naming SSOT missing") -if not versioning_ssot.exists(): - fail("ssot.versioning", str(versioning_ssot), "missing", str(versioning_ssot), "Versioning SSOT missing") - -naming = {} -versioning = {} -if naming_ssot.exists(): - naming = json.loads(naming_ssot.read_text(encoding="utf-8")) -if versioning_ssot.exists(): - versioning = json.loads(versioning_ssot.read_text(encoding="utf-8")) - -canonical = str(naming.get("canonical_name", "")) -canonical_package = str(naming.get("package_id", "")) -check("identity.canonical_vs_package", canonical, canonical_package, rel(naming_ssot) if naming_ssot.exists() else str(naming_ssot)) - -project_files = versioning.get("project_files", []) -required_fields = versioning.get("require_vbproj_version_fields", []) -if not isinstance(project_files, list) or len(project_files) == 0: - fail("versioning.project_files", "non-empty list", str(project_files), rel(versioning_ssot) if versioning_ssot.exists() else str(versioning_ssot), "project_files must be configured") - -if not isinstance(required_fields, list) or len(required_fields) == 0: - fail("versioning.require_vbproj_version_fields", "non-empty list", str(required_fields), rel(versioning_ssot) if versioning_ssot.exists() else str(versioning_ssot), "require_vbproj_version_fields must be configured") - -head_tags = [] -try: - raw_tags = subprocess.check_output(["git", "-C", str(repo_root), "tag", "--points-at", "HEAD"], text=True) - for line in raw_tags.splitlines(): - t = line.strip() - if release_tag_regex.match(t): - head_tags.append(t) -except Exception as ex: - fail("git.tag_lookup", "command succeeds", "failed", ".git", f"Failed to resolve HEAD tags: {ex}") - -head_tags = sorted(set(head_tags)) -expected_version = "" -# workflow_dispatch releases provide EXPECTED_RELEASE_TAG explicitly. -# tag-push releases derive the expected version from the exact release tag on HEAD. -if expected_release_tag: - if not release_tag_regex.match(expected_release_tag): - fail("svt.expected_release_tag", "vX.Y.Z[-prerelease]", expected_release_tag, "env:EXPECTED_RELEASE_TAG", "Invalid EXPECTED_RELEASE_TAG format") - else: - expected_version = expected_release_tag[1:] - checks.append({ - "scope": "svt.expected_release_tag", - "status": "pass", - "expected": expected_release_tag, - "actual": expected_release_tag, - "evidence": "env:EXPECTED_RELEASE_TAG", - }) -elif len(head_tags) == 0: - if require_release_tag: - fail("svt.head_tag", "exactly one tag vX.Y.Z[-prerelease] on HEAD", "none", ".git", "No exact release tag on HEAD") -elif len(head_tags) > 1: - fail("svt.head_tag", "exactly one tag vX.Y.Z[-prerelease] on HEAD", ",".join(head_tags), ".git", "Multiple release tags on HEAD") -else: - expected_version = head_tags[0][1:] -# --- repo SSOT version consistency (no mixed versions) --- -def read_repo_version(props_path: Path) -> str: - if not props_path.exists(): - fail("repo.props.exists", "Directory.Build.props present", "missing", str(props_path), "Directory.Build.props missing") - return "" - txt = props_path.read_text(encoding="utf-8", errors="ignore") - m = re.search(r"\s*([^<\s]+)\s*", txt) - return m.group(1).strip() if m else "" - -def check_csproj_uses_repo_version(csproj: Path, prop_name: str) -> None: - if not csproj.exists(): - fail(f"repo.csproj.exists.{csproj.name}", "present", "missing", rel(csproj), f"{csproj.name} missing") - return - txt = csproj.read_text(encoding="utf-8", errors="ignore") - # enforce: default property value must be $(RepoVersion), not a literal - pat = rf"<{re.escape(prop_name)}\s+Condition=\"'\$\({re.escape(prop_name)}\)'\s*==\s*''\">([^<]+)" - m = re.search(pat, txt) - if not m: - fail(f"repo.csproj.{csproj.name}.{prop_name}", "default property present", "missing", rel(csproj), f"Default {prop_name} missing") - return - actual = m.group(1).strip() - check(f"repo.csproj.{csproj.name}.{prop_name}", "$(RepoVersion)", actual, rel(csproj)) - -repo_props = repo_root / "Directory.Build.props" -repo_version = read_repo_version(repo_props) -if repo_version == "": - fail("repo.ssot.RepoVersion", "non-empty", "empty", rel(repo_props), "RepoVersion missing in Directory.Build.props") -else: - # basic semver guard to avoid garbage - if not re.match(r"^[0-9]+\.[0-9]+\.[0-9]+$", repo_version): - fail("repo.ssot.RepoVersion.semver", "X.Y.Z", repo_version, rel(repo_props), "RepoVersion is not semver X.Y.Z") - # if tag defines the release version, RepoVersion must match exactly - if expected_version: - expected_core_version = semver_core(expected_version) - if expected_core_version == "": - fail("repo.ssot.RepoVersion.tag.semver", "X.Y.Z[-prerelease]", expected_version, "env:EXPECTED_RELEASE_TAG", "Expected release version is not semver-compatible") - elif "-" in expected_version: - check("repo.ssot.RepoVersion.core", expected_core_version, repo_version, rel(repo_props)) - else: - check("repo.ssot.RepoVersion", expected_version, repo_version, rel(repo_props)) - -check_csproj_uses_repo_version(repo_root / "samples" / "PortableConsumer" / "PortableConsumer.csproj", "PortableConsumerPackageVersion") -check_csproj_uses_repo_version(repo_root / "tests" / "PackageBacked.Tests" / "PackageBacked.Tests.csproj", "PackageBackedVersion") -# --- end repo SSOT version consistency --- - -vbproj_version = "" -vbproj_package_version = "" -project_path = None -if isinstance(project_files, list) and len(project_files) > 0: - project_path = (repo_root / str(project_files[0])).resolve() - if not project_path.exists(): - fail("vbproj.exists", str(project_path), "missing", str(project_path), "Configured vbproj missing") - else: - tree = ET.parse(project_path) - root = tree.getroot() - for elem in root.iter(): - tag = elem.tag.split("}")[-1] - if tag == "Version" and elem.text: - vbproj_version = elem.text.strip() - if tag == "PackageVersion" and elem.text: - vbproj_package_version = elem.text.strip() - - if "Version" in required_fields and vbproj_version == "": - fail("vbproj.Version", "non-empty", "empty", rel(project_path), "Version field missing in vbproj") - if "PackageVersion" in required_fields and vbproj_package_version == "": - fail("vbproj.PackageVersion", "non-empty", "empty", rel(project_path), "PackageVersion field missing in vbproj") - if vbproj_version and vbproj_package_version: - check("vbproj.Version_vs_PackageVersion", vbproj_version, vbproj_package_version, rel(project_path)) - -if expected_version and project_path is not None and project_path.exists(): - expected_core_version = semver_core(expected_version) - if expected_core_version == "": - fail("svt.expected_version.semver", "X.Y.Z[-prerelease]", expected_version, "env:EXPECTED_RELEASE_TAG", "Derived expected version is not semver-compatible") - elif "-" in expected_version: - check("svt.tag_core_vs_vbproj.Version", expected_core_version, vbproj_version, rel(project_path)) - check("svt.tag_core_vs_vbproj.PackageVersion", expected_core_version, vbproj_package_version, rel(project_path)) - else: - check("svt.tag_vs_vbproj.Version", expected_version, vbproj_version, rel(project_path)) - check("svt.tag_vs_vbproj.PackageVersion", expected_version, vbproj_package_version, rel(project_path)) - -nupkg_dir = repo_root / "artifacts" / "nuget" -nupkg_files = sorted([p for p in nupkg_dir.glob("*.nupkg") if not p.name.endswith(".snupkg")]) -if len(nupkg_files) == 0: - if require_release_tag: - fail("nupkg.exists", "at least one nupkg in artifacts/nuget", "none", rel(nupkg_dir), "No nupkg found for SVT verification") - else: - checks.append({ - "scope": "nupkg.exists", - "status": "pass", - "expected": "pre-pack check allows missing nupkg", - "actual": "none", - "evidence": rel(nupkg_dir), - }) -else: - records = [] - for nupkg in nupkg_files: - try: - with zipfile.ZipFile(nupkg, "r") as zf: - nuspec_names = sorted([n for n in zf.namelist() if n.endswith(".nuspec")]) - if not nuspec_names: - fail("nupkg.nuspec", "present", "missing", rel(nupkg), "nuspec missing in nupkg") - continue - nuspec_content = zf.read(nuspec_names[0]).decode("utf-8", errors="ignore") - mid = re.search(r"([^<]+)", nuspec_content) - mver = re.search(r"([^<]+)", nuspec_content) - nupkg_id = mid.group(1).strip() if mid else "" - nupkg_ver = mver.group(1).strip() if mver else "" - records.append((nupkg, nupkg_id, nupkg_ver)) - except Exception as ex: - fail("nupkg.read", "readable", "failed", rel(nupkg), f"Failed to inspect nupkg: {ex}") - - canonical_records = [r for r in records if r[1] == canonical_package] if canonical_package else records - if not canonical_records: - fail("nupkg.canonical", canonical_package or "canonical package", "missing", rel(nupkg_dir), "No canonical nupkg found in artifacts/nuget") - else: - # Deterministic pick: highest semantic core version, then lexicographic filename. - def sem_key(v: str): - m = re.match(r"^([0-9]+)\\.([0-9]+)\\.([0-9]+)", v) - if not m: - return (-1, -1, -1, v) - return (int(m.group(1)), int(m.group(2)), int(m.group(3)), v) - - chosen = sorted(canonical_records, key=lambda r: (sem_key(r[2]), r[0].name), reverse=True)[0] - chosen_path, chosen_id, chosen_ver = chosen - check("nupkg.id", canonical_package, chosen_id, rel(chosen_path)) - if expected_version: - check("svt.tag_vs_nupkg.version", expected_version, chosen_ver, rel(chosen_path)) - elif vbproj_package_version: - check("svt.vbproj_vs_nupkg.version", vbproj_package_version, chosen_ver, rel(chosen_path)) - -status = "pass" if len(violations) == 0 else "fail" -report = { - "schema_version": 1, - "check_id": "versioning-svt", - "status": status, - "expected_version": expected_version, - "canonical_package_id": canonical_package, - "checks": checks, - "violations": violations, - "mismatches": violations, -} -summary = { - "schema_version": 1, - "check_id": "versioning-svt", - "status": status, - "rule_violations": [ - { - "rule_id": "CI-VERSION-001", - "severity": "fail", - "message": v["message"], - "evidence_paths": [v["evidence"]], - } - for v in violations - ], - "evidence_paths": sorted({v["evidence"] for v in violations}), - "artifacts": [ - "artifacts/versioning_report.json", - out_path.resolve().relative_to(repo_root).as_posix(), - ], -} -report_path.write_text(json.dumps(report, indent=2, ensure_ascii=True) + "\n", encoding="utf-8") -out_path.write_text(json.dumps(summary, indent=2, ensure_ascii=True) + "\n", encoding="utf-8") -print(json.dumps(report, indent=2, ensure_ascii=True)) -if status != "pass": - sys.exit(1) -PY +python3 "${ROOT_DIR}/tools/ci/bin/check_versioning_svt.py" "${ROOT_DIR}" "${NAMING_SSOT}" "${VERSIONING_SSOT}" "${OUT_PATH}" diff --git a/tools/ci/nuget-migrate-legacy-package.sh b/tools/ci/nuget-migrate-legacy-package.sh index e5374599..70df3101 100755 --- a/tools/ci/nuget-migrate-legacy-package.sh +++ b/tools/ci/nuget-migrate-legacy-package.sh @@ -31,19 +31,7 @@ if [[ "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then fi read_ssot() { - python3 - "$SSOT_FILE" <<'PY' -import json -import sys -from pathlib import Path -obj = json.loads(Path(sys.argv[1]).read_text(encoding='utf-8')) -canon = str(obj.get('package_id', '')) -dep = obj.get('deprecated_package_ids', []) -if not isinstance(dep, list): - dep = [] -print(canon) -for item in dep: - print(str(item)) -PY + python3 "${REPO_ROOT}/tools/ci/bin/nuget_migration_helpers.py" read-ssot --ssot "$SSOT_FILE" } mapfile -t ssot_lines < <(read_ssot) @@ -98,15 +86,7 @@ for LEGACY_PACKAGE_ID in "${DEPRECATED_IDS[@]}"; do exit 1 } - mapfile -t versions < <(VERSIONS_JSON="${versions_json}" python3 - <<'PY' -import json -import os -obj = json.loads(os.environ.get('VERSIONS_JSON', '{}')) -for v in obj.get('versions', []): - if isinstance(v, str) and v.strip(): - print(v.strip()) -PY -) + mapfile -t versions < <(python3 "${REPO_ROOT}/tools/ci/bin/nuget_migration_helpers.py" extract-versions --versions-json "${versions_json}") if [[ "${#versions[@]}" -eq 0 ]]; then echo "FAIL: no versions found for '${LEGACY_PACKAGE_ID}'" | tee -a "${ARTIFACT_PLAN}" >&2 diff --git a/tools/ci/policies/data/dependency_canary.json b/tools/ci/policies/data/dependency_canary.json new file mode 100644 index 00000000..f850d36f --- /dev/null +++ b/tools/ci/policies/data/dependency_canary.json @@ -0,0 +1,19 @@ +{ + "dependencies": [ + { + "name": "SharpCompress", + "risk": "archive-parser", + "test_filter": "SharpCompress|ArchiveInternalsNestedBranchUnitTests|CoreAndArchiveInternalsFailClosedUnitTests|ArchiveSharpCompressCompatUnitTests" + }, + { + "name": "Microsoft.IO.RecyclableMemoryStream", + "risk": "memory-stream-core", + "test_filter": "ArchiveStreamEngineUnitTests|ArchiveManagedBackendUnitTests|ArchiveProcessingFacadeUnitTests|FileMaterializerUnitTests" + }, + { + "name": "System.Text.Json", + "risk": "json-input-parser", + "test_filter": "FileTypeOptionsFacadeUnitTests|FileTypeDetectorAdditionalUnitTests|FileTypeDetectorPrivateBranchUnitTests|FixtureManifestCatalogUnitTests" + } + ] +} diff --git a/tools/ci/publish_nuget_local.sh b/tools/ci/publish_nuget_local.sh index afe9023e..68c933a8 100755 --- a/tools/ci/publish_nuget_local.sh +++ b/tools/ci/publish_nuget_local.sh @@ -36,23 +36,7 @@ fi if [[ -z "${NUGET_API_KEY}" ]]; then NUGET_API_KEY="$( - python3 - <<'PY' || true -import subprocess -import os -try: - user = os.environ.get("USER", "") - result = subprocess.run( - ["security", "find-generic-password", "-a", user, "-s", "NUGET_API_KEY", "-w"], - check=False, - capture_output=True, - text=True, - timeout=5, - ) - if result.returncode == 0: - print(result.stdout.strip()) -except Exception: - pass -PY + python3 "${REPO_ROOT}/tools/ci/bin/keychain_get_secret.py" --service "NUGET_API_KEY" || true )" fi [[ -n "${NUGET_API_KEY}" ]] || fail "No NuGet API key found (env NUGET_API_KEY or macOS keychain service 'NUGET_API_KEY')." diff --git a/tools/ci/release/assert_naming_summary_pass.sh b/tools/ci/release/assert_naming_summary_pass.sh index ffdc216f..3dd1e932 100755 --- a/tools/ci/release/assert_naming_summary_pass.sh +++ b/tools/ci/release/assert_naming_summary_pass.sh @@ -3,14 +3,7 @@ set -euo pipefail summary_path="${1:-artifacts/nuget/naming-snt-summary.json}" [[ -f "${summary_path}" ]] || { echo "Missing naming summary: ${summary_path}" >&2; exit 1; } -status="$(python3 - "${summary_path}" <<'PY' -import json -import sys -from pathlib import Path -obj = json.loads(Path(sys.argv[1]).read_text(encoding='utf-8')) -print(obj.get('status', '')) -PY -)" +status="$(python3 tools/ci/release/read_summary_status.py --summary "${summary_path}")" if [[ "${status}" != "pass" ]]; then echo "Naming summary status is '${status}' (expected 'pass')." >&2 exit 1 diff --git a/tools/ci/release/read_summary_status.py b/tools/ci/release/read_summary_status.py new file mode 100755 index 00000000..2a6c3ba1 --- /dev/null +++ b/tools/ci/release/read_summary_status.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import sys +from pathlib import Path + + +def main() -> int: + parser = argparse.ArgumentParser() + parser.add_argument("--summary", required=True) + args = parser.parse_args() + try: + obj = json.loads(Path(args.summary).read_text(encoding="utf-8")) + except Exception as ex: + print(f"ERROR: {ex}", file=sys.stderr) + return 1 + print(obj.get("status", "")) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tools/ci/release/resolve_workflow_run_release_meta.sh b/tools/ci/release/resolve_workflow_run_release_meta.sh index 6d485f36..1b594133 100755 --- a/tools/ci/release/resolve_workflow_run_release_meta.sh +++ b/tools/ci/release/resolve_workflow_run_release_meta.sh @@ -19,14 +19,10 @@ fi release_version="${release_tag#v}" verify_log_path="artifacts/ci/nuget-online-convergence/verify.log" -package_id="$(python3 - <<'PY' -import json -from pathlib import Path -p = Path("tools/ci/policies/data/naming.json") -obj = json.loads(p.read_text(encoding="utf-8")) -print(obj.get("package_id", "Tomtastisch.FileClassifier")) -PY -)" +package_id="$(python3 tools/ci/bin/read_json_field.py --file tools/ci/policies/data/naming.json --field package_id)" +if [[ -z "${package_id}" ]]; then + package_id="Tomtastisch.FileClassifier" +fi mkdir -p artifacts/ci/nuget-online-convergence { diff --git a/tools/ci/verify_nuget_release.sh b/tools/ci/verify_nuget_release.sh index 70d7d68e..e4cec676 100755 --- a/tools/ci/verify_nuget_release.sh +++ b/tools/ci/verify_nuget_release.sh @@ -126,19 +126,7 @@ resolve_nupkg_path() { derive_from_filename() { local filename filename="$(basename "${NUPKG_PATH}")" - python3 - "$filename" <<'PY' -import re -import sys - -name = sys.argv[1] -m = re.match(r'^(?P.+)\.(?P\d+\.\d+\.\d+(?:-[0-9A-Za-z.-]+)?)\.nupkg$', name) -if not m: - print() - print() - sys.exit(0) -print(m.group("id")) -print(m.group("ver")) -PY + python3 tools/ci/bin/verify_nuget_release_helpers.py derive-filename --filename "$filename" } derive_from_nuspec() { @@ -146,34 +134,7 @@ derive_from_nuspec() { nuspec_xml="$(unzip -p "${NUPKG_PATH}" '*.nuspec' 2>/dev/null)" || fail "Unable to read .nuspec from ${NUPKG_PATH}" local parsed - parsed="$(NUSPEC_XML="${nuspec_xml}" python3 - <<'PY' -import re -import sys -import xml.etree.ElementTree as ET -import os - -text = os.environ.get("NUSPEC_XML", "") -try: - root = ET.fromstring(text) -except ET.ParseError: - def by_regex(tag): - m = re.search(rf'<{tag}>\s*([^<]+?)\s*', text, flags=re.IGNORECASE) - return m.group(1).strip() if m else "" - print(by_regex("id")) - print(by_regex("version")) - sys.exit(0) - -def find_first(node, tag_name): - for elem in node.iter(): - local = elem.tag.rsplit('}', 1)[-1] - if local == tag_name and elem.text: - return elem.text.strip() - return "" - -print(find_first(root, "id")) -print(find_first(root, "version")) -PY -)" + parsed="$(python3 tools/ci/bin/verify_nuget_release_helpers.py derive-nuspec --nuspec-xml "${nuspec_xml}")" printf '%s\n' "${parsed}" } @@ -186,42 +147,7 @@ query_search() { response="$(curl -fsS --compressed --max-time "${TIMEOUT_SECONDS}" "${SEARCH_URL}")" || return 1 local out - out="$(SEARCH_RESPONSE="${response}" python3 - "$PKG_ID" "$PKG_VER" <<'PY' -import json -import sys -import os - -pkg = sys.argv[1].lower() -ver = sys.argv[2] -data = json.loads(os.environ.get("SEARCH_RESPONSE", "{}")) -registration = "" -has_id = False -has_ver = False - -for item in data.get("data", []): - item_id = str(item.get("id", "")) - if item_id.lower() != pkg: - continue - has_id = True - for v in item.get("versions", []): - if isinstance(v, dict) and str(v.get("version", "")) == ver: - has_ver = True - break - if item.get("registration"): - registration = str(item["registration"]) - -if not has_id: - print("missing_id", file=sys.stderr) - sys.exit(2) -if not has_ver: - print("missing_version", file=sys.stderr) - sys.exit(3) -if not registration: - print("missing_registration", file=sys.stderr) - sys.exit(4) -print(registration) -PY -)" || return 1 + out="$(python3 tools/ci/bin/verify_nuget_release_helpers.py query-search --response-json "${response}" --pkg-id "$PKG_ID" --pkg-ver "$PKG_VER")" || return 1 REGISTRATION_URL="${out}" SEARCH_OK="ok" @@ -238,33 +164,7 @@ query_registration() { local response response="$(curl -fsS --compressed --max-time "${TIMEOUT_SECONDS}" "${REGISTRATION_URL}")" || return 1 - REGISTRATION_RESPONSE="${response}" python3 - "$PKG_VER" <<'PY' >/dev/null || return 1 -import json -import sys -import os - -target = sys.argv[1].lower() -obj = json.loads(os.environ.get("REGISTRATION_RESPONSE", "{}")) -found = False - -def walk(node): - global found - if found: - return - if isinstance(node, dict): - for k, v in node.items(): - if k.lower() == "version" and isinstance(v, str) and v.lower() == target: - found = True - return - walk(v) - elif isinstance(node, list): - for item in node: - walk(item) - -walk(obj) -if not found: - sys.exit(2) -PY + python3 tools/ci/bin/verify_nuget_release_helpers.py registration-contains --response-json "${response}" --pkg-ver "$PKG_VER" >/dev/null || return 1 REGISTRATION_OK="ok" return 0 @@ -304,26 +204,7 @@ query_v2_download() { } emit_summary_json() { - python3 - <<'PY' -import json -import os - -print(json.dumps({ - "id": os.environ.get("PKG_ID", ""), - "version": os.environ.get("PKG_VER", ""), - "expected": os.environ.get("EXPECTED_VERSION", ""), - "verify_online": os.environ.get("VERIFY_ONLINE", ""), - "require_search": os.environ.get("REQUIRE_SEARCH", ""), - "require_registration": os.environ.get("REQUIRE_REGISTRATION", ""), - "require_flatcontainer": os.environ.get("REQUIRE_FLATCONTAINER", ""), - "require_v2_download": os.environ.get("REQUIRE_V2_DOWNLOAD", ""), - "registration": os.environ.get("REGISTRATION_URL", ""), - "search": os.environ.get("SEARCH_OK", "skipped"), - "registration_check": os.environ.get("REGISTRATION_OK", "skipped"), - "flatcontainer": os.environ.get("FLATCONTAINER_OK", "skipped"), - "v2_download": os.environ.get("V2_DOWNLOAD_OK", "skipped") -}, separators=(",", ":"))) -PY + python3 tools/ci/bin/verify_nuget_release_helpers.py emit-summary } main() { diff --git a/tools/test-bdd-readable.sh b/tools/test-bdd-readable.sh index ddce4e56..267739b1 100644 --- a/tools/test-bdd-readable.sh +++ b/tools/test-bdd-readable.sh @@ -108,126 +108,6 @@ mkdir -p "${coverage_dir}" fi } >"${coverage_summary}" -python3 - "$trx_file" <<'PY' | tee "${readable_report}" -import re -import sys -import xml.etree.ElementTree as ET - -BLUE = "\033[94m" -WHITE = "\033[97m" -GREEN = "\033[32m" -RED = "\033[31m" -RESET = "\033[0m" -DIM = "\033[2m" - -CHECK = "✔" -CROSS = "✘" - -trx_path = sys.argv[1] -root = ET.parse(trx_path).getroot() -ns = {"t": root.tag.split("}")[0].strip("{")} if root.tag.startswith("{") else {} - -def findall(path): - return root.findall(path, ns) if ns else root.findall(path) - -def find(node, path): - return node.find(path, ns) if ns else node.find(path) - -def strip_param_suffix(text: str) -> str: - value = text.strip() - # Drop trailing "(...)" blocks used by data-driven test display names. - while True: - updated = re.sub(r"\s*\([^()]*\)\s*$", "", value).strip() - if updated == value: - return value - value = updated - -def humanize_identifier(text: str) -> str: - value = strip_param_suffix(text) - value = value.replace("_", " ") - value = re.sub(r"([a-z0-9])([A-Z])", r"\1 \2", value) - value = re.sub(r"([A-Z]+)([A-Z][a-z])", r"\1 \2", value) - value = re.sub(r"\s+", " ", value).strip() - return value - -def normalize_title(test_name: str, scenario: str | None) -> str: - if scenario: - return strip_param_suffix(scenario) - - raw = strip_param_suffix(test_name) - # xUnit names are usually Namespace.Class.Method(...). Keep only method part. - if "." in raw: - raw = raw.rsplit(".", 1)[-1] - return humanize_identifier(raw) - -def iter_step_lines(stdout: str): - if not stdout: - return [] - lines = [] - for raw in stdout.splitlines(): - line = raw.strip() - if not line: - continue - if line.startswith("[BDD]"): - continue - if line.startswith("-> done:"): - continue - if line.startswith("--- table step argument ---"): - continue - if line.startswith("|"): - continue - if line.startswith("Standardausgabemeldungen:"): - continue - if re.match(r"^(Angenommen|Wenn|Dann|Und|Aber)\b", line): - lines.append(line) - # dedupe preserving order - deduped = [] - seen = set() - for line in lines: - if line not in seen: - deduped.append(line) - seen.add(line) - return deduped - -results = [] -for node in findall(".//t:UnitTestResult" if ns else ".//UnitTestResult"): - outcome = (node.attrib.get("outcome") or "").strip() - test_name = (node.attrib.get("testName") or "").strip() - output = find(node, "t:Output" if ns else "Output") - stdout = "" - if output is not None: - std_node = find(output, "t:StdOut" if ns else "StdOut") - if std_node is not None and std_node.text: - stdout = std_node.text - - scenario = None - if stdout: - for line in stdout.splitlines(): - l = line.strip() - m = re.match(r"^\[BDD\]\s*Szenario startet:\s*(.+)$", l) - if m: - scenario = m.group(1).strip() - break - - title = normalize_title(test_name, scenario) - steps = iter_step_lines(stdout) - results.append((title, outcome, steps)) - -for title, outcome, steps in results: - passed = outcome.lower() == "passed" - icon = CHECK if passed else CROSS - icon_color = GREEN if passed else RED - end_word = "FINISHED" if passed else "FAILED" - - if not steps: - steps = ["Test erfolgreich abgeschlossen" if passed else "Test fehlgeschlagen"] - - print(f"{DIM}────────────────────────────────────────────────────────────────{RESET}") - print(f"{BLUE}{title}{RESET}") - for s in steps: - print(f"{icon_color}{icon}{RESET} {WHITE}{s}{RESET}") - print(f"{icon_color}{end_word}{RESET}") - print("") -PY +python3 "${ROOT_DIR}/tools/ci/bin/bdd_readable_from_trx.py" "${trx_file}" | tee "${readable_report}" exit "${test_exit}" diff --git a/tools/versioning/count_prefixed_labels.py b/tools/versioning/count_prefixed_labels.py new file mode 100755 index 00000000..ec66f105 --- /dev/null +++ b/tools/versioning/count_prefixed_labels.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import sys + + +def main() -> int: + parser = argparse.ArgumentParser() + parser.add_argument("--labels-json", required=True) + parser.add_argument("--prefix", required=True) + args = parser.parse_args() + + try: + labels = json.loads(args.labels_json) + except Exception as ex: + print(f"ERROR: {ex}", file=sys.stderr) + return 1 + + count = sum(1 for label in labels if isinstance(label, str) and label.startswith(args.prefix)) + print(count) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tools/versioning/run-versioning-policy.sh b/tools/versioning/run-versioning-policy.sh index 4cc97757..33f3e7d7 100755 --- a/tools/versioning/run-versioning-policy.sh +++ b/tools/versioning/run-versioning-policy.sh @@ -18,7 +18,7 @@ files_json="$(python3 tools/ci/bin/github_api.py pr-files --repo "${REPO}" --pr labels_json="[]" for _ in {1..12}; do labels_json="$(python3 tools/ci/bin/github_api.py issue-labels --repo "${REPO}" --issue "${PR_NUMBER}")" - count="$(python3 -c 'import json,sys; labels=json.loads(sys.argv[1]); print(sum(1 for l in labels if isinstance(l, str) and l.startswith("versioning:")))' "${labels_json}")" + count="$(python3 tools/versioning/count_prefixed_labels.py --labels-json "${labels_json}" --prefix "versioning:")" if [[ "${count}" -eq 1 ]]; then break fi From 9aec47e1e024fa86ad9da65e85e0a9f1d978c699 Mon Sep 17 00:00:00 2001 From: GitHub Copilot Agent Date: Thu, 19 Feb 2026 17:20:38 +0100 Subject: [PATCH 2/9] chore(ci): preflight-governance neu ausloesen From ba7b6cc9c3266ab548d50098130eafd1904dc3ad Mon Sep 17 00:00:00 2001 From: GitHub Copilot Agent Date: Thu, 19 Feb 2026 17:22:31 +0100 Subject: [PATCH 3/9] chore(ci): helper-kommentar fuer synchronisationslauf --- tools/ci/release/read_summary_status.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/ci/release/read_summary_status.py b/tools/ci/release/read_summary_status.py index 2a6c3ba1..2bd87e60 100755 --- a/tools/ci/release/read_summary_status.py +++ b/tools/ci/release/read_summary_status.py @@ -8,6 +8,7 @@ def main() -> int: + # Small dedicated helper to keep bash wrappers free of inline Python blocks. parser = argparse.ArgumentParser() parser.add_argument("--summary", required=True) args = parser.parse_args() From 921695dd0e6508566c4cfae7d20a2712fb8c4c79 Mon Sep 17 00:00:00 2001 From: GitHub Copilot Agent Date: Thu, 19 Feb 2026 17:28:11 +0100 Subject: [PATCH 4/9] fix(ci): qodana-reuse-workflow stabilisieren --- .github/workflows/dependency-canary.yml | 6 +++--- .../{_qodana-contract.yml => qodana-contract-core.yml} | 0 .github/workflows/qodana.yml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) rename .github/workflows/{_qodana-contract.yml => qodana-contract-core.yml} (100%) diff --git a/.github/workflows/dependency-canary.yml b/.github/workflows/dependency-canary.yml index 55109195..39f07f26 100644 --- a/.github/workflows/dependency-canary.yml +++ b/.github/workflows/dependency-canary.yml @@ -20,7 +20,7 @@ permissions: jobs: canary-sharpcompress: if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == 'SharpCompress' - uses: ./.github/workflows/_qodana-contract.yml + uses: ./.github/workflows/qodana-contract-core.yml with: pre_command: bash -euo pipefail tools/ci/bin/dependency-canary.sh "SharpCompress" "${{ inputs.version || 'latest' }}" artifact_name: canary-SharpCompress @@ -29,7 +29,7 @@ jobs: canary-recyclable: if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == 'Microsoft.IO.RecyclableMemoryStream' - uses: ./.github/workflows/_qodana-contract.yml + uses: ./.github/workflows/qodana-contract-core.yml with: pre_command: bash -euo pipefail tools/ci/bin/dependency-canary.sh "Microsoft.IO.RecyclableMemoryStream" "${{ inputs.version || 'latest' }}" artifact_name: canary-Microsoft.IO.RecyclableMemoryStream @@ -38,7 +38,7 @@ jobs: canary-system-text-json: if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == 'System.Text.Json' - uses: ./.github/workflows/_qodana-contract.yml + uses: ./.github/workflows/qodana-contract-core.yml with: pre_command: bash -euo pipefail tools/ci/bin/dependency-canary.sh "System.Text.Json" "${{ inputs.version || 'latest' }}" artifact_name: canary-System.Text.Json diff --git a/.github/workflows/_qodana-contract.yml b/.github/workflows/qodana-contract-core.yml similarity index 100% rename from .github/workflows/_qodana-contract.yml rename to .github/workflows/qodana-contract-core.yml diff --git a/.github/workflows/qodana.yml b/.github/workflows/qodana.yml index b01302b1..0704da86 100644 --- a/.github/workflows/qodana.yml +++ b/.github/workflows/qodana.yml @@ -12,7 +12,7 @@ permissions: jobs: qodana: - uses: ./.github/workflows/_qodana-contract.yml + uses: ./.github/workflows/qodana-contract-core.yml with: artifact_name: ci-qodana upload_sarif: true From da89cd05bf3ed9080ae99313ea2892bcaa4d51ed Mon Sep 17 00:00:00 2001 From: GitHub Copilot Agent Date: Thu, 19 Feb 2026 17:30:18 +0100 Subject: [PATCH 5/9] fix(ci): qodana startup-failure durch direkten workflow beheben --- .github/workflows/dependency-canary.yml | 88 +++++++++++++++------- .github/workflows/qodana-contract-core.yml | 84 --------------------- .github/workflows/qodana.yml | 62 +++++++++++++-- 3 files changed, 119 insertions(+), 115 deletions(-) delete mode 100644 .github/workflows/qodana-contract-core.yml diff --git a/.github/workflows/dependency-canary.yml b/.github/workflows/dependency-canary.yml index 39f07f26..d16d7b92 100644 --- a/.github/workflows/dependency-canary.yml +++ b/.github/workflows/dependency-canary.yml @@ -18,29 +18,65 @@ permissions: contents: read jobs: - canary-sharpcompress: - if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == 'SharpCompress' - uses: ./.github/workflows/qodana-contract-core.yml - with: - pre_command: bash -euo pipefail tools/ci/bin/dependency-canary.sh "SharpCompress" "${{ inputs.version || 'latest' }}" - artifact_name: canary-SharpCompress - upload_sarif: false - secrets: inherit - - canary-recyclable: - if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == 'Microsoft.IO.RecyclableMemoryStream' - uses: ./.github/workflows/qodana-contract-core.yml - with: - pre_command: bash -euo pipefail tools/ci/bin/dependency-canary.sh "Microsoft.IO.RecyclableMemoryStream" "${{ inputs.version || 'latest' }}" - artifact_name: canary-Microsoft.IO.RecyclableMemoryStream - upload_sarif: false - secrets: inherit - - canary-system-text-json: - if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == 'System.Text.Json' - uses: ./.github/workflows/qodana-contract-core.yml - with: - pre_command: bash -euo pipefail tools/ci/bin/dependency-canary.sh "System.Text.Json" "${{ inputs.version || 'latest' }}" - artifact_name: canary-System.Text.Json - upload_sarif: false - secrets: inherit + canary: + if: github.event_name != 'pull_request' + strategy: + fail-fast: false + matrix: + dependency: + - SharpCompress + - Microsoft.IO.RecyclableMemoryStream + - System.Text.Json + runs-on: ubuntu-latest + env: + QODANA_TOKEN: ${{ secrets.QODANA_TOKEN }} + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Setup .NET + uses: actions/setup-dotnet@67a3573c9a986a3f9c594539f4ab511d57bb3ce9 # v4 + with: + dotnet-version: | + 8.0.x + 10.0.102 + + - name: Run dependency canary update + focused tests + if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == matrix.dependency + shell: bash + run: | + bash -euo pipefail tools/ci/bin/dependency-canary.sh "${{ matrix.dependency }}" "${{ inputs.version || 'latest' }}" + + - name: Assert QODANA_TOKEN present + if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == matrix.dependency + shell: bash + run: | + test -n "${QODANA_TOKEN:-}" || (echo "FAIL: QODANA_TOKEN missing" >&2; exit 1) + echo "OK: QODANA_TOKEN present" + + - name: Run Qodana + if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == matrix.dependency + uses: JetBrains/qodana-action@42dad391966aca8ca344ca2340a7f43a5507e9b2 # v2025.3.1 + with: + args: --linter=jetbrains/qodana-dotnet:2025.3 + results-dir: artifacts/ci/qodana + upload-result: false + use-caches: false + + - name: Run Qodana contract validator + if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == matrix.dependency + shell: bash + env: + CI_DEFER_ARTIFACT_LINK_RESOLUTION: "1" + run: bash -euo pipefail tools/ci/bin/run.sh qodana + + - name: Upload canary artifacts + if: always() && (github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == matrix.dependency) + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + with: + name: canary-${{ matrix.dependency }} + path: | + artifacts/ci/qodana/ + if-no-files-found: error diff --git a/.github/workflows/qodana-contract-core.yml b/.github/workflows/qodana-contract-core.yml deleted file mode 100644 index 17f90f3f..00000000 --- a/.github/workflows/qodana-contract-core.yml +++ /dev/null @@ -1,84 +0,0 @@ -name: qodana-contract-core - -on: - workflow_call: - inputs: - pre_command: - description: "Optional shell command executed before Qodana." - required: false - type: string - default: "" - artifact_name: - description: "Artifact name for uploaded Qodana evidence." - required: false - type: string - default: "ci-qodana" - upload_sarif: - description: "Upload SARIF to code scanning on non-PR events." - required: false - type: boolean - default: true - -permissions: - contents: read - -jobs: - qodana: - if: github.event_name != 'pull_request' || (github.event.pull_request.head.repo.fork == false && github.actor != 'dependabot[bot]') - runs-on: ubuntu-latest - permissions: - contents: read - security-events: write - env: - QODANA_TOKEN: ${{ secrets.QODANA_TOKEN }} - PRE_COMMAND: ${{ inputs.pre_command }} - steps: - - name: Checkout - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - with: - fetch-depth: 0 - - - name: Setup .NET - uses: actions/setup-dotnet@67a3573c9a986a3f9c594539f4ab511d57bb3ce9 # v4 - with: - dotnet-version: "10.0.102" - - - name: Run pre command - if: env.PRE_COMMAND != '' - shell: bash - run: bash -euo pipefail -c "${PRE_COMMAND}" - - - name: Assert QODANA_TOKEN present - shell: bash - run: | - test -n "${QODANA_TOKEN:-}" || (echo "FAIL: QODANA_TOKEN missing" >&2; exit 1) - echo "OK: QODANA_TOKEN present" - - - name: Run Qodana - uses: JetBrains/qodana-action@42dad391966aca8ca344ca2340a7f43a5507e9b2 # v2025.3.1 - with: - args: --linter=jetbrains/qodana-dotnet:2025.3 - results-dir: artifacts/ci/qodana - upload-result: false - use-caches: false - - - name: Run Entry Check - if: always() - shell: bash - env: - CI_DEFER_ARTIFACT_LINK_RESOLUTION: "1" - run: bash tools/ci/bin/run.sh qodana - - - name: Upload SARIF To Code Scanning - if: inputs.upload_sarif && github.event_name != 'pull_request' - uses: github/codeql-action/upload-sarif@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3 - with: - sarif_file: artifacts/ci/qodana/qodana.upload.sarif.json - - - name: Upload Artifact - if: always() - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 - with: - name: ${{ inputs.artifact_name }} - path: artifacts/ci/qodana/ - if-no-files-found: error diff --git a/.github/workflows/qodana.yml b/.github/workflows/qodana.yml index 0704da86..40659c60 100644 --- a/.github/workflows/qodana.yml +++ b/.github/workflows/qodana.yml @@ -12,8 +12,60 @@ permissions: jobs: qodana: - uses: ./.github/workflows/qodana-contract-core.yml - with: - artifact_name: ci-qodana - upload_sarif: true - secrets: inherit + # In untrusted PR contexts (forks, dependabot), repository secrets are unavailable. + if: github.event_name != 'pull_request' || (github.event.pull_request.head.repo.fork == false && github.actor != 'dependabot[bot]') + runs-on: ubuntu-latest + permissions: + contents: read + security-events: write + env: + QODANA_TOKEN: ${{ secrets.QODANA_TOKEN }} + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Assert QODANA_TOKEN present + # Fail-closed in trusted CI contexts where Qodana is expected to run. + shell: bash + run: | + test -n "${QODANA_TOKEN:-}" || (echo "FAIL: QODANA_TOKEN missing" >&2; exit 1) + echo "OK: QODANA_TOKEN present" + + - name: Setup .NET + uses: actions/setup-dotnet@67a3573c9a986a3f9c594539f4ab511d57bb3ce9 # v4 + with: + dotnet-version: "10.0.102" + + - name: Run Qodana + uses: JetBrains/qodana-action@42dad391966aca8ca344ca2340a7f43a5507e9b2 # v2025.3.1 + with: + # gh-actions input is a single string; use --flag=value form to avoid passing a single argv containing a space. + args: --linter=jetbrains/qodana-dotnet:2025.3 + results-dir: artifacts/ci/qodana + upload-result: false + use-caches: false + + - name: Run Entry Check + if: always() + shell: bash + env: + # The job uploads the qodana artifact after this step; defer artifact-link rendering to avoid false-negative + # "artifact_not_found" failures when earlier steps fail. + CI_DEFER_ARTIFACT_LINK_RESOLUTION: "1" + run: bash tools/ci/bin/run.sh qodana + + - name: Upload SARIF To Code Scanning + if: github.event_name != 'pull_request' + uses: github/codeql-action/upload-sarif@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3 + with: + sarif_file: artifacts/ci/qodana/qodana.upload.sarif.json + + - name: Upload Artifact + if: always() + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + with: + name: ci-qodana + path: artifacts/ci/qodana/ + if-no-files-found: error From 6a2cd04fe0ccabe75dc7a9971059728a4dd4736e Mon Sep 17 00:00:00 2001 From: GitHub Copilot Agent Date: Thu, 19 Feb 2026 17:41:13 +0100 Subject: [PATCH 6/9] fix(ci): fail-closed canary inputs und robuste helper-pfade --- .github/workflows/dependency-canary.yml | 32 ++++++++++++++++++++----- tools/ci/verify_nuget_release.sh | 13 ++++++---- 2 files changed, 34 insertions(+), 11 deletions(-) diff --git a/.github/workflows/dependency-canary.yml b/.github/workflows/dependency-canary.yml index d16d7b92..d2a32d1e 100644 --- a/.github/workflows/dependency-canary.yml +++ b/.github/workflows/dependency-canary.yml @@ -18,7 +18,27 @@ permissions: contents: read jobs: + validate-dispatch-inputs: + if: github.event_name == 'workflow_dispatch' + runs-on: ubuntu-latest + steps: + - name: Validate dependency input (fail-closed) + shell: bash + run: | + dep="${{ github.event.inputs.dependency || 'all' }}" + case "${dep}" in + all|SharpCompress|Microsoft.IO.RecyclableMemoryStream|System.Text.Json) + echo "OK: dependency input='${dep}'" + ;; + *) + echo "FAIL: unsupported dependency input='${dep}'" >&2 + echo "Allowed: all, SharpCompress, Microsoft.IO.RecyclableMemoryStream, System.Text.Json" >&2 + exit 1 + ;; + esac + canary: + needs: [validate-dispatch-inputs] if: github.event_name != 'pull_request' strategy: fail-fast: false @@ -44,20 +64,20 @@ jobs: 10.0.102 - name: Run dependency canary update + focused tests - if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == matrix.dependency + if: github.event_name != 'workflow_dispatch' || github.event.inputs.dependency == 'all' || github.event.inputs.dependency == matrix.dependency shell: bash run: | - bash -euo pipefail tools/ci/bin/dependency-canary.sh "${{ matrix.dependency }}" "${{ inputs.version || 'latest' }}" + bash -euo pipefail tools/ci/bin/dependency-canary.sh "${{ matrix.dependency }}" "${{ github.event.inputs.version || 'latest' }}" - name: Assert QODANA_TOKEN present - if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == matrix.dependency + if: github.event_name != 'workflow_dispatch' || github.event.inputs.dependency == 'all' || github.event.inputs.dependency == matrix.dependency shell: bash run: | test -n "${QODANA_TOKEN:-}" || (echo "FAIL: QODANA_TOKEN missing" >&2; exit 1) echo "OK: QODANA_TOKEN present" - name: Run Qodana - if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == matrix.dependency + if: github.event_name != 'workflow_dispatch' || github.event.inputs.dependency == 'all' || github.event.inputs.dependency == matrix.dependency uses: JetBrains/qodana-action@42dad391966aca8ca344ca2340a7f43a5507e9b2 # v2025.3.1 with: args: --linter=jetbrains/qodana-dotnet:2025.3 @@ -66,14 +86,14 @@ jobs: use-caches: false - name: Run Qodana contract validator - if: github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == matrix.dependency + if: github.event_name != 'workflow_dispatch' || github.event.inputs.dependency == 'all' || github.event.inputs.dependency == matrix.dependency shell: bash env: CI_DEFER_ARTIFACT_LINK_RESOLUTION: "1" run: bash -euo pipefail tools/ci/bin/run.sh qodana - name: Upload canary artifacts - if: always() && (github.event_name != 'workflow_dispatch' || inputs.dependency == 'all' || inputs.dependency == matrix.dependency) + if: always() && (github.event_name != 'workflow_dispatch' || github.event.inputs.dependency == 'all' || github.event.inputs.dependency == matrix.dependency) uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 with: name: canary-${{ matrix.dependency }} diff --git a/tools/ci/verify_nuget_release.sh b/tools/ci/verify_nuget_release.sh index e4cec676..ebe4a469 100755 --- a/tools/ci/verify_nuget_release.sh +++ b/tools/ci/verify_nuget_release.sh @@ -3,6 +3,9 @@ set -euo pipefail IFS=$'\n\t' LC_ALL=C +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +HELPER_PY="${SCRIPT_DIR}/bin/verify_nuget_release_helpers.py" + NUPKG_PATH="${NUPKG_PATH:-}" NUPKG_DIR="${NUPKG_DIR:-artifacts/nuget}" PKG_ID="${PKG_ID:-}" @@ -126,7 +129,7 @@ resolve_nupkg_path() { derive_from_filename() { local filename filename="$(basename "${NUPKG_PATH}")" - python3 tools/ci/bin/verify_nuget_release_helpers.py derive-filename --filename "$filename" + python3 "${HELPER_PY}" derive-filename --filename "$filename" } derive_from_nuspec() { @@ -134,7 +137,7 @@ derive_from_nuspec() { nuspec_xml="$(unzip -p "${NUPKG_PATH}" '*.nuspec' 2>/dev/null)" || fail "Unable to read .nuspec from ${NUPKG_PATH}" local parsed - parsed="$(python3 tools/ci/bin/verify_nuget_release_helpers.py derive-nuspec --nuspec-xml "${nuspec_xml}")" + parsed="$(python3 "${HELPER_PY}" derive-nuspec --nuspec-xml "${nuspec_xml}")" printf '%s\n' "${parsed}" } @@ -147,7 +150,7 @@ query_search() { response="$(curl -fsS --compressed --max-time "${TIMEOUT_SECONDS}" "${SEARCH_URL}")" || return 1 local out - out="$(python3 tools/ci/bin/verify_nuget_release_helpers.py query-search --response-json "${response}" --pkg-id "$PKG_ID" --pkg-ver "$PKG_VER")" || return 1 + out="$(python3 "${HELPER_PY}" query-search --response-json "${response}" --pkg-id "$PKG_ID" --pkg-ver "$PKG_VER")" || return 1 REGISTRATION_URL="${out}" SEARCH_OK="ok" @@ -164,7 +167,7 @@ query_registration() { local response response="$(curl -fsS --compressed --max-time "${TIMEOUT_SECONDS}" "${REGISTRATION_URL}")" || return 1 - python3 tools/ci/bin/verify_nuget_release_helpers.py registration-contains --response-json "${response}" --pkg-ver "$PKG_VER" >/dev/null || return 1 + python3 "${HELPER_PY}" registration-contains --response-json "${response}" --pkg-ver "$PKG_VER" >/dev/null || return 1 REGISTRATION_OK="ok" return 0 @@ -204,7 +207,7 @@ query_v2_download() { } emit_summary_json() { - python3 tools/ci/bin/verify_nuget_release_helpers.py emit-summary + python3 "${HELPER_PY}" emit-summary } main() { From 9a51c0f1675348869e64200e229d0fc65bbb4ae4 Mon Sep 17 00:00:00 2001 From: GitHub Copilot Agent Date: Thu, 19 Feb 2026 18:50:32 +0100 Subject: [PATCH 7/9] fix(ci): preflight-konforme canary-input-validierung --- .github/workflows/dependency-canary.yml | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/.github/workflows/dependency-canary.yml b/.github/workflows/dependency-canary.yml index d2a32d1e..6def3888 100644 --- a/.github/workflows/dependency-canary.yml +++ b/.github/workflows/dependency-canary.yml @@ -26,16 +26,10 @@ jobs: shell: bash run: | dep="${{ github.event.inputs.dependency || 'all' }}" - case "${dep}" in - all|SharpCompress|Microsoft.IO.RecyclableMemoryStream|System.Text.Json) - echo "OK: dependency input='${dep}'" - ;; - *) - echo "FAIL: unsupported dependency input='${dep}'" >&2 - echo "Allowed: all, SharpCompress, Microsoft.IO.RecyclableMemoryStream, System.Text.Json" >&2 - exit 1 - ;; - esac + [[ "${dep}" == "all" || "${dep}" == "SharpCompress" || "${dep}" == "Microsoft.IO.RecyclableMemoryStream" || "${dep}" == "System.Text.Json" ]] && { echo "OK: dependency input='${dep}'"; exit 0; } + echo "FAIL: unsupported dependency input='${dep}'" >&2 + echo "Allowed: all, SharpCompress, Microsoft.IO.RecyclableMemoryStream, System.Text.Json" >&2 + exit 1 canary: needs: [validate-dispatch-inputs] From 8f438364ff4f393dca998ae756dc153ea5d56c1e Mon Sep 17 00:00:00 2001 From: GitHub Copilot Agent Date: Thu, 19 Feb 2026 18:53:16 +0100 Subject: [PATCH 8/9] fix(ci): canary-validierung yaml-sicher und preflight-konform --- .github/workflows/dependency-canary.yml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/.github/workflows/dependency-canary.yml b/.github/workflows/dependency-canary.yml index 6def3888..1656f320 100644 --- a/.github/workflows/dependency-canary.yml +++ b/.github/workflows/dependency-canary.yml @@ -24,12 +24,8 @@ jobs: steps: - name: Validate dependency input (fail-closed) shell: bash - run: | - dep="${{ github.event.inputs.dependency || 'all' }}" - [[ "${dep}" == "all" || "${dep}" == "SharpCompress" || "${dep}" == "Microsoft.IO.RecyclableMemoryStream" || "${dep}" == "System.Text.Json" ]] && { echo "OK: dependency input='${dep}'"; exit 0; } - echo "FAIL: unsupported dependency input='${dep}'" >&2 - echo "Allowed: all, SharpCompress, Microsoft.IO.RecyclableMemoryStream, System.Text.Json" >&2 - exit 1 + run: >- + dep="${{ github.event.inputs.dependency || 'all' }}"; [[ "${dep}" == "all" || "${dep}" == "SharpCompress" || "${dep}" == "Microsoft.IO.RecyclableMemoryStream" || "${dep}" == "System.Text.Json" ]] && { echo "OK: dependency input='${dep}'"; exit 0; }; echo "FAIL: unsupported dependency input='${dep}'" >&2; echo "Allowed: all, SharpCompress, Microsoft.IO.RecyclableMemoryStream, System.Text.Json" >&2; exit 1 canary: needs: [validate-dispatch-inputs] From e4c53d3c9cac62a1bc2edf1fafea65d5f860e4cf Mon Sep 17 00:00:00 2001 From: GitHub Copilot Agent Date: Thu, 19 Feb 2026 19:00:11 +0100 Subject: [PATCH 9/9] fix(ci): validate-job immer ausfuehren fuer schedule-needs --- .github/workflows/dependency-canary.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/dependency-canary.yml b/.github/workflows/dependency-canary.yml index 1656f320..082b656b 100644 --- a/.github/workflows/dependency-canary.yml +++ b/.github/workflows/dependency-canary.yml @@ -19,10 +19,10 @@ permissions: jobs: validate-dispatch-inputs: - if: github.event_name == 'workflow_dispatch' runs-on: ubuntu-latest steps: - name: Validate dependency input (fail-closed) + if: github.event_name == 'workflow_dispatch' shell: bash run: >- dep="${{ github.event.inputs.dependency || 'all' }}"; [[ "${dep}" == "all" || "${dep}" == "SharpCompress" || "${dep}" == "Microsoft.IO.RecyclableMemoryStream" || "${dep}" == "System.Text.Json" ]] && { echo "OK: dependency input='${dep}'"; exit 0; }; echo "FAIL: unsupported dependency input='${dep}'" >&2; echo "Allowed: all, SharpCompress, Microsoft.IO.RecyclableMemoryStream, System.Text.Json" >&2; exit 1