Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
92 changes: 92 additions & 0 deletions .github/workflows/dependency-canary.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
name: dependency-canary

on:
schedule:
- cron: "0 5 * * 1"
workflow_dispatch:
inputs:
dependency:
description: "Dependency to probe (all or exact package id)"
required: false
default: "all"
version:
description: "Target version or 'latest'"
required: false
default: "latest"

permissions:
contents: read

jobs:
validate-dispatch-inputs:
runs-on: ubuntu-latest
steps:
- name: Validate dependency input (fail-closed)
if: github.event_name == 'workflow_dispatch'
shell: bash
run: >-
dep="${{ github.event.inputs.dependency || 'all' }}"; [[ "${dep}" == "all" || "${dep}" == "SharpCompress" || "${dep}" == "Microsoft.IO.RecyclableMemoryStream" || "${dep}" == "System.Text.Json" ]] && { echo "OK: dependency input='${dep}'"; exit 0; }; echo "FAIL: unsupported dependency input='${dep}'" >&2; echo "Allowed: all, SharpCompress, Microsoft.IO.RecyclableMemoryStream, System.Text.Json" >&2; exit 1

canary:
needs: [validate-dispatch-inputs]
if: github.event_name != 'pull_request'
strategy:
fail-fast: false
matrix:
dependency:
- SharpCompress
- Microsoft.IO.RecyclableMemoryStream
- System.Text.Json
runs-on: ubuntu-latest
env:
QODANA_TOKEN: ${{ secrets.QODANA_TOKEN }}
steps:
- name: Checkout
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
with:
fetch-depth: 0

- name: Setup .NET
uses: actions/setup-dotnet@67a3573c9a986a3f9c594539f4ab511d57bb3ce9 # v4
with:
dotnet-version: |
8.0.x
10.0.102

- name: Run dependency canary update + focused tests
if: github.event_name != 'workflow_dispatch' || github.event.inputs.dependency == 'all' || github.event.inputs.dependency == matrix.dependency
shell: bash
run: |
bash -euo pipefail tools/ci/bin/dependency-canary.sh "${{ matrix.dependency }}" "${{ github.event.inputs.version || 'latest' }}"

- name: Assert QODANA_TOKEN present
if: github.event_name != 'workflow_dispatch' || github.event.inputs.dependency == 'all' || github.event.inputs.dependency == matrix.dependency
shell: bash
run: |
test -n "${QODANA_TOKEN:-}" || (echo "FAIL: QODANA_TOKEN missing" >&2; exit 1)
echo "OK: QODANA_TOKEN present"

- name: Run Qodana
if: github.event_name != 'workflow_dispatch' || github.event.inputs.dependency == 'all' || github.event.inputs.dependency == matrix.dependency
uses: JetBrains/qodana-action@42dad391966aca8ca344ca2340a7f43a5507e9b2 # v2025.3.1
with:
args: --linter=jetbrains/qodana-dotnet:2025.3
results-dir: artifacts/ci/qodana
upload-result: false
use-caches: false

- name: Run Qodana contract validator
if: github.event_name != 'workflow_dispatch' || github.event.inputs.dependency == 'all' || github.event.inputs.dependency == matrix.dependency
shell: bash
env:
CI_DEFER_ARTIFACT_LINK_RESOLUTION: "1"
run: bash -euo pipefail tools/ci/bin/run.sh qodana

- name: Upload canary artifacts
if: always() && (github.event_name != 'workflow_dispatch' || github.event.inputs.dependency == 'all' || github.event.inputs.dependency == matrix.dependency)
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: canary-${{ matrix.dependency }}
path: |
artifacts/ci/qodana/
if-no-files-found: error
6 changes: 6 additions & 0 deletions docs/audit/009_SUPPLY_CHAIN_BASELINE.MD
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,10 @@ Minimum an reproduzierbaren Kontrollen fuer Source-to-Package-Integritaet in die
- E4 Release/Provenance-Nachweise:
- `.github/workflows/release.yml`
- `artifacts/nuget/attestation-verify.txt` (wenn der Release-Workflow laeuft)
- E5 Dependency-Canary-Nachweise:
- `.github/workflows/dependency-canary.yml`
- Workflow-Artefakte `canary-*` mit Qodana-Contract-Output (`artifacts/ci/qodana/`)
- Laufhistorie: https://github.com/tomtastisch/FileClassifier/actions/workflows/dependency-canary.yml

## 4. Verifikationskommandos
Alle Kommandos sind fuer Ausfuehrung im Repository-Root gedacht.
Expand All @@ -45,6 +49,8 @@ NUPKG="$(find artifacts/nuget -maxdepth 1 -type f -name '*.nupkg' | head -n 1)"
test -n "$NUPKG"
dotnet nuget verify "$NUPKG"
gh attestation verify "$NUPKG" --repo tomtastisch/FileClassifier
# Optionaler Canary-Nachweis:
gh run list --workflow dependency-canary.yml --limit 5
```

## 5. Operative Kadenz
Expand Down
6 changes: 6 additions & 0 deletions docs/audit/109_SUPPLY_CHAIN_BASELINE.MD
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,10 @@ Define minimum reproducible controls for source-to-package integrity in this rep
- E4 Release/provenance evidence:
- `.github/workflows/release.yml`
- `artifacts/nuget/attestation-verify.txt` (when release workflow runs)
- E5 Dependency canary evidence:
- `.github/workflows/dependency-canary.yml`
- workflow artifacts `canary-*` with Qodana contract output (`artifacts/ci/qodana/`)
- run history: https://github.com/tomtastisch/FileClassifier/actions/workflows/dependency-canary.yml

## 4. Verification Commands
All commands are intended to run from the repository root.
Expand All @@ -45,6 +49,8 @@ NUPKG="$(find artifacts/nuget -maxdepth 1 -type f -name '*.nupkg' | head -n 1)"
test -n "$NUPKG"
dotnet nuget verify "$NUPKG"
gh attestation verify "$NUPKG" --repo tomtastisch/FileClassifier
# Optional canary evidence:
gh run list --workflow dependency-canary.yml --limit 5
```

## 5. Operational Cadence
Expand Down
11 changes: 11 additions & 0 deletions docs/versioning/001_POLICY_VERSIONING.MD
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,17 @@ Primary-Prioritaet ist fix:
## 11. Qodana
Qodana ist zusaetzliche Static-Analysis und ersetzt keine CI-Quality-Gates fuer Build/Test/Coverage/Versioning.

## 12. Dependency-Upgrade-Policy (Canary)
- Patch-/Minor-Upgrades fuer risikoreiche Dependencies werden ueber den Canary-Workflow vorab verifiziert:
- `.github/workflows/dependency-canary.yml`
- `tools/ci/bin/dependency-canary.sh`
- `tools/ci/policies/data/dependency_canary.json`
- Verbindliche Canary-Nachweise:
- fokussierte dependency-spezifische Fail-closed-/Guard-Tests erfolgreich
- Qodana-Lauf plus `run.sh qodana` Contract-Validierung erfolgreich
- Major-Upgrades (`X+1.0.0`) sind fuer alle Canary-Dependencies nicht automatisch:
- nur via dediziertem PR mit API-Impact-Review und expliziter SemVer-Entscheidung

## RoC-Bezug
- [Artifact-Contract-Regel](https://github.com/tomtastisch/FileClassifier/blob/main/tools/ci/policies/rules/artifact_contract.yaml)
- [Docs-Drift-Regel](https://github.com/tomtastisch/FileClassifier/blob/main/tools/ci/policies/rules/docs_drift.yaml)
Expand Down
11 changes: 11 additions & 0 deletions docs/versioning/101_POLICY_VERSIONING.MD
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,17 @@ Primary priority is fixed:
## 11. Qodana
Qodana is additional static analysis and does not replace CI quality gates for build/test/coverage/versioning.

## 12. Dependency Upgrade Policy (Canary)
- Patch/minor upgrades for risky dependencies are pre-verified via the canary workflow:
- `.github/workflows/dependency-canary.yml`
- `tools/ci/bin/dependency-canary.sh`
- `tools/ci/policies/data/dependency_canary.json`
- Mandatory canary evidence:
- focused dependency-specific fail-closed/guard tests pass
- Qodana run plus `run.sh qodana` contract validation pass
- Major upgrades (`X+1.0.0`) are not automatic for all canary dependencies:
- only via dedicated PR with API impact review and explicit SemVer decision

## RoC References
- [Artifact-Contract-Regel](https://github.com/tomtastisch/FileClassifier/blob/main/tools/ci/policies/rules/artifact_contract.yaml)
- [Docs-Drift-Regel](https://github.com/tomtastisch/FileClassifier/blob/main/tools/ci/policies/rules/docs_drift.yaml)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
using FileTypeDetectionLib.Tests.Support;
using Tomtastisch.FileClassifier;

namespace FileTypeDetectionLib.Tests.Unit;

public sealed class ArchiveSharpCompressCompatUnitTests
{
[Fact]
public void OpenArchive_ReturnsNull_ForNonArchivePayload()
{
using var stream = new MemoryStream(new byte[] { 0x01, 0x02, 0x03 }, false);
var archive = ArchiveSharpCompressCompat.OpenArchive(stream);
Assert.Null(archive);
}

[Fact]
public void OpenArchive_ReturnsArchive_ForTarPayload()
{
var tar = ArchivePayloadFactory.CreateTarWithSingleEntry("note.txt", "ok");
using var stream = new MemoryStream(tar, false);
using var archive = ArchiveSharpCompressCompat.OpenArchive(stream);
Assert.NotNull(archive);
}

[Fact]
public void OpenArchiveForContainer_ReturnsArchive_ForGZipPayload()
{
var gzip = ArchivePayloadFactory.CreateGZipWithSingleEntry("payload.bin", new byte[] { 0x11, 0x22, 0x33 });
using var stream = new MemoryStream(gzip, false);
using var archive = ArchiveSharpCompressCompat.OpenArchiveForContainer(stream, ArchiveContainerType.GZip);
Assert.NotNull(archive);
}

[Fact]
public void HasGZipMagic_ReturnsTrue_ForGZipHeader()
{
using var stream = new MemoryStream(new byte[] { 0x1F, 0x8B, 0x08 }, false);
Assert.True(ArchiveSharpCompressCompat.HasGZipMagic(stream));
}

[Fact]
public void HasGZipMagic_ReturnsFalse_ForNonSeekableStream()
{
using var nonSeekable = new NonSeekableStream(new byte[] { 0x1F, 0x8B, 0x08 });
Assert.False(ArchiveSharpCompressCompat.HasGZipMagic(nonSeekable));
}

private sealed class NonSeekableStream : MemoryStream
{
internal NonSeekableStream(byte[] buffer) : base(buffer, false)
{
}

public override bool CanSeek => false;
}
}
1 change: 1 addition & 0 deletions tests/FileTypeDetectionLib.Tests/Unit/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

## Testabdeckung

| Testdatei | Fokus |

Check notice on line 9 in tests/FileTypeDetectionLib.Tests/Unit/README.md

View workflow job for this annotation

GitHub Actions / qodana

Incorrect table formatting

Table is not correctly formatted
|----------------------------------------------------------|-------------------------------------------------------------------------------|
| `FileTypeRegistryUnitTests.cs` | deterministisches Typ-/Alias-Mapping |
| `HeaderCoveragePolicyUnitTests.cs` | Header-/Content-Coverage-Policy |
Expand Down Expand Up @@ -51,6 +51,7 @@
| `ArchiveTypeResolverAdditionalUnitTests.cs` | Stream/Bytes Branches in ArchiveTypeResolver |
| `ArchiveTypeResolverExceptionUnitTests.cs` | Exception-Pfade in ArchiveTypeResolver |
| `SharpCompressArchiveBackendUnitTests.cs` | Branches fuer SharpCompress-Backend |
| `ArchiveSharpCompressCompatUnitTests.cs` | Contract-Guards fuer SharpCompress-Kompat-Schicht |
| `SharpCompressEntryModelUnitTests.cs` | Null-Entry Defaults im SharpCompressEntryModel |
| `SharpCompressEntryModelNonNullUnitTests.cs` | Real-Entry Pfade im SharpCompressEntryModel |
| `FileTypeDetectorAdditionalUnitTests.cs` | LoadOptions/ReadFileSafe/Detect Branches |
Expand Down
134 changes: 134 additions & 0 deletions tools/ci/bin/bdd_readable_from_trx.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
#!/usr/bin/env python3
from __future__ import annotations

import re
import sys
import xml.etree.ElementTree as ET

BLUE = "\033[94m"
WHITE = "\033[97m"
GREEN = "\033[32m"
RED = "\033[31m"
RESET = "\033[0m"
DIM = "\033[2m"

CHECK = "✔"
CROSS = "✘"


def strip_param_suffix(text: str) -> str:
value = text.strip()
while True:
updated = re.sub(r"\s*\([^()]*\)\s*$", "", value).strip()
if updated == value:
return value
value = updated


def humanize_identifier(text: str) -> str:
value = strip_param_suffix(text)
value = value.replace("_", " ")
value = re.sub(r"([a-z0-9])([A-Z])", r"\1 \2", value)
value = re.sub(r"([A-Z]+)([A-Z][a-z])", r"\1 \2", value)
value = re.sub(r"\s+", " ", value).strip()
return value


def normalize_title(test_name: str, scenario: str | None) -> str:
if scenario:
return strip_param_suffix(scenario)
raw = strip_param_suffix(test_name)
if "." in raw:
raw = raw.rsplit(".", 1)[-1]
return humanize_identifier(raw)


def iter_step_lines(stdout: str) -> list[str]:
if not stdout:
return []
lines: list[str] = []
for raw in stdout.splitlines():
line = raw.strip()
if not line:
continue
if line.startswith("[BDD]"):
continue
if line.startswith("-> done:"):
continue
if line.startswith("--- table step argument ---"):
continue
if line.startswith("|"):
continue
if line.startswith("Standardausgabemeldungen:"):
continue
if re.match(r"^(Angenommen|Wenn|Dann|Und|Aber)\b", line):
lines.append(line)
deduped: list[str] = []
seen: set[str] = set()
for line in lines:
if line not in seen:
deduped.append(line)
seen.add(line)
return deduped


def main() -> int:
if len(sys.argv) != 2:
print("Usage: bdd_readable_from_trx.py <trx_path>", file=sys.stderr)
return 2

trx_path = sys.argv[1]
root = ET.parse(trx_path).getroot()
ns = {"t": root.tag.split("}")[0].strip("{")} if root.tag.startswith("{") else {}

def findall(path: str):
return root.findall(path, ns) if ns else root.findall(path)

def find(node, path: str):
return node.find(path, ns) if ns else node.find(path)

results: list[tuple[str, str, list[str]]] = []
for node in findall(".//t:UnitTestResult" if ns else ".//UnitTestResult"):
outcome = (node.attrib.get("outcome") or "").strip()
test_name = (node.attrib.get("testName") or "").strip()
output = find(node, "t:Output" if ns else "Output")
stdout = ""
if output is not None:
std_node = find(output, "t:StdOut" if ns else "StdOut")
if std_node is not None and std_node.text:
stdout = std_node.text

scenario = None
if stdout:
for line in stdout.splitlines():
l = line.strip()
m = re.match(r"^\[BDD\]\s*Szenario startet:\s*(.+)$", l)
if m:
scenario = m.group(1).strip()
break

title = normalize_title(test_name, scenario)
steps = iter_step_lines(stdout)
results.append((title, outcome, steps))

for title, outcome, steps in results:
passed = outcome.lower() == "passed"
icon = CHECK if passed else CROSS
icon_color = GREEN if passed else RED
end_word = "FINISHED" if passed else "FAILED"

if not steps:
steps = ["Test erfolgreich abgeschlossen" if passed else "Test fehlgeschlagen"]

print(f"{DIM}────────────────────────────────────────────────────────────────{RESET}")
print(f"{BLUE}{title}{RESET}")
for step in steps:
print(f"{icon_color}{icon}{RESET} {WHITE}{step}{RESET}")
print(f"{icon_color}{end_word}{RESET}")
print("")

return 0


if __name__ == "__main__":
raise SystemExit(main())
Loading
Loading