diff --git a/.coverage b/.coverage deleted file mode 100644 index 38a4eab..0000000 Binary files a/.coverage and /dev/null differ diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index abf0db7..8b5c60e 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -1,10 +1,37 @@ Copilot instructions for Organizational Workflows Purpose -TBD +A collection of reusable GitHub Actions workflows with Python automation backends. +Each workflow domain is a self-contained package under `src/`. The workflows are +designed to be called via `workflow_call` from other repositories. Structure -TBD +```text +src/ +├── core/ # Shared foundation (GitHub API, config, helpers) +│ ├── github/ # GitHub CLI wrappers (issues, projects) +│ │ ├── client.py # run_gh / run_cmd subprocess helpers +│ │ ├── issues.py # Issue CRUD (create, edit, comment, labels) +│ │ └── projects.py # Projects V2 GraphQL (priority sync) +│ ├── config.py # Logging setup, RUNNER_DEBUG parsing +│ ├── helpers.py # Pure utilities (sha256, iso_date, normalize_path) +│ ├── models.py # Shared data models (Issue) +│ ├── priority.py # Severity-to-priority mapping +│ └── rendering.py # Generic Markdown template renderer +│ +├── security/ # Security workflow domain +│ ├── main.py # Pipeline orchestrator (check → collect → promote) +│ ├── check_labels.py # Verify required labels exist +│ ├── collect_alert.py # Fetch code-scanning alerts → JSON +│ ├── promote_alerts.py # Create/update Issues from alerts +│ ├── send_to_teams.py # Send Adaptive Card to Teams webhook +│ ├── constants.py # Labels, event types, metadata types +│ ├── alerts/ # Alert domain (parsing, models) +│ ├── issues/ # Issue management (sync, builder, secmeta) +│ └── notifications/ # Teams webhook notifications +│ +tests/ # Mirrors src/ structure +``` Python style - Python 3.14 @@ -19,8 +46,8 @@ Python style Patterns - Classes with `__init__` cannot throw exceptions - Use private methods (`_method_name`) for internal class helpers -- All info logs must start with "Security workflow -" prefix -- Never disable pylint behaviour in the code +- All logs must start with " -" prefix (e.g., "Security -") +- Never disable pylint behavior in the code Testing - Mirror src structure: `src/security/module.py` -> `tests/security/test_module.py` diff --git a/.github/workflows/aquasec-branch-comparison.yml b/.github/workflows/aquasec-branch-comparison.yml deleted file mode 100644 index 1e560a3..0000000 --- a/.github/workflows/aquasec-branch-comparison.yml +++ /dev/null @@ -1,77 +0,0 @@ -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -name: AquaSec Branch Comparison - -on: - pull_request: - types: [ opened, synchronize, reopened ] - -concurrency: - group: aquasec-branch-comparison-${{ github.event.pull_request.number }} - cancel-in-progress: true - -permissions: - contents: read - pull-requests: write - -jobs: - branch-comparison: - name: AquaSec Branch Comparison - if: ${{ !github.event.pull_request.head.repo.fork }} - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - with: - persist-credentials: false - fetch-depth: 0 - - - name: Compare branches - id: aquasec - uses: AbsaOSS/aquasec-scan-results@15ee405515a000288b4ae9cdcb9943ea974f74b7 - with: - aqua-key: ${{ secrets.AQUA_KEY }} - aqua-secret: ${{ secrets.AQUA_SECRET }} - group-id: ${{ secrets.AQUA_GROUP_ID }} - repository-id: ${{ secrets.AQUA_REPOSITORY_ID }} - dev-branch-comparison: 'true' - - - name: Find existing PR comment - if: always() && steps.aquasec.outputs.comparison-summary-file != '' - uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad - id: find-comment - with: - issue-number: ${{ github.event.pull_request.number }} - comment-author: 'github-actions[bot]' - body-includes: '' - - - name: Post or update PR comment - if: always() && steps.aquasec.outputs.comparison-summary-file != '' - uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 - with: - issue-number: ${{ github.event.pull_request.number }} - comment-id: ${{ steps.find-comment.outputs.comment-id }} - edit-mode: replace - body-path: ${{ steps.aquasec.outputs.comparison-summary-file }} - - - name: Upload comparison summary as artifact - if: always() && steps.aquasec.outputs.comparison-summary-file != '' - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f - with: - name: aquasec-comparison-summary-pr-${{ github.event.pull_request.number }} - path: ${{ steps.aquasec.outputs.comparison-summary-file }} - retention-days: 7 diff --git a/.github/workflows/aquasec-scan.yml b/.github/workflows/aquasec-scan.yml index a32dc53..dc96f28 100644 --- a/.github/workflows/aquasec-scan.yml +++ b/.github/workflows/aquasec-scan.yml @@ -14,12 +14,9 @@ # limitations under the License. # -# Reusable workflow – Aquasec Scan + Security Alerts to Issues. -# -# Called from application repositories via workflow_call. -# The caller triggers on schedule / workflow_dispatch and passes the required secrets. +# SECURITY reusable workflow – AquaSec Scan + Security Alerts to Issues. -name: Aquasec Scan +name: AquaSec Scan on: workflow_call: @@ -29,16 +26,17 @@ on: required: false type: boolean default: false + severity-priority-map: description: > Comma-separated severity=priority pairs that map alert severities to - priority values on the GitHub Project (e.g. - 'Critical=Blocker,High=Urgent,Medium=Normal'). Only listed severities - get a priority; unlisted ones are left empty. When not set, priority + priority values on the GitHub Project (e.g.'Critical=Blocker,High=Urgent,Medium=Normal'). + Only listed severities get a priority. Unlisted ones are left empty. When not set, priority is skipped entirely. required: false type: string default: '' + project-number: description: > GitHub Projects V2 number (org-level) where a Priority single-select @@ -47,6 +45,7 @@ on: required: false type: number default: 0 + project-org: description: > GitHub organisation that owns the Projects V2 board. Use when the @@ -55,6 +54,7 @@ on: required: false type: string default: '' + secrets: AQUA_KEY: required: true @@ -66,28 +66,20 @@ on: required: true TEAMS_WEBHOOK_URL: required: false - GH_PROJECT_ONLY_TOKEN: - description: > - Classic PAT with 'project' scope on an account that is a member of the - org that owns the ProjectV2 board. Required only when the project lives - in a different organisation than the calling repository. When omitted, - github.token is used (works only for same-org projects). - required: false permissions: contents: read actions: read issues: write security-events: write - repository-projects: write jobs: aquasec-scan: - name: Aquasec Scan + name: AquaSec Scan runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd with: persist-credentials: false fetch-depth: 0 @@ -102,8 +94,8 @@ jobs: repository-id: ${{ secrets.AQUA_REPOSITORY_ID }} verbose-logging: ${{ inputs.verbose-logging }} - - name: Upload Scan Results to GitHub Security - uses: github/codeql-action/upload-sarif@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 + - name: Upload scan results to GitHub Security and quality + uses: github/codeql-action/upload-sarif@7434149006143a4d75b82a2f411ef15b03ccc2d7 with: sarif_file: ${{ steps.aquasec.outputs.nightscan-sarif-file }} category: aquasec @@ -113,8 +105,8 @@ jobs: needs: aquasec-scan runs-on: ubuntu-latest steps: - - name: Checkout security scripts - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 + - name: Checkout code + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd with: repository: AbsaOSS/organizational-workflows ref: master @@ -122,7 +114,7 @@ jobs: persist-credentials: false - name: Set up Python - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 with: python-version: '3.14' cache: 'pip' @@ -131,13 +123,13 @@ jobs: - name: Install dependencies run: pip install -r org-workflows/requirements.txt - - name: Run alert-to-issue sync + - name: Create issues from security alerts env: + PYTHONPATH: org-workflows/src GH_TOKEN: ${{ github.token }} - GH_PROJECT_ONLY_TOKEN: ${{ secrets.GH_PROJECT_ONLY_TOKEN }} TEAMS_WEBHOOK_URL: ${{ secrets.TEAMS_WEBHOOK_URL }} SEVERITY_PRIORITY_MAP: ${{ inputs.severity-priority-map }} PROJECT_NUMBER: ${{ inputs.project-number }} PROJECT_ORG: ${{ inputs.project-org }} run: | - python3 org-workflows/src/security/sync_security_alerts.py + python3 org-workflows/src/security/main.py diff --git a/.github/workflows/remove-adept-to-close-on-issue-close.yml b/.github/workflows/remove-adept-to-close-on-issue-close.yml deleted file mode 100644 index 88acb4d..0000000 --- a/.github/workflows/remove-adept-to-close-on-issue-close.yml +++ /dev/null @@ -1,73 +0,0 @@ -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Reusable workflow – Remove sec:adept-to-close label when an issue is closed. -# -# Called from application repositories via workflow_call. -# The caller must trigger on `issues: [closed]`. -# Note: for `workflow_call`, the called workflow receives the same event payload as the caller, -# so `context.payload` (aka `github.event`) is populated without needing to "forward" it via inputs. - -name: Remove sec:adept-to-close on close - -on: - workflow_call: - -permissions: - issues: write - -jobs: - cleanup-label: - runs-on: ubuntu-latest - steps: - - name: Remove label when conditions match - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd - with: - script: | - const issue = context.payload.issue; - - // Safety: ignore PRs (they can appear as issues in GitHub UI) - if (issue.pull_request) { - core.info('Skipping: payload refers to a pull request, not an issue.'); - return; - } - - const labels = (issue.labels ?? []) - .map(l => (typeof l === 'string' ? l : l?.name)) - .filter(Boolean); - - const hasScopeSecurity = labels.includes('scope:security'); - const hasTechDebt = labels.includes('type:tech-debt'); - const hasAdeptToClose = labels.includes('sec:adept-to-close'); - - if (!hasScopeSecurity || !hasTechDebt) { - core.info( - `Skipping: required labels missing (scope:security=${hasScopeSecurity}, type:tech-debt=${hasTechDebt}).` - ); - return; - } - - if (!hasAdeptToClose) { - core.info('No-op: label sec:adept-to-close is not present on the issue.'); - return; - } - - await github.rest.issues.removeLabel({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: issue.number, - name: 'sec:adept-to-close', - }); diff --git a/README.md b/README.md index 19fd0c0..ffbf386 100644 --- a/README.md +++ b/README.md @@ -36,7 +36,7 @@ docs/ # per-solution documentation ## Shared workflows Application repositories adopt a solution by adding a short **caller workflow** that delegates to the reusable workflow in this repo. -Ready-to-copy example caller workflows are located in [`docs/security/example_workflows/`](docs/security/example_workflows/). +Ready-to-copy example caller workflows are located in [`docs/security/example_workflows/`](docs/security/example_workflow/). See each solution's documentation for details and required secrets. ## Next diff --git a/docs/security/example_workflows/aquasec-night-scan.yml b/docs/security/aquasec-night-scan-example.yml similarity index 66% rename from docs/security/example_workflows/aquasec-night-scan.yml rename to docs/security/aquasec-night-scan-example.yml index 9763091..aa17056 100644 --- a/docs/security/example_workflows/aquasec-night-scan.yml +++ b/docs/security/aquasec-night-scan-example.yml @@ -14,12 +14,9 @@ # limitations under the License. # -# Example caller workflow – add this to your application repository as -# .github/workflows/aquasec-night-scan.yml -# -# It delegates to the reusable workflow in the organizational-workflows repo. +# Example caller workflow – Add this to your repository as .github/workflows/aquasec-night-scan.yml -name: Aquasec Night Scan +name: AquaSec Night Scan on: schedule: @@ -35,21 +32,17 @@ permissions: actions: read issues: write security-events: write - repository-projects: write jobs: scan: uses: AbsaOSS/organizational-workflows/.github/workflows/aquasec-scan.yml@c1fa5b54ff24fea071415da89abc4f0506344f01 with: severity-priority-map: 'Critical=Blocker,High=Urgent,Medium=Normal,Low=Minor' - project-number: 42 # Replace with your org's GitHub Project number for priority tracking - project-org: 'my-org' # Replace with the org that owns the project (if different from repo org) + project-number: 42 + project-org: 'my-org' secrets: AQUA_KEY: ${{ secrets.AQUA_KEY }} AQUA_SECRET: ${{ secrets.AQUA_SECRET }} AQUA_GROUP_ID: ${{ secrets.AQUA_GROUP_ID }} AQUA_REPOSITORY_ID: ${{ secrets.AQUA_REPOSITORY_ID }} TEAMS_WEBHOOK_URL: ${{ secrets.TEAMS_WEBHOOK_URL }} - # Required only when project-org differs from this repository's org. - # See docs/security/security.md – "Cross-org project token" for how to create it. - GH_PROJECT_ONLY_TOKEN: ${{ secrets.GH_PROJECT_ONLY_TOKEN }} diff --git a/docs/security/security.md b/docs/security/security.md index 4e32d9c..9e56441 100644 --- a/docs/security/security.md +++ b/docs/security/security.md @@ -166,7 +166,7 @@ The caller needs the following **repository secrets** configured: | `TEAMS_WEBHOOK_URL` | no | Teams Incoming Webhook URL for new/reopened issue alerts | | `GH_PROJECT_ONLY_TOKEN` | no (required for cross-org projects) | Classic PAT with `project` scope on an account that is a member of the org owning the ProjectV2 board – see [Cross-org project token](#cross-org-project-token) | -Example caller (already available in [aquasec-night-scan.yml](/docs/security/example_workflows/aquasec-night-scan.yml)): +Example caller (already available in [aquasec-night-scan.yml](/docs/security/aquasec-night-scan-example.yml)): ```yaml name: Aquasec Night Scan @@ -246,7 +246,7 @@ The reusable workflow forwards it to the Python script as the `GH_PROJECT_ONLY_T #### Remove sec:adept-to-close on close -Example caller (already available in [remove-adept-to-close-on-issue-close.yml](/docs/security/example_workflows/remove-adept-to-close-on-issue-close.yml)): +Example caller (already available in [remove-adept-to-close-on-issue-close.yml](/docs/security/example_workflow/remove-resolved-finding-label.yml)): ```yaml name: Remove sec:adept-to-close on close @@ -260,7 +260,7 @@ permissions: jobs: remove-label: - uses: AbsaOSS/organizational-workflows/.github/workflows/remove-adept-to-close-on-issue-close.yml@master + uses: AbsaOSS/organizational-workflows/.github/workflows/remove-resolved-finding-label.yml@master ``` > **Note:** The calling repository must grant the permissions the reusable workflow needs (listed in each workflow file). For cross-organization calls the reusable workflow repository must be set to "Accessible from repositories in the organization" under **Settings → Actions → General**. diff --git a/pyproject.toml b/pyproject.toml index b366200..ffde4ee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,6 @@ [tool.pytest.ini_options] -pythonpath = ["src", "src/security"] +pythonpath = ["src"] +addopts = ["--import-mode=importlib"] [tool.black] line-length = 120 @@ -7,7 +8,9 @@ target-version = ['py314'] force-exclude = '''test''' [tool.coverage.run] -omit = ["tests/*"] +omit = [ + "tests/*", +] [tool.mypy] check_untyped_defs = true diff --git a/tests/security/__init__.py b/src/core/__init__.py similarity index 100% rename from tests/security/__init__.py rename to src/core/__init__.py diff --git a/src/shared/logging_config.py b/src/core/config.py similarity index 70% rename from src/shared/logging_config.py rename to src/core/config.py index 42cfa3d..ed4af68 100644 --- a/src/shared/logging_config.py +++ b/src/core/config.py @@ -14,12 +14,23 @@ # limitations under the License. # -"""Centralised logging configuration for the security tooling.""" +"""Runtime configuration – GitHub Actions environment detection and logging setup.""" import logging +import os import sys +def parse_runner_debug() -> bool: + """Return ``True`` when the GitHub Actions ``RUNNER_DEBUG`` env var is ``'1'``.""" + raw = os.getenv("RUNNER_DEBUG") + if raw is None or raw == "": + return False + if raw not in {"0", "1"}: + raise SystemExit("ERROR: RUNNER_DEBUG must be '0' or '1' when set") + return raw == "1" + + def setup_logging(verbose: bool = False) -> None: """Configure the root logger (DEBUG when *verbose*, else INFO).""" level = logging.DEBUG if verbose else logging.INFO diff --git a/tests/security/utils/__init__.py b/src/core/github/__init__.py similarity index 100% rename from tests/security/utils/__init__.py rename to src/core/github/__init__.py diff --git a/src/core/github/client.py b/src/core/github/client.py new file mode 100644 index 0000000..0227f61 --- /dev/null +++ b/src/core/github/client.py @@ -0,0 +1,43 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Subprocess wrappers for the ``gh`` CLI.""" + +import logging +import subprocess + + +def run_cmd( + cmd: list[str], + *, + capture_output: bool = True, +) -> subprocess.CompletedProcess: + """Run *cmd* as a subprocess and return the completed process.""" + return subprocess.run(cmd, check=False, capture_output=capture_output, text=True) + + +def run_gh( + args: list[str], + *, + capture_output: bool = True, +) -> subprocess.CompletedProcess: + """Run a ``gh`` CLI command and return the completed process.""" + cmd = ["gh"] + args + try: + return run_cmd(cmd, capture_output=capture_output) + except FileNotFoundError as exc: + logging.error("gh CLI not found. Install and authenticate gh.") + raise SystemExit(1) from exc diff --git a/src/shared/github_issues.py b/src/core/github/issues.py similarity index 99% rename from src/shared/github_issues.py rename to src/core/github/issues.py index 8d31966..cefffc3 100644 --- a/src/shared/github_issues.py +++ b/src/core/github/issues.py @@ -25,8 +25,8 @@ import subprocess import time -from .common import run_gh -from .models import Issue +from .client import run_gh +from ..models import Issue _NOT_FOUND_MARKERS = ( "HTTP 404", diff --git a/src/shared/github_projects.py b/src/core/github/projects.py similarity index 96% rename from src/shared/github_projects.py rename to src/core/github/projects.py index 8b7a462..6d73c9c 100644 --- a/src/shared/github_projects.py +++ b/src/core/github/projects.py @@ -21,13 +21,11 @@ import json import logging -import os -from collections.abc import Mapping from dataclasses import dataclass from typing import Any -from .common import run_gh -from .priority import resolve_priority +from .client import run_gh +from ..priority import resolve_priority # --------------------------------------------------------------------------- # Data structures @@ -52,21 +50,11 @@ class ProjectPriorityField: def _run_graphql(query: str, variables: dict[str, Any] | None = None) -> dict[str, Any] | None: - """Execute a GraphQL query via ``gh api graphql`` and return parsed JSON. - - When ``GH_PROJECT_ONLY_TOKEN`` is set in the environment the GraphQL call is made - with that token instead of the default ``GH_TOKEN``. This allows cross-org - project access while the rest of the pipeline continues to use the scoped - ``github.token``. - """ + """Execute a GraphQL query via ``gh api graphql`` and return parsed JSON.""" args = ["api", "graphql", "-f", f"query={query}"] for k, v in (variables or {}).items(): args += ["-F", f"{k}={v}"] - env: Mapping[str, str] | None = None - project_token = os.environ.get("GH_PROJECT_ONLY_TOKEN", "") - if project_token: - env = {**os.environ, "GH_TOKEN": project_token} - res = run_gh(args, env=env) + res = run_gh(args) if res.returncode != 0: logging.warning(f"GraphQL call failed: {res.stderr}") return None diff --git a/src/shared/common.py b/src/core/helpers.py similarity index 51% rename from src/shared/common.py rename to src/core/helpers.py index 6f175f6..af42f6e 100644 --- a/src/shared/common.py +++ b/src/core/helpers.py @@ -14,29 +14,13 @@ # limitations under the License. # -"""Low-level utilities – date helpers, hashing, -path normalisation, and subprocess wrappers for the ``gh`` CLI. -""" +"""Pure utility functions – date helpers, hashing, and path normalisation.""" import hashlib -import logging -import os import re -import subprocess -from collections.abc import Mapping from datetime import datetime, timezone -def parse_runner_debug() -> bool: - """Return ``True`` when the GitHub Actions ``RUNNER_DEBUG`` env var is ``'1'``.""" - raw = os.getenv("RUNNER_DEBUG") - if raw is None or raw == "": - return False - if raw not in {"0", "1"}: - raise SystemExit("ERROR: RUNNER_DEBUG must be '0' or '1' when set") - return raw == "1" - - def utc_today() -> str: """Return today's date in UTC as an ISO-8601 string (``YYYY-MM-DD``).""" return datetime.now(timezone.utc).date().isoformat() @@ -57,7 +41,7 @@ def sha256_hex(text: str) -> str: def normalize_path(path: str | None) -> str: - """Normalise a file path to forward-slash, no leading ``./`` or ``/``.""" + """Normalize a file path to forward-slash, no leading ``./`` or ``/``.""" if not path: return "" p = path.replace("\\", "/").strip() @@ -66,28 +50,3 @@ def normalize_path(path: str | None) -> str: p = p.lstrip("/") p = re.sub(r"/+", "/", p) return p - - -def run_cmd( - cmd: list[str], - *, - capture_output: bool = True, - env: Mapping[str, str] | None = None, -) -> subprocess.CompletedProcess: - """Run *cmd* as a subprocess and return the completed process.""" - return subprocess.run(cmd, check=False, capture_output=capture_output, text=True, env=env) - - -def run_gh( - args: list[str], - *, - capture_output: bool = True, - env: Mapping[str, str] | None = None, -) -> subprocess.CompletedProcess: - """Run a ``gh`` CLI command and return the completed process.""" - cmd = ["gh"] + args - try: - return run_cmd(cmd, capture_output=capture_output, env=env) - except FileNotFoundError as exc: - logging.error("gh CLI not found. Install and authenticate gh.") - raise SystemExit(1) from exc diff --git a/src/shared/models.py b/src/core/models.py similarity index 100% rename from src/shared/models.py rename to src/core/models.py diff --git a/src/shared/priority.py b/src/core/priority.py similarity index 100% rename from src/shared/priority.py rename to src/core/priority.py diff --git a/src/shared/templates.py b/src/core/rendering.py similarity index 100% rename from src/shared/templates.py rename to src/core/rendering.py diff --git a/src/shared/__init__.py b/src/security/__init__.py similarity index 90% rename from src/shared/__init__.py rename to src/security/__init__.py index 32f59a7..ebfbdd3 100644 --- a/src/shared/__init__.py +++ b/src/security/__init__.py @@ -13,5 +13,3 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -"""Shared utilities reusable across workflow solutions.""" diff --git a/src/security/utils/__init__.py b/src/security/alerts/__init__.py similarity index 88% rename from src/security/utils/__init__.py rename to src/security/alerts/__init__.py index b1d4a5a..ebfbdd3 100644 --- a/src/security/utils/__init__.py +++ b/src/security/alerts/__init__.py @@ -13,5 +13,3 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -"""Security-specific utilities for the alert-to-issue promotion pipeline.""" diff --git a/src/security/utils/models.py b/src/security/alerts/models.py similarity index 67% rename from src/security/utils/models.py rename to src/security/alerts/models.py index 3ba4bc0..b1a074f 100644 --- a/src/security/utils/models.py +++ b/src/security/alerts/models.py @@ -14,15 +14,12 @@ # limitations under the License. # -"""Security-specific data models.""" +"""Alert-specific data models.""" from dataclasses import dataclass, field from typing import Any -from shared.github_projects import ProjectPrioritySync -from shared.models import Issue - -from .constants import NOT_AVAILABLE +from security.constants import NOT_AVAILABLE @dataclass @@ -37,6 +34,7 @@ class AlertMetadata: alert_url: str = "" rule_id: str = "" rule_name: str = "" + rule_description: str = "" severity: str = "" confidence: str = "" tags: list[str] = field(default_factory=list) @@ -54,6 +52,7 @@ class AlertMetadata: def __post_init__(self) -> None: self.rule_id = (self.rule_id or "").strip() self.rule_name = (self.rule_name or "").strip() + self.rule_description = (self.rule_description or "").strip() self.severity = (self.severity or "").strip() or "unknown" self.state = (self.state or "").lower().strip() self.tool = (self.tool or "").strip() @@ -153,98 +152,3 @@ class LoadedAlerts: repo_full: str open_by_number: dict[int, Alert] - - -@dataclass -class IssueIndex: - """In-memory indexes for fast issue lookup by fingerprint and rule_id.""" - - by_fingerprint: dict[str, Issue] - parent_by_rule_id: dict[str, Issue] - - -@dataclass -class NotifiedIssue: - """Tracks a new or reopened child issue for Teams notification.""" - - repo: str - issue_number: int - severity: str - category: str - state: str # "new" or "reopen" - tool: str - - -@dataclass -class SeverityChange: - """Records a parent issue whose severity changed between syncs.""" - - repo: str - issue_number: int - rule_id: str - old_severity: str - new_severity: str - - -# Ordered from lowest to highest so we can compute direction. -SEVERITY_ORDER: dict[str, int] = { - "unknown": 0, - "low": 1, - "medium": 2, - "high": 3, - "critical": 4, -} - - -def severity_direction(old: str, new: str) -> str: - """Return an emoji+label describing the direction of a severity change.""" - old_rank = SEVERITY_ORDER.get(old.lower(), -1) - new_rank = SEVERITY_ORDER.get(new.lower(), -1) - if new_rank > old_rank: - return "⬆️ escalated" - if new_rank < old_rank: - return "⬇️ de-escalated" - return "↔️ unchanged" - - -@dataclass -class SyncResult: - """Aggregated output of a full sync run.""" - - notifications: list[NotifiedIssue] - severity_changes: list[SeverityChange] - - -@dataclass -class AlertContext: - """Per-alert data extracted in ``ensure_issue`` and passed to child handlers.""" - - alert: Alert - alert_number: int - fingerprint: str - occurrence_fp: str - repo: str - first_seen: str - last_seen: str - tool: str - rule_id: str - rule_name: str - severity: str - cve: str - path: str - start_line: int | None - end_line: int | None - commit_sha: str - - -@dataclass -class SyncContext: - """Shared orchestration state for the sync run.""" - - issues: dict[int, Issue] - index: IssueIndex - dry_run: bool - notifications: list[NotifiedIssue] | None - severity_priority_map: dict[str, str] - priority_sync: ProjectPrioritySync | None - parent_sub_issues_cache: dict[int, set[int]] = field(default_factory=dict) diff --git a/src/security/utils/alert_parser.py b/src/security/alerts/parser.py similarity index 97% rename from src/security/utils/alert_parser.py rename to src/security/alerts/parser.py index f593296..254c051 100644 --- a/src/security/utils/alert_parser.py +++ b/src/security/alerts/parser.py @@ -24,8 +24,8 @@ import os from enum import StrEnum -from shared.common import sha256_hex -from .models import Alert, AlertDetails, AlertMetadata, LoadedAlerts, RuleDetails +from core.helpers import sha256_hex +from security.alerts.models import Alert, AlertDetails, AlertMetadata, LoadedAlerts, RuleDetails class AlertMessageKey(StrEnum): diff --git a/src/security/check_labels.py b/src/security/check_labels.py index 82f317c..9427281 100644 --- a/src/security/check_labels.py +++ b/src/security/check_labels.py @@ -20,16 +20,10 @@ import argparse import json import logging -import os -import sys -_repo_root = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")) -if _repo_root not in sys.path: - sys.path.insert(0, _repo_root) - -from shared.common import run_gh -from shared.logging_config import setup_logging -from utils.constants import ( +from core.config import setup_logging +from core.github.client import run_gh +from security.constants import ( LABEL_EPIC, LABEL_SCOPE_SECURITY, LABEL_SEC_ADEPT_TO_CLOSE, diff --git a/src/security/collect_alert.py b/src/security/collect_alert.py index 63d7f75..110fec3 100644 --- a/src/security/collect_alert.py +++ b/src/security/collect_alert.py @@ -23,15 +23,10 @@ import os import re import shutil -import sys from datetime import datetime, timezone -_repo_root = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")) -if _repo_root not in sys.path: - sys.path.insert(0, _repo_root) - -from shared.common import parse_runner_debug, run_gh -from shared.logging_config import setup_logging +from core.config import parse_runner_debug, setup_logging +from core.github.client import run_gh logger = logging.getLogger(__name__) @@ -132,6 +127,7 @@ def _normalise_alert(alert: dict) -> dict: "alert_url": alert.get("html_url"), "rule_id": rule.get("id"), "rule_name": rule.get("name"), + "rule_description": rule.get("description"), "severity": rule.get("security_severity_level"), "confidence": rule.get("severity"), "tags": rule.get("tags") or [], diff --git a/src/security/utils/constants.py b/src/security/constants.py similarity index 94% rename from src/security/utils/constants.py rename to src/security/constants.py index ed32aed..ae85d38 100644 --- a/src/security/utils/constants.py +++ b/src/security/constants.py @@ -21,9 +21,6 @@ LABEL_EPIC = "epic" LABEL_SEC_ADEPT_TO_CLOSE = "sec:adept-to-close" -SEC_EVENT_OPEN = "open" -SEC_EVENT_REOPEN = "reopen" - SECMETA_TYPE_PARENT = "parent" SECMETA_TYPE_CHILD = "child" diff --git a/src/security/derive_team_security_metrics.py b/src/security/derive_team_security_metrics.py deleted file mode 100644 index 8316e66..0000000 --- a/src/security/derive_team_security_metrics.py +++ /dev/null @@ -1,208 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# pylint: skip-file -""" -Derive team security metrics from issue snapshots (Issues-only). - -Resurfacing definition (B): -- A fingerprint is considered 'resurfaced' when its occurrence_count transitions - from 0 in the previous snapshot to >0 in the current snapshot. - -Inputs: -- data/issues_snapshot.json (required) -- data/issues_snapshot.prev.json (optional; if missing, resurfacing cannot be computed) - -Outputs: -- reports/metrics.json -- reports/summary.md (appends derived metrics) -""" - -import json -import logging -import os -from datetime import datetime -from typing import Any, Dict, List, Optional - -SNAPSHOT_CUR = os.environ.get("SNAPSHOT_CURRENT", "data/issues_snapshot.json") -SNAPSHOT_PREV = os.environ.get("SNAPSHOT_PREVIOUS", "data/issues_snapshot.prev.json") - -OUT_METRICS_JSON = os.environ.get("OUT_METRICS_JSON", "reports/metrics.json") -OUT_SUMMARY_MD = os.environ.get("OUT_SUMMARY_MD", "reports/summary.md") - - -def require_env(key: str) -> str: - """Return the value of environment variable *key*, or exit.""" - try: - return os.environ[key] - except KeyError as exc: - raise SystemExit(f"Missing required environment variable: {key}") from exc - - -TEAM_SLUG = require_env("GITHUB_TEAM_SLUG") - - -def _safe_int(v: Any, default: int = 0) -> int: - """Coerce *v* to ``int``, returning *default* on failure.""" - try: - if v is None: - return default - if isinstance(v, int): - return v - s = str(v).strip() - if s == "": - return default - return int(float(s)) - except Exception: - return default - - -def _load_json(path: str) -> Optional[Any]: - """Load and return JSON from *path*, or ``None`` if the file is missing.""" - if not os.path.exists(path): - return None - with open(path, "r", encoding="utf-8") as f: - return json.load(f) - - -def _index_by_fingerprint(snapshot: List[Dict[str, Any]]) -> Dict[str, Dict[str, Any]]: - """Index snapshot items by their secmeta fingerprint.""" - idx: Dict[str, Dict[str, Any]] = {} - for item in snapshot: - fp = (item.get("secmeta") or {}).get("fingerprint") - if not fp: - # If secmeta is missing or malformed, it cannot participate in fingerprint-level stats. - continue - idx[fp] = item - return idx - - -def _severity_from_labels(labels: List[str]) -> str: - """Extract the severity token from ``sec:sev/`` labels.""" - for l in labels: - if l.startswith("sec:sev/"): - return l.split("/", 1)[1] - return "unknown" - - -def main() -> None: - """Derive and write team security metrics from issue snapshots.""" - # TODO decide about changes related to this script - logging.warning( - "This script is deprecated and may be removed in the future. Please refer to the updated documentation for deriving security metrics." - ) - return - - cur = _load_json(SNAPSHOT_CUR) - if cur is None: - raise SystemExit(f"Missing current snapshot: {SNAPSHOT_CUR}") - - if not isinstance(cur, list): - raise SystemExit(f"Current snapshot is not a list: {SNAPSHOT_CUR}") - - prev = _load_json(SNAPSHOT_PREV) - - cur_idx = _index_by_fingerprint(cur) - prev_idx = _index_by_fingerprint(prev) if isinstance(prev, list) else {} - - # Basic counts - total = len(cur) - by_sev: Dict[str, int] = {} - postponed = 0 - needs_review = 0 - - for item in cur: - labels = item.get("labels") or [] - sev = _severity_from_labels(labels) - by_sev[sev] = by_sev.get(sev, 0) + 1 - if "sec:state/postponed" in labels: - postponed += 1 - if "sec:state/needs-review" in labels: - needs_review += 1 - - # Resurfacing (B): prev occurrence_count == 0 and current > 0 - resurfaced: List[Dict[str, Any]] = [] - if prev_idx: - for fp, cur_item in cur_idx.items(): - cur_occ = _safe_int((cur_item.get("secmeta") or {}).get("occurrence_count"), 0) - prev_item = prev_idx.get(fp) - prev_occ = _safe_int(((prev_item or {}).get("secmeta") or {}).get("occurrence_count"), 0) - if prev_item is not None and prev_occ == 0 and cur_occ > 0: - resurfaced.append( - { - "fingerprint": fp, - "repo": cur_item.get("repo"), - "issue_number": cur_item.get("issue_number"), - "title": cur_item.get("title"), - "severity": _severity_from_labels(cur_item.get("labels") or []), - "prev_occurrence_count": prev_occ, - "current_occurrence_count": cur_occ, - } - ) - - metrics = { - "team": TEAM_SLUG, - "generated_at_utc": datetime.utcnow().isoformat() + "Z", - "snapshot_current": SNAPSHOT_CUR, - "snapshot_previous": SNAPSHOT_PREV if prev_idx else None, - "counts": { - "total_security_issues": total, - "postponed": postponed, - "needs_review": needs_review, - "by_severity": dict(sorted(by_sev.items())), - }, - "resurfaced": { - "definition": "B: fingerprint occurrence_count from 0 (previous snapshot) to >0 (current snapshot)", - "count": len(resurfaced), - "items": resurfaced, - }, - } - - os.makedirs(os.path.dirname(OUT_METRICS_JSON), exist_ok=True) - os.makedirs(os.path.dirname(OUT_SUMMARY_MD), exist_ok=True) - - with open(OUT_METRICS_JSON, "w", encoding="utf-8") as f: - json.dump(metrics, f, indent=2) - - # Append to (or create) summary.md - summary_lines: List[str] = [] - summary_lines.append(f"\n## Derived metrics\n") - summary_lines.append(f"Generated at: {metrics['generated_at_utc']}\n") - if metrics["snapshot_previous"] is None: - summary_lines.append("- Resurfacing: not computed (no previous snapshot found)\n") - else: - summary_lines.append(f"- Resurfaced fingerprints (definition B): {metrics['resurfaced']['count']}\n") - if resurfaced: - summary_lines.append("\n### Resurfaced items\n") - for r in resurfaced[:50]: - summary_lines.append( - f"- {r['severity']} {r['repo']}#{r['issue_number']} (occ {r['prev_occurrence_count']} -> {r['current_occurrence_count']}): {r['title']}\n" - ) - if len(resurfaced) > 50: - summary_lines.append(f"- ... and {len(resurfaced) - 50} more\n") - - # Ensure summary exists; if not, create a minimal header. - if not os.path.exists(OUT_SUMMARY_MD): - with open(OUT_SUMMARY_MD, "w", encoding="utf-8") as f: - f.write(f"# Security summary for team `{TEAM_SLUG}`\n\n") - - with open(OUT_SUMMARY_MD, "a", encoding="utf-8") as f: - f.writelines(summary_lines) - - -if __name__ == "__main__": - main() diff --git a/src/security/extract_team_security_stats.py b/src/security/extract_team_security_stats.py deleted file mode 100644 index da1b6ac..0000000 --- a/src/security/extract_team_security_stats.py +++ /dev/null @@ -1,206 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# pylint: skip-file -""" -Extract security statistics per GitHub team. - -Model: -- Source of truth: GitHub Issues -- Scope: repositories owned by a given GitHub team -- Unit: one Issue = one logical vulnerability (fingerprint) - -Outputs: -- data/issues_snapshot.json -- data/events_flat.csv -- reports/summary.md -""" - -import csv -import json -import logging -import os -import re -from datetime import datetime -from github import Github - - -# -------------------- -# Configuration -# -------------------- -def require_env(key: str) -> str: - """Return the value of environment variable *key*, or exit.""" - try: - return os.environ[key] - except KeyError as exc: - raise SystemExit(f"Missing required environment variable: {key}") from exc - - -GITHUB_TOKEN = require_env("GITHUB_TOKEN") -ORG = require_env("GITHUB_ORG") -TEAM_SLUG = require_env("GITHUB_TEAM_SLUG") - -OUT_DATA = "data" -OUT_REPORTS = "reports" - -SEC_LABEL_PREFIX = "sec:" - -SEC_EVENT_RE = re.compile(r"\[sec-event\](.*?)\[/sec-event\]", re.S) -SECMETA_RE = re.compile(r"```secmeta(.*?)```", re.S) - -# -------------------- -# Helpers -# -------------------- - - -def ensure_dirs(): - """Create output directories if they don't exist.""" - os.makedirs(OUT_DATA, exist_ok=True) - os.makedirs(OUT_REPORTS, exist_ok=True) - - -def parse_kv_block(block: str) -> dict: - """Parse a ``key=value``-per-line block into a dict.""" - data = {} - for line in block.splitlines(): - line = line.strip() - if not line or "=" not in line: - continue - k, v = line.split("=", 1) - data[k.strip()] = v.strip() - return data - - -def parse_secmeta(body: str) -> dict: - """Extract the secmeta key-value block from an issue body.""" - match = SECMETA_RE.search(body or "") - if not match: - return {} - return parse_kv_block(match.group(1)) - - -def parse_events(comments): - """Extract ``[sec-event]`` blocks from issue comments.""" - events = [] - for c in comments: - for raw in SEC_EVENT_RE.findall(c.body or ""): - evt = parse_kv_block(raw) - evt["timestamp"] = c.created_at.isoformat() - events.append(evt) - return events - - -def issue_has_sec_label(issue): - """Return ``True`` if *issue* carries any ``sec:`` prefixed label.""" - return any(l.name.startswith(SEC_LABEL_PREFIX) for l in issue.labels) - - -# -------------------- -# Main extraction -# -------------------- - - -def main(): - """Extract security statistics from GitHub Issues for the configured team.""" - # TODO decide about changes related to this script - logging.warning( - "This script is deprecated and may be removed in the future. Please refer to the updated documentation for deriving security metrics." - ) - return - - ensure_dirs() - - gh = Github(GITHUB_TOKEN) - org = gh.get_organization(ORG) - team = org.get_team_by_slug(TEAM_SLUG) - - repos = list(team.get_repos()) - - snapshot = [] - flat_events = [] - - for repo in repos: - issues = repo.get_issues(state="all") - for issue in issues: - # Skip PRs that may be returned by the issues API - if getattr(issue, "pull_request", None): - continue - - if not issue_has_sec_label(issue): - continue - - secmeta = parse_secmeta(issue.body or "") - events = parse_events(issue.get_comments()) - - snapshot.append( - { - "repo": repo.full_name, - "issue_number": issue.number, - "title": issue.title, - "state": issue.state, - "labels": [l.name for l in issue.labels], - "secmeta": secmeta, - "created_at": issue.created_at.isoformat(), - "updated_at": issue.updated_at.isoformat(), - "event_count": len(events), - } - ) - - for e in events: - fp = secmeta.get("fingerprint") if secmeta else None - if not fp: - continue # ignore events without a fingerprint - flat_events.append( - { - "repo": repo.full_name, - "issue_number": issue.number, - "fingerprint": fp, - "action": e.get("action"), - "reason": e.get("reason"), - "timestamp": e.get("timestamp"), - } - ) - - # Write snapshot - with open(os.path.join(OUT_DATA, "issues_snapshot.json"), "w") as f: - json.dump(snapshot, f, indent=2) - - # Write flat events - with open(os.path.join(OUT_DATA, "events_flat.csv"), "w", newline="") as f: - writer = csv.DictWriter(f, fieldnames=["repo", "issue_number", "fingerprint", "action", "reason", "timestamp"]) - writer.writeheader() - writer.writerows(flat_events) - - # Summary report - total = len(snapshot) - by_sev = {} - - for item in snapshot: - sev = next((l for l in item["labels"] if l.startswith("sec:sev/")), "sec:sev/unknown") - by_sev[sev] = by_sev.get(sev, 0) + 1 - - with open(os.path.join(OUT_REPORTS, "summary.md"), "w") as f: - f.write(f"# Security summary for team `{TEAM_SLUG}`\n\n") - f.write(f"Generated at: {datetime.utcnow().isoformat()} UTC\n\n") - f.write(f"## Total security issues: {total}\n\n") - f.write("## By severity\n\n") - for sev, cnt in sorted(by_sev.items()): - f.write(f"- {sev}: {cnt}\n") - - -if __name__ == "__main__": - main() diff --git a/docs/security/example_workflows/remove-adept-to-close-on-issue-close.yml b/src/security/issues/__init__.py similarity index 53% rename from docs/security/example_workflows/remove-adept-to-close-on-issue-close.yml rename to src/security/issues/__init__.py index 60e5998..ebfbdd3 100644 --- a/docs/security/example_workflows/remove-adept-to-close-on-issue-close.yml +++ b/src/security/issues/__init__.py @@ -13,22 +13,3 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# Example caller workflow – add this to your application repository as -# .github/workflows/remove-adept-to-close-on-issue-close.yml -# -# It delegates to the reusable workflow in the organizational-workflows repo. -# Adjust and to match your organization. - -name: Remove sec:adept-to-close on close - -on: - issues: - types: [closed] - -permissions: - issues: write - -jobs: - remove-label: - uses: AbsaOSS/organizational-workflows/.github/workflows/remove-adept-to-close-on-issue-close.yml@master diff --git a/src/security/utils/issue_builder.py b/src/security/issues/builder.py similarity index 85% rename from src/security/utils/issue_builder.py rename to src/security/issues/builder.py index 38fad8c..29ac998 100644 --- a/src/security/utils/issue_builder.py +++ b/src/security/issues/builder.py @@ -18,17 +18,17 @@ from typing import Any -from shared.common import iso_date -from shared.templates import render_markdown_template +from core.helpers import iso_date +from core.rendering import render_markdown_template -from .constants import NOT_AVAILABLE, SECMETA_TYPE_PARENT -from .models import Alert -from .secmeta import render_secmeta -from .templates import CHILD_BODY_TEMPLATE, PARENT_BODY_TEMPLATE +from security.constants import NOT_AVAILABLE, SECMETA_TYPE_PARENT +from security.alerts.models import Alert +from security.issues.secmeta import render_secmeta +from security.issues.templates import CHILD_BODY_TEMPLATE, PARENT_BODY_TEMPLATE def _synthesize_references(alert: Alert) -> str: - """Build a markdown bullet list from metadata URLs when rule_details.references is absent.""" + """Build a Markdown bullet list from metadata URLs when rule_details.references is absent.""" lines = [] if alert.metadata.help_uri: lines.append(f"- {alert.metadata.help_uri}") @@ -38,7 +38,7 @@ def _synthesize_references(alert: Alert) -> str: def _synthesize_owasp(alert: Alert) -> str: - """Build a markdown bullet list from OWASP-related tags when rule_details.owasp is absent.""" + """Build a Markdown bullet list from OWASP-related tags when rule_details.owasp is absent.""" lines = [f"- {tag}" for tag in alert.metadata.tags if "owasp" in tag.lower()] return "\n".join(lines) if lines else NOT_AVAILABLE @@ -87,7 +87,7 @@ def build_parent_template_values(alert: Alert, *, rule_id: str, severity: str) - return { "category": alert.metadata.rule_name or NOT_AVAILABLE, "avd_id": alert.alert_details.vulnerability or rule_id, - "title": rule_id, + "title": alert.metadata.rule_description or rule_id, "severity": severity, "published_date": iso_date(alert.rule_details.published_date or NOT_AVAILABLE), "package_name": alert.rule_details.package_name, @@ -103,16 +103,10 @@ def build_parent_issue_body(alert: Alert) -> str: repo_full = alert.repo secmeta: dict[str, str] = { - "schema": "1", "type": SECMETA_TYPE_PARENT, "repo": repo_full, - "source": "code_scanning", - "tool": alert.metadata.tool, - "severity": severity, "rule_id": rule_id, - "first_seen": iso_date(alert.metadata.created_at), - "last_seen": iso_date(alert.metadata.updated_at), - "postponed_until": "", + "severity": severity, } values = build_parent_template_values(alert, rule_id=rule_id, severity=severity) @@ -120,10 +114,15 @@ def build_parent_issue_body(alert: Alert) -> str: return render_secmeta(secmeta) + "\n\n" + human_body -def build_issue_title(rule_name: str | None, rule_id: str, fingerprint: str) -> str: +def build_issue_title( + rule_description: str | None, + rule_name: str | None, + rule_id: str, + fingerprint: str, +) -> str: """Build the title string for a child issue.""" prefix = fingerprint[:8] if fingerprint else NOT_AVAILABLE - summary = (rule_name or rule_id or "Security finding").strip() or "Security finding" + summary = (rule_description or rule_name or rule_id or "Security finding").strip() or "Security finding" return f"[SEC][FP={prefix}] {summary}" @@ -136,7 +135,7 @@ def build_child_issue_body(alert: Alert) -> str: vulnerability = alert.alert_details.vulnerability avd_id = vulnerability if vulnerability.startswith("AVD-") else NOT_AVAILABLE - title = alert.metadata.rule_id + title = alert.metadata.rule_description or alert.metadata.rule_id scm_file = alert.alert_details.scm_file start_line = alert.metadata.start_line @@ -169,7 +168,6 @@ def build_child_issue_body(alert: Alert) -> str: "installed_version": alert.alert_details.installed_version, "fixed_version": alert.rule_details.fixed_version, "reachable": alert.alert_details.reachable, - "scan_date": iso_date(alert.alert_details.scan_date or alert.metadata.updated_at or NOT_AVAILABLE), "first_seen": iso_date(alert.alert_details.first_seen or alert.metadata.created_at or NOT_AVAILABLE), } return render_markdown_template(CHILD_BODY_TEMPLATE, values).strip() + "\n" diff --git a/src/security/issues/models.py b/src/security/issues/models.py new file mode 100644 index 0000000..3bdaa9b --- /dev/null +++ b/src/security/issues/models.py @@ -0,0 +1,117 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Issue-related and sync-orchestration data models.""" + +from dataclasses import dataclass, field + +from core.github.projects import ProjectPrioritySync +from core.models import Issue + +from security.alerts.models import Alert + + +@dataclass +class IssueIndex: + """In-memory indexes for fast issue lookup by fingerprint and rule_id.""" + + by_fingerprint: dict[str, Issue] + parent_by_rule_id: dict[str, Issue] + + +@dataclass +class NotifiedIssue: + """Tracks a new or reopened child issue for Teams notification.""" + + repo: str + issue_number: int + severity: str + category: str + state: str # "new" or "reopen" + tool: str + + +@dataclass +class SeverityChange: + """Records a parent issue whose severity changed between syncs.""" + + repo: str + issue_number: int + rule_id: str + old_severity: str + new_severity: str + + +# Ordered from lowest to highest so we can compute direction. +SEVERITY_ORDER: dict[str, int] = { + "unknown": 0, + "low": 1, + "medium": 2, + "high": 3, + "critical": 4, +} + + +def severity_direction(old: str, new: str) -> str: + """Return an emoji+label describing the direction of a severity change.""" + old_rank = SEVERITY_ORDER.get(old.lower(), -1) + new_rank = SEVERITY_ORDER.get(new.lower(), -1) + if new_rank > old_rank: + return "⬆️ escalated" + if new_rank < old_rank: + return "⬇️ de-escalated" + return "↔️ unchanged" + + +@dataclass +class SyncResult: + """Aggregated output of a full sync run.""" + + notifications: list[NotifiedIssue] + severity_changes: list[SeverityChange] + + +@dataclass +class AlertContext: + """Per-alert data extracted in ``ensure_issue`` and passed to child handlers.""" + + alert: Alert + alert_number: int + fingerprint: str + repo: str + tool: str + rule_id: str + rule_name: str + rule_description: str + severity: str + cve: str + path: str + start_line: int | None + end_line: int | None + commit_sha: str + + +@dataclass +class SyncContext: + """Shared orchestration state for the sync run.""" + + issues: dict[int, Issue] + index: IssueIndex + dry_run: bool + notifications: list[NotifiedIssue] | None + severity_priority_map: dict[str, str] + priority_sync: ProjectPrioritySync | None + parent_sub_issues_cache: dict[int, set[int]] = field(default_factory=dict) diff --git a/src/security/utils/secmeta.py b/src/security/issues/secmeta.py similarity index 93% rename from src/security/utils/secmeta.py rename to src/security/issues/secmeta.py index fc1342b..f816ecc 100644 --- a/src/security/utils/secmeta.py +++ b/src/security/issues/secmeta.py @@ -79,22 +79,12 @@ def render_kv_lines( def render_secmeta(secmeta: dict[str, str]) -> str: """Render a secmeta dict as a hidden HTML-comment block for issue bodies.""" preferred_order = [ - "schema", + "type", "fingerprint", "repo", - "source", - "tool", - "severity", - "cve", - "category", "rule_id", - "first_seen", - "last_seen", - "last_seen_commit", - "postponed_until", + "severity", "gh_alert_numbers", - "occurrence_count", - "last_occurrence_fp", ] lines = render_kv_lines(secmeta, preferred_order) return "" diff --git a/src/security/utils/issue_sync.py b/src/security/issues/sync.py similarity index 80% rename from src/security/utils/issue_sync.py rename to src/security/issues/sync.py index e222546..4822b9a 100644 --- a/src/security/utils/issue_sync.py +++ b/src/security/issues/sync.py @@ -19,39 +19,36 @@ and labels orphaned issues for closure. This is the main business-logic module that ties together all other -``utils.*`` modules. +``issues.*`` modules. """ import logging -from shared.common import iso_date, normalize_path, utc_today -from shared.github_issues import ( +from core.helpers import normalize_path +from core.github.issues import ( gh_issue_add_labels, gh_issue_add_sub_issue_by_number, - gh_issue_comment, gh_issue_create, gh_issue_edit_body, gh_issue_edit_state, gh_issue_edit_title, gh_issue_get_sub_issue_numbers, ) -from shared.github_projects import ProjectPrioritySync, gh_project_get_priority_field -from shared.models import Issue -from shared.templates import render_markdown_template +from core.github.projects import ProjectPrioritySync, gh_project_get_priority_field +from core.models import Issue +from core.rendering import render_markdown_template -from .alert_parser import compute_occurrence_fp -from .constants import ( +from security.alerts.models import Alert +from security.constants import ( LABEL_EPIC, LABEL_SCOPE_SECURITY, LABEL_SEC_ADEPT_TO_CLOSE, LABEL_TYPE_TECH_DEBT, NOT_AVAILABLE, - SEC_EVENT_OPEN, - SEC_EVENT_REOPEN, SECMETA_TYPE_CHILD, SECMETA_TYPE_PARENT, ) -from .issue_builder import ( +from .builder import ( build_child_issue_body, build_issue_title, build_parent_issue_body, @@ -59,8 +56,7 @@ build_parent_template_values, classify_category, ) -from .models import Alert, AlertContext, IssueIndex, NotifiedIssue, SeverityChange, SyncContext, SyncResult -from .sec_events import render_sec_event, strip_sec_events_from_body +from .models import AlertContext, IssueIndex, NotifiedIssue, SeverityChange, SyncContext, SyncResult from .secmeta import json_list, load_secmeta, parse_json_list, render_secmeta from .templates import PARENT_BODY_TEMPLATE @@ -126,30 +122,11 @@ def maybe_reopen_parent_issue( context, child_issue_number or "", ) - logging.info( - "DRY-RUN: would comment parent reopen sec-event on issue #%d (rule_id=%s)", - parent_issue.number, - rule_id, - ) parent_issue.state = "open" return if gh_issue_edit_state(repo, parent_issue.number, "open"): parent_issue.state = "open" - gh_issue_comment( - repo, - parent_issue.number, - render_sec_event( - { - "action": SEC_EVENT_REOPEN, - "seen_at": utc_today(), - "source": "code_scanning", - "rule_id": rule_id, - "context": context, - "child_issue": str(child_issue_number) if child_issue_number else "", - } - ), - ) def _close_resolved_parent_issues( @@ -231,11 +208,7 @@ def ensure_parent_issue( existing = find_parent_issue(index, rule_id=rule_id) if existing is not None: # Keep parent issues aligned to the template as alerts evolve. - existing_secmeta = load_secmeta(existing.body) or {"schema": "1"} - existing_first = existing_secmeta.get("first_seen") or iso_date(alert.metadata.created_at) - existing_last = existing_secmeta.get("last_seen") or iso_date(alert.metadata.updated_at) - first_seen_final = min(existing_first, iso_date(alert.metadata.created_at)) - last_seen_final = max(existing_last, iso_date(alert.metadata.updated_at)) + existing_secmeta = load_secmeta(existing.body) or {} existing_severity = str(existing_secmeta.get("severity") or "unknown") existing_severity_cmp = existing_severity.lower() @@ -252,7 +225,7 @@ def ensure_parent_issue( ) if dry_run: logging.info( - "DRY-RUN: severity change on parent #%d (rule_id=%s): %s \u2192 %s", + "DRY-RUN: severity change on parent #%d (rule_id=%s): %s - %s", existing.number, rule_id, existing_severity_cmp, @@ -265,16 +238,10 @@ def ensure_parent_issue( existing_secmeta.update( { - "schema": existing_secmeta.get("schema") or "1", "type": SECMETA_TYPE_PARENT, "repo": repo_full, - "source": existing_secmeta.get("source") or "code_scanning", - "tool": alert.metadata.tool or existing_secmeta.get("tool") or "", "severity": severity_stored, "rule_id": rule_id, - "first_seen": first_seen_final, - "last_seen": last_seen_final, - "postponed_until": existing_secmeta.get("postponed_until", ""), } ) @@ -287,7 +254,6 @@ def ensure_parent_issue( ).strip() + "\n" ) - rebuilt = strip_sec_events_from_body(rebuilt) # Snapshot the original body on first encounter so we can # defer the API call until all alerts have been processed. @@ -329,24 +295,6 @@ def ensure_parent_issue( if num is None: return None - # Parent lifecycle event (human visible): opened/created. - if dry_run: - logging.info("DRY-RUN: would comment parent open sec-event on issue #%d (rule_id=%s)", num, rule_id) - else: - gh_issue_comment( - repo_full, - num, - render_sec_event( - { - "action": SEC_EVENT_OPEN, - "seen_at": iso_date(alert.metadata.created_at), - "source": "code_scanning", - "rule_id": rule_id, - "severity": alert.metadata.severity, - } - ), - ) - created = Issue(number=num, state="open", title=title, body=body) issues[num] = created index.parent_by_rule_id[rule_id] = created @@ -396,29 +344,17 @@ def _handle_new_child_issue( """Create a new child issue for an alert that has no matching issue yet.""" category = classify_category(ctx.alert) secmeta: dict[str, str] = { - "schema": "1", "type": SECMETA_TYPE_CHILD, "fingerprint": ctx.fingerprint, "repo": ctx.repo, - "source": "code_scanning", - "tool": ctx.tool, - "severity": ctx.severity, - "category": category, "rule_id": ctx.rule_id, - "first_seen": ctx.first_seen, - "last_seen": ctx.last_seen, - "last_seen_commit": ctx.commit_sha, - "postponed_until": "", + "severity": ctx.severity, "gh_alert_numbers": json_list([str(ctx.alert_number)]), - "occurrence_count": "1", - "last_occurrence_fp": ctx.occurrence_fp, } - if ctx.cve: - secmeta["cve"] = ctx.cve human_body = build_child_issue_body(ctx.alert) body = render_secmeta(secmeta) + "\n\n" + human_body - title = build_issue_title(ctx.rule_name, ctx.rule_id, ctx.fingerprint) + title = build_issue_title(ctx.rule_description, ctx.rule_name, ctx.rule_id, ctx.fingerprint) if sync.dry_run: labels = [LABEL_SCOPE_SECURITY, LABEL_TYPE_TECH_DEBT] @@ -428,7 +364,7 @@ def _handle_new_child_issue( logging.info( "DRY-RUN: create child alert=%d rule_id=%s sev=%s" " fp=%s tool=%s commit=%s loc=%s title=%r labels=[%s]" - " | secmeta:first_seen=%s last_seen=%s occurrence_count=1 gh_alert_numbers=[%d]", + " gh_alert_numbers=[%d]", ctx.alert_number, ctx.rule_id, ctx.severity, @@ -438,8 +374,6 @@ def _handle_new_child_issue( loc, title, ",".join(labels), - ctx.first_seen, - ctx.last_seen, ctx.alert_number, ) if parent_issue is None and ctx.rule_id: @@ -503,24 +437,6 @@ def _handle_new_child_issue( logging.info("Add sub-issue link parent=#%d child=#%d (alert %d)", parent_issue.number, num, ctx.alert_number) gh_issue_add_sub_issue_by_number(ctx.repo, parent_issue.number, num) - gh_issue_comment( - ctx.repo, - num, - render_sec_event( - { - "action": SEC_EVENT_OPEN, - "seen_at": ctx.first_seen, - "source": "code_scanning", - "gh_alert_number": str(ctx.alert_number), - "occurrence_fp": str(ctx.occurrence_fp), - "commit_sha": str(ctx.commit_sha), - "path": str(ctx.path), - "start_line": str(ctx.start_line or ""), - "end_line": str(ctx.end_line or ""), - } - ), - ) - if sync.priority_sync is not None: sync.priority_sync.enqueue(ctx.repo, num, ctx.severity, sync.severity_priority_map) @@ -577,12 +493,9 @@ def _merge_child_secmeta( *, ctx: AlertContext, issue: Issue, -) -> tuple[dict[str, str], bool]: - """Merge incoming alert data into the child issue's secmeta. - - Returns ``(updated_secmeta, new_occurrence)``. - """ - secmeta = load_secmeta(issue.body) or {"schema": "1"} +) -> dict[str, str]: + """Merge incoming alert data into the child issue's secmeta.""" + secmeta = load_secmeta(issue.body) or {} secmeta.pop("alert_hash", None) existing_alerts = parse_json_list(secmeta.get("gh_alert_numbers")) @@ -591,40 +504,18 @@ def _merge_child_secmeta( if str(ctx.alert_number) not in existing_alerts: existing_alerts.append(str(ctx.alert_number)) - last_occ_fp = secmeta.get("last_occurrence_fp", "") - occurrence_count = int(secmeta.get("occurrence_count") or "0" or 0) - new_occurrence = bool(ctx.occurrence_fp and ctx.occurrence_fp != last_occ_fp) - if occurrence_count <= 0: - occurrence_count = 1 - if new_occurrence: - occurrence_count += 1 - - existing_first = secmeta.get("first_seen") or ctx.first_seen - existing_last = secmeta.get("last_seen") or ctx.last_seen - first_seen_final = min(existing_first, ctx.first_seen) - last_seen_final = max(existing_last, ctx.last_seen) - secmeta.update( { + "type": SECMETA_TYPE_CHILD, "fingerprint": ctx.fingerprint, "repo": ctx.repo, - "source": secmeta.get("source") or "code_scanning", - "tool": ctx.tool or secmeta.get("tool", ""), - "severity": ctx.severity, - "category": classify_category(ctx.alert) or secmeta.get("category", ""), "rule_id": ctx.rule_id or secmeta.get("rule_id", ""), - "first_seen": first_seen_final, - "last_seen": last_seen_final, - "last_seen_commit": ctx.commit_sha or secmeta.get("last_seen_commit", ""), + "severity": ctx.severity, "gh_alert_numbers": json_list(existing_alerts), - "occurrence_count": str(occurrence_count), - "last_occurrence_fp": ctx.occurrence_fp or last_occ_fp, } ) - if ctx.cve: - secmeta["cve"] = ctx.cve - return secmeta, new_occurrence + return secmeta def _rebuild_and_apply_child_body( @@ -637,7 +528,6 @@ def _rebuild_and_apply_child_body( """Render a fresh child body from *secmeta* + template and apply if changed.""" human_body = build_child_issue_body(ctx.alert) new_body = render_secmeta(secmeta) + "\n\n" + human_body - new_body = strip_sec_events_from_body(new_body) if new_body != issue.body: if sync.dry_run: @@ -651,32 +541,6 @@ def _rebuild_and_apply_child_body( issue.body = new_body -def _comment_child_event( - *, - ctx: AlertContext, - sync: SyncContext, - issue: Issue, - reopened: bool, -) -> None: - """Post a reopen sec-event comment on the child issue.""" - if reopened: - if sync.dry_run: - logging.info("DRY-RUN: would comment reopen event on issue #%d (alert %d)", issue.number, ctx.alert_number) - else: - gh_issue_comment( - ctx.repo, - issue.number, - render_sec_event( - { - "action": SEC_EVENT_REOPEN, - "seen_at": utc_today(), - "source": "code_scanning", - "gh_alert_number": str(ctx.alert_number), - } - ), - ) - - def _sync_child_title_and_labels( *, ctx: AlertContext, @@ -684,7 +548,7 @@ def _sync_child_title_and_labels( issue: Issue, ) -> None: """Fix title drift and ensure required labels and priority on the child issue.""" - expected_title = build_issue_title(ctx.rule_name, ctx.rule_id, ctx.fingerprint) + expected_title = build_issue_title(ctx.rule_description, ctx.rule_name, ctx.rule_id, ctx.fingerprint) if expected_title != (issue.title or ""): if sync.dry_run: logging.info( @@ -757,10 +621,9 @@ def _handle_existing_child_issue( if parent_issue is None and ctx.rule_id: parent_issue = find_parent_issue(sync.index, rule_id=ctx.rule_id) - reopened = _maybe_reopen_child(ctx=ctx, sync=sync, issue=issue, parent_issue=parent_issue) - secmeta, _ = _merge_child_secmeta(ctx=ctx, issue=issue) + _maybe_reopen_child(ctx=ctx, sync=sync, issue=issue, parent_issue=parent_issue) + secmeta = _merge_child_secmeta(ctx=ctx, issue=issue) _rebuild_and_apply_child_body(ctx=ctx, sync=sync, issue=issue, secmeta=secmeta) - _comment_child_event(ctx=ctx, sync=sync, issue=issue, reopened=reopened) _sync_child_title_and_labels(ctx=ctx, sync=sync, issue=issue) if parent_issue is not None: @@ -800,11 +663,7 @@ def ensure_issue( "Ensure the collector/scanner includes an 'Alert hash: ...' line." ) - occurrence_fp = compute_occurrence_fp(commit_sha, path, start_line, end_line) - repo_full = alert.repo - first_seen = iso_date(alert.metadata.created_at) - last_seen = iso_date(alert.metadata.updated_at) parent_issue = ensure_parent_issue( alert, @@ -825,13 +684,11 @@ def ensure_issue( alert=alert, alert_number=alert_number, fingerprint=fingerprint, - occurrence_fp=occurrence_fp, repo=repo_full, - first_seen=first_seen, - last_seen=last_seen, tool=alert.metadata.tool, rule_id=rule_id, rule_name=alert.metadata.rule_name, + rule_description=alert.metadata.rule_description, severity=alert.metadata.severity, cve=cve, path=path, diff --git a/src/security/utils/templates.py b/src/security/issues/templates.py similarity index 98% rename from src/security/utils/templates.py rename to src/security/issues/templates.py index f90aa2d..d2d7ede 100644 --- a/src/security/utils/templates.py +++ b/src/security/issues/templates.py @@ -81,6 +81,5 @@ ## Detection Timeline -- **Scan date:** {{ scan_date }} - **First seen:** {{ first_seen }} """ diff --git a/src/security/sync_security_alerts.py b/src/security/main.py similarity index 89% rename from src/security/sync_security_alerts.py rename to src/security/main.py index 6441285..c46d6ab 100644 --- a/src/security/sync_security_alerts.py +++ b/src/security/main.py @@ -15,30 +15,17 @@ # limitations under the License. # -"""Orchestrator that runs the full security-alert sync pipeline. - -Steps ------ -1. **check_labels** – verify required labels exist in the repository. -2. **collect_alert** – fetch code-scanning alerts and write a normalised JSON file. -3. **promote_alerts** – create / update GitHub Issues from the collected alerts. -""" +"""Orchestrator that runs the full Security pipeline: GH sec-Issues creation.""" import argparse import logging import os -import sys - -_repo_root = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")) -if _repo_root not in sys.path: - sys.path.insert(0, _repo_root) -from shared.common import parse_runner_debug -from shared.logging_config import setup_logging +from core.config import parse_runner_debug, setup_logging -from check_labels import check_labels -from collect_alert import main as collect_alert_main -from promote_alerts import main as promote_alerts_main +from security.check_labels import check_labels +from security.collect_alert import main as collect_alert_main +from security.promote_alerts import main as promote_alerts_main logger = logging.getLogger(__name__) diff --git a/src/security/notifications/__init__.py b/src/security/notifications/__init__.py new file mode 100644 index 0000000..ebfbdd3 --- /dev/null +++ b/src/security/notifications/__init__.py @@ -0,0 +1,15 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/src/security/utils/teams.py b/src/security/notifications/teams.py similarity index 91% rename from src/security/utils/teams.py rename to src/security/notifications/teams.py index 8773054..01a1362 100644 --- a/src/security/utils/teams.py +++ b/src/security/notifications/teams.py @@ -15,7 +15,7 @@ # """Teams webhook notification – builds the notification payload and invokes -``send_to_teams.py`` for new / reopened issues. +``send_notifications.py`` for new / reopened issues. """ import logging @@ -24,7 +24,7 @@ import sys import tempfile -from .models import NotifiedIssue, SeverityChange, severity_direction +from security.issues.models import NotifiedIssue, SeverityChange, severity_direction def build_teams_notification_body(notifications: list[NotifiedIssue]) -> str: @@ -70,7 +70,7 @@ def _post_to_teams( label: str, dry_run: bool = False, ) -> None: - """Write *body* to a temp file and invoke send_to_teams.py.""" + """Write *body* to a temp file and invoke send_notifications.py.""" if dry_run: if webhook_url: logging.info(f"DRY-RUN: {label} webhook configured; no delivery will occur") @@ -80,10 +80,10 @@ def _post_to_teams( ) script_dir = os.path.dirname(os.path.abspath(__file__)) - send_script = os.path.join(os.path.dirname(script_dir), "send_to_teams.py") + send_script = os.path.join(os.path.dirname(script_dir), "send_notifications.py") if not os.path.exists(send_script): - logging.warning(f"send_to_teams.py not found at {send_script} – skipping {label.lower()}") + logging.warning(f"send_notifications.py not found at {send_script} – skipping {label.lower()}") return body_file: str | None = None @@ -116,7 +116,7 @@ def _post_to_teams( logging.warning(f"{label} failed: {result.stderr}") else: if dry_run: - logging.info(f"DRY-RUN: send_to_teams.py {label.lower()} output:") + logging.info(f"DRY-RUN: send_notifications.py {label.lower()} output:") logging.info(result.stdout) else: logging.info(f"{label} sent successfully") @@ -134,7 +134,7 @@ def notify_teams( *, dry_run: bool = False, ) -> None: - """Send a Teams message about new / reopened issues via send_to_teams.py.""" + """Send a Teams message about new / reopened issues via send_notifications.py.""" if not notifications: logging.info("No new or reopened issues – skipping Teams notification") return @@ -156,7 +156,7 @@ def notify_teams_severity_changes( *, dry_run: bool = False, ) -> None: - """Send a Teams message about parent severity changes via send_to_teams.py.""" + """Send a Teams message about parent severity changes via send_notifications.py.""" if not changes: logging.debug("No severity changes – skipping Teams severity-change notification") return diff --git a/src/security/promote_alerts.py b/src/security/promote_alerts.py index 98db6df..ed4437f 100644 --- a/src/security/promote_alerts.py +++ b/src/security/promote_alerts.py @@ -35,9 +35,9 @@ `python3 promote_alerts.py --file alerts.json --verbose --dry-run` Implementation: -- Shared, cross-solution logic lives in the ``shared`` package +- Core, cross-solution logic lives in the ``core`` package (common helpers, GitHub wrappers, priority mapping, template renderer). -- Security-specific orchestration and domain logic lives in ``utils``. +- Security-specific orchestration and domain logic lives in ``alerts``, ``issues``, ``notifications``. - This file remains the CLI entry-point only: argument parsing → wiring → main(). """ @@ -45,22 +45,15 @@ import logging import os import shutil -import sys - -# Ensure the repo root is on sys.path so `shared.*` is importable. -_repo_root = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")) -if _repo_root not in sys.path: - sys.path.insert(0, _repo_root) - -from shared.common import parse_runner_debug -from shared.github_issues import gh_issue_list_by_label -from shared.priority import parse_severity_priority_map - -from utils.alert_parser import load_open_alerts_from_file -from utils.constants import LABEL_SCOPE_SECURITY -from utils.issue_sync import sync_alerts_and_issues -from shared.logging_config import setup_logging -from utils.teams import notify_teams, notify_teams_severity_changes + +from core.config import parse_runner_debug, setup_logging +from core.github.issues import gh_issue_list_by_label +from core.priority import parse_severity_priority_map + +from security.alerts.parser import load_open_alerts_from_file +from security.constants import LABEL_SCOPE_SECURITY +from security.issues.sync import sync_alerts_and_issues +from security.notifications.teams import notify_teams, notify_teams_severity_changes def parse_args(argv: list[str] | None = None) -> argparse.Namespace: diff --git a/src/security/send_to_teams.py b/src/security/send_notifications.py similarity index 91% rename from src/security/send_to_teams.py rename to src/security/send_notifications.py index df5a369..7b18969 100644 --- a/src/security/send_to_teams.py +++ b/src/security/send_notifications.py @@ -43,16 +43,16 @@ Usage examples -------------- # Body from a file -python3 send_to_teams.py --body-file reports/summary.md --title "Security Report" +python3 send_notifications.py --body-file reports/summary.md --title "Security Report" # Body from a CLI argument -python3 send_to_teams.py --body "All checks **passed** ✅" +python3 send_notifications.py --body "All checks **passed** ✅" # Body from stdin (pipe) -cat reports/summary.md | python3 send_to_teams.py --title "Daily digest" +cat reports/summary.md | python3 send_notifications.py --title "Daily digest" # Dry-run (print the payload without sending) -python3 send_to_teams.py --body-file reports/summary.md --dry-run +python3 send_notifications.py --body-file reports/summary.md --dry-run """ import argparse @@ -64,16 +64,7 @@ import requests -# Ensure the repo root is on sys.path so `shared.*` is importable. -_script_dir = os.path.dirname(os.path.abspath(__file__)) -if _script_dir not in sys.path: - sys.path.insert(0, _script_dir) -_repo_root = os.path.normpath(os.path.join(_script_dir, "..")) -if _repo_root not in sys.path: - sys.path.insert(0, _repo_root) - -from shared.common import parse_runner_debug -from shared.logging_config import setup_logging +from core.config import parse_runner_debug, setup_logging def _text_block(text: str, **kwargs: Any) -> Dict[str, Any]: diff --git a/src/security/utils/sec_events.py b/src/security/utils/sec_events.py deleted file mode 100644 index e0a018e..0000000 --- a/src/security/utils/sec_events.py +++ /dev/null @@ -1,72 +0,0 @@ -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -"""``[sec-event]`` comment blocks – parsing, rendering, and stripping -structured lifecycle-event blocks from issue bodies. -""" - -import re - -from .secmeta import render_kv_lines - - -def parse_sec_event_fields(raw: str) -> dict[str, str]: - """Parse ``key=value`` lines from a raw sec-event block.""" - fields: dict[str, str] = {} - for line in (raw or "").splitlines(): - s = line.strip() - if not s or "=" not in s: - continue - k, v = s.split("=", 1) - fields[k.strip()] = v.strip() - return fields - - -def render_sec_event(fields: dict[str, str]) -> str: - """Render a structured ``[sec-event]`` comment block from *fields*.""" - preferred_order = [ - "action", - "seen_at", - "source", - "gh_alert_number", - "occurrence_fp", - "commit_sha", - "path", - "start_line", - "end_line", - ] - lines = ["[sec-event]"] - lines.extend(render_kv_lines(fields, preferred_order, skip_empty=True)) - lines.append("[/sec-event]") - return "\n".join(lines) + "\n" - - -def strip_sec_events_from_body(body: str) -> str: - """Remove any legacy sec-event content from an issue body. - - - Drops a dedicated '## Security Events' section if present (from previous versions). - - Removes any inline [sec-event] blocks. - """ - - text = body or "" - # Drop everything from the header onward (the section was intended to be last). - m = re.search(r"\n##\s+Security\s+Events\s*\n", text, flags=re.IGNORECASE) - if m: - text = text[: m.start()].rstrip() + "\n" - # Remove any inline blocks. - text = re.compile(r"\[sec-event\]\s*(.*?)\s*\[/sec-event\]", re.S).sub("", text) - text = re.sub(r"\n{3,}", "\n\n", text).strip() + "\n" - return text diff --git a/tests/security/test_github_issues.py b/tests/core/github/test_issues.py similarity index 75% rename from tests/security/test_github_issues.py rename to tests/core/github/test_issues.py index ec880d6..d03f7dc 100644 --- a/tests/security/test_github_issues.py +++ b/tests/core/github/test_issues.py @@ -26,7 +26,7 @@ import pytest from pytest_mock import MockerFixture -from shared.github_issues import ( +from core.github.issues import ( _gh_with_retry, _is_not_found_error, _not_found_hint, @@ -96,8 +96,8 @@ def test_not_found_hint_empty_on_success() -> None: def test_retry_succeeds_first_attempt(mocker: MockerFixture) -> None: """No retries when the first call succeeds.""" - mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="ok")) - mock_sleep = mocker.patch("shared.github_issues.time.sleep") + mock_run = mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout="ok")) + mock_sleep = mocker.patch("core.github.issues.time.sleep") result = _gh_with_retry(["some", "cmd"]) assert result.returncode == 0 mock_run.assert_called_once() @@ -106,18 +106,18 @@ def test_retry_succeeds_first_attempt(mocker: MockerFixture) -> None: def test_retry_succeeds_on_second_attempt(mocker: MockerFixture) -> None: """Retries once on 404 then succeeds.""" mock_run = mocker.patch( - "shared.github_issues.run_gh", + "core.github.issues.run_gh", side_effect=[_not_found(), _ok(stdout="42")], ) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.time.sleep") result = _gh_with_retry(["some", "cmd"], retries=3) assert result.returncode == 0 assert mock_run.call_count == 2 def test_retry_exhausts_all_attempts(mocker: MockerFixture) -> None: """Returns the last failure after all retries are consumed.""" - mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mocker.patch("shared.github_issues.time.sleep") + mock_run = mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.time.sleep") result = _gh_with_retry(["some", "cmd"], retries=3) assert result.returncode != 0 # 1 initial + 3 retries = 4 total @@ -125,16 +125,16 @@ def test_retry_exhausts_all_attempts(mocker: MockerFixture) -> None: def test_retry_does_not_retry_non_404_error(mocker: MockerFixture) -> None: """Non-404 errors are not retried.""" - mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_err("server error")) - mocker.patch("shared.github_issues.time.sleep") + mock_run = mocker.patch("core.github.issues.run_gh", return_value=_err("server error")) + mocker.patch("core.github.issues.time.sleep") result = _gh_with_retry(["some", "cmd"], retries=3) assert result.returncode != 0 mock_run.assert_called_once() # no retries def test_retry_sleeps_with_exponential_backoff(mocker: MockerFixture) -> None: """Sleep duration grows as backoff_base ** attempt.""" - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mock_sleep = mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mock_sleep = mocker.patch("core.github.issues.time.sleep") _gh_with_retry(["cmd"], retries=3, backoff_base=2.0) sleep_calls = [c.args[0] for c in mock_sleep.call_args_list] # attempts 1, 2, 3 → 2**1=2, 2**2=4, 2**3=8 @@ -142,60 +142,60 @@ def test_retry_sleeps_with_exponential_backoff(mocker: MockerFixture) -> None: def test_retry_zero_retries_no_sleep(mocker: MockerFixture) -> None: """retries=0 means a single attempt with no sleep.""" - mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mock_sleep = mocker.patch("shared.github_issues.time.sleep") + mock_run = mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mock_sleep = mocker.patch("core.github.issues.time.sleep") _gh_with_retry(["cmd"], retries=0) mock_run.assert_called_once() mock_sleep.assert_not_called() def test_get_rest_id_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="987654\n")) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout="987654\n")) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_get_rest_id("org/repo", 42) == 987654 def test_get_rest_id_retries_on_404(mocker: MockerFixture) -> None: mock_run = mocker.patch( - "shared.github_issues.run_gh", + "core.github.issues.run_gh", side_effect=[_not_found(), _ok(stdout="1111\n")], ) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.time.sleep") result = gh_issue_get_rest_id("org/repo", 5) assert result == 1111 assert mock_run.call_count == 2 def test_get_rest_id_returns_none_after_all_retries(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_get_rest_id("org/repo", 5) is None def test_get_rest_id_parse_failure(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="not-a-number")) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout="not-a-number")) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_get_rest_id("org/repo", 1) is None def test_get_rest_id_not_found_hint_in_log(mocker: MockerFixture, caplog) -> None: """Log message includes the not-found hint for 404 errors.""" - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.time.sleep") with caplog.at_level(logging.WARNING, logger="root"): gh_issue_get_rest_id("org/repo", 99) assert any("deleted or transferred" in r.message for r in caplog.records) def test_add_sub_issue_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + mocker.patch("core.github.issues.run_gh", return_value=_ok()) assert gh_issue_add_sub_issue("org/repo", 10, 9999) is True def test_add_sub_issue_failure_logs_hint(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) with caplog.at_level(logging.ERROR, logger="root"): result = gh_issue_add_sub_issue("org/repo", 10, 9999) assert result is False assert any("deleted or transferred" in r.message for r in caplog.records) def test_add_sub_issue_failure_plain_error(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_err("rate limited")) + mocker.patch("core.github.issues.run_gh", return_value=_err("rate limited")) with caplog.at_level(logging.ERROR, logger="root"): result = gh_issue_add_sub_issue("org/repo", 10, 9999) assert result is False @@ -203,57 +203,57 @@ def test_add_sub_issue_failure_plain_error(mocker: MockerFixture, caplog) -> Non def test_add_sub_issue_by_number_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", side_effect=[_ok(stdout="5555\n"), _ok()]) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", side_effect=[_ok(stdout="5555\n"), _ok()]) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_add_sub_issue_by_number("org/repo", 10, 42) is True def test_add_sub_issue_by_number_rest_id_fails(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_add_sub_issue_by_number("org/repo", 10, 42) is False def test_get_sub_issue_numbers_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="[1, 2, 3]\n")) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout="[1, 2, 3]\n")) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_get_sub_issue_numbers("org/repo", 10) == {1, 2, 3} def test_get_sub_issue_numbers_empty(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="[]\n")) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout="[]\n")) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_get_sub_issue_numbers("org/repo", 10) == set() def test_get_sub_issue_numbers_not_found_error(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.time.sleep") with caplog.at_level(logging.ERROR, logger="root"): result = gh_issue_get_sub_issue_numbers("org/repo", 10) assert result == set() assert any("deleted or transferred" in r.message for r in caplog.records) def test_get_sub_issue_numbers_parse_error(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="not-json")) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout="not-json")) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_get_sub_issue_numbers("org/repo", 10) == set() def test_issue_comment_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok()) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_ok()) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_comment("org/repo", 1, "hello") is True def test_issue_comment_retries_on_404(mocker: MockerFixture) -> None: mock_run = mocker.patch( - "shared.github_issues.run_gh", + "core.github.issues.run_gh", side_effect=[_not_found(), _ok()], ) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.time.sleep") assert gh_issue_comment("org/repo", 1, "hello") is True assert mock_run.call_count == 2 def test_issue_comment_fails_after_all_retries(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.time.sleep") with caplog.at_level(logging.ERROR, logger="root"): result = gh_issue_comment("org/repo", 1, "hello") assert result is False @@ -265,19 +265,19 @@ def test_issue_comment_graphql_not_found_hint(mocker: MockerFixture, caplog) -> returncode=1, stderr="GraphQL: Could not resolve to an issue or pull request with the number of 42. (repository.issue)", ) - mocker.patch("shared.github_issues.run_gh", return_value=graphql_err) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=graphql_err) + mocker.patch("core.github.issues.time.sleep") with caplog.at_level(logging.ERROR, logger="root"): gh_issue_comment("org/repo", 42, "body") assert any("deleted or transferred" in r.message for r in caplog.records) def test_edit_state_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + mocker.patch("core.github.issues.run_gh", return_value=_ok()) assert gh_issue_edit_state("org/repo", 1, "open") is True def test_edit_state_not_found_hint(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) with caplog.at_level(logging.ERROR, logger="root"): result = gh_issue_edit_state("org/repo", 1, "open") assert result is False @@ -289,11 +289,11 @@ def test_edit_state_invalid_state_raises() -> None: def test_edit_title_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + mocker.patch("core.github.issues.run_gh", return_value=_ok()) assert gh_issue_edit_title("org/repo", 1, "New title") is True def test_edit_title_not_found_hint(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) with caplog.at_level(logging.ERROR, logger="root"): result = gh_issue_edit_title("org/repo", 1, "New title") assert result is False @@ -301,11 +301,11 @@ def test_edit_title_not_found_hint(mocker: MockerFixture, caplog) -> None: def test_edit_body_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + mocker.patch("core.github.issues.run_gh", return_value=_ok()) assert gh_issue_edit_body("org/repo", 1, "new body") is True def test_edit_body_not_found_hint(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) with caplog.at_level(logging.ERROR, logger="root"): result = gh_issue_edit_body("org/repo", 1, "new body") assert result is False @@ -313,17 +313,17 @@ def test_edit_body_not_found_hint(mocker: MockerFixture, caplog) -> None: def test_add_labels_success(mocker: MockerFixture) -> None: - mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + mock_run = mocker.patch("core.github.issues.run_gh", return_value=_ok()) gh_issue_add_labels("org/repo", 1, ["bug", "security"]) mock_run.assert_called_once() def test_add_labels_no_labels_skips_call(mocker: MockerFixture) -> None: - mock_run = mocker.patch("shared.github_issues.run_gh") + mock_run = mocker.patch("core.github.issues.run_gh") gh_issue_add_labels("org/repo", 1, []) mock_run.assert_not_called() def test_add_labels_not_found_hint(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) with caplog.at_level(logging.WARNING, logger="root"): gh_issue_add_labels("org/repo", 1, ["bug"]) assert any("deleted or transferred" in r.message for r in caplog.records) @@ -331,18 +331,18 @@ def test_add_labels_not_found_hint(mocker: MockerFixture, caplog) -> None: def test_create_issue_success_url(mocker: MockerFixture) -> None: mocker.patch( - "shared.github_issues.run_gh", + "core.github.issues.run_gh", return_value=_ok(stdout="https://github.com/org/repo/issues/123\n"), ) num = gh_issue_create("org/repo", "title", "body", ["label"]) assert num == 123 def test_create_issue_success_bare_number(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="issues/456")) + mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout="issues/456")) assert gh_issue_create("org/repo", "t", "b", []) == 456 def test_create_issue_failure_returns_none(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_err("permission denied")) + mocker.patch("core.github.issues.run_gh", return_value=_err("permission denied")) assert gh_issue_create("org/repo", "t", "b", []) is None @@ -351,7 +351,7 @@ def test_list_by_label_success(mocker: MockerFixture) -> None: {"number": 1, "state": "open", "title": "T1", "body": "b1", "labels": [{"name": "bug"}]}, {"number": 2, "state": "closed", "title": "T2", "body": "b2", "labels": []}, ] - mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout=json.dumps(payload))) + mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout=json.dumps(payload))) issues = gh_issue_list_by_label("org/repo", "bug") assert len(issues) == 2 assert issues[1].title == "T1" @@ -359,10 +359,10 @@ def test_list_by_label_success(mocker: MockerFixture) -> None: assert issues[2].labels == [] def test_list_by_label_empty_label_returns_empty(mocker: MockerFixture) -> None: - mock_run = mocker.patch("shared.github_issues.run_gh") + mock_run = mocker.patch("core.github.issues.run_gh") assert gh_issue_list_by_label("org/repo", "") == {} mock_run.assert_not_called() def test_list_by_label_gh_failure_returns_empty(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_err("network error")) + mocker.patch("core.github.issues.run_gh", return_value=_err("network error")) assert gh_issue_list_by_label("org/repo", "bug") == {} diff --git a/tests/security/alerts/test_models.py b/tests/security/alerts/test_models.py new file mode 100644 index 0000000..cfd55c3 --- /dev/null +++ b/tests/security/alerts/test_models.py @@ -0,0 +1,54 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Unit tests for ``security.alerts.models``.""" + +from security.alerts.models import AlertMetadata + + +# ===================================================================== +# AlertMetadata – None-safe __post_init__ +# ===================================================================== + + +def test_alert_metadata_none_fields_do_not_crash() -> None: + """AlertMetadata must not raise when nullable collector fields are None.""" + md = AlertMetadata( + severity=None, # type: ignore[arg-type] – mirrors _normalise_alert output + rule_id=None, # type: ignore[arg-type] + rule_name=None, # type: ignore[arg-type] + rule_description=None, # type: ignore[arg-type] + state=None, # type: ignore[arg-type] + tool=None, # type: ignore[arg-type] + ) + assert md.severity == "unknown" + assert md.rule_id == "" + assert md.rule_name == "" + assert md.rule_description == "" + assert md.state == "" + assert md.tool == "" + + +def test_alert_metadata_strips_whitespace() -> None: + md = AlertMetadata(severity=" high ", rule_id=" CVE-123 ", tool=" AquaSec ") + assert md.severity == "high" + assert md.rule_id == "CVE-123" + assert md.tool == "AquaSec" + + +def test_alert_metadata_state_lowercased() -> None: + md = AlertMetadata(state=" OPEN ") + assert md.state == "open" diff --git a/tests/security/utils/test_alert_parser.py b/tests/security/alerts/test_parser.py similarity index 98% rename from tests/security/utils/test_alert_parser.py rename to tests/security/alerts/test_parser.py index 08be648..9f6675a 100644 --- a/tests/security/utils/test_alert_parser.py +++ b/tests/security/alerts/test_parser.py @@ -22,7 +22,7 @@ import pytest -from utils.alert_parser import ( +from security.alerts.parser import ( AlertMessageKey, compute_occurrence_fp, load_open_alerts_from_file, @@ -70,7 +70,7 @@ ) _RAW_PIPELINE_MESSAGE = ( - "Artifact: .github/workflows/aquasec-night-scan.yml\n" + "Artifact: .github/workflows/aquasec-night-scan-example.yml\n" "Type: pipelineMisconfigurations\n" "Vulnerability: AVD-PIPELINE-0008\n" "Severity: MEDIUM\n" @@ -82,7 +82,7 @@ "First seen: 2026-02-09T15:51:33.454Z\n" "SCM file: https://github.com/test-org/test-repo/blob/" "64c62d98a7db5dbd80ae8b0affd531099cf54280/" - ".github/workflows/aquasec-night-scan.yml\n" + ".github/workflows/aquasec-night-scan-example.yml\n" "Start line: 21\n" "Alert hash: bed23a624d7f1f07f56a07c6349bcd8b" ) diff --git a/tests/security/conftest.py b/tests/security/conftest.py index 8fec7e3..7edd1aa 100644 --- a/tests/security/conftest.py +++ b/tests/security/conftest.py @@ -23,7 +23,7 @@ import pytest -from utils.models import Alert +from security.alerts.models import Alert # ── Raw alert payloads (read-only module-level originals) ────────────── @@ -38,6 +38,7 @@ "alert_url": "https://github.com/test-org/test-repo/security/code-scanning/303", "rule_id": "req-with-very-false-aquasec-python", "rule_name": "sast", + "rule_description": "Requests with verify=False", "severity": "high", "confidence": "error", "tags": ["HIGH", "sast", "security"], @@ -108,6 +109,7 @@ "alert_url": "https://github.com/test-org/test-repo/security/code-scanning/312", "rule_id": "CVE-2026-25755", "rule_name": "vulnerabilities", + "rule_description": "jsPDF PDF object injection", "severity": "high", "confidence": "error", "tags": ["HIGH", "security", "vulnerabilities"], @@ -179,6 +181,7 @@ "alert_url": "https://github.com/test-org/test-repo/security/code-scanning/317", "rule_id": "AVD-PIPELINE-0008", "rule_name": "pipelineMisconfigurations", + "rule_description": "Dependency not pinned to commit SHA", "severity": "medium", "confidence": "warning", "tags": ["MEDIUM", "pipelineMisconfigurations", "security"], @@ -189,13 +192,13 @@ "commit_sha": "d28cb4b49c437fdc4e26471ced2b128c63839d0e", "instance_url": None, "classifications": [], - "file": ".github/workflows/aquasec-night-scan.yml", + "file": ".github/workflows/aquasec-night-scan-example.yml", "start_line": 21, "end_line": None, }, "alert_details": { "alert_hash": "bed23a624d7f1f07f56a07c6349bcd8b", - "artifact": ".github/workflows/aquasec-night-scan.yml", + "artifact": ".github/workflows/aquasec-night-scan-example.yml", "type": "pipelineMisconfigurations", "vulnerability": "AVD-PIPELINE-0008", "severity": "MEDIUM", @@ -206,7 +209,7 @@ "scm_file": ( "https://github.com/test-org/test-repo/blob/" "64c62d98a7db5dbd80ae8b0affd531099cf54280/" - ".github/workflows/aquasec-night-scan.yml" + ".github/workflows/aquasec-night-scan-example.yml" ), "start_line": "21", "message": ( diff --git a/tests/security/utils/test_issue_builder.py b/tests/security/issues/test_builder.py similarity index 92% rename from tests/security/utils/test_issue_builder.py rename to tests/security/issues/test_builder.py index 2234f87..c350ab9 100644 --- a/tests/security/utils/test_issue_builder.py +++ b/tests/security/issues/test_builder.py @@ -18,7 +18,7 @@ import pytest -from utils.issue_builder import ( +from security.issues.builder import ( alert_extra_data, build_child_issue_body, build_issue_title, @@ -27,7 +27,7 @@ build_parent_template_values, classify_category, ) -from utils.models import Alert +from security.alerts.models import Alert # ===================================================================== @@ -278,19 +278,23 @@ def test_contains_confidence(vuln_alert: Alert) -> None: def test_format() -> None: fp = "a1b2c3d4e5f6" - title = build_issue_title("sast", "rule-123", fp) - assert title == "[SEC][FP=a1b2c3d4] sast" + title = build_issue_title("A description", "sast", "rule-123", fp) + assert title == "[SEC][FP=a1b2c3d4] A description" + +def test_fallback_to_rule_name() -> None: + title = build_issue_title(None, "sast", "rule-123", "abcdef12") + assert "sast" in title def test_fallback_to_rule_id() -> None: - title = build_issue_title(None, "rule-123", "abcdef12") + title = build_issue_title(None, None, "rule-123", "abcdef12") assert "rule-123" in title def test_fallback_to_default() -> None: - title = build_issue_title(None, "", "abcdef12") + title = build_issue_title(None, None, "", "abcdef12") assert "Security finding" in title def test_empty_fingerprint() -> None: - title = build_issue_title("sast", "rule-123", "") + title = build_issue_title("A description", "sast", "rule-123", "") assert "N/A" in title @@ -303,7 +307,7 @@ def test_empty_fingerprint() -> None: def test_sast_avd_id(sast_alert: Alert) -> None: body = build_child_issue_body(sast_alert) - assert "req-with-very-false-aquasec-python" in body + assert "Requests with verify=False" in body def test_sast_alert_hash(sast_alert: Alert) -> None: body = build_child_issue_body(sast_alert) @@ -311,7 +315,7 @@ def test_sast_alert_hash(sast_alert: Alert) -> None: def test_sast_title(sast_alert: Alert) -> None: body = build_child_issue_body(sast_alert) - assert "sast" in body + assert "Requests with verify=False" in body def test_sast_message_present(sast_alert: Alert) -> None: body = build_child_issue_body(sast_alert) @@ -333,10 +337,6 @@ def test_sast_reachable_from_msg(sast_alert: Alert) -> None: body = build_child_issue_body(sast_alert) assert "False" in body -def test_sast_scan_date(sast_alert: Alert) -> None: - body = build_child_issue_body(sast_alert) - assert "2026-02-24" in body - def test_sast_first_seen(sast_alert: Alert) -> None: body = build_child_issue_body(sast_alert) assert "2025-09-17" in body @@ -345,7 +345,7 @@ def test_sast_first_seen(sast_alert: Alert) -> None: def test_vuln_avd_id(vuln_alert: Alert) -> None: body = build_child_issue_body(vuln_alert) - assert "CVE-2026-25755" in body + assert "jsPDF PDF object injection" in body def test_vuln_installed_version(vuln_alert: Alert) -> None: body = build_child_issue_body(vuln_alert) @@ -410,21 +410,6 @@ def test_all_template_sections_rendered(vuln_alert: Alert) -> None: assert "## Detection Timeline" in body -def test_scan_date_falls_back_to_metadata_updated_at() -> None: - """When alert_details.scan_date is absent, fall back to metadata.updated_at.""" - alert = Alert.from_dict({ - "metadata": { - "rule_id": "X", - "updated_at": "2026-01-15T10:00:00Z", - "created_at": "2025-12-01T08:00:00Z", - }, - "alert_details": {}, # scan_date absent → defaults to "" - "rule_details": {}, - }) - body = build_child_issue_body(alert) - assert "2026-01-15" in body - - def test_first_seen_falls_back_to_metadata_created_at() -> None: """When alert_details.first_seen is absent, fall back to metadata.created_at.""" alert = Alert.from_dict({ @@ -440,7 +425,7 @@ def test_first_seen_falls_back_to_metadata_created_at() -> None: assert "2025-12-01" in body -def test_scan_date_and_first_seen_yield_na_when_no_fallback() -> None: +def test_first_seen_yields_na_when_no_fallback() -> None: """When neither alert_details nor metadata provide dates, render N/A.""" alert = Alert.from_dict({ "metadata": {"rule_id": "X"}, # no updated_at / created_at @@ -448,4 +433,4 @@ def test_scan_date_and_first_seen_yield_na_when_no_fallback() -> None: "rule_details": {}, }) body = build_child_issue_body(alert) - assert body.count("N/A") >= 2 + assert body.count("N/A") >= 1 diff --git a/tests/security/utils/test_models.py b/tests/security/issues/test_models.py similarity index 72% rename from tests/security/utils/test_models.py rename to tests/security/issues/test_models.py index 83abddb..9df3447 100644 --- a/tests/security/utils/test_models.py +++ b/tests/security/issues/test_models.py @@ -14,13 +14,10 @@ # limitations under the License. # -"""Unit tests for ``utils.models``.""" +"""Unit tests for ``security.issues.models``.""" -import pytest - -from utils.models import ( +from security.issues.models import ( AlertContext, - AlertMetadata, IssueIndex, NotifiedIssue, SEVERITY_ORDER, @@ -29,7 +26,6 @@ SyncResult, severity_direction, ) -from shared.models import Issue # ===================================================================== @@ -112,9 +108,10 @@ def test_issue_index_creation() -> None: def test_alert_context_creation() -> None: ctx = AlertContext( - alert={}, alert_number=1, fingerprint="fp", occurrence_fp="ofp", - repo="org/repo", first_seen="2026-01-01", last_seen="2026-01-02", + alert={}, alert_number=1, fingerprint="fp", + repo="org/repo", tool="AquaSec", rule_id="R1", rule_name="sast", + rule_description="Test finding description", severity="high", cve="CVE-79", path="src/f.py", start_line=10, end_line=20, commit_sha="abc123", ) @@ -128,36 +125,3 @@ def test_sync_context_creation() -> None: notifications=[], severity_priority_map={}, priority_sync=None, ) assert sc.dry_run is True - - -# ===================================================================== -# AlertMetadata – None-safe __post_init__ -# ===================================================================== - - -def test_alert_metadata_none_fields_do_not_crash() -> None: - """AlertMetadata must not raise when nullable collector fields are None.""" - md = AlertMetadata( - severity=None, # type: ignore[arg-type] – mirrors _normalise_alert output - rule_id=None, # type: ignore[arg-type] - rule_name=None, # type: ignore[arg-type] - state=None, # type: ignore[arg-type] - tool=None, # type: ignore[arg-type] - ) - assert md.severity == "unknown" - assert md.rule_id == "" - assert md.rule_name == "" - assert md.state == "" - assert md.tool == "" - - -def test_alert_metadata_strips_whitespace() -> None: - md = AlertMetadata(severity=" high ", rule_id=" CVE-123 ", tool=" AquaSec ") - assert md.severity == "high" - assert md.rule_id == "CVE-123" - assert md.tool == "AquaSec" - - -def test_alert_metadata_state_lowercased() -> None: - md = AlertMetadata(state=" OPEN ") - assert md.state == "open" diff --git a/tests/security/utils/test_secmeta.py b/tests/security/issues/test_secmeta.py similarity index 96% rename from tests/security/utils/test_secmeta.py rename to tests/security/issues/test_secmeta.py index fb2a7b5..1ac9017 100644 --- a/tests/security/utils/test_secmeta.py +++ b/tests/security/issues/test_secmeta.py @@ -18,7 +18,7 @@ import pytest -from utils.secmeta import ( +from security.issues.secmeta import ( json_list, load_secmeta, parse_json_list, @@ -115,10 +115,10 @@ def test_preferred_order() -> None: } rendered = render_secmeta(data) lines = rendered.strip().split("\n") - # schema should appear before fingerprint - schema_idx = next(i for i, l in enumerate(lines) if "schema=" in l) + # type should appear before fingerprint per preferred_order + type_idx = next(i for i, l in enumerate(lines) if "type=" in l) fp_idx = next(i for i, l in enumerate(lines) if "fingerprint=" in l) - assert schema_idx < fp_idx + assert type_idx < fp_idx def test_secmeta_roundtrip() -> None: """Render then parse should recover the original data.""" diff --git a/tests/security/utils/test_issue_sync.py b/tests/security/issues/test_sync.py similarity index 80% rename from tests/security/utils/test_issue_sync.py rename to tests/security/issues/test_sync.py index 81d399c..da42ac1 100644 --- a/tests/security/utils/test_issue_sync.py +++ b/tests/security/issues/test_sync.py @@ -14,7 +14,7 @@ # limitations under the License. # -"""Unit tests for ``utils.issue_sync`` – pure-logic helpers and +"""Unit tests for ``security.issues.sync`` – pure-logic helpers and orchestration functions with mocked GitHub CLI calls. """ @@ -23,11 +23,10 @@ import pytest from pytest_mock import MockerFixture -from shared.models import Issue -from utils.issue_sync import ( +from core.models import Issue +from security.issues.sync import ( _append_notification, _close_resolved_parent_issues, - _comment_child_event, _ensure_child_linked_to_parent, _flush_parent_body_updates, _handle_existing_child_issue, @@ -46,15 +45,15 @@ maybe_reopen_parent_issue, sync_alerts_and_issues, ) -from utils.models import ( - Alert, +from security.alerts.models import Alert +from security.issues.models import ( AlertContext, IssueIndex, NotifiedIssue, SeverityChange, SyncContext, ) -from utils.secmeta import render_secmeta +from security.issues.secmeta import render_secmeta # ===================================================================== @@ -72,13 +71,11 @@ def _make_alert_context(**overrides: Any) -> AlertContext: alert=Alert(), alert_number=1, fingerprint="fp_test_123", - occurrence_fp="occ_fp_test", repo="test-org/test-repo", - first_seen="2026-01-01", - last_seen="2026-01-02", tool="AquaSec", rule_id="CVE-2026-1234", rule_name="sast", + rule_description="Test finding description", severity="high", cve="CVE-2026-1234", path="src/main.py", @@ -227,7 +224,7 @@ def test_find_parent_not_found() -> None: def test_reopen_parent_none(mocker: MockerFixture) -> None: """No-op when parent_issue is None — no gh call is made.""" - mock_edit = mocker.patch("utils.issue_sync.gh_issue_edit_state") + mock_edit = mocker.patch("security.issues.sync.gh_issue_edit_state") maybe_reopen_parent_issue( "org/repo", None, rule_id="R1", dry_run=False, context="test", ) @@ -250,23 +247,18 @@ def test_reopen_parent_dry_run() -> None: assert parent.state == "open" def test_reopen_parent_real(mocker: MockerFixture) -> None: - """Non-dry-run reopens issue and posts sec-event comment.""" - mock_edit_state = mocker.patch("utils.issue_sync.gh_issue_edit_state", return_value=True) - mock_comment = mocker.patch("utils.issue_sync.gh_issue_comment") + """Non-dry-run reopens issue via state edit.""" + mock_edit_state = mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=True) parent = Issue(number=1, state="closed", title="P", body="b") maybe_reopen_parent_issue( "org/repo", parent, rule_id="R1", dry_run=False, context="reopen_child", child_issue_number=5, ) assert parent.state == "open" mock_edit_state.assert_called_once_with("org/repo", 1, "open") - mock_comment.assert_called_once() - comment_body = mock_comment.call_args[0][2] - assert "reopen" in comment_body - assert "R1" in comment_body def test_reopen_parent_gh_failure(mocker: MockerFixture) -> None: """If gh_issue_edit_state fails, state stays closed.""" - mocker.patch("utils.issue_sync.gh_issue_edit_state", return_value=False) + mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=False) parent = Issue(number=1, state="closed", title="P", body="b") maybe_reopen_parent_issue( "org/repo", parent, rule_id="R1", dry_run=False, context="test", @@ -308,47 +300,11 @@ def test_merge_new_alert_number() -> None: "type": "child", "fingerprint": "fp1", "gh_alert_numbers": '["100"]', - "occurrence_count": "1", - "last_occurrence_fp": "old_occ", - "first_seen": "2026-01-01", - "last_seen": "2026-01-01", }) - ctx = _make_alert_context(alert_number=200, fingerprint="fp1", occurrence_fp="new_occ") - secmeta, new_occurrence = _merge_child_secmeta(ctx=ctx, issue=child) + ctx = _make_alert_context(alert_number=200, fingerprint="fp1") + secmeta = _merge_child_secmeta(ctx=ctx, issue=child) assert "200" in secmeta["gh_alert_numbers"] assert "100" in secmeta["gh_alert_numbers"] - assert new_occurrence is True - assert secmeta["occurrence_count"] == "2" - -def test_merge_same_occurrence_fp() -> None: - """Same occurrence_fp means no new occurrence counted.""" - child = _issue_with_secmeta(1, { - "type": "child", - "fingerprint": "fp1", - "gh_alert_numbers": '["100"]', - "occurrence_count": "1", - "last_occurrence_fp": "same_occ", - "first_seen": "2026-01-01", - "last_seen": "2026-01-01", - }) - ctx = _make_alert_context(alert_number=100, fingerprint="fp1", occurrence_fp="same_occ") - secmeta, new_occurrence = _merge_child_secmeta(ctx=ctx, issue=child) - assert new_occurrence is False - assert secmeta["occurrence_count"] == "1" - -def test_merge_date_range_expansion() -> None: - """first_seen takes the min, last_seen takes the max.""" - child = _issue_with_secmeta(1, { - "type": "child", - "fingerprint": "fp1", - "first_seen": "2026-02-01", - "last_seen": "2026-02-15", - "occurrence_count": "1", - }) - ctx = _make_alert_context(first_seen="2026-01-15", last_seen="2026-03-01") - secmeta, _ = _merge_child_secmeta(ctx=ctx, issue=child) - assert secmeta["first_seen"] == "2026-01-15" - assert secmeta["last_seen"] == "2026-03-01" def test_merge_removes_alert_hash() -> None: """Legacy alert_hash key is dropped during merge.""" @@ -356,26 +312,11 @@ def test_merge_removes_alert_hash() -> None: "type": "child", "alert_hash": "old_hash", "fingerprint": "fp1", - "occurrence_count": "1", - "first_seen": "2026-01-01", - "last_seen": "2026-01-01", }) ctx = _make_alert_context(fingerprint="fp1") - secmeta, _ = _merge_child_secmeta(ctx=ctx, issue=child) + secmeta = _merge_child_secmeta(ctx=ctx, issue=child) assert "alert_hash" not in secmeta -def test_merge_zero_occurrence_count_reset() -> None: - """occurrence_count <= 0 is reset to at least 1.""" - child_secmeta_str = render_secmeta({ - "type": "child", "fingerprint": "fp1", - "occurrence_count": "0", "last_occurrence_fp": "same_fp", - "first_seen": "2026-01-01", "last_seen": "2026-01-01", - }) - child = Issue(number=1, state="open", title="T", body=child_secmeta_str + "\nBody\n") - ctx = _make_alert_context(fingerprint="fp1", occurrence_fp="same_fp") - secmeta, _ = _merge_child_secmeta(ctx=ctx, issue=child) - assert int(secmeta["occurrence_count"]) >= 1 - # ===================================================================== # _maybe_reopen_child @@ -403,7 +344,7 @@ def test_reopen_child_dry_run() -> None: def test_reopen_child_real(mocker: MockerFixture) -> None: """Non-dry-run calls gh_issue_edit_state and appends notification.""" - mock_edit = mocker.patch("utils.issue_sync.gh_issue_edit_state", return_value=True) + mock_edit = mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=True) body = render_secmeta({"type": "child", "category": "sast"}) + "\nbody" issue = Issue(number=5, state="closed", title="T", body=body) ctx = _make_alert_context() @@ -416,7 +357,7 @@ def test_reopen_child_real(mocker: MockerFixture) -> None: def test_reopen_child_cascades_to_parent(mocker: MockerFixture) -> None: """Reopening child also reopens the closed parent (dry-run).""" - mocker.patch("utils.issue_sync.gh_issue_edit_state", return_value=True) + mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=True) body = render_secmeta({"type": "child"}) + "\nbody" issue = Issue(number=5, state="closed", title="T", body=body) parent = Issue(number=1, state="closed", title="P", body="pb") @@ -433,7 +374,7 @@ def test_reopen_child_cascades_to_parent(mocker: MockerFixture) -> None: def test_rebuild_body_changed(mocker: MockerFixture, sast_alert: Alert) -> None: """When body changes, gh_issue_edit_body is called.""" - mock_edit = mocker.patch("utils.issue_sync.gh_issue_edit_body") + mock_edit = mocker.patch("security.issues.sync.gh_issue_edit_body") issue = Issue(number=1, state="open", title="T", body="old body") ctx = _make_alert_context(alert=sast_alert) sync = _make_sync_context() @@ -443,13 +384,11 @@ def test_rebuild_body_changed(mocker: MockerFixture, sast_alert: Alert) -> None: def test_rebuild_body_unchanged(sast_alert: Alert) -> None: """When body is identical, no API call is made.""" - from utils.issue_builder import build_child_issue_body - from utils.sec_events import strip_sec_events_from_body + from security.issues.builder import build_child_issue_body secmeta = {"schema": "1", "type": "child", "fingerprint": "fp1"} human_body = build_child_issue_body(sast_alert) body = render_secmeta(secmeta) + "\n\n" + human_body - body = strip_sec_events_from_body(body) issue = Issue(number=1, state="open", title="T", body=body) ctx = _make_alert_context(alert=sast_alert) sync = _make_sync_context() @@ -464,54 +403,6 @@ def test_rebuild_body_dry_run(sast_alert: Alert) -> None: _rebuild_and_apply_child_body(ctx=ctx, sync=sync, issue=issue, secmeta=secmeta) -# ===================================================================== -# _comment_child_event -# ===================================================================== - - -def test_comment_reopen_event(mocker: MockerFixture) -> None: - """Posts a reopen sec-event comment when reopened=True.""" - mock_comment = mocker.patch("utils.issue_sync.gh_issue_comment") - issue = Issue(number=1, state="open", title="T", body="b") - ctx = _make_alert_context() - sync = _make_sync_context() - _comment_child_event(ctx=ctx, sync=sync, issue=issue, reopened=True) - mock_comment.assert_called_once() - comment_body = mock_comment.call_args[0][2] - assert "reopen" in comment_body - -def test_comment_occurrence_event_no_comment(mocker: MockerFixture) -> None: - """No sec-event comment when issue is already open (new_occurrence=True but reopened=False).""" - mock_comment = mocker.patch("utils.issue_sync.gh_issue_comment") - issue = Issue(number=1, state="open", title="T", body="b") - ctx = _make_alert_context() - sync = _make_sync_context() - _comment_child_event(ctx=ctx, sync=sync, issue=issue, reopened=False) - mock_comment.assert_not_called() - -def test_comment_no_event() -> None: - """No comment when neither reopened nor new_occurrence.""" - issue = Issue(number=1, state="open", title="T", body="b") - ctx = _make_alert_context() - sync = _make_sync_context() - _comment_child_event(ctx=ctx, sync=sync, issue=issue, reopened=False) - -def test_comment_reopen_dry_run() -> None: - """Dry-run mode does not call gh_issue_comment for reopen.""" - issue = Issue(number=1, state="open", title="T", body="b") - ctx = _make_alert_context() - sync = _make_sync_context(dry_run=True) - _comment_child_event(ctx=ctx, sync=sync, issue=issue, reopened=True) - -def test_comment_occurrence_dry_run() -> None: - """No comment in any mode when issue is already open (occurrence-only path).""" - issue = Issue(number=1, state="open", title="T", body="b") - ctx = _make_alert_context() - sync = _make_sync_context(dry_run=True) - # Dry-run should also be silent for already-open issues. - _comment_child_event(ctx=ctx, sync=sync, issue=issue, reopened=False) - - # ===================================================================== # _sync_child_title_and_labels # ===================================================================== @@ -519,8 +410,8 @@ def test_comment_occurrence_dry_run() -> None: def test_sync_title_drift_corrected(mocker: MockerFixture) -> None: """Title is updated when it drifts from the expected format.""" - mock_title = mocker.patch("utils.issue_sync.gh_issue_edit_title", return_value=True) - mock_labels = mocker.patch("utils.issue_sync.gh_issue_add_labels") + mock_title = mocker.patch("security.issues.sync.gh_issue_edit_title", return_value=True) + mock_labels = mocker.patch("security.issues.sync.gh_issue_add_labels") issue = Issue(number=1, state="open", title="Wrong title", body="b") ctx = _make_alert_context(rule_name="sast", rule_id="CVE-2026-1234", fingerprint="fp_test_123") sync = _make_sync_context() @@ -530,9 +421,9 @@ def test_sync_title_drift_corrected(mocker: MockerFixture) -> None: def test_sync_title_already_correct(mocker: MockerFixture) -> None: """Title is not updated when it matches the expected format.""" - mock_labels = mocker.patch("utils.issue_sync.gh_issue_add_labels") - from utils.issue_builder import build_issue_title - title = build_issue_title("sast", "CVE-2026-1234", "fp_test_123") + mock_labels = mocker.patch("security.issues.sync.gh_issue_add_labels") + from security.issues.builder import build_issue_title + title = build_issue_title("Test finding description", "sast", "CVE-2026-1234", "fp_test_123") issue = Issue(number=1, state="open", title=title, body="b") ctx = _make_alert_context(rule_name="sast", rule_id="CVE-2026-1234", fingerprint="fp_test_123") sync = _make_sync_context() @@ -554,8 +445,7 @@ def test_sync_title_dry_run() -> None: def test_handle_new_child_creates_issue(mocker: MockerFixture, sast_alert: Alert) -> None: """Creates a new issue and registers it in the index.""" - mock_create = mocker.patch("utils.issue_sync.gh_issue_create", return_value=42) - mocker.patch("utils.issue_sync.gh_issue_comment") + mock_create = mocker.patch("security.issues.sync.gh_issue_create", return_value=42) ctx = _make_alert_context(alert=sast_alert, rule_name="sast") issues: dict[int, Issue] = {} index = IssueIndex(by_fingerprint={}, parent_by_rule_id={}) @@ -578,9 +468,8 @@ def test_handle_new_child_dry_run(sast_alert: Alert) -> None: def test_handle_new_child_links_to_parent(mocker: MockerFixture, sast_alert: Alert) -> None: """When a parent issue exists, the child is linked as a sub-issue.""" - mocker.patch("utils.issue_sync.gh_issue_create", return_value=42) - mock_sub = mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number") - mocker.patch("utils.issue_sync.gh_issue_comment") + mocker.patch("security.issues.sync.gh_issue_create", return_value=42) + mock_sub = mocker.patch("security.issues.sync.gh_issue_add_sub_issue_by_number") parent = Issue(number=1, state="open", title="P", body="pb") ctx = _make_alert_context(alert=sast_alert) sync = _make_sync_context(notifications=[]) @@ -589,7 +478,7 @@ def test_handle_new_child_links_to_parent(mocker: MockerFixture, sast_alert: Ale def test_handle_new_child_create_fails(mocker: MockerFixture, sast_alert: Alert) -> None: """If gh_issue_create returns None, no crash and no index update.""" - mocker.patch("utils.issue_sync.gh_issue_create", return_value=None) + mocker.patch("security.issues.sync.gh_issue_create", return_value=None) ctx = _make_alert_context(alert=sast_alert) sync = _make_sync_context(notifications=[]) _handle_new_child_issue(ctx=ctx, sync=sync, parent_issue=None) @@ -603,8 +492,8 @@ def test_handle_new_child_create_fails(mocker: MockerFixture, sast_alert: Alert) def test_ensure_child_linked_already_linked(mocker: MockerFixture) -> None: """No-op when the child is already in the parent's sub-issues.""" - mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value={5}) - mock_add = mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number") + mocker.patch("security.issues.sync.gh_issue_get_sub_issue_numbers", return_value={5}) + mock_add = mocker.patch("security.issues.sync.gh_issue_add_sub_issue_by_number") parent = Issue(number=1, state="open", title="P", body="pb") child = Issue(number=5, state="open", title="C", body="cb") ctx = _make_alert_context() @@ -615,8 +504,8 @@ def test_ensure_child_linked_already_linked(mocker: MockerFixture) -> None: def test_ensure_child_linked_missing_adds_link(mocker: MockerFixture) -> None: """Adds the sub-issue link when the child is missing from the parent.""" - mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value=set()) - mock_add = mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number", return_value=True) + mocker.patch("security.issues.sync.gh_issue_get_sub_issue_numbers", return_value=set()) + mock_add = mocker.patch("security.issues.sync.gh_issue_add_sub_issue_by_number", return_value=True) parent = Issue(number=1, state="open", title="P", body="pb") child = Issue(number=5, state="open", title="C", body="cb") ctx = _make_alert_context() @@ -627,8 +516,8 @@ def test_ensure_child_linked_missing_adds_link(mocker: MockerFixture) -> None: def test_ensure_child_linked_missing_dry_run(mocker: MockerFixture) -> None: """In dry-run mode logs intent without calling the add-sub-issue API.""" - mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value=set()) - mock_add = mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number") + mocker.patch("security.issues.sync.gh_issue_get_sub_issue_numbers", return_value=set()) + mock_add = mocker.patch("security.issues.sync.gh_issue_add_sub_issue_by_number") parent = Issue(number=1, state="open", title="P", body="pb") child = Issue(number=5, state="open", title="C", body="cb") ctx = _make_alert_context() @@ -639,8 +528,8 @@ def test_ensure_child_linked_missing_dry_run(mocker: MockerFixture) -> None: def test_ensure_child_linked_cache_populated(mocker: MockerFixture) -> None: """gh_issue_get_sub_issue_numbers is called only once per parent (cached).""" - mock_list = mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value={5, 6}) - mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number") + mock_list = mocker.patch("security.issues.sync.gh_issue_get_sub_issue_numbers", return_value={5, 6}) + mocker.patch("security.issues.sync.gh_issue_add_sub_issue_by_number") parent = Issue(number=1, state="open", title="P", body="pb") child_a = Issue(number=5, state="open", title="A", body="ab") child_b = Issue(number=6, state="open", title="B", body="bb") @@ -654,8 +543,8 @@ def test_ensure_child_linked_cache_populated(mocker: MockerFixture) -> None: def test_ensure_child_linked_api_failure_no_cache_update(mocker: MockerFixture) -> None: """When the API call to add the link fails, the cache is not updated.""" - mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value=set()) - mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number", return_value=False) + mocker.patch("security.issues.sync.gh_issue_get_sub_issue_numbers", return_value=set()) + mocker.patch("security.issues.sync.gh_issue_add_sub_issue_by_number", return_value=False) parent = Issue(number=1, state="open", title="P", body="pb") child = Issue(number=5, state="open", title="C", body="cb") ctx = _make_alert_context() @@ -671,8 +560,7 @@ def test_ensure_child_linked_api_failure_no_cache_update(mocker: MockerFixture) def test_ensure_parent_creates_new(mocker: MockerFixture, sast_alert: Alert) -> None: """Creates a parent issue when none exists for the rule_id.""" - mock_create = mocker.patch("utils.issue_sync.gh_issue_create", return_value=99) - mocker.patch("utils.issue_sync.gh_issue_comment") + mock_create = mocker.patch("security.issues.sync.gh_issue_create", return_value=99) issues: dict[int, Issue] = {} index = IssueIndex(by_fingerprint={}, parent_by_rule_id={}) result = ensure_parent_issue(sast_alert, issues, index, dry_run=False) @@ -694,8 +582,6 @@ def test_ensure_parent_existing_returns_existing(sast_alert: Alert) -> None: "type": "parent", "rule_id": sast_alert.metadata.rule_id, "severity": "high", - "first_seen": "2026-01-01", - "last_seen": "2026-01-01", }) issues = {10: parent} index = build_issue_index(issues) @@ -709,8 +595,6 @@ def test_ensure_parent_severity_change_detected(sast_alert: Alert) -> None: "type": "parent", "rule_id": sast_alert.metadata.rule_id, "severity": "low", - "first_seen": "2026-01-01", - "last_seen": "2026-01-01", }) issues = {10: parent} index = build_issue_index(issues) @@ -729,7 +613,7 @@ def test_ensure_parent_no_rule_id() -> None: def test_ensure_parent_create_fails(mocker: MockerFixture, sast_alert: Alert) -> None: """Returns None if gh_issue_create fails.""" - mocker.patch("utils.issue_sync.gh_issue_create", return_value=None) + mocker.patch("security.issues.sync.gh_issue_create", return_value=None) issues: dict[int, Issue] = {} index = IssueIndex(by_fingerprint={}, parent_by_rule_id={}) result = ensure_parent_issue(sast_alert, issues, index, dry_run=False) @@ -741,8 +625,6 @@ def test_ensure_parent_body_deferred(sast_alert: Alert) -> None: "type": "parent", "rule_id": sast_alert.metadata.rule_id, "severity": "high", - "first_seen": "2026-01-01", - "last_seen": "2026-01-01", }) original_body = parent.body issues = {10: parent} @@ -754,13 +636,11 @@ def test_ensure_parent_body_deferred(sast_alert: Alert) -> None: def test_ensure_parent_title_drift_corrected(mocker: MockerFixture, sast_alert: Alert) -> None: """Title is updated when it drifts from the expected format.""" - mock_title = mocker.patch("utils.issue_sync.gh_issue_edit_title", return_value=True) + mock_title = mocker.patch("security.issues.sync.gh_issue_edit_title", return_value=True) parent = _issue_with_secmeta(10, { "type": "parent", "rule_id": sast_alert.metadata.rule_id, "severity": "high", - "first_seen": "2026-01-01", - "last_seen": "2026-01-01", }) parent.title = "Wrong old title" issues = {10: parent} @@ -776,7 +656,7 @@ def test_ensure_parent_title_drift_corrected(mocker: MockerFixture, sast_alert: def test_flush_writes_changed_bodies(mocker: MockerFixture) -> None: """Writes body when it has changed.""" - mock_edit = mocker.patch("utils.issue_sync.gh_issue_edit_body") + mock_edit = mocker.patch("security.issues.sync.gh_issue_edit_body") issue = Issue(number=1, state="open", title="T", body="new body") bods = {1: ("org/repo", "old body")} _flush_parent_body_updates(bods, {1: issue}, dry_run=False) @@ -816,7 +696,7 @@ def test_label_orphan_no_orphans() -> None: def test_label_orphan_found(mocker: MockerFixture) -> None: """Labels child issues that have no matching alert.""" - mock_labels = mocker.patch("utils.issue_sync.gh_issue_add_labels") + mock_labels = mocker.patch("security.issues.sync.gh_issue_add_labels") child = _issue_with_secmeta(1, { "type": "child", "fingerprint": "fp_orphan", "repo": "org/repo", }) @@ -868,7 +748,7 @@ def test_label_orphan_no_repo_in_secmeta() -> None: def test_close_resolved_parent_issue(mocker: MockerFixture) -> None: """Closes an open parent when all known children are closed.""" - mock_edit = mocker.patch("utils.issue_sync.gh_issue_edit_state", return_value=True) + mock_edit = mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=True) parent = _issue_with_secmeta(10, { "type": "parent", "rule_id": "R1", "repo": "org/repo", }) @@ -889,7 +769,7 @@ def test_close_resolved_parent_issue(mocker: MockerFixture) -> None: def test_close_resolved_parent_skips_open_child(mocker: MockerFixture) -> None: """Leaves the parent open when any child is still open.""" - mock_edit = mocker.patch("utils.issue_sync.gh_issue_edit_state", return_value=True) + mock_edit = mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=True) parent = _issue_with_secmeta(10, { "type": "parent", "rule_id": "R1", "repo": "org/repo", }) @@ -976,7 +856,7 @@ def test_sync_severity_change_detected(sast_alert: Alert) -> None: """Severity change on existing parent is captured in result.""" parent = _issue_with_secmeta(10, { "type": "parent", "rule_id": sast_alert.metadata.rule_id, - "severity": "low", "first_seen": "2026-01-01", "last_seen": "2026-01-01", + "severity": "low", }) issues = {10: parent} result = sync_alerts_and_issues({303: sast_alert}, issues, dry_run=True) @@ -987,7 +867,7 @@ def test_sync_severity_change_detected(sast_alert: Alert) -> None: def test_sync_closes_parent_when_all_children_closed(mocker: MockerFixture) -> None: """Closes a parent during sync when its children are already closed.""" - mock_edit = mocker.patch("utils.issue_sync.gh_issue_edit_state", return_value=True) + mock_edit = mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=True) parent = _issue_with_secmeta(10, { "type": "parent", "rule_id": "R1", "repo": "org/repo", }) @@ -1033,7 +913,7 @@ def test_init_priority_sync_no_org_returns_none() -> None: def test_init_priority_sync_field_lookup_fails(mocker: MockerFixture) -> None: """Returns None when gh_project_get_priority_field fails.""" - mocker.patch("utils.issue_sync.gh_project_get_priority_field", return_value=None) + mocker.patch("security.issues.sync.gh_project_get_priority_field", return_value=None) result = _init_priority_sync( {}, severity_priority_map={"high": "Urgent"}, project_number=7, project_org="org", dry_run=False, diff --git a/tests/security/utils/test_templates.py b/tests/security/issues/test_templates.py similarity index 95% rename from tests/security/utils/test_templates.py rename to tests/security/issues/test_templates.py index cd3a423..9399980 100644 --- a/tests/security/utils/test_templates.py +++ b/tests/security/issues/test_templates.py @@ -16,8 +16,8 @@ """Unit tests for ``utils.templates``.""" -from shared.templates import render_markdown_template -from utils.templates import CHILD_BODY_TEMPLATE, PARENT_BODY_TEMPLATE +from core.rendering import render_markdown_template +from security.issues.templates import CHILD_BODY_TEMPLATE, PARENT_BODY_TEMPLATE # ===================================================================== @@ -102,7 +102,7 @@ def test_child_contains_all_placeholders() -> None: "{{ avd_id }}", "{{ alert_hash }}", "{{ title }}", "{{ message }}", "{{ repository_full_name }}", "{{ file_display }}", "{{ file_permalink }}", "{{ package_name }}", "{{ installed_version }}", "{{ fixed_version }}", - "{{ reachable }}", "{{ scan_date }}", "{{ first_seen }}", + "{{ reachable }}", "{{ first_seen }}", ] for ph in expected_placeholders: assert ph in CHILD_BODY_TEMPLATE, f"Missing placeholder: {ph}" @@ -120,7 +120,6 @@ def test_child_renders_without_error() -> None: "installed_version": "1.0", "fixed_version": "2.0", "reachable": "True", - "scan_date": "2026-01-01", "first_seen": "2026-01-01", } result = render_markdown_template(CHILD_BODY_TEMPLATE, values) diff --git a/tests/security/utils/test_teams.py b/tests/security/notifications/test_teams.py similarity index 92% rename from tests/security/utils/test_teams.py rename to tests/security/notifications/test_teams.py index 20be5a0..5de3a58 100644 --- a/tests/security/utils/test_teams.py +++ b/tests/security/notifications/test_teams.py @@ -22,8 +22,8 @@ import pytest -from utils.models import NotifiedIssue, SeverityChange -from utils.teams import ( +from security.issues.models import NotifiedIssue, SeverityChange +from security.notifications.teams import ( build_severity_change_body, build_teams_notification_body, notify_teams, @@ -136,8 +136,8 @@ def fake_run(cmd, **kwargs): calls.append((cmd, kwargs)) return types.SimpleNamespace(returncode=0, stdout="ok", stderr="") - monkeypatch.setattr("utils.teams.subprocess.run", fake_run) - monkeypatch.setattr("utils.teams.os.path.exists", lambda _: True) + monkeypatch.setattr("security.notifications.teams.subprocess.run", fake_run) + monkeypatch.setattr("security.notifications.teams.os.path.exists", lambda _: True) return calls @@ -166,8 +166,8 @@ def test_notify_teams_skips_when_script_not_found( caplog: pytest.LogCaptureFixture, ) -> None: calls: list = [] - monkeypatch.setattr("utils.teams.subprocess.run", lambda cmd, **kw: calls.append(cmd)) - monkeypatch.setattr("utils.teams.os.path.exists", lambda _: False) + monkeypatch.setattr("security.notifications.teams.subprocess.run", lambda cmd, **kw: calls.append(cmd)) + monkeypatch.setattr("security.notifications.teams.os.path.exists", lambda _: False) with caplog.at_level(logging.WARNING): notify_teams("https://hook", sample_notifications, dry_run=False) assert len(calls) == 0 @@ -195,8 +195,8 @@ def test_notify_teams_subprocess_failure( def fake_run(cmd, **kwargs): return types.SimpleNamespace(returncode=1, stdout="", stderr="send failed") - monkeypatch.setattr("utils.teams.subprocess.run", fake_run) - monkeypatch.setattr("utils.teams.os.path.exists", lambda _: True) + monkeypatch.setattr("security.notifications.teams.subprocess.run", fake_run) + monkeypatch.setattr("security.notifications.teams.os.path.exists", lambda _: True) with caplog.at_level(logging.WARNING): notify_teams("https://hook", sample_notifications, dry_run=False) assert any("failed" in r.message.lower() for r in caplog.records) diff --git a/tests/security/test_check_labels.py b/tests/security/test_check_labels.py index 19d18a6..78975bf 100644 --- a/tests/security/test_check_labels.py +++ b/tests/security/test_check_labels.py @@ -22,7 +22,7 @@ import pytest from pytest_mock import MockerFixture -from check_labels import REQUIRED_LABELS, check_labels, fetch_repo_labels, main +from security.check_labels import REQUIRED_LABELS, check_labels, fetch_repo_labels, main REPO = "my-org/my-repo" @@ -35,7 +35,7 @@ def _gh_result(labels: list[str]) -> subprocess.CompletedProcess: def test_fetch_repo_labels_returns_names(mocker: MockerFixture) -> None: - mock_gh = mocker.patch("check_labels.run_gh", return_value=_gh_result(["scope:security", "epic"])) + mock_gh = mocker.patch("security.check_labels.run_gh", return_value=_gh_result(["scope:security", "epic"])) assert fetch_repo_labels(REPO) == ["scope:security", "epic"] mock_gh.assert_called_once_with( ["label", "list", "--repo", REPO, "--json", "name", "--limit", "500"], @@ -45,19 +45,19 @@ def test_fetch_repo_labels_returns_names(mocker: MockerFixture) -> None: def test_fetch_repo_labels_skips_empty_names(mocker: MockerFixture) -> None: payload = json.dumps([{"name": "good"}, {"name": ""}, {}]) mocker.patch( - "check_labels.run_gh", + "security.check_labels.run_gh", return_value=subprocess.CompletedProcess(args=[], returncode=0, stdout=payload, stderr=""), ) assert fetch_repo_labels(REPO) == ["good"] def test_check_labels_all_present(mocker: MockerFixture) -> None: - mocker.patch("check_labels.fetch_repo_labels", return_value=list(REQUIRED_LABELS) + ["extra-label"]) + mocker.patch("security.check_labels.fetch_repo_labels", return_value=list(REQUIRED_LABELS) + ["extra-label"]) assert check_labels(REPO) == [] def test_check_labels_some_missing(mocker: MockerFixture) -> None: - mocker.patch("check_labels.fetch_repo_labels", return_value=["scope:security", "epic"]) + mocker.patch("security.check_labels.fetch_repo_labels", return_value=["scope:security", "epic"]) missing = check_labels(REPO) assert "type:tech-debt" in missing assert "sec:adept-to-close" in missing @@ -65,23 +65,23 @@ def test_check_labels_some_missing(mocker: MockerFixture) -> None: def test_check_labels_all_missing(mocker: MockerFixture) -> None: - mocker.patch("check_labels.fetch_repo_labels", return_value=[]) + mocker.patch("security.check_labels.fetch_repo_labels", return_value=[]) assert check_labels(REPO) == list(REQUIRED_LABELS) def test_check_labels_custom_required(mocker: MockerFixture) -> None: - mocker.patch("check_labels.fetch_repo_labels", return_value=["a"]) + mocker.patch("security.check_labels.fetch_repo_labels", return_value=["a"]) assert check_labels(REPO, required=["a", "b"]) == ["b"] def test_main_success(mocker: MockerFixture) -> None: - mock_check = mocker.patch("check_labels.check_labels", return_value=[]) + mock_check = mocker.patch("security.check_labels.check_labels", return_value=[]) assert main(["--repo", REPO]) == 0 mock_check.assert_called_once_with(REPO) def test_main_failure(mocker: MockerFixture) -> None: - mocker.patch("check_labels.check_labels", return_value=["epic"]) + mocker.patch("security.check_labels.check_labels", return_value=["epic"]) assert main(["--repo", REPO]) == 1 diff --git a/tests/security/test_collect_alert.py b/tests/security/test_collect_alert.py index 0b817b4..bc701b3 100644 --- a/tests/security/test_collect_alert.py +++ b/tests/security/test_collect_alert.py @@ -22,7 +22,7 @@ import pytest from pytest_mock import MockerFixture -from collect_alert import ( +from security.collect_alert import ( RULE_DETAIL_KEYS, VALID_STATES, _gh_api_json, @@ -49,6 +49,7 @@ "rule": { "id": "rule-1", "name": "sast", + "description": "Requests with verify=False", "security_severity_level": "high", "severity": "error", "tags": ["HIGH", "sast"], @@ -69,13 +70,13 @@ def _mock_happy_path(mocker: MockerFixture, repo_data: dict | None = None, raw_alerts: list | None = None): """Set up mocks for a successful main() run.""" - mocker.patch("collect_alert.shutil.which", return_value="/usr/bin/gh") + mocker.patch("security.collect_alert.shutil.which", return_value="/usr/bin/gh") mocker.patch( - "collect_alert.run_gh", + "security.collect_alert.run_gh", return_value=_gh_ok("Logged in"), ) mocker.patch( - "collect_alert._gh_api_json", + "security.collect_alert._gh_api_json", return_value=repo_data or { "id": 1, "name": "my-repo", @@ -87,7 +88,7 @@ def _mock_happy_path(mocker: MockerFixture, repo_data: dict | None = None, raw_a }, ) mocker.patch( - "collect_alert._gh_api_paginate", + "security.collect_alert._gh_api_paginate", return_value=raw_alerts if raw_alerts is not None else [], ) @@ -214,19 +215,19 @@ def test_parse_alert_details_value_with_colon() -> None: def test_gh_api_json_success(mocker: MockerFixture) -> None: payload = {"id": 123, "name": "my-repo"} - mocker.patch("collect_alert.run_gh", return_value=_gh_ok(json.dumps(payload))) + mocker.patch("security.collect_alert.run_gh", return_value=_gh_ok(json.dumps(payload))) assert _gh_api_json("/repos/my-org/my-repo") == payload def test_gh_api_json_failure_exits(mocker: MockerFixture) -> None: - mocker.patch("collect_alert.run_gh", return_value=_gh_fail("not found")) + mocker.patch("security.collect_alert.run_gh", return_value=_gh_fail("not found")) with pytest.raises(SystemExit): _gh_api_json("/repos/my-org/my-repo") def test_gh_api_paginate_single_page(mocker: MockerFixture) -> None: alerts = [{"number": 1}, {"number": 2}] - mocker.patch("collect_alert.run_gh", return_value=_gh_ok(json.dumps(alerts))) + mocker.patch("security.collect_alert.run_gh", return_value=_gh_ok(json.dumps(alerts))) assert _gh_api_paginate("/repos/org/repo/alerts") == alerts @@ -234,24 +235,24 @@ def test_gh_api_paginate_multiple_pages(mocker: MockerFixture) -> None: page1 = json.dumps([{"number": 1}]) page2 = json.dumps([{"number": 2}]) stdout = page1 + "\n" + page2 - mocker.patch("collect_alert.run_gh", return_value=_gh_ok(stdout)) + mocker.patch("security.collect_alert.run_gh", return_value=_gh_ok(stdout)) result = _gh_api_paginate("/repos/org/repo/alerts") assert result == [{"number": 1}, {"number": 2}] def test_gh_api_paginate_single_object(mocker: MockerFixture) -> None: - mocker.patch("collect_alert.run_gh", return_value=_gh_ok(json.dumps({"key": "val"}))) + mocker.patch("security.collect_alert.run_gh", return_value=_gh_ok(json.dumps({"key": "val"}))) result = _gh_api_paginate("/endpoint") assert result == [{"key": "val"}] def test_gh_api_paginate_empty_array(mocker: MockerFixture) -> None: - mocker.patch("collect_alert.run_gh", return_value=_gh_ok("[]")) + mocker.patch("security.collect_alert.run_gh", return_value=_gh_ok("[]")) assert _gh_api_paginate("/endpoint") == [] def test_gh_api_paginate_failure_exits(mocker: MockerFixture) -> None: - mocker.patch("collect_alert.run_gh", return_value=_gh_fail("error")) + mocker.patch("security.collect_alert.run_gh", return_value=_gh_fail("error")) with pytest.raises(SystemExit): _gh_api_paginate("/endpoint") @@ -263,6 +264,7 @@ def test_normalise_alert_metadata() -> None: assert meta["state"] == "open" assert meta["rule_id"] == "rule-1" assert meta["rule_name"] == "sast" + assert meta["rule_description"] == "Requests with verify=False" assert meta["severity"] == "high" assert meta["confidence"] == "error" assert meta["tags"] == ["HIGH", "sast"] @@ -298,6 +300,7 @@ def test_normalise_alert_minimal() -> None: assert meta["alert_number"] is None assert meta["state"] is None assert meta["rule_id"] is None + assert meta["rule_description"] is None assert meta["tool"] is None assert meta["file"] is None assert meta["tags"] == [] @@ -385,7 +388,7 @@ def test_main_repo_metadata_in_output(mocker: MockerFixture, tmp_path) -> None: def test_main_state_forwarded_to_paginate(mocker: MockerFixture, tmp_path) -> None: _mock_happy_path(mocker) - mock_paginate = mocker.patch("collect_alert._gh_api_paginate", return_value=[]) + mock_paginate = mocker.patch("security.collect_alert._gh_api_paginate", return_value=[]) out = str(tmp_path / "alerts.json") main(["--repo", REPO, "--state", "dismissed", "--out", out]) endpoint = mock_paginate.call_args[0][0] @@ -394,7 +397,7 @@ def test_main_state_forwarded_to_paginate(mocker: MockerFixture, tmp_path) -> No def test_main_state_all_omits_state_param(mocker: MockerFixture, tmp_path) -> None: _mock_happy_path(mocker) - mock_paginate = mocker.patch("collect_alert._gh_api_paginate", return_value=[]) + mock_paginate = mocker.patch("security.collect_alert._gh_api_paginate", return_value=[]) out = str(tmp_path / "alerts.json") main(["--repo", REPO, "--state", "all", "--out", out]) endpoint = mock_paginate.call_args[0][0] @@ -402,31 +405,31 @@ def test_main_state_all_omits_state_param(mocker: MockerFixture, tmp_path) -> No def test_main_invalid_repo_format_exits(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("collect_alert.shutil.which", return_value="/usr/bin/gh") - mocker.patch("collect_alert.run_gh", return_value=_gh_ok("ok")) + mocker.patch("security.collect_alert.shutil.which", return_value="/usr/bin/gh") + mocker.patch("security.collect_alert.run_gh", return_value=_gh_ok("ok")) out = str(tmp_path / "alerts.json") with pytest.raises(SystemExit): main(["--repo", "noslash", "--out", out]) def test_main_gh_not_found_exits(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("collect_alert.shutil.which", return_value=None) + mocker.patch("security.collect_alert.shutil.which", return_value=None) out = str(tmp_path / "alerts.json") with pytest.raises(SystemExit): main(["--repo", REPO, "--out", out]) def test_main_gh_not_authenticated_exits(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("collect_alert.shutil.which", return_value="/usr/bin/gh") - mocker.patch("collect_alert.run_gh", return_value=_gh_fail("not logged in")) + mocker.patch("security.collect_alert.shutil.which", return_value="/usr/bin/gh") + mocker.patch("security.collect_alert.run_gh", return_value=_gh_fail("not logged in")) out = str(tmp_path / "alerts.json") with pytest.raises(SystemExit): main(["--repo", REPO, "--out", out]) def test_main_refuses_overwrite(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("collect_alert.shutil.which", return_value="/usr/bin/gh") - mocker.patch("collect_alert.run_gh", return_value=_gh_ok("ok")) + mocker.patch("security.collect_alert.shutil.which", return_value="/usr/bin/gh") + mocker.patch("security.collect_alert.run_gh", return_value=_gh_ok("ok")) out = tmp_path / "alerts.json" out.write_text("{}") with pytest.raises(SystemExit): @@ -435,7 +438,7 @@ def test_main_refuses_overwrite(mocker: MockerFixture, tmp_path) -> None: def test_main_verbose_via_flag(mocker: MockerFixture, tmp_path) -> None: _mock_happy_path(mocker) - mock_setup = mocker.patch("collect_alert.setup_logging") + mock_setup = mocker.patch("security.collect_alert.setup_logging") out = str(tmp_path / "alerts.json") main(["--repo", REPO, "--out", out, "--verbose"]) mock_setup.assert_called_once_with(True) @@ -444,7 +447,7 @@ def test_main_verbose_via_flag(mocker: MockerFixture, tmp_path) -> None: def test_main_verbose_via_runner_debug(mocker: MockerFixture, tmp_path, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("RUNNER_DEBUG", "1") _mock_happy_path(mocker) - mock_setup = mocker.patch("collect_alert.setup_logging") + mock_setup = mocker.patch("security.collect_alert.setup_logging") out = str(tmp_path / "alerts.json") main(["--repo", REPO, "--out", out]) mock_setup.assert_called_once_with(True) diff --git a/tests/security/utils/test_constants.py b/tests/security/test_constants.py similarity index 86% rename from tests/security/utils/test_constants.py rename to tests/security/test_constants.py index c832dbe..91560d2 100644 --- a/tests/security/utils/test_constants.py +++ b/tests/security/test_constants.py @@ -16,13 +16,11 @@ """Unit tests for ``utils.constants``.""" -from utils.constants import ( +from security.constants import ( LABEL_EPIC, LABEL_SCOPE_SECURITY, LABEL_SEC_ADEPT_TO_CLOSE, LABEL_TYPE_TECH_DEBT, - SEC_EVENT_OPEN, - SEC_EVENT_REOPEN, SECMETA_TYPE_CHILD, SECMETA_TYPE_PARENT, ) @@ -41,13 +39,6 @@ def test_adept_to_close() -> None: assert LABEL_SEC_ADEPT_TO_CLOSE == "sec:adept-to-close" -def test_open() -> None: - assert SEC_EVENT_OPEN == "open" - -def test_reopen() -> None: - assert SEC_EVENT_REOPEN == "reopen" - - def test_parent() -> None: assert SECMETA_TYPE_PARENT == "parent" diff --git a/tests/security/test_sync_security_alerts.py b/tests/security/test_main.py similarity index 78% rename from tests/security/test_sync_security_alerts.py rename to tests/security/test_main.py index 3d5b842..f0de391 100644 --- a/tests/security/test_sync_security_alerts.py +++ b/tests/security/test_main.py @@ -14,12 +14,12 @@ # limitations under the License. # -"""Unit tests for ``sync_security_alerts.py``.""" +"""Unit tests for ``security.main``.""" import pytest from pytest_mock import MockerFixture -from sync_security_alerts import VALID_STATES, _resolve_repo, main, parse_args +from security.main import VALID_STATES, _resolve_repo, main, parse_args REPO = "my-org/my-repo" @@ -27,9 +27,9 @@ def _run_promote(mocker: MockerFixture, tmp_path, extra_args: list[str] | None = None) -> list[str]: """Helper: run main() with mocked pipeline and return the argv passed to promote_alerts.""" - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mocker.patch("sync_security_alerts.collect_alert_main") - mock_promote = mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mocker.patch("security.main.collect_alert_main") + mock_promote = mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") argv = ["--repo", REPO, "--out", out] + (extra_args or []) main(argv) @@ -127,15 +127,15 @@ def test_resolve_repo_no_slash_raises() -> None: def test_missing_labels_returns_1(mocker: MockerFixture) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=["epic"]) + mocker.patch("security.main.check_labels", return_value=["epic"]) rc = main(["--repo", REPO]) assert rc == 1 def test_skip_label_check(mocker: MockerFixture, tmp_path) -> None: - mock_check = mocker.patch("sync_security_alerts.check_labels") - mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mock_check = mocker.patch("security.main.check_labels") + mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") rc = main(["--repo", REPO, "--skip-label-check", "--out", out]) mock_check.assert_not_called() @@ -143,16 +143,16 @@ def test_skip_label_check(mocker: MockerFixture, tmp_path) -> None: def test_labels_ok_proceeds(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") rc = main(["--repo", REPO, "--out", out]) assert rc == 0 def test_existing_file_without_force_returns_1(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) + mocker.patch("security.main.check_labels", return_value=[]) out = tmp_path / "alerts.json" out.write_text("{}") rc = main(["--repo", REPO, "--out", str(out)]) @@ -160,9 +160,9 @@ def test_existing_file_without_force_returns_1(mocker: MockerFixture, tmp_path) def test_existing_file_with_force_removes_it(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = tmp_path / "alerts.json" out.write_text("{}") rc = main(["--repo", REPO, "--out", str(out), "--force"]) @@ -171,18 +171,18 @@ def test_existing_file_with_force_removes_it(mocker: MockerFixture, tmp_path) -> def test_nonexistent_file_proceeds(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "new.json") rc = main(["--repo", REPO, "--out", out]) assert rc == 0 def test_collect_called_with_basic_args(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mock_collect = mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mock_collect = mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") main(["--repo", REPO, "--state", "fixed", "--out", out]) call_args = mock_collect.call_args[0][0] @@ -195,9 +195,9 @@ def test_collect_called_with_basic_args(mocker: MockerFixture, tmp_path) -> None def test_verbose_forwarded_to_collect(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mock_collect = mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mock_collect = mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") main(["--repo", REPO, "--verbose", "--out", out]) call_args = mock_collect.call_args[0][0] @@ -260,25 +260,25 @@ def test_promote_empty_optionals_not_forwarded(mocker: MockerFixture, tmp_path) def test_pipeline_success_returns_0(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") assert main(["--repo", REPO, "--out", out]) == 0 def test_collect_error_propagates(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mocker.patch("sync_security_alerts.collect_alert_main", side_effect=SystemExit(1)) + mocker.patch("security.main.check_labels", return_value=[]) + mocker.patch("security.main.collect_alert_main", side_effect=SystemExit(1)) out = str(tmp_path / "alerts.json") with pytest.raises(SystemExit): main(["--repo", REPO, "--out", out]) def test_promote_error_propagates(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main", side_effect=SystemExit(1)) + mocker.patch("security.main.check_labels", return_value=[]) + mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main", side_effect=SystemExit(1)) out = str(tmp_path / "alerts.json") with pytest.raises(SystemExit): main(["--repo", REPO, "--out", out]) @@ -287,16 +287,16 @@ def test_promote_error_propagates(mocker: MockerFixture, tmp_path) -> None: def test_pipeline_call_order(mocker: MockerFixture, tmp_path) -> None: call_order: list[str] = [] mocker.patch( - "sync_security_alerts.check_labels", + "security.main.check_labels", return_value=[], side_effect=lambda *a, **k: (call_order.append("check"), [])[-1], ) mocker.patch( - "sync_security_alerts.collect_alert_main", + "security.main.collect_alert_main", side_effect=lambda *a, **k: call_order.append("collect"), ) mocker.patch( - "sync_security_alerts.promote_alerts_main", + "security.main.promote_alerts_main", side_effect=lambda *a, **k: call_order.append("promote"), ) out = str(tmp_path / "alerts.json") @@ -306,9 +306,9 @@ def test_pipeline_call_order(mocker: MockerFixture, tmp_path) -> None: def test_env_repo_fallback(mocker: MockerFixture, tmp_path, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("GITHUB_REPOSITORY", REPO) - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mock_collect = mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mock_collect = mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") assert main(["--out", out]) == 0 call_args = mock_collect.call_args[0][0] @@ -323,9 +323,9 @@ def test_no_repo_returns_error(monkeypatch: pytest.MonkeyPatch) -> None: def test_verbose_via_runner_debug(mocker: MockerFixture, tmp_path, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("RUNNER_DEBUG", "1") - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mock_collect = mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mock_collect = mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") main(["--repo", REPO, "--out", out]) call_args = mock_collect.call_args[0][0] diff --git a/tests/security/test_promote_alerts.py b/tests/security/test_promote_alerts.py index 5984f11..591d027 100644 --- a/tests/security/test_promote_alerts.py +++ b/tests/security/test_promote_alerts.py @@ -21,7 +21,8 @@ import pytest from pytest_mock import MockerFixture -from utils.models import LoadedAlerts, NotifiedIssue, SeverityChange, SyncResult +from security.alerts.models import LoadedAlerts +from security.issues.models import NotifiedIssue, SeverityChange, SyncResult # Default empty sync result reused across tests. _SYNC_RESULT_EMPTY = SyncResult(notifications=[], severity_changes=[]) @@ -34,12 +35,12 @@ def test_parse_args_defaults(monkeypatch: pytest.MonkeyPatch) -> None: """Defaults are applied when no CLI args are given.""" - monkeypatch.setattr("sys.argv", ["promote_alerts.py"]) + monkeypatch.setattr("sys.argv", ["security.promote_alerts.py"]) monkeypatch.delenv("SEVERITY_PRIORITY_MAP", raising=False) monkeypatch.delenv("PROJECT_NUMBER", raising=False) monkeypatch.delenv("PROJECT_ORG", raising=False) monkeypatch.delenv("TEAMS_WEBHOOK_URL", raising=False) - from promote_alerts import parse_args + from security.promote_alerts import parse_args args = parse_args() assert args.file == "alerts.json" @@ -50,7 +51,7 @@ def test_parse_args_defaults(monkeypatch: pytest.MonkeyPatch) -> None: def test_parse_args_all_flags(monkeypatch: pytest.MonkeyPatch) -> None: """All CLI flags and options are parsed correctly.""" monkeypatch.setattr("sys.argv", [ - "promote_alerts.py", + "security.promote_alerts.py", "--file", "custom.json", "--dry-run", "--verbose", @@ -60,7 +61,7 @@ def test_parse_args_all_flags(monkeypatch: pytest.MonkeyPatch) -> None: "--project-org", "my-org", "--teams-webhook-url", "https://hook.example.com", ]) - from promote_alerts import parse_args + from security.promote_alerts import parse_args args = parse_args() assert args.file == "custom.json" @@ -80,9 +81,9 @@ def test_parse_args_all_flags(monkeypatch: pytest.MonkeyPatch) -> None: def test_missing_gh_cli_raises(monkeypatch: pytest.MonkeyPatch) -> None: """main() raises SystemExit when gh CLI is not found.""" - monkeypatch.setattr("sys.argv", ["promote_alerts.py"]) + monkeypatch.setattr("sys.argv", ["security.promote_alerts.py"]) monkeypatch.setattr("shutil.which", lambda _cmd: None) - from promote_alerts import main + from security.promote_alerts import main with pytest.raises(SystemExit, match="gh CLI"): main() @@ -97,21 +98,21 @@ def test_missing_gh_cli_raises(monkeypatch: pytest.MonkeyPatch) -> None: def main_mocks(mocker: MockerFixture) -> SimpleNamespace: """Provide mocked dependencies for ``main()`` with sensible defaults.""" return SimpleNamespace( - which=mocker.patch("promote_alerts.shutil.which", return_value="/usr/bin/gh"), + which=mocker.patch("security.promote_alerts.shutil.which", return_value="/usr/bin/gh"), load=mocker.patch( - "promote_alerts.load_open_alerts_from_file", + "security.promote_alerts.load_open_alerts_from_file", return_value=LoadedAlerts(repo_full="org/repo", open_by_number={}), ), list_issues=mocker.patch( - "promote_alerts.gh_issue_list_by_label", + "security.promote_alerts.gh_issue_list_by_label", return_value={}, ), sync=mocker.patch( - "promote_alerts.sync_alerts_and_issues", + "security.promote_alerts.sync_alerts_and_issues", return_value=_SYNC_RESULT_EMPTY, ), - notify=mocker.patch("promote_alerts.notify_teams"), - notify_sev=mocker.patch("promote_alerts.notify_teams_severity_changes"), + notify=mocker.patch("security.promote_alerts.notify_teams"), + notify_sev=mocker.patch("security.promote_alerts.notify_teams_severity_changes"), ) @@ -125,12 +126,12 @@ def test_main_dry_run( monkeypatch: pytest.MonkeyPatch, ) -> None: """Dry-run mode passes dry_run=True to sync_alerts_and_issues.""" - monkeypatch.setattr("sys.argv", ["promote_alerts.py", "--dry-run"]) + monkeypatch.setattr("sys.argv", ["security.promote_alerts.py", "--dry-run"]) monkeypatch.delenv("TEAMS_WEBHOOK_URL", raising=False) monkeypatch.delenv("SEVERITY_PRIORITY_MAP", raising=False) monkeypatch.delenv("PROJECT_NUMBER", raising=False) monkeypatch.delenv("PROJECT_ORG", raising=False) - from promote_alerts import main + from security.promote_alerts import main main() _, kwargs = main_mocks.sync.call_args @@ -142,12 +143,12 @@ def test_main_passes_file_arg( monkeypatch: pytest.MonkeyPatch, ) -> None: """--file value is forwarded to load_open_alerts_from_file.""" - monkeypatch.setattr("sys.argv", ["promote_alerts.py", "--file", "custom.json"]) + monkeypatch.setattr("sys.argv", ["security.promote_alerts.py", "--file", "custom.json"]) monkeypatch.delenv("TEAMS_WEBHOOK_URL", raising=False) monkeypatch.delenv("SEVERITY_PRIORITY_MAP", raising=False) monkeypatch.delenv("PROJECT_NUMBER", raising=False) monkeypatch.delenv("PROJECT_ORG", raising=False) - from promote_alerts import main + from security.promote_alerts import main main() main_mocks.load.assert_called_once_with("custom.json") @@ -158,7 +159,7 @@ def test_main_no_webhook_skips_notification( monkeypatch: pytest.MonkeyPatch, ) -> None: """Without TEAMS_WEBHOOK_URL, notify_teams is still called (with empty url).""" - monkeypatch.setattr("sys.argv", ["promote_alerts.py"]) + monkeypatch.setattr("sys.argv", ["security.promote_alerts.py"]) monkeypatch.delenv("TEAMS_WEBHOOK_URL", raising=False) monkeypatch.delenv("SEVERITY_PRIORITY_MAP", raising=False) monkeypatch.delenv("PROJECT_NUMBER", raising=False) @@ -174,7 +175,7 @@ def test_main_no_webhook_skips_notification( ], severity_changes=[], ) - from promote_alerts import main + from security.promote_alerts import main main() # Without webhook URL, logging.debug is hit and notify_teams is not called @@ -187,7 +188,7 @@ def test_main_with_webhook_sends_notifications( ) -> None: """When TEAMS_WEBHOOK_URL is set and there are notifications, notify_teams is called.""" monkeypatch.setattr("sys.argv", [ - "promote_alerts.py", "--teams-webhook-url", "https://hook.example.com", + "security.promote_alerts.py", "--teams-webhook-url", "https://hook.example.com", ]) monkeypatch.delenv("TEAMS_WEBHOOK_URL", raising=False) monkeypatch.delenv("SEVERITY_PRIORITY_MAP", raising=False) @@ -203,7 +204,7 @@ def test_main_with_webhook_sends_notifications( ], severity_changes=[], ) - from promote_alerts import main + from security.promote_alerts import main main() main_mocks.notify.assert_called_once() @@ -217,13 +218,13 @@ def test_main_severity_priority_map_forwarded( ) -> None: """--severity-priority-map value is parsed and forwarded to sync.""" monkeypatch.setattr("sys.argv", [ - "promote_alerts.py", "--severity-priority-map", "High=Urgent,Low=Minor", + "security.promote_alerts.py", "--severity-priority-map", "High=Urgent,Low=Minor", ]) monkeypatch.delenv("TEAMS_WEBHOOK_URL", raising=False) monkeypatch.delenv("SEVERITY_PRIORITY_MAP", raising=False) monkeypatch.delenv("PROJECT_NUMBER", raising=False) monkeypatch.delenv("PROJECT_ORG", raising=False) - from promote_alerts import main + from security.promote_alerts import main main() _, kwargs = main_mocks.sync.call_args @@ -236,13 +237,13 @@ def test_main_project_number_forwarded( ) -> None: """--project-number and --project-org are forwarded to sync.""" monkeypatch.setattr("sys.argv", [ - "promote_alerts.py", "--project-number", "42", "--project-org", "my-org", + "security.promote_alerts.py", "--project-number", "42", "--project-org", "my-org", ]) monkeypatch.delenv("TEAMS_WEBHOOK_URL", raising=False) monkeypatch.delenv("SEVERITY_PRIORITY_MAP", raising=False) monkeypatch.delenv("PROJECT_NUMBER", raising=False) monkeypatch.delenv("PROJECT_ORG", raising=False) - from promote_alerts import main + from security.promote_alerts import main main() _, kwargs = main_mocks.sync.call_args diff --git a/tests/security/test_send_to_teams.py b/tests/security/test_send_notifications.py similarity index 93% rename from tests/security/test_send_to_teams.py rename to tests/security/test_send_notifications.py index 054c074..db77005 100644 --- a/tests/security/test_send_to_teams.py +++ b/tests/security/test_send_notifications.py @@ -24,7 +24,7 @@ import pytest -from send_to_teams import ( +from security.send_notifications import ( _build_card_body, _parse_args, _resolve_body, @@ -149,7 +149,7 @@ def test_no_body_raises(monkeypatch: pytest.MonkeyPatch) -> None: args = _parse_args([]) # stdin is a tty in tests, so it should raise fake_stdin = types.SimpleNamespace(isatty=lambda: True) - monkeypatch.setattr("send_to_teams.sys.stdin", fake_stdin) + monkeypatch.setattr("security.send_notifications.sys.stdin", fake_stdin) with pytest.raises(SystemExit): _resolve_body(args) @@ -157,7 +157,7 @@ def test_from_stdin(monkeypatch: pytest.MonkeyPatch) -> None: """Body is read from stdin when neither --body nor --body-file is given.""" args = _parse_args([]) fake_stdin = types.SimpleNamespace(isatty=lambda: False, read=lambda: "piped content") - monkeypatch.setattr("send_to_teams.sys.stdin", fake_stdin) + monkeypatch.setattr("security.send_notifications.sys.stdin", fake_stdin) assert _resolve_body(args) == "piped content" @@ -187,7 +187,7 @@ def test_no_webhook_raises(monkeypatch: pytest.MonkeyPatch) -> None: def test_main_sends_when_not_dry_run(monkeypatch: pytest.MonkeyPatch) -> None: """Non-dry-run path: main() calls send_to_teams with the webhook URL.""" calls: list[tuple] = [] - monkeypatch.setattr("send_to_teams.send_to_teams", lambda url, payload: calls.append((url, payload))) + monkeypatch.setattr("security.send_notifications.send_to_teams", lambda url, payload: calls.append((url, payload))) main(["--body", "hi", "--webhook-url", "https://hook"]) assert len(calls) == 1 assert calls[0][0] == "https://hook" @@ -206,7 +206,7 @@ def fake_post(url, **kwargs): calls.append((url, kwargs)) return types.SimpleNamespace(status_code=200, text="1") - monkeypatch.setattr("send_to_teams.requests.post", fake_post) + monkeypatch.setattr("security.send_notifications.requests.post", fake_post) send_to_teams("https://hook", {"type": "message"}) assert len(calls) == 1 @@ -214,6 +214,6 @@ def test_failure_raises(monkeypatch: pytest.MonkeyPatch) -> None: def fake_post(url, **kwargs): return types.SimpleNamespace(status_code=500, text="error") - monkeypatch.setattr("send_to_teams.requests.post", fake_post) + monkeypatch.setattr("security.send_notifications.requests.post", fake_post) with pytest.raises(SystemExit, match="failed"): send_to_teams("https://hook", {"type": "message"}) diff --git a/tests/security/utils/test_sec_events.py b/tests/security/utils/test_sec_events.py deleted file mode 100644 index e5685d7..0000000 --- a/tests/security/utils/test_sec_events.py +++ /dev/null @@ -1,163 +0,0 @@ -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -"""Unit tests for ``utils.sec_events``.""" - -import pytest - -from utils.sec_events import ( - parse_sec_event_fields, - render_sec_event, - strip_sec_events_from_body, -) - - -# ===================================================================== -# parse_sec_event_fields -# ===================================================================== - - -def test_basic_kv() -> None: - raw = "action=created\nseen_at=2026-01-01\nsource=aquasec" - result = parse_sec_event_fields(raw) - assert result == { - "action": "created", - "seen_at": "2026-01-01", - "source": "aquasec", - } - -def test_ignores_blank_and_no_equals() -> None: - raw = "action=created\n\njust-text\nfoo=bar" - result = parse_sec_event_fields(raw) - assert result == {"action": "created", "foo": "bar"} - -def test_equals_in_value() -> None: - raw = "path=a=b" - result = parse_sec_event_fields(raw) - assert result == {"path": "a=b"} - -def test_empty_string() -> None: - assert parse_sec_event_fields("") == {} - -def test_none_input() -> None: - assert parse_sec_event_fields(None) == {} - -def test_strips_whitespace() -> None: - raw = " action = created \n seen_at = 2026-01-01 " - result = parse_sec_event_fields(raw) - assert result == {"action": "created", "seen_at": "2026-01-01"} - - -# ===================================================================== -# render_sec_event -# ===================================================================== - - -def test_renders_fields_in_preferred_order() -> None: - fields = { - "commit_sha": "abc123", - "action": "created", - "seen_at": "2026-01-01", - } - rendered = render_sec_event(fields) - lines = rendered.strip().splitlines() - assert lines[0] == "[sec-event]" - assert lines[-1] == "[/sec-event]" - # action should come before commit_sha (preferred order) - assert lines.index("action=created") < lines.index("commit_sha=abc123") - -def test_includes_non_preferred_keys_sorted() -> None: - fields = {"action": "created", "z_custom": "1", "a_extra": "2"} - rendered = render_sec_event(fields) - # extra keys sorted alphabetically after preferred - assert "a_extra=2" in rendered - assert "z_custom=1" in rendered - lines = rendered.strip().splitlines() - idx_a = lines.index("a_extra=2") - idx_z = lines.index("z_custom=1") - assert idx_a < idx_z - -def test_skips_blank_values() -> None: - fields = {"action": "created", "path": " ", "source": ""} - rendered = render_sec_event(fields) - assert "path=" not in rendered - assert "source=" not in rendered - assert "action=created" in rendered - -def test_roundtrip() -> None: - fields = { - "action": "created", - "seen_at": "2026-01-01", - "source": "aquasec", - "gh_alert_number": "42", - "occurrence_fp": "fp123", - } - rendered = render_sec_event(fields) - # Extract inner content (skip opening/closing tags) - inner = "\n".join(rendered.strip().splitlines()[1:-1]) - parsed = parse_sec_event_fields(inner) - assert parsed == fields - - -# ===================================================================== -# strip_sec_events_from_body -# ===================================================================== - - -def test_removes_inline_block() -> None: - body = "Some text\n[sec-event]\naction=created\n[/sec-event]\nMore text" - result = strip_sec_events_from_body(body) - assert "[sec-event]" not in result - assert "Some text" in result - assert "More text" in result - -def test_removes_section_header() -> None: - body = "Intro\n\n## Security Events\nold stuff\n" - result = strip_sec_events_from_body(body) - assert "## Security Events" not in result - assert "Intro" in result - -def test_empty_body() -> None: - result = strip_sec_events_from_body("") - assert result.strip() == "" - -def test_none_body() -> None: - result = strip_sec_events_from_body(None) - assert result.strip() == "" - -def test_no_events() -> None: - body = "Just regular body text\n" - result = strip_sec_events_from_body(body) - assert "Just regular body text" in result - -def test_multiple_inline_blocks() -> None: - body = ( - "Text\n" - "[sec-event]\naction=created\n[/sec-event]\n" - "Middle\n" - "[sec-event]\naction=reopened\n[/sec-event]\n" - "End\n" - ) - result = strip_sec_events_from_body(body) - assert "[sec-event]" not in result - assert "Text" in result - assert "Middle" in result - assert "End" in result - -def test_collapses_excessive_newlines() -> None: - body = "A\n\n\n\n\nB\n" - result = strip_sec_events_from_body(body) - assert "\n\n\n" not in result