From 8e971698a89fb678de25e7b81185f5b364542121 Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Fri, 10 Apr 2026 09:34:20 +0200 Subject: [PATCH 01/16] QA project gates. --- .github/copilot-instructions.md | 38 ++++ .github/dependabot.yml | 39 ++++ .github/pull_request_template.md | 11 + .../workflows/aquasec-branch-comparison.yml | 77 +++++++ .github/workflows/aquasec-night-scan.yml | 46 ++++ .github/workflows/check_pr_release_notes.yml | 6 +- .github/workflows/check_python.yml | 6 +- .github/workflows/release_draft.yml | 120 +++++++++++ .gitignore | 38 ++-- .pylintrc | 23 +- LICENSE | 201 ++++++++++++++++++ Makefile | 20 ++ pyproject.toml | 49 +---- requirements.txt | 1 + src/security/derive_team_security_metrics.py | 1 + tests/security/utils/test_issue_sync.py | 81 ------- 16 files changed, 599 insertions(+), 158 deletions(-) create mode 100644 .github/copilot-instructions.md create mode 100644 .github/dependabot.yml create mode 100644 .github/pull_request_template.md create mode 100644 .github/workflows/aquasec-branch-comparison.yml create mode 100644 .github/workflows/aquasec-night-scan.yml create mode 100644 .github/workflows/release_draft.yml create mode 100644 LICENSE create mode 100644 Makefile diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 0000000..abf0db7 --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,38 @@ +Copilot instructions for Organizational Workflows + +Purpose +TBD + +Structure +TBD + +Python style +- Python 3.14 +- Type hints for public functions and classes +- Use `logging.getLogger(__name__)`, not print +- Lazy % formatting in logging: `logger.info("msg %s", var)` +- F-strings in exceptions: `raise ValueError(f"Error {var}")` +- Google-style docstrings +- Single blank line at end of file +- No documentation for `__init__` methods + +Patterns +- Classes with `__init__` cannot throw exceptions +- Use private methods (`_method_name`) for internal class helpers +- All info logs must start with "Security workflow -" prefix +- Never disable pylint behaviour in the code + +Testing +- Mirror src structure: `src/security/module.py` -> `tests/security/test_module.py` +- Minimal tests, no redundant tests +- All imports at the top of test files (never inside test functions) +- Use conftest.py fixtures for repeated mocking patterns across tests +- Comment sections: `# method_name` before tests +- Use `mocker.patch("module.dependency")` or `mocker.patch.object(Class, "method")` +- Assert pattern: `assert expected == actual` +- Use `pytest.raises(Exception)` for exceptions +- Use `@pytest.mark.parametrize` for data-driven tests (negative/failure scenarios with multiple similar cases) + +Quality gates (run after changes, fix only if below threshold) +- Run all quality gates at once: `make qa` +- Once a quality gate passes, do not re-run it in different scenarios \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..9afdef0 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,39 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + target-branch: "master" + schedule: + interval: "weekly" + day: "sunday" + labels: + - "infrastructure" + - "no RN" + open-pull-requests-limit: 3 + commit-message: + prefix: "chore" + include: "scope" + groups: + github-actions: + patterns: + - "*" + + - package-ecosystem: "pip" + directory: "/" + target-branch: "master" + schedule: + interval: "weekly" + day: "sunday" + labels: + - "infrastructure" + - "no RN" + open-pull-requests-limit: 3 + commit-message: + prefix: "chore" + include: "scope" + allow: + - dependency-type: "direct" + groups: + python-dependencies: + patterns: + - "*" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..323ac01 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,11 @@ + +## Overview + + +## Release Notes +- TBD: 1st item of release notes +- TBD: 2nd item of release notes + + +## Related +Closes #issue_number diff --git a/.github/workflows/aquasec-branch-comparison.yml b/.github/workflows/aquasec-branch-comparison.yml new file mode 100644 index 0000000..1e560a3 --- /dev/null +++ b/.github/workflows/aquasec-branch-comparison.yml @@ -0,0 +1,77 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +name: AquaSec Branch Comparison + +on: + pull_request: + types: [ opened, synchronize, reopened ] + +concurrency: + group: aquasec-branch-comparison-${{ github.event.pull_request.number }} + cancel-in-progress: true + +permissions: + contents: read + pull-requests: write + +jobs: + branch-comparison: + name: AquaSec Branch Comparison + if: ${{ !github.event.pull_request.head.repo.fork }} + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd + with: + persist-credentials: false + fetch-depth: 0 + + - name: Compare branches + id: aquasec + uses: AbsaOSS/aquasec-scan-results@15ee405515a000288b4ae9cdcb9943ea974f74b7 + with: + aqua-key: ${{ secrets.AQUA_KEY }} + aqua-secret: ${{ secrets.AQUA_SECRET }} + group-id: ${{ secrets.AQUA_GROUP_ID }} + repository-id: ${{ secrets.AQUA_REPOSITORY_ID }} + dev-branch-comparison: 'true' + + - name: Find existing PR comment + if: always() && steps.aquasec.outputs.comparison-summary-file != '' + uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad + id: find-comment + with: + issue-number: ${{ github.event.pull_request.number }} + comment-author: 'github-actions[bot]' + body-includes: '' + + - name: Post or update PR comment + if: always() && steps.aquasec.outputs.comparison-summary-file != '' + uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 + with: + issue-number: ${{ github.event.pull_request.number }} + comment-id: ${{ steps.find-comment.outputs.comment-id }} + edit-mode: replace + body-path: ${{ steps.aquasec.outputs.comparison-summary-file }} + + - name: Upload comparison summary as artifact + if: always() && steps.aquasec.outputs.comparison-summary-file != '' + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f + with: + name: aquasec-comparison-summary-pr-${{ github.event.pull_request.number }} + path: ${{ steps.aquasec.outputs.comparison-summary-file }} + retention-days: 7 diff --git a/.github/workflows/aquasec-night-scan.yml b/.github/workflows/aquasec-night-scan.yml new file mode 100644 index 0000000..ef316f1 --- /dev/null +++ b/.github/workflows/aquasec-night-scan.yml @@ -0,0 +1,46 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +name: AquaSec Night Scan + +on: + schedule: + - cron: '23 2 * * *' + workflow_dispatch: + +concurrency: + group: aquasec-night-scan-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + actions: read + issues: write + security-events: write + +jobs: + aquasec-night-scan: + uses: AbsaOSS/organizational-workflows/.github/workflows/aquasec-scan.yml@d5379d7bfc89dfb5ba359516525c4b07846d9716 + with: + severity-priority-map: 'Critical=P0,High=P1,Medium=P2,Low=P3' + project-number: 203 + project-org: 'absa-group' + secrets: + AQUA_KEY: ${{ secrets.AQUA_KEY }} + AQUA_SECRET: ${{ secrets.AQUA_SECRET }} + AQUA_GROUP_ID: ${{ secrets.AQUA_GROUP_ID }} + AQUA_REPOSITORY_ID: ${{ secrets.AQUA_REPOSITORY_ID }} + TEAMS_WEBHOOK_URL: ${{ secrets.TEAMS_WEBHOOK_URL }} diff --git a/.github/workflows/check_pr_release_notes.yml b/.github/workflows/check_pr_release_notes.yml index 0999a0b..bb4445b 100644 --- a/.github/workflows/check_pr_release_notes.yml +++ b/.github/workflows/check_pr_release_notes.yml @@ -22,7 +22,7 @@ on: branches: [ master ] concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + group: ${{ github.workflow }}-${{ github.event.pull_request.number }} cancel-in-progress: true jobs: @@ -30,12 +30,12 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v5 + - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 with: python-version: '3.14' - name: Check presence of release notes in PR description - uses: AbsaOSS/release-notes-presence-check@8e586b26a5e27f899ee8590a5d988fd4780a3dbf # v0.4.0 + uses: AbsaOSS/release-notes-presence-check@8e586b26a5e27f899ee8590a5d988fd4780a3dbf env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/.github/workflows/check_python.yml b/.github/workflows/check_python.yml index b8b3036..802ccb6 100644 --- a/.github/workflows/check_python.yml +++ b/.github/workflows/check_python.yml @@ -94,11 +94,11 @@ jobs: - name: Check Pylint score run: | - if (( $(echo "$PYLINT_SCORE < 9.5" | bc -l) )); then - echo "Failure: Pylint score is below 9.5 (project score: $PYLINT_SCORE)." + if (( $(echo "$PYLINT_SCORE < 9.0" | bc -l) )); then + echo "Failure: Pylint score is below 9.0 (project score: $PYLINT_SCORE)." exit 1 else - echo "Success: Pylint score is above 9.5 (project score: $PYLINT_SCORE)." + echo "Success: Pylint score is above 9.0 (project score: $PYLINT_SCORE)." fi black-check: diff --git a/.github/workflows/release_draft.yml b/.github/workflows/release_draft.yml new file mode 100644 index 0000000..4568701 --- /dev/null +++ b/.github/workflows/release_draft.yml @@ -0,0 +1,120 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +name: Draft Release +on: + workflow_dispatch: + inputs: + tag-name: + description: 'Name of git tag to be created, and then draft release created. Syntax: "v[0-9]+.[0-9]+.[0-9]+".' + required: true + from-tag-name: + description: 'Name of the git tag from which to detect changes from. Default value: latest tag. Syntax: "v[0-9]+.[0-9]+.[0-9]+".' + required: false + +jobs: + release-draft: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd + with: + fetch-depth: 0 + persist-credentials: false + + - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 + with: + python-version: '3.14' + + - name: Check Format of Received Tag + id: check-version-tag + uses: AbsaOSS/version-tag-check@4145e48bf3f77a5afff2ec9afdd8afb6b53bce34 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + github-repository: ${{ github.repository }} + version-tag: ${{ github.event.inputs.tag-name }} + + - name: Check Format of Received From Tag + if: ${{ github.event.inputs.from-tag-name }} + id: check-version-from-tag + uses: AbsaOSS/version-tag-check@4145e48bf3f77a5afff2ec9afdd8afb6b53bce34 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + github-repository: ${{ github.repository }} + version-tag: ${{ github.event.inputs.from-tag-name }} + should-exist: true + + - name: Generate Release Notes + id: generate_release_notes + uses: AbsaOSS/generate-release-notes@B90223510d1704301a36a36f0d86a72a0e72f0cf + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag-name: ${{ github.event.inputs.tag-name }} + from-tag-name: ${{ github.event.inputs.from-tag-name }} + chapters: | + - {"title": "Entries to skip 🚫", "label": "duplicate"} + - {"title": "Entries to skip 🚫", "label": "invalid"} + - {"title": "Entries to skip 🚫", "label": "wontfix"} + - {"title": "Entries to skip 🚫", "label": "no RN"} + - {"title": "Breaking Changes 💥", "label": "breaking change"} + - {"title": "Security updates 👮", "label": "security"} + - {"title": "New Features 🎉", "label": "enhancement"} + - {"title": "Bugfixes 🛠", "label": "bug"} + - {"title": "Epics 📖", "label": "epic"} + - {"title": "Infrastructure ⚙️", "label": "infrastructure"} + - {"title": "Silent-live 🤫", "label": "silent live"} + - {"title": "Documentation 📜", "label": "documentation"} + duplicity-scope: 'none' + warnings: true + skip-release-notes-labels: "no RN,duplicate,invalid,wontfix" + print-empty-chapters: false + row-format-issue: '_{title}_ {developers} in {number}' + row-format-pr: '_{title}_ {developers} in {number}' + row-format-link-pr: true + hierarchy: true + + - name: Create and Push Tag + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd + env: + TAG_NAME: ${{ github.event.inputs.tag-name }} + with: + script: | + const tag = process.env.TAG_NAME + const ref = `refs/tags/${tag}`; + const sha = context.sha; // The SHA of the commit to tag + + await github.rest.git.createRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: ref, + sha: sha + }); + + console.log(`Tag created: ${tag}`); + github-token: ${{ secrets.GITHUB_TOKEN }} + + - name: Create Draft Release + uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + name: ${{ github.event.inputs.tag-name }} + body: ${{ steps.generate_release_notes.outputs.release-notes }} + tag_name: ${{ github.event.inputs.tag-name }} + draft: true + prerelease: false diff --git a/.gitignore b/.gitignore index 8f67b1d..4942f61 100644 --- a/.gitignore +++ b/.gitignore @@ -1,14 +1,26 @@ -.DS_Store -src/security/__pycache__/ -**/__pycache__/ -*.pyc -/.idea -target -.bsp -.idea -*.iml -.vscode -src/security/alerts*.json -src/security/.coverage +# Python virtual environment +.venv/ +venv/ +env/ + +# Python cache files +__pycache__/ +*.py[cod] +*$py.class + +# Test coverage .coverage -output.txt +.coverage.* +htmlcov/ +.pytest_cache/ + +# IDEs +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# OS +.DS_Store +Thumbs.db diff --git a/.pylintrc b/.pylintrc index 0457059..b537ea4 100644 --- a/.pylintrc +++ b/.pylintrc @@ -68,7 +68,7 @@ ignored-modules= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). -init-hook='import sys; sys.path.extend(["src", "src/security"])' +#init-hook= # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the # number of processors available to use, and will cap the count on Windows to @@ -294,16 +294,16 @@ ignored-parents= max-args=10 # Maximum number of attributes for a class (see R0902). -max-attributes=25 +max-attributes=7 # Maximum number of boolean expressions in an if statement (see R0916). max-bool-expr=5 # Maximum number of branch for function / method body. -max-branches=20 +max-branches=12 # Maximum number of locals for function / method body. -max-locals=30 +max-locals=15 # Maximum number of parents for a class (see R0901). max-parents=7 @@ -312,10 +312,10 @@ max-parents=7 max-public-methods=20 # Maximum number of return / yield for function / method body. -max-returns=7 +max-returns=6 # Maximum number of statements in function / method body. -max-statements=60 +max-statements=50 # Minimum number of public methods for a class (see R0903). min-public-methods=1 @@ -436,10 +436,7 @@ disable=raw-checker-failed, deprecated-pragma, use-symbolic-message-instead, use-implicit-booleaness-not-comparison-to-string, - use-implicit-booleaness-not-comparison-to-zero, - wrong-import-position, - wrong-import-order, - ungrouped-imports + use-implicit-booleaness-not-comparison-to-zero # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option @@ -469,7 +466,7 @@ notes-rgx= [REFACTORING] # Maximum number of nested blocks for function / method body -max-nested-blocks=6 +max-nested-blocks=5 # Complete name of functions that never returns. When checking for # inconsistent-return-statements if a never returning function is called then @@ -524,7 +521,7 @@ ignore-imports=yes ignore-signatures=yes # Minimum lines number of a similarity. -min-similarity-lines=20 +min-similarity-lines=4 [SPELLING] @@ -572,7 +569,7 @@ contextmanager-decorators=contextlib.contextmanager # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. -generated-members=__dataclass_fields__ +generated-members= # Tells whether to warn about missing members when the owner of the attribute # is inferred to be None. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..58cf8ef --- /dev/null +++ b/Makefile @@ -0,0 +1,20 @@ +.PHONY: black pylint mypy pytest-unit qa + +PYTHON := .venv/bin/python +PY_FILES := $(shell git ls-files '*.py') +MIN_PYLINT_SCORE = 9.0 +MIN_COVERAGE = 80 + +black: ## Run Black formatter + $(PYTHON) -m black . + +pylint: ## Run Pylint (threshold >= 9.5) + $(PYTHON) -m pylint --fail-under=$(MIN_PYLINT_SCORE) $(PY_FILES) + +mypy: ## Run mypy static type checker + $(PYTHON) -m mypy . + +pytest-unit: ## Run unit tests with coverage (threshold >= 80%) + $(PYTHON) -m pytest tests/ --cov=src --cov-fail-under=$(MIN_COVERAGE) + +qa: black pylint mypy pytest-unit diff --git a/pyproject.toml b/pyproject.toml index 06af3d9..b366200 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,55 +1,14 @@ -[project] -name = "organizational-workflows" -version = "0.1.0" -requires-python = ">=3.14" -dependencies = [] - -[project.optional-dependencies] -security = ["PyGithub>=2.0", "requests>=2.32"] -dev = [ - "organizational-workflows[security]", - "pytest>=8.0", - "pytest-cov>=6.0", -] - -# ── pytest + coverage ──────────────────────────────────────────────── - [tool.pytest.ini_options] -testpaths = ["tests"] -pythonpath = [".", "src", "src/security"] - -[tool.coverage.run] -source = ["src/shared", "src/security"] -omit = [ - "*/tests/*", - "*/__pycache__/*", - "*/htmlcov/*", -] - -[tool.coverage.report] -show_missing = true -skip_empty = true -exclude_lines = [ - "pragma: no cover", - "if __name__ == .__main__.", - "if TYPE_CHECKING:", -] - -# ── formatting & type-checking ─────────────────────────────────────── +pythonpath = ["src", "src/security"] [tool.black] line-length = 120 target-version = ['py314'] force-exclude = '''test''' +[tool.coverage.run] +omit = ["tests/*"] + [tool.mypy] check_untyped_defs = true exclude = "tests" - -[[tool.mypy.overrides]] -module = "requests.*" -ignore_missing_imports = true - -[[tool.mypy.overrides]] -module = "derive_team_security_metrics" -ignore_errors = true diff --git a/requirements.txt b/requirements.txt index 51375b5..b309ff3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,3 +6,4 @@ pytest-mock>=3.0 pylint>=3.0 black>=24.0 mypy>=1.0 +types-requests>=2.33 diff --git a/src/security/derive_team_security_metrics.py b/src/security/derive_team_security_metrics.py index 178adeb..8316e66 100644 --- a/src/security/derive_team_security_metrics.py +++ b/src/security/derive_team_security_metrics.py @@ -35,6 +35,7 @@ import json import logging import os +from datetime import datetime from typing import Any, Dict, List, Optional SNAPSHOT_CUR = os.environ.get("SNAPSHOT_CURRENT", "data/issues_snapshot.json") diff --git a/tests/security/utils/test_issue_sync.py b/tests/security/utils/test_issue_sync.py index 3e373da..81d399c 100644 --- a/tests/security/utils/test_issue_sync.py +++ b/tests/security/utils/test_issue_sync.py @@ -596,26 +596,6 @@ def test_handle_new_child_create_fails(mocker: MockerFixture, sast_alert: Alert) assert ctx.fingerprint not in sync.index.by_fingerprint -# ===================================================================== -# _handle_existing_child_issue (integration of sub-functions) -# ===================================================================== - - -def test_handle_existing_child_updates_body(mocker: MockerFixture, sast_alert: Alert) -> None: - """Existing child issue body is updated with fresh template.""" - mock_body = mocker.patch("utils.issue_sync.gh_issue_edit_body") - mocker.patch("utils.issue_sync.gh_issue_add_labels") - child = _issue_with_secmeta(5, { - "type": "child", "fingerprint": "fp_test_123", - "occurrence_count": "1", "first_seen": "2026-01-01", - "last_seen": "2026-01-01", "last_occurrence_fp": "old_occ", - }) - ctx = _make_alert_context(alert=sast_alert, fingerprint="fp_test_123") - sync = _make_sync_context(issues={5: child}, notifications=[]) - _handle_existing_child_issue(ctx=ctx, sync=sync, issue=child, parent_issue=None) - mock_body.assert_called_once() - - # ===================================================================== # _ensure_child_linked_to_parent # ===================================================================== @@ -929,23 +909,6 @@ def test_close_resolved_parent_skips_open_child(mocker: MockerFixture) -> None: # ensure_issue (end-to-end orchestration per alert) # ===================================================================== - -def test_ensure_issue_new_alert_creates_parent_and_child( - mocker: MockerFixture, sast_alert: Alert, -) -> None: - """Full path: new alert creates parent + child.""" - mock_create = mocker.patch("utils.issue_sync.gh_issue_create", return_value=50) - mocker.patch("utils.issue_sync.gh_issue_comment") - mocker.patch("utils.issue_sync.gh_issue_edit_body") - mocker.patch("utils.issue_sync.gh_issue_add_labels") - issues: dict[int, Issue] = {} - index = IssueIndex(by_fingerprint={}, parent_by_rule_id={}) - notifications: list[NotifiedIssue] = [] - sync = _make_sync_context(issues=issues, index=index, dry_run=False, notifications=notifications) - ensure_issue(sast_alert, sync) - assert mock_create.call_count == 2 # parent + child - assert len(notifications) == 1 - def test_ensure_issue_dry_run(sast_alert: Alert) -> None: """Dry-run: no gh calls, notification with issue_number=0.""" issues: dict[int, Issue] = {} @@ -991,26 +954,6 @@ def test_ensure_issue_missing_alert_details_raises() -> None: with pytest.raises(SystemExit, match="alert_hash"): ensure_issue(alert, _make_sync_context(issues=issues, index=index, dry_run=True)) -def test_ensure_issue_existing_child_updates(mocker: MockerFixture, sast_alert: Alert) -> None: - """When a child issue already exists, it is updated (not duplicated).""" - mocker.patch("utils.issue_sync.gh_issue_edit_body") - mocker.patch("utils.issue_sync.gh_issue_add_labels") - fp = sast_alert.alert_details.alert_hash - child = _issue_with_secmeta(5, { - "type": "child", "fingerprint": fp, - "occurrence_count": "1", "first_seen": "2026-01-01", - "last_seen": "2026-01-01", "last_occurrence_fp": "old_occ", - }) - parent = _issue_with_secmeta(10, { - "type": "parent", "rule_id": sast_alert.metadata.rule_id, - "severity": "high", "first_seen": "2026-01-01", "last_seen": "2026-01-01", - }) - issues = {5: child, 10: parent} - index = build_issue_index(issues) - notifications: list[NotifiedIssue] = [] - sync = _make_sync_context(issues=issues, index=index, dry_run=True, notifications=notifications) - ensure_issue(sast_alert, sync) - # ===================================================================== # sync_alerts_and_issues (top-level orchestrator) @@ -1029,19 +972,6 @@ def test_sync_dry_run_single_alert(sast_alert: Alert) -> None: result = sync_alerts_and_issues(alerts, {}, dry_run=True) assert len(result.notifications) == 1 -def test_sync_creates_issues( - mocker: MockerFixture, sast_alert: Alert, vuln_alert: Alert, -) -> None: - """Multiple alerts each get parent + child issues.""" - mock_create = mocker.patch("utils.issue_sync.gh_issue_create", return_value=100) - mocker.patch("utils.issue_sync.gh_issue_comment") - mocker.patch("utils.issue_sync.gh_issue_edit_body") - mocker.patch("utils.issue_sync.gh_issue_add_labels") - alerts = {303: sast_alert, 312: vuln_alert} - result = sync_alerts_and_issues(alerts, {}, dry_run=False) - assert len(result.notifications) == 2 - assert mock_create.call_count == 4 # 2 parents + 2 children - def test_sync_severity_change_detected(sast_alert: Alert) -> None: """Severity change on existing parent is captured in result.""" parent = _issue_with_secmeta(10, { @@ -1092,17 +1022,6 @@ def test_init_priority_sync_no_project_number() -> None: ) assert result is None -def test_init_priority_sync_derives_org_from_alert(mocker: MockerFixture) -> None: - """Derives org from the first alert's _repo when project_org is empty.""" - alerts = {1: Alert.from_dict({"_repo": "derived-org/repo-a"})} - mocker.patch("utils.issue_sync.gh_project_get_priority_field", return_value=mocker.MagicMock()) - result = _init_priority_sync( - alerts, severity_priority_map={"high": "Urgent"}, project_number=7, - project_org="", dry_run=True, - ) - assert result is not None - assert result.org == "derived-org" - def test_init_priority_sync_no_org_returns_none() -> None: """Returns None with warning when org cannot be determined.""" alerts = {1: Alert.from_dict({"_repo": ""})} From 313d1924b5ba0d8ca23c200feee2c309074e9ec0 Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Wed, 15 Apr 2026 14:10:40 +0200 Subject: [PATCH 02/16] current state before deleting parts. --- .github/workflows/aquasec-remove-label.yml | 28 ++++++++++++++++ .github/workflows/aquasec-scan.yml | 33 +++++++------------ ....yml => remove-resolved-finding-label.yml} | 9 ++--- .../example_workflows/aquasec-night-scan.yml | 15 +++------ ....yml => remove-resolved-finding-label.yml} | 10 ++---- docs/security/security.md | 4 +-- 6 files changed, 51 insertions(+), 48 deletions(-) create mode 100644 .github/workflows/aquasec-remove-label.yml rename .github/workflows/{remove-adept-to-close-on-issue-close.yml => remove-resolved-finding-label.yml} (83%) rename docs/security/example_workflows/{remove-adept-to-close-on-issue-close.yml => remove-resolved-finding-label.yml} (67%) diff --git a/.github/workflows/aquasec-remove-label.yml b/.github/workflows/aquasec-remove-label.yml new file mode 100644 index 0000000..a8e3bfc --- /dev/null +++ b/.github/workflows/aquasec-remove-label.yml @@ -0,0 +1,28 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +name: Remove Resolved Finding Label + +on: + issues: + types: [closed] + +permissions: + issues: write + +jobs: + remove-label: + uses: AbsaOSS/organizational-workflows/.github/workflows/remove-resolved-finding-label.yml@master \ No newline at end of file diff --git a/.github/workflows/aquasec-scan.yml b/.github/workflows/aquasec-scan.yml index a32dc53..dcf1b06 100644 --- a/.github/workflows/aquasec-scan.yml +++ b/.github/workflows/aquasec-scan.yml @@ -14,12 +14,9 @@ # limitations under the License. # -# Reusable workflow – Aquasec Scan + Security Alerts to Issues. -# -# Called from application repositories via workflow_call. -# The caller triggers on schedule / workflow_dispatch and passes the required secrets. +# SECURITY reusable workflow – AquaSec Scan + Security Alerts to Issues. -name: Aquasec Scan +name: AquaSec Scan on: workflow_call: @@ -29,16 +26,17 @@ on: required: false type: boolean default: false + severity-priority-map: description: > Comma-separated severity=priority pairs that map alert severities to - priority values on the GitHub Project (e.g. - 'Critical=Blocker,High=Urgent,Medium=Normal'). Only listed severities - get a priority; unlisted ones are left empty. When not set, priority + priority values on the GitHub Project (e.g.'Critical=Blocker,High=Urgent,Medium=Normal'). + Only listed severities get a priority. Unlisted ones are left empty. When not set, priority is skipped entirely. required: false type: string default: '' + project-number: description: > GitHub Projects V2 number (org-level) where a Priority single-select @@ -47,6 +45,7 @@ on: required: false type: number default: 0 + project-org: description: > GitHub organisation that owns the Projects V2 board. Use when the @@ -55,6 +54,7 @@ on: required: false type: string default: '' + secrets: AQUA_KEY: required: true @@ -66,24 +66,16 @@ on: required: true TEAMS_WEBHOOK_URL: required: false - GH_PROJECT_ONLY_TOKEN: - description: > - Classic PAT with 'project' scope on an account that is a member of the - org that owns the ProjectV2 board. Required only when the project lives - in a different organisation than the calling repository. When omitted, - github.token is used (works only for same-org projects). - required: false permissions: contents: read actions: read issues: write security-events: write - repository-projects: write jobs: aquasec-scan: - name: Aquasec Scan + name: AquaSec Scan runs-on: ubuntu-latest steps: - name: Checkout code @@ -102,7 +94,7 @@ jobs: repository-id: ${{ secrets.AQUA_REPOSITORY_ID }} verbose-logging: ${{ inputs.verbose-logging }} - - name: Upload Scan Results to GitHub Security + - name: Upload scan results to GitHub Security and quality uses: github/codeql-action/upload-sarif@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 with: sarif_file: ${{ steps.aquasec.outputs.nightscan-sarif-file }} @@ -113,7 +105,7 @@ jobs: needs: aquasec-scan runs-on: ubuntu-latest steps: - - name: Checkout security scripts + - name: Checkout code uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 with: repository: AbsaOSS/organizational-workflows @@ -131,10 +123,9 @@ jobs: - name: Install dependencies run: pip install -r org-workflows/requirements.txt - - name: Run alert-to-issue sync + - name: Create issues from security alerts env: GH_TOKEN: ${{ github.token }} - GH_PROJECT_ONLY_TOKEN: ${{ secrets.GH_PROJECT_ONLY_TOKEN }} TEAMS_WEBHOOK_URL: ${{ secrets.TEAMS_WEBHOOK_URL }} SEVERITY_PRIORITY_MAP: ${{ inputs.severity-priority-map }} PROJECT_NUMBER: ${{ inputs.project-number }} diff --git a/.github/workflows/remove-adept-to-close-on-issue-close.yml b/.github/workflows/remove-resolved-finding-label.yml similarity index 83% rename from .github/workflows/remove-adept-to-close-on-issue-close.yml rename to .github/workflows/remove-resolved-finding-label.yml index 88acb4d..c813543 100644 --- a/.github/workflows/remove-adept-to-close-on-issue-close.yml +++ b/.github/workflows/remove-resolved-finding-label.yml @@ -14,14 +14,9 @@ # limitations under the License. # -# Reusable workflow – Remove sec:adept-to-close label when an issue is closed. -# -# Called from application repositories via workflow_call. -# The caller must trigger on `issues: [closed]`. -# Note: for `workflow_call`, the called workflow receives the same event payload as the caller, -# so `context.payload` (aka `github.event`) is populated without needing to "forward" it via inputs. +# SECURITY reusable workflow – Removing sec:adept-to-close label when an Issue is closed. -name: Remove sec:adept-to-close on close +name: Remove Resolved Finding Label on: workflow_call: diff --git a/docs/security/example_workflows/aquasec-night-scan.yml b/docs/security/example_workflows/aquasec-night-scan.yml index 9763091..aa17056 100644 --- a/docs/security/example_workflows/aquasec-night-scan.yml +++ b/docs/security/example_workflows/aquasec-night-scan.yml @@ -14,12 +14,9 @@ # limitations under the License. # -# Example caller workflow – add this to your application repository as -# .github/workflows/aquasec-night-scan.yml -# -# It delegates to the reusable workflow in the organizational-workflows repo. +# Example caller workflow – Add this to your repository as .github/workflows/aquasec-night-scan.yml -name: Aquasec Night Scan +name: AquaSec Night Scan on: schedule: @@ -35,21 +32,17 @@ permissions: actions: read issues: write security-events: write - repository-projects: write jobs: scan: uses: AbsaOSS/organizational-workflows/.github/workflows/aquasec-scan.yml@c1fa5b54ff24fea071415da89abc4f0506344f01 with: severity-priority-map: 'Critical=Blocker,High=Urgent,Medium=Normal,Low=Minor' - project-number: 42 # Replace with your org's GitHub Project number for priority tracking - project-org: 'my-org' # Replace with the org that owns the project (if different from repo org) + project-number: 42 + project-org: 'my-org' secrets: AQUA_KEY: ${{ secrets.AQUA_KEY }} AQUA_SECRET: ${{ secrets.AQUA_SECRET }} AQUA_GROUP_ID: ${{ secrets.AQUA_GROUP_ID }} AQUA_REPOSITORY_ID: ${{ secrets.AQUA_REPOSITORY_ID }} TEAMS_WEBHOOK_URL: ${{ secrets.TEAMS_WEBHOOK_URL }} - # Required only when project-org differs from this repository's org. - # See docs/security/security.md – "Cross-org project token" for how to create it. - GH_PROJECT_ONLY_TOKEN: ${{ secrets.GH_PROJECT_ONLY_TOKEN }} diff --git a/docs/security/example_workflows/remove-adept-to-close-on-issue-close.yml b/docs/security/example_workflows/remove-resolved-finding-label.yml similarity index 67% rename from docs/security/example_workflows/remove-adept-to-close-on-issue-close.yml rename to docs/security/example_workflows/remove-resolved-finding-label.yml index 60e5998..2c0514c 100644 --- a/docs/security/example_workflows/remove-adept-to-close-on-issue-close.yml +++ b/docs/security/example_workflows/remove-resolved-finding-label.yml @@ -14,13 +14,9 @@ # limitations under the License. # -# Example caller workflow – add this to your application repository as -# .github/workflows/remove-adept-to-close-on-issue-close.yml -# -# It delegates to the reusable workflow in the organizational-workflows repo. -# Adjust and to match your organization. +# Example caller workflow – Add this to your repository as .github/workflows/remove-resolved-finding-label.yml -name: Remove sec:adept-to-close on close +name: Remove Resolved Finding Label on: issues: @@ -31,4 +27,4 @@ permissions: jobs: remove-label: - uses: AbsaOSS/organizational-workflows/.github/workflows/remove-adept-to-close-on-issue-close.yml@master + uses: AbsaOSS/organizational-workflows/.github/workflows/remove-resolved-finding-label.yml@c1fa5b54ff24fea071415da89abc4f0506344f01 diff --git a/docs/security/security.md b/docs/security/security.md index 4e32d9c..69e86a7 100644 --- a/docs/security/security.md +++ b/docs/security/security.md @@ -246,7 +246,7 @@ The reusable workflow forwards it to the Python script as the `GH_PROJECT_ONLY_T #### Remove sec:adept-to-close on close -Example caller (already available in [remove-adept-to-close-on-issue-close.yml](/docs/security/example_workflows/remove-adept-to-close-on-issue-close.yml)): +Example caller (already available in [remove-adept-to-close-on-issue-close.yml](/docs/security/example_workflows/remove-resolved-finding-label.yml)): ```yaml name: Remove sec:adept-to-close on close @@ -260,7 +260,7 @@ permissions: jobs: remove-label: - uses: AbsaOSS/organizational-workflows/.github/workflows/remove-adept-to-close-on-issue-close.yml@master + uses: AbsaOSS/organizational-workflows/.github/workflows/remove-resolved-finding-label.yml@master ``` > **Note:** The calling repository must grant the permissions the reusable workflow needs (listed in each workflow file). For cross-organization calls the reusable workflow repository must be set to "Accessible from repositories in the organization" under **Settings → Actions → General**. From 69ab6bda26ab8dd9a9932a33a30369f00299ee2f Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Wed, 15 Apr 2026 16:26:13 +0200 Subject: [PATCH 03/16] New project structure. --- .coverage | Bin 53248 -> 0 bytes .github/copilot-instructions.md | 35 ++- .github/workflows/aquasec-remove-label.yml | 28 --- .github/workflows/aquasec-scan.yml | 2 +- .../remove-resolved-finding-label.yml | 68 ------ README.md | 2 +- ...can.yml => aquasec-night-scan-example.yml} | 0 .../remove-resolved-finding-label.yml | 30 --- docs/security/security.md | 4 +- pyproject.toml | 3 +- {tests/security => src/core}/__init__.py | 2 + .../logging_config.py => core/config.py} | 13 +- src/core/github/__init__.py | 17 ++ src/core/github/client.py | 46 ++++ .../github/issues.py} | 4 +- .../github/projects.py} | 4 +- src/{shared/common.py => core/helpers.py} | 45 +--- src/{shared => core}/models.py | 0 src/{shared => core}/priority.py | 0 .../templates.py => core/rendering.py} | 0 src/{shared => security}/__init__.py | 2 +- .../utils => src/security/alerts}/__init__.py | 2 + src/security/{utils => alerts}/models.py | 102 +-------- .../alert_parser.py => alerts/parser.py} | 4 +- src/security/check_labels.py | 12 +- src/security/collect_alert.py | 9 +- src/security/{utils => }/constants.py | 0 src/security/derive_team_security_metrics.py | 208 ------------------ src/security/extract_team_security_stats.py | 206 ----------------- src/security/{utils => issues}/__init__.py | 2 +- .../issue_builder.py => issues/builder.py} | 16 +- src/security/issues/models.py | 119 ++++++++++ src/security/{utils => issues}/sec_events.py | 2 +- src/security/{utils => issues}/secmeta.py | 0 .../{utils/issue_sync.py => issues/sync.py} | 21 +- src/security/{utils => issues}/templates.py | 0 .../{sync_security_alerts.py => main.py} | 23 +- src/security/notifications/__init__.py | 17 ++ .../{utils => notifications}/teams.py | 16 +- src/security/promote_alerts.py | 29 +-- ...send_to_teams.py => send_notifications.py} | 19 +- .../github/test_issues.py} | 122 +++++----- tests/security/alerts/test_models.py | 52 +++++ .../test_parser.py} | 6 +- tests/security/conftest.py | 8 +- .../test_builder.py} | 4 +- .../security/{utils => issues}/test_models.py | 41 +--- .../{utils => issues}/test_sec_events.py | 2 +- .../{utils => issues}/test_secmeta.py | 2 +- .../test_sync.py} | 94 ++++---- .../{utils => issues}/test_templates.py | 4 +- .../{utils => notifications}/test_teams.py | 16 +- tests/security/test_check_labels.py | 18 +- tests/security/test_collect_alert.py | 46 ++-- tests/security/{utils => }/test_constants.py | 2 +- ...t_sync_security_alerts.py => test_main.py} | 84 +++---- tests/security/test_promote_alerts.py | 51 ++--- ...to_teams.py => test_send_notifications.py} | 12 +- 58 files changed, 608 insertions(+), 1068 deletions(-) delete mode 100644 .coverage delete mode 100644 .github/workflows/aquasec-remove-label.yml delete mode 100644 .github/workflows/remove-resolved-finding-label.yml rename docs/security/{example_workflows/aquasec-night-scan.yml => aquasec-night-scan-example.yml} (100%) delete mode 100644 docs/security/example_workflows/remove-resolved-finding-label.yml rename {tests/security => src/core}/__init__.py (90%) rename src/{shared/logging_config.py => core/config.py} (70%) create mode 100644 src/core/github/__init__.py create mode 100644 src/core/github/client.py rename src/{shared/github_issues.py => core/github/issues.py} (99%) rename src/{shared/github_projects.py => core/github/projects.py} (99%) rename src/{shared/common.py => core/helpers.py} (51%) rename src/{shared => core}/models.py (100%) rename src/{shared => core}/priority.py (100%) rename src/{shared/templates.py => core/rendering.py} (100%) rename src/{shared => security}/__init__.py (90%) rename {tests/security/utils => src/security/alerts}/__init__.py (91%) rename src/security/{utils => alerts}/models.py (67%) rename src/security/{utils/alert_parser.py => alerts/parser.py} (97%) rename src/security/{utils => }/constants.py (100%) delete mode 100644 src/security/derive_team_security_metrics.py delete mode 100644 src/security/extract_team_security_stats.py rename src/security/{utils => issues}/__init__.py (88%) rename src/security/{utils/issue_builder.py => issues/builder.py} (93%) create mode 100644 src/security/issues/models.py rename src/security/{utils => issues}/sec_events.py (97%) rename src/security/{utils => issues}/secmeta.py (100%) rename src/security/{utils/issue_sync.py => issues/sync.py} (98%) rename src/security/{utils => issues}/templates.py (100%) rename src/security/{sync_security_alerts.py => main.py} (89%) create mode 100644 src/security/notifications/__init__.py rename src/security/{utils => notifications}/teams.py (91%) rename src/security/{send_to_teams.py => send_notifications.py} (91%) rename tests/{security/test_github_issues.py => core/github/test_issues.py} (75%) create mode 100644 tests/security/alerts/test_models.py rename tests/security/{utils/test_alert_parser.py => alerts/test_parser.py} (98%) rename tests/security/{utils/test_issue_builder.py => issues/test_builder.py} (99%) rename tests/security/{utils => issues}/test_models.py (75%) rename tests/security/{utils => issues}/test_sec_events.py (99%) rename tests/security/{utils => issues}/test_secmeta.py (99%) rename tests/security/{utils/test_issue_sync.py => issues/test_sync.py} (91%) rename tests/security/{utils => issues}/test_templates.py (97%) rename tests/security/{utils => notifications}/test_teams.py (92%) rename tests/security/{utils => }/test_constants.py (97%) rename tests/security/{test_sync_security_alerts.py => test_main.py} (78%) rename tests/security/{test_send_to_teams.py => test_send_notifications.py} (94%) diff --git a/.coverage b/.coverage deleted file mode 100644 index 38a4eabcd5b08303673b30b6079fe06ad4fa0730..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 53248 zcmeI5du$xV8Nhe%_8xn$@iRwAsh1XTQfyyLP|^~DV?xwM0a749tB`E=?$-8_yW3-T z&xr}aTtG4^qDZY&prSlfQPf9T{wb9LG*GB`Nu&W)AkriVL6ZpL1Q7^en(O!N-r9Fg zoJRFVl=NHO&Fo|5n{R&e&CGtcp7k|XU9MTG)TbMH#ggW64IIyNizSKUI1#>a__|vN zb_DkTV!md7s$G%m8oVpcuH$0CyE(QdzJhhe9*Cb8-4V-+KZ-sWQp5~gpp6I+0U|&I z{=W&7uZo3|GiUOfH(5$Lr&@-RQ4J@IFS&YY@5-gp%HB&aUn)80q}dS(w$4teS2FbV zQc*ReJ}sw8S|O`t6iX}gOV)turY@Ok7AHD52U8s;?6ULYo~&gdm!ITa>0YX%*zkn&E+12cGBtz{}4YiO_O-GB_T6Ruk&h5jIP_nt1A5rZ( zDMki9(Yo5eiHWK#>jOa<-N>T$X+tSw22`^{QpRhT(Lv7*mQ!0p&!;S1%4r3=ngvs{ zG`%3H8`Mn6QnSacU{$+H#`4b< zJEV!69dd2(+1AF7 z++-UH;*ryuWsdKNbw)FOdJ5CQ>r>ke_L6!d!CsWhNqV2%WiYqoG!9sEZJ$ZGFcbqj znxt0~f~H@pwV|@0gOtwc>3UnUEyh%#gTPqeChIE|GT2p2tV2d0R1KwH?W$gNhJ`QN zkic}h#b9!2+f=4AZUS=m_->>;GZ;$Fn#FGxZ8NI2Ry7LMnULF(#~DW5OLONKj; zoM@1Ab4-kqZXc;$f$DUIg2~=lQ&p#$PVTPmgv%izlx%I~%TABLaS6YHx=P@gi7qf{ zA~<>Vi;6WM9jiN@2F2S0!Q{f$sT4;QE~h4$FILmWjr&X@^W>k zb5O~_Xi`)(1G+4X!OjdIVZy8eiIucovTXasL)$!Y?wQcn5ioJ=vWHZ2?BvkBzptC7 z2Gc4|TjoXXS*d+MF>w?!+oiOw=TxQO%nguZnF(cTrey1I?5xx?A;TO(8U2IpwodNk zg2Vhu=yY!Xf}MO`DP*hj6;8vBR2ciQb7DEpAOabscIcEQbkRvNU7^*hIAXc-yNbXB zrRJ5a)7`OAY9y{7P>&Jk%tn_fmTGBvRl+2;GPYr@S4=5msBjVH(mCz^6PW?FmfVdY zddXqDl`FULq2&4J^W{>tGdkmpY{I0TS6sELhrT)^nRIc&URJ<>3wJxUc7p6`aNwa+ z0uF9Y3#J_kZmR0NPGk-`_&giq;6ob`AOb{y2oM1xKm>>Y5g-CYfCvx)B5?W<@bf-C zg#N#ez0I*v2xub$M1Tko0U|&IhyW2F0z`la5CI}U1iq34Vm|*_%>4_GxedI3*39bP z04(U5-_?CV1Zw4D`#83bef*WIASq1*hyW2F0z`la5CI}U1c(3;AOb{y2#gbm`Oo6r zp8WzSRndv^t+LLk&(!`;)`OR zD29I>zBu$MB&UrC5CI}U1c(3;IC%oAlLA-nNnLBg(#DjMo_oQCxm?Oni#i^eO3GdE zpVEzfrJ&smFEP*yO0ILgZmjFe>FdpuX=G9+tnN27YY5h2=W=Ssl9e31CjnMJ=4OES z)t-sZs)jbG%9g6+Ww%Tj7C0JO#)Q(=wL)pDJSz=eY5*^MuqG8}S_-TzFAl9~fx>z{ zE362w8qjeeK9WQEtC~UnLeJ!vEG=iIkQ@qM+XTXwdMq4TL{ifC9Yhcuw;|3*d5J z5Py9f#Lt_y{to_Hy)!WoynNb%-2ubV+d-c$X*uZONI!ZC=))?0=(~<}-VlKT7EQYr z4xXIe-w!WGk)eP-tsmP~1nJ(jt*I8WvZdSO5Sq@)Fo^H;Ncp^;RdXoyx)4b1_DCwc zvPQ=V0bv9^fl)UQz+ibJOf#tV4FM28{g$k^$8jht1rJf<%LeoUqV{`;T9&nfX2~+h zP5VIZvS~MAJqvq6l8cG~w+K+i8Xn5Mkx32 zp4~D`_WzT4p}g8V@&Cd9pOF*FtGp}i#QuM)ER=h_E9@BmzvTv@ywE%O_5OdeB9xbU zExgYEZ}LX{y4(J7|G)86s9)#*H>?%POS}eD>qj$hJKkjfKk);hJa78?AM5|evqE|K z^aa=Z7?b_~*r}jj>;Fer3*|-Auf-Jpf5h9io$UXM*9zrMuaqBmb;H*S44vzX-n`=B{{MbB;+j`9EL z{eN^>Y5g-CY zfCvx)B0vO)01+Spry&8qKQ7|)|CacB96Q4Hu)nkC*zegO)7Tm|pM8t9CB8_ECf-QA zn7BHTN_;)RLqgh!01+SpM1Tko0U|&IhyW2F0z}|{m%y1V0x!;Q;W+rm^oQ;rJG}AW zqGv`%+IL+s{G>Rm8LoZ1ndAO)?-Qv5D@GSS{(fcm*7u7~4*z4cjWxkVsfpt*-T#p9 zkg(N%b|aj)tPxUQ!QemK-w0pa7GqFq;CPM?TMYqTEDtomd*$ap6JPwy>qkf5yn1x! z?oTSuj_ulX@Zgrp3om^3!_RlTR5`TygM&SXk5>M)bI-_d>EJ`dW0kW%e|+0;W%)(9 zv5l4CpZ$5`eYYNIJACUWEt|%k+Sa#cqh)Q{TiMog@20U`uN-JI#&&+ZZTowd?AhM) zXq{H+_e3StxMX zKL`hfARH_>)Vz;&@M=RTh8w5D{4QSeT{@{P~%Kg81jSu_bOcO}(&G5lqmk(s6o~x|jDqTO_vh&tG z0Y1bFq6nyb`24?x4RUOZ9cBmEKjBw^H`xyMGW-(o6#FCm8t@pr8SsAg6LuH7gKdI@ zv=IR!Km>>Y5g-CYfCvx)B0vO)01+SpCrJR^f4-%~j+&e8sHw@08XN7Xp}~$Av!g`9 zj^c4UipA_G8nvTH#EwMKj>2I(3We+_7__56z>WmLj{JT*^7#b#3BZ2-&%tjsv=IR! zKm>>Y5g-CYfCvx)B0vO)01+Spr!N6~{!jk@>D#_kGa^6)hyW2F0z`la5CI}U1c(3; zAOc7L*Z(Kk-T3$apRf -" prefix (e.g., "Security -") +- Never disable pylint behavior in the code Testing - Mirror src structure: `src/security/module.py` -> `tests/security/test_module.py` diff --git a/.github/workflows/aquasec-remove-label.yml b/.github/workflows/aquasec-remove-label.yml deleted file mode 100644 index a8e3bfc..0000000 --- a/.github/workflows/aquasec-remove-label.yml +++ /dev/null @@ -1,28 +0,0 @@ -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -name: Remove Resolved Finding Label - -on: - issues: - types: [closed] - -permissions: - issues: write - -jobs: - remove-label: - uses: AbsaOSS/organizational-workflows/.github/workflows/remove-resolved-finding-label.yml@master \ No newline at end of file diff --git a/.github/workflows/aquasec-scan.yml b/.github/workflows/aquasec-scan.yml index dcf1b06..4cbf4b7 100644 --- a/.github/workflows/aquasec-scan.yml +++ b/.github/workflows/aquasec-scan.yml @@ -131,4 +131,4 @@ jobs: PROJECT_NUMBER: ${{ inputs.project-number }} PROJECT_ORG: ${{ inputs.project-org }} run: | - python3 org-workflows/src/security/sync_security_alerts.py + python3 org-workflows/src/security/main.py diff --git a/.github/workflows/remove-resolved-finding-label.yml b/.github/workflows/remove-resolved-finding-label.yml deleted file mode 100644 index c813543..0000000 --- a/.github/workflows/remove-resolved-finding-label.yml +++ /dev/null @@ -1,68 +0,0 @@ -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# SECURITY reusable workflow – Removing sec:adept-to-close label when an Issue is closed. - -name: Remove Resolved Finding Label - -on: - workflow_call: - -permissions: - issues: write - -jobs: - cleanup-label: - runs-on: ubuntu-latest - steps: - - name: Remove label when conditions match - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd - with: - script: | - const issue = context.payload.issue; - - // Safety: ignore PRs (they can appear as issues in GitHub UI) - if (issue.pull_request) { - core.info('Skipping: payload refers to a pull request, not an issue.'); - return; - } - - const labels = (issue.labels ?? []) - .map(l => (typeof l === 'string' ? l : l?.name)) - .filter(Boolean); - - const hasScopeSecurity = labels.includes('scope:security'); - const hasTechDebt = labels.includes('type:tech-debt'); - const hasAdeptToClose = labels.includes('sec:adept-to-close'); - - if (!hasScopeSecurity || !hasTechDebt) { - core.info( - `Skipping: required labels missing (scope:security=${hasScopeSecurity}, type:tech-debt=${hasTechDebt}).` - ); - return; - } - - if (!hasAdeptToClose) { - core.info('No-op: label sec:adept-to-close is not present on the issue.'); - return; - } - - await github.rest.issues.removeLabel({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: issue.number, - name: 'sec:adept-to-close', - }); diff --git a/README.md b/README.md index 19fd0c0..ffbf386 100644 --- a/README.md +++ b/README.md @@ -36,7 +36,7 @@ docs/ # per-solution documentation ## Shared workflows Application repositories adopt a solution by adding a short **caller workflow** that delegates to the reusable workflow in this repo. -Ready-to-copy example caller workflows are located in [`docs/security/example_workflows/`](docs/security/example_workflows/). +Ready-to-copy example caller workflows are located in [`docs/security/example_workflows/`](docs/security/example_workflow/). See each solution's documentation for details and required secrets. ## Next diff --git a/docs/security/example_workflows/aquasec-night-scan.yml b/docs/security/aquasec-night-scan-example.yml similarity index 100% rename from docs/security/example_workflows/aquasec-night-scan.yml rename to docs/security/aquasec-night-scan-example.yml diff --git a/docs/security/example_workflows/remove-resolved-finding-label.yml b/docs/security/example_workflows/remove-resolved-finding-label.yml deleted file mode 100644 index 2c0514c..0000000 --- a/docs/security/example_workflows/remove-resolved-finding-label.yml +++ /dev/null @@ -1,30 +0,0 @@ -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Example caller workflow – Add this to your repository as .github/workflows/remove-resolved-finding-label.yml - -name: Remove Resolved Finding Label - -on: - issues: - types: [closed] - -permissions: - issues: write - -jobs: - remove-label: - uses: AbsaOSS/organizational-workflows/.github/workflows/remove-resolved-finding-label.yml@c1fa5b54ff24fea071415da89abc4f0506344f01 diff --git a/docs/security/security.md b/docs/security/security.md index 69e86a7..9e56441 100644 --- a/docs/security/security.md +++ b/docs/security/security.md @@ -166,7 +166,7 @@ The caller needs the following **repository secrets** configured: | `TEAMS_WEBHOOK_URL` | no | Teams Incoming Webhook URL for new/reopened issue alerts | | `GH_PROJECT_ONLY_TOKEN` | no (required for cross-org projects) | Classic PAT with `project` scope on an account that is a member of the org owning the ProjectV2 board – see [Cross-org project token](#cross-org-project-token) | -Example caller (already available in [aquasec-night-scan.yml](/docs/security/example_workflows/aquasec-night-scan.yml)): +Example caller (already available in [aquasec-night-scan.yml](/docs/security/aquasec-night-scan-example.yml)): ```yaml name: Aquasec Night Scan @@ -246,7 +246,7 @@ The reusable workflow forwards it to the Python script as the `GH_PROJECT_ONLY_T #### Remove sec:adept-to-close on close -Example caller (already available in [remove-adept-to-close-on-issue-close.yml](/docs/security/example_workflows/remove-resolved-finding-label.yml)): +Example caller (already available in [remove-adept-to-close-on-issue-close.yml](/docs/security/example_workflow/remove-resolved-finding-label.yml)): ```yaml name: Remove sec:adept-to-close on close diff --git a/pyproject.toml b/pyproject.toml index b366200..be4f01c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,6 @@ [tool.pytest.ini_options] -pythonpath = ["src", "src/security"] +pythonpath = ["src"] +addopts = ["--import-mode=importlib"] [tool.black] line-length = 120 diff --git a/tests/security/__init__.py b/src/core/__init__.py similarity index 90% rename from tests/security/__init__.py rename to src/core/__init__.py index ebfbdd3..228a826 100644 --- a/tests/security/__init__.py +++ b/src/core/__init__.py @@ -13,3 +13,5 @@ # See the License for the specific language governing permissions and # limitations under the License. # + +"""Core utilities reusable across workflow solutions.""" diff --git a/src/shared/logging_config.py b/src/core/config.py similarity index 70% rename from src/shared/logging_config.py rename to src/core/config.py index 42cfa3d..ed4af68 100644 --- a/src/shared/logging_config.py +++ b/src/core/config.py @@ -14,12 +14,23 @@ # limitations under the License. # -"""Centralised logging configuration for the security tooling.""" +"""Runtime configuration – GitHub Actions environment detection and logging setup.""" import logging +import os import sys +def parse_runner_debug() -> bool: + """Return ``True`` when the GitHub Actions ``RUNNER_DEBUG`` env var is ``'1'``.""" + raw = os.getenv("RUNNER_DEBUG") + if raw is None or raw == "": + return False + if raw not in {"0", "1"}: + raise SystemExit("ERROR: RUNNER_DEBUG must be '0' or '1' when set") + return raw == "1" + + def setup_logging(verbose: bool = False) -> None: """Configure the root logger (DEBUG when *verbose*, else INFO).""" level = logging.DEBUG if verbose else logging.INFO diff --git a/src/core/github/__init__.py b/src/core/github/__init__.py new file mode 100644 index 0000000..24ae5d5 --- /dev/null +++ b/src/core/github/__init__.py @@ -0,0 +1,17 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""GitHub API wrappers – CLI subprocess helpers, Issues CRUD, Projects V2 GraphQL.""" diff --git a/src/core/github/client.py b/src/core/github/client.py new file mode 100644 index 0000000..12b39ad --- /dev/null +++ b/src/core/github/client.py @@ -0,0 +1,46 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Subprocess wrappers for the ``gh`` CLI.""" + +import logging +import subprocess +from collections.abc import Mapping + + +def run_cmd( + cmd: list[str], + *, + capture_output: bool = True, + env: Mapping[str, str] | None = None, +) -> subprocess.CompletedProcess: + """Run *cmd* as a subprocess and return the completed process.""" + return subprocess.run(cmd, check=False, capture_output=capture_output, text=True, env=env) + + +def run_gh( + args: list[str], + *, + capture_output: bool = True, + env: Mapping[str, str] | None = None, +) -> subprocess.CompletedProcess: + """Run a ``gh`` CLI command and return the completed process.""" + cmd = ["gh"] + args + try: + return run_cmd(cmd, capture_output=capture_output, env=env) + except FileNotFoundError as exc: + logging.error("gh CLI not found. Install and authenticate gh.") + raise SystemExit(1) from exc diff --git a/src/shared/github_issues.py b/src/core/github/issues.py similarity index 99% rename from src/shared/github_issues.py rename to src/core/github/issues.py index 8d31966..cefffc3 100644 --- a/src/shared/github_issues.py +++ b/src/core/github/issues.py @@ -25,8 +25,8 @@ import subprocess import time -from .common import run_gh -from .models import Issue +from .client import run_gh +from ..models import Issue _NOT_FOUND_MARKERS = ( "HTTP 404", diff --git a/src/shared/github_projects.py b/src/core/github/projects.py similarity index 99% rename from src/shared/github_projects.py rename to src/core/github/projects.py index 8b7a462..7e16f04 100644 --- a/src/shared/github_projects.py +++ b/src/core/github/projects.py @@ -26,8 +26,8 @@ from dataclasses import dataclass from typing import Any -from .common import run_gh -from .priority import resolve_priority +from .client import run_gh +from ..priority import resolve_priority # --------------------------------------------------------------------------- # Data structures diff --git a/src/shared/common.py b/src/core/helpers.py similarity index 51% rename from src/shared/common.py rename to src/core/helpers.py index 6f175f6..af42f6e 100644 --- a/src/shared/common.py +++ b/src/core/helpers.py @@ -14,29 +14,13 @@ # limitations under the License. # -"""Low-level utilities – date helpers, hashing, -path normalisation, and subprocess wrappers for the ``gh`` CLI. -""" +"""Pure utility functions – date helpers, hashing, and path normalisation.""" import hashlib -import logging -import os import re -import subprocess -from collections.abc import Mapping from datetime import datetime, timezone -def parse_runner_debug() -> bool: - """Return ``True`` when the GitHub Actions ``RUNNER_DEBUG`` env var is ``'1'``.""" - raw = os.getenv("RUNNER_DEBUG") - if raw is None or raw == "": - return False - if raw not in {"0", "1"}: - raise SystemExit("ERROR: RUNNER_DEBUG must be '0' or '1' when set") - return raw == "1" - - def utc_today() -> str: """Return today's date in UTC as an ISO-8601 string (``YYYY-MM-DD``).""" return datetime.now(timezone.utc).date().isoformat() @@ -57,7 +41,7 @@ def sha256_hex(text: str) -> str: def normalize_path(path: str | None) -> str: - """Normalise a file path to forward-slash, no leading ``./`` or ``/``.""" + """Normalize a file path to forward-slash, no leading ``./`` or ``/``.""" if not path: return "" p = path.replace("\\", "/").strip() @@ -66,28 +50,3 @@ def normalize_path(path: str | None) -> str: p = p.lstrip("/") p = re.sub(r"/+", "/", p) return p - - -def run_cmd( - cmd: list[str], - *, - capture_output: bool = True, - env: Mapping[str, str] | None = None, -) -> subprocess.CompletedProcess: - """Run *cmd* as a subprocess and return the completed process.""" - return subprocess.run(cmd, check=False, capture_output=capture_output, text=True, env=env) - - -def run_gh( - args: list[str], - *, - capture_output: bool = True, - env: Mapping[str, str] | None = None, -) -> subprocess.CompletedProcess: - """Run a ``gh`` CLI command and return the completed process.""" - cmd = ["gh"] + args - try: - return run_cmd(cmd, capture_output=capture_output, env=env) - except FileNotFoundError as exc: - logging.error("gh CLI not found. Install and authenticate gh.") - raise SystemExit(1) from exc diff --git a/src/shared/models.py b/src/core/models.py similarity index 100% rename from src/shared/models.py rename to src/core/models.py diff --git a/src/shared/priority.py b/src/core/priority.py similarity index 100% rename from src/shared/priority.py rename to src/core/priority.py diff --git a/src/shared/templates.py b/src/core/rendering.py similarity index 100% rename from src/shared/templates.py rename to src/core/rendering.py diff --git a/src/shared/__init__.py b/src/security/__init__.py similarity index 90% rename from src/shared/__init__.py rename to src/security/__init__.py index 32f59a7..8208bf4 100644 --- a/src/shared/__init__.py +++ b/src/security/__init__.py @@ -14,4 +14,4 @@ # limitations under the License. # -"""Shared utilities reusable across workflow solutions.""" +"""Security workflow – alert-to-issue sync pipeline.""" diff --git a/tests/security/utils/__init__.py b/src/security/alerts/__init__.py similarity index 91% rename from tests/security/utils/__init__.py rename to src/security/alerts/__init__.py index ebfbdd3..aad9ddb 100644 --- a/tests/security/utils/__init__.py +++ b/src/security/alerts/__init__.py @@ -13,3 +13,5 @@ # See the License for the specific language governing permissions and # limitations under the License. # + +"""Alert domain – models, parsing, and constants.""" diff --git a/src/security/utils/models.py b/src/security/alerts/models.py similarity index 67% rename from src/security/utils/models.py rename to src/security/alerts/models.py index 3ba4bc0..9c56c93 100644 --- a/src/security/utils/models.py +++ b/src/security/alerts/models.py @@ -14,15 +14,12 @@ # limitations under the License. # -"""Security-specific data models.""" +"""Alert-specific data models.""" from dataclasses import dataclass, field from typing import Any -from shared.github_projects import ProjectPrioritySync -from shared.models import Issue - -from .constants import NOT_AVAILABLE +from security.constants import NOT_AVAILABLE @dataclass @@ -153,98 +150,3 @@ class LoadedAlerts: repo_full: str open_by_number: dict[int, Alert] - - -@dataclass -class IssueIndex: - """In-memory indexes for fast issue lookup by fingerprint and rule_id.""" - - by_fingerprint: dict[str, Issue] - parent_by_rule_id: dict[str, Issue] - - -@dataclass -class NotifiedIssue: - """Tracks a new or reopened child issue for Teams notification.""" - - repo: str - issue_number: int - severity: str - category: str - state: str # "new" or "reopen" - tool: str - - -@dataclass -class SeverityChange: - """Records a parent issue whose severity changed between syncs.""" - - repo: str - issue_number: int - rule_id: str - old_severity: str - new_severity: str - - -# Ordered from lowest to highest so we can compute direction. -SEVERITY_ORDER: dict[str, int] = { - "unknown": 0, - "low": 1, - "medium": 2, - "high": 3, - "critical": 4, -} - - -def severity_direction(old: str, new: str) -> str: - """Return an emoji+label describing the direction of a severity change.""" - old_rank = SEVERITY_ORDER.get(old.lower(), -1) - new_rank = SEVERITY_ORDER.get(new.lower(), -1) - if new_rank > old_rank: - return "⬆️ escalated" - if new_rank < old_rank: - return "⬇️ de-escalated" - return "↔️ unchanged" - - -@dataclass -class SyncResult: - """Aggregated output of a full sync run.""" - - notifications: list[NotifiedIssue] - severity_changes: list[SeverityChange] - - -@dataclass -class AlertContext: - """Per-alert data extracted in ``ensure_issue`` and passed to child handlers.""" - - alert: Alert - alert_number: int - fingerprint: str - occurrence_fp: str - repo: str - first_seen: str - last_seen: str - tool: str - rule_id: str - rule_name: str - severity: str - cve: str - path: str - start_line: int | None - end_line: int | None - commit_sha: str - - -@dataclass -class SyncContext: - """Shared orchestration state for the sync run.""" - - issues: dict[int, Issue] - index: IssueIndex - dry_run: bool - notifications: list[NotifiedIssue] | None - severity_priority_map: dict[str, str] - priority_sync: ProjectPrioritySync | None - parent_sub_issues_cache: dict[int, set[int]] = field(default_factory=dict) diff --git a/src/security/utils/alert_parser.py b/src/security/alerts/parser.py similarity index 97% rename from src/security/utils/alert_parser.py rename to src/security/alerts/parser.py index f593296..254c051 100644 --- a/src/security/utils/alert_parser.py +++ b/src/security/alerts/parser.py @@ -24,8 +24,8 @@ import os from enum import StrEnum -from shared.common import sha256_hex -from .models import Alert, AlertDetails, AlertMetadata, LoadedAlerts, RuleDetails +from core.helpers import sha256_hex +from security.alerts.models import Alert, AlertDetails, AlertMetadata, LoadedAlerts, RuleDetails class AlertMessageKey(StrEnum): diff --git a/src/security/check_labels.py b/src/security/check_labels.py index 82f317c..9427281 100644 --- a/src/security/check_labels.py +++ b/src/security/check_labels.py @@ -20,16 +20,10 @@ import argparse import json import logging -import os -import sys -_repo_root = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")) -if _repo_root not in sys.path: - sys.path.insert(0, _repo_root) - -from shared.common import run_gh -from shared.logging_config import setup_logging -from utils.constants import ( +from core.config import setup_logging +from core.github.client import run_gh +from security.constants import ( LABEL_EPIC, LABEL_SCOPE_SECURITY, LABEL_SEC_ADEPT_TO_CLOSE, diff --git a/src/security/collect_alert.py b/src/security/collect_alert.py index 63d7f75..6590378 100644 --- a/src/security/collect_alert.py +++ b/src/security/collect_alert.py @@ -23,15 +23,10 @@ import os import re import shutil -import sys from datetime import datetime, timezone -_repo_root = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")) -if _repo_root not in sys.path: - sys.path.insert(0, _repo_root) - -from shared.common import parse_runner_debug, run_gh -from shared.logging_config import setup_logging +from core.config import parse_runner_debug, setup_logging +from core.github.client import run_gh logger = logging.getLogger(__name__) diff --git a/src/security/utils/constants.py b/src/security/constants.py similarity index 100% rename from src/security/utils/constants.py rename to src/security/constants.py diff --git a/src/security/derive_team_security_metrics.py b/src/security/derive_team_security_metrics.py deleted file mode 100644 index 8316e66..0000000 --- a/src/security/derive_team_security_metrics.py +++ /dev/null @@ -1,208 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# pylint: skip-file -""" -Derive team security metrics from issue snapshots (Issues-only). - -Resurfacing definition (B): -- A fingerprint is considered 'resurfaced' when its occurrence_count transitions - from 0 in the previous snapshot to >0 in the current snapshot. - -Inputs: -- data/issues_snapshot.json (required) -- data/issues_snapshot.prev.json (optional; if missing, resurfacing cannot be computed) - -Outputs: -- reports/metrics.json -- reports/summary.md (appends derived metrics) -""" - -import json -import logging -import os -from datetime import datetime -from typing import Any, Dict, List, Optional - -SNAPSHOT_CUR = os.environ.get("SNAPSHOT_CURRENT", "data/issues_snapshot.json") -SNAPSHOT_PREV = os.environ.get("SNAPSHOT_PREVIOUS", "data/issues_snapshot.prev.json") - -OUT_METRICS_JSON = os.environ.get("OUT_METRICS_JSON", "reports/metrics.json") -OUT_SUMMARY_MD = os.environ.get("OUT_SUMMARY_MD", "reports/summary.md") - - -def require_env(key: str) -> str: - """Return the value of environment variable *key*, or exit.""" - try: - return os.environ[key] - except KeyError as exc: - raise SystemExit(f"Missing required environment variable: {key}") from exc - - -TEAM_SLUG = require_env("GITHUB_TEAM_SLUG") - - -def _safe_int(v: Any, default: int = 0) -> int: - """Coerce *v* to ``int``, returning *default* on failure.""" - try: - if v is None: - return default - if isinstance(v, int): - return v - s = str(v).strip() - if s == "": - return default - return int(float(s)) - except Exception: - return default - - -def _load_json(path: str) -> Optional[Any]: - """Load and return JSON from *path*, or ``None`` if the file is missing.""" - if not os.path.exists(path): - return None - with open(path, "r", encoding="utf-8") as f: - return json.load(f) - - -def _index_by_fingerprint(snapshot: List[Dict[str, Any]]) -> Dict[str, Dict[str, Any]]: - """Index snapshot items by their secmeta fingerprint.""" - idx: Dict[str, Dict[str, Any]] = {} - for item in snapshot: - fp = (item.get("secmeta") or {}).get("fingerprint") - if not fp: - # If secmeta is missing or malformed, it cannot participate in fingerprint-level stats. - continue - idx[fp] = item - return idx - - -def _severity_from_labels(labels: List[str]) -> str: - """Extract the severity token from ``sec:sev/`` labels.""" - for l in labels: - if l.startswith("sec:sev/"): - return l.split("/", 1)[1] - return "unknown" - - -def main() -> None: - """Derive and write team security metrics from issue snapshots.""" - # TODO decide about changes related to this script - logging.warning( - "This script is deprecated and may be removed in the future. Please refer to the updated documentation for deriving security metrics." - ) - return - - cur = _load_json(SNAPSHOT_CUR) - if cur is None: - raise SystemExit(f"Missing current snapshot: {SNAPSHOT_CUR}") - - if not isinstance(cur, list): - raise SystemExit(f"Current snapshot is not a list: {SNAPSHOT_CUR}") - - prev = _load_json(SNAPSHOT_PREV) - - cur_idx = _index_by_fingerprint(cur) - prev_idx = _index_by_fingerprint(prev) if isinstance(prev, list) else {} - - # Basic counts - total = len(cur) - by_sev: Dict[str, int] = {} - postponed = 0 - needs_review = 0 - - for item in cur: - labels = item.get("labels") or [] - sev = _severity_from_labels(labels) - by_sev[sev] = by_sev.get(sev, 0) + 1 - if "sec:state/postponed" in labels: - postponed += 1 - if "sec:state/needs-review" in labels: - needs_review += 1 - - # Resurfacing (B): prev occurrence_count == 0 and current > 0 - resurfaced: List[Dict[str, Any]] = [] - if prev_idx: - for fp, cur_item in cur_idx.items(): - cur_occ = _safe_int((cur_item.get("secmeta") or {}).get("occurrence_count"), 0) - prev_item = prev_idx.get(fp) - prev_occ = _safe_int(((prev_item or {}).get("secmeta") or {}).get("occurrence_count"), 0) - if prev_item is not None and prev_occ == 0 and cur_occ > 0: - resurfaced.append( - { - "fingerprint": fp, - "repo": cur_item.get("repo"), - "issue_number": cur_item.get("issue_number"), - "title": cur_item.get("title"), - "severity": _severity_from_labels(cur_item.get("labels") or []), - "prev_occurrence_count": prev_occ, - "current_occurrence_count": cur_occ, - } - ) - - metrics = { - "team": TEAM_SLUG, - "generated_at_utc": datetime.utcnow().isoformat() + "Z", - "snapshot_current": SNAPSHOT_CUR, - "snapshot_previous": SNAPSHOT_PREV if prev_idx else None, - "counts": { - "total_security_issues": total, - "postponed": postponed, - "needs_review": needs_review, - "by_severity": dict(sorted(by_sev.items())), - }, - "resurfaced": { - "definition": "B: fingerprint occurrence_count from 0 (previous snapshot) to >0 (current snapshot)", - "count": len(resurfaced), - "items": resurfaced, - }, - } - - os.makedirs(os.path.dirname(OUT_METRICS_JSON), exist_ok=True) - os.makedirs(os.path.dirname(OUT_SUMMARY_MD), exist_ok=True) - - with open(OUT_METRICS_JSON, "w", encoding="utf-8") as f: - json.dump(metrics, f, indent=2) - - # Append to (or create) summary.md - summary_lines: List[str] = [] - summary_lines.append(f"\n## Derived metrics\n") - summary_lines.append(f"Generated at: {metrics['generated_at_utc']}\n") - if metrics["snapshot_previous"] is None: - summary_lines.append("- Resurfacing: not computed (no previous snapshot found)\n") - else: - summary_lines.append(f"- Resurfaced fingerprints (definition B): {metrics['resurfaced']['count']}\n") - if resurfaced: - summary_lines.append("\n### Resurfaced items\n") - for r in resurfaced[:50]: - summary_lines.append( - f"- {r['severity']} {r['repo']}#{r['issue_number']} (occ {r['prev_occurrence_count']} -> {r['current_occurrence_count']}): {r['title']}\n" - ) - if len(resurfaced) > 50: - summary_lines.append(f"- ... and {len(resurfaced) - 50} more\n") - - # Ensure summary exists; if not, create a minimal header. - if not os.path.exists(OUT_SUMMARY_MD): - with open(OUT_SUMMARY_MD, "w", encoding="utf-8") as f: - f.write(f"# Security summary for team `{TEAM_SLUG}`\n\n") - - with open(OUT_SUMMARY_MD, "a", encoding="utf-8") as f: - f.writelines(summary_lines) - - -if __name__ == "__main__": - main() diff --git a/src/security/extract_team_security_stats.py b/src/security/extract_team_security_stats.py deleted file mode 100644 index da1b6ac..0000000 --- a/src/security/extract_team_security_stats.py +++ /dev/null @@ -1,206 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# pylint: skip-file -""" -Extract security statistics per GitHub team. - -Model: -- Source of truth: GitHub Issues -- Scope: repositories owned by a given GitHub team -- Unit: one Issue = one logical vulnerability (fingerprint) - -Outputs: -- data/issues_snapshot.json -- data/events_flat.csv -- reports/summary.md -""" - -import csv -import json -import logging -import os -import re -from datetime import datetime -from github import Github - - -# -------------------- -# Configuration -# -------------------- -def require_env(key: str) -> str: - """Return the value of environment variable *key*, or exit.""" - try: - return os.environ[key] - except KeyError as exc: - raise SystemExit(f"Missing required environment variable: {key}") from exc - - -GITHUB_TOKEN = require_env("GITHUB_TOKEN") -ORG = require_env("GITHUB_ORG") -TEAM_SLUG = require_env("GITHUB_TEAM_SLUG") - -OUT_DATA = "data" -OUT_REPORTS = "reports" - -SEC_LABEL_PREFIX = "sec:" - -SEC_EVENT_RE = re.compile(r"\[sec-event\](.*?)\[/sec-event\]", re.S) -SECMETA_RE = re.compile(r"```secmeta(.*?)```", re.S) - -# -------------------- -# Helpers -# -------------------- - - -def ensure_dirs(): - """Create output directories if they don't exist.""" - os.makedirs(OUT_DATA, exist_ok=True) - os.makedirs(OUT_REPORTS, exist_ok=True) - - -def parse_kv_block(block: str) -> dict: - """Parse a ``key=value``-per-line block into a dict.""" - data = {} - for line in block.splitlines(): - line = line.strip() - if not line or "=" not in line: - continue - k, v = line.split("=", 1) - data[k.strip()] = v.strip() - return data - - -def parse_secmeta(body: str) -> dict: - """Extract the secmeta key-value block from an issue body.""" - match = SECMETA_RE.search(body or "") - if not match: - return {} - return parse_kv_block(match.group(1)) - - -def parse_events(comments): - """Extract ``[sec-event]`` blocks from issue comments.""" - events = [] - for c in comments: - for raw in SEC_EVENT_RE.findall(c.body or ""): - evt = parse_kv_block(raw) - evt["timestamp"] = c.created_at.isoformat() - events.append(evt) - return events - - -def issue_has_sec_label(issue): - """Return ``True`` if *issue* carries any ``sec:`` prefixed label.""" - return any(l.name.startswith(SEC_LABEL_PREFIX) for l in issue.labels) - - -# -------------------- -# Main extraction -# -------------------- - - -def main(): - """Extract security statistics from GitHub Issues for the configured team.""" - # TODO decide about changes related to this script - logging.warning( - "This script is deprecated and may be removed in the future. Please refer to the updated documentation for deriving security metrics." - ) - return - - ensure_dirs() - - gh = Github(GITHUB_TOKEN) - org = gh.get_organization(ORG) - team = org.get_team_by_slug(TEAM_SLUG) - - repos = list(team.get_repos()) - - snapshot = [] - flat_events = [] - - for repo in repos: - issues = repo.get_issues(state="all") - for issue in issues: - # Skip PRs that may be returned by the issues API - if getattr(issue, "pull_request", None): - continue - - if not issue_has_sec_label(issue): - continue - - secmeta = parse_secmeta(issue.body or "") - events = parse_events(issue.get_comments()) - - snapshot.append( - { - "repo": repo.full_name, - "issue_number": issue.number, - "title": issue.title, - "state": issue.state, - "labels": [l.name for l in issue.labels], - "secmeta": secmeta, - "created_at": issue.created_at.isoformat(), - "updated_at": issue.updated_at.isoformat(), - "event_count": len(events), - } - ) - - for e in events: - fp = secmeta.get("fingerprint") if secmeta else None - if not fp: - continue # ignore events without a fingerprint - flat_events.append( - { - "repo": repo.full_name, - "issue_number": issue.number, - "fingerprint": fp, - "action": e.get("action"), - "reason": e.get("reason"), - "timestamp": e.get("timestamp"), - } - ) - - # Write snapshot - with open(os.path.join(OUT_DATA, "issues_snapshot.json"), "w") as f: - json.dump(snapshot, f, indent=2) - - # Write flat events - with open(os.path.join(OUT_DATA, "events_flat.csv"), "w", newline="") as f: - writer = csv.DictWriter(f, fieldnames=["repo", "issue_number", "fingerprint", "action", "reason", "timestamp"]) - writer.writeheader() - writer.writerows(flat_events) - - # Summary report - total = len(snapshot) - by_sev = {} - - for item in snapshot: - sev = next((l for l in item["labels"] if l.startswith("sec:sev/")), "sec:sev/unknown") - by_sev[sev] = by_sev.get(sev, 0) + 1 - - with open(os.path.join(OUT_REPORTS, "summary.md"), "w") as f: - f.write(f"# Security summary for team `{TEAM_SLUG}`\n\n") - f.write(f"Generated at: {datetime.utcnow().isoformat()} UTC\n\n") - f.write(f"## Total security issues: {total}\n\n") - f.write("## By severity\n\n") - for sev, cnt in sorted(by_sev.items()): - f.write(f"- {sev}: {cnt}\n") - - -if __name__ == "__main__": - main() diff --git a/src/security/utils/__init__.py b/src/security/issues/__init__.py similarity index 88% rename from src/security/utils/__init__.py rename to src/security/issues/__init__.py index b1d4a5a..481b539 100644 --- a/src/security/utils/__init__.py +++ b/src/security/issues/__init__.py @@ -14,4 +14,4 @@ # limitations under the License. # -"""Security-specific utilities for the alert-to-issue promotion pipeline.""" +"""Issue management domain – builder, sync, metadata, and templates.""" diff --git a/src/security/utils/issue_builder.py b/src/security/issues/builder.py similarity index 93% rename from src/security/utils/issue_builder.py rename to src/security/issues/builder.py index 38fad8c..a2c4cea 100644 --- a/src/security/utils/issue_builder.py +++ b/src/security/issues/builder.py @@ -18,17 +18,17 @@ from typing import Any -from shared.common import iso_date -from shared.templates import render_markdown_template +from core.helpers import iso_date +from core.rendering import render_markdown_template -from .constants import NOT_AVAILABLE, SECMETA_TYPE_PARENT -from .models import Alert -from .secmeta import render_secmeta -from .templates import CHILD_BODY_TEMPLATE, PARENT_BODY_TEMPLATE +from security.constants import NOT_AVAILABLE, SECMETA_TYPE_PARENT +from security.alerts.models import Alert +from security.issues.secmeta import render_secmeta +from security.issues.templates import CHILD_BODY_TEMPLATE, PARENT_BODY_TEMPLATE def _synthesize_references(alert: Alert) -> str: - """Build a markdown bullet list from metadata URLs when rule_details.references is absent.""" + """Build a Markdown bullet list from metadata URLs when rule_details.references is absent.""" lines = [] if alert.metadata.help_uri: lines.append(f"- {alert.metadata.help_uri}") @@ -38,7 +38,7 @@ def _synthesize_references(alert: Alert) -> str: def _synthesize_owasp(alert: Alert) -> str: - """Build a markdown bullet list from OWASP-related tags when rule_details.owasp is absent.""" + """Build a Markdown bullet list from OWASP-related tags when rule_details.owasp is absent.""" lines = [f"- {tag}" for tag in alert.metadata.tags if "owasp" in tag.lower()] return "\n".join(lines) if lines else NOT_AVAILABLE diff --git a/src/security/issues/models.py b/src/security/issues/models.py new file mode 100644 index 0000000..d2c2d63 --- /dev/null +++ b/src/security/issues/models.py @@ -0,0 +1,119 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Issue-related and sync-orchestration data models.""" + +from dataclasses import dataclass, field + +from core.github.projects import ProjectPrioritySync +from core.models import Issue + +from security.alerts.models import Alert + + +@dataclass +class IssueIndex: + """In-memory indexes for fast issue lookup by fingerprint and rule_id.""" + + by_fingerprint: dict[str, Issue] + parent_by_rule_id: dict[str, Issue] + + +@dataclass +class NotifiedIssue: + """Tracks a new or reopened child issue for Teams notification.""" + + repo: str + issue_number: int + severity: str + category: str + state: str # "new" or "reopen" + tool: str + + +@dataclass +class SeverityChange: + """Records a parent issue whose severity changed between syncs.""" + + repo: str + issue_number: int + rule_id: str + old_severity: str + new_severity: str + + +# Ordered from lowest to highest so we can compute direction. +SEVERITY_ORDER: dict[str, int] = { + "unknown": 0, + "low": 1, + "medium": 2, + "high": 3, + "critical": 4, +} + + +def severity_direction(old: str, new: str) -> str: + """Return an emoji+label describing the direction of a severity change.""" + old_rank = SEVERITY_ORDER.get(old.lower(), -1) + new_rank = SEVERITY_ORDER.get(new.lower(), -1) + if new_rank > old_rank: + return "⬆️ escalated" + if new_rank < old_rank: + return "⬇️ de-escalated" + return "↔️ unchanged" + + +@dataclass +class SyncResult: + """Aggregated output of a full sync run.""" + + notifications: list[NotifiedIssue] + severity_changes: list[SeverityChange] + + +@dataclass +class AlertContext: + """Per-alert data extracted in ``ensure_issue`` and passed to child handlers.""" + + alert: Alert + alert_number: int + fingerprint: str + occurrence_fp: str + repo: str + first_seen: str + last_seen: str + tool: str + rule_id: str + rule_name: str + severity: str + cve: str + path: str + start_line: int | None + end_line: int | None + commit_sha: str + + +@dataclass +class SyncContext: + """Shared orchestration state for the sync run.""" + + issues: dict[int, Issue] + index: IssueIndex + dry_run: bool + notifications: list[NotifiedIssue] | None + severity_priority_map: dict[str, str] + priority_sync: ProjectPrioritySync | None + parent_sub_issues_cache: dict[int, set[int]] = field(default_factory=dict) diff --git a/src/security/utils/sec_events.py b/src/security/issues/sec_events.py similarity index 97% rename from src/security/utils/sec_events.py rename to src/security/issues/sec_events.py index e0a018e..54625f4 100644 --- a/src/security/utils/sec_events.py +++ b/src/security/issues/sec_events.py @@ -20,7 +20,7 @@ import re -from .secmeta import render_kv_lines +from security.issues.secmeta import render_kv_lines def parse_sec_event_fields(raw: str) -> dict[str, str]: diff --git a/src/security/utils/secmeta.py b/src/security/issues/secmeta.py similarity index 100% rename from src/security/utils/secmeta.py rename to src/security/issues/secmeta.py diff --git a/src/security/utils/issue_sync.py b/src/security/issues/sync.py similarity index 98% rename from src/security/utils/issue_sync.py rename to src/security/issues/sync.py index e222546..f414f0f 100644 --- a/src/security/utils/issue_sync.py +++ b/src/security/issues/sync.py @@ -19,13 +19,13 @@ and labels orphaned issues for closure. This is the main business-logic module that ties together all other -``utils.*`` modules. +``issues.*`` modules. """ import logging -from shared.common import iso_date, normalize_path, utc_today -from shared.github_issues import ( +from core.helpers import iso_date, normalize_path, utc_today +from core.github.issues import ( gh_issue_add_labels, gh_issue_add_sub_issue_by_number, gh_issue_comment, @@ -35,12 +35,13 @@ gh_issue_edit_title, gh_issue_get_sub_issue_numbers, ) -from shared.github_projects import ProjectPrioritySync, gh_project_get_priority_field -from shared.models import Issue -from shared.templates import render_markdown_template +from core.github.projects import ProjectPrioritySync, gh_project_get_priority_field +from core.models import Issue +from core.rendering import render_markdown_template -from .alert_parser import compute_occurrence_fp -from .constants import ( +from security.alerts.models import Alert +from security.alerts.parser import compute_occurrence_fp +from security.constants import ( LABEL_EPIC, LABEL_SCOPE_SECURITY, LABEL_SEC_ADEPT_TO_CLOSE, @@ -51,7 +52,7 @@ SECMETA_TYPE_CHILD, SECMETA_TYPE_PARENT, ) -from .issue_builder import ( +from .builder import ( build_child_issue_body, build_issue_title, build_parent_issue_body, @@ -59,7 +60,7 @@ build_parent_template_values, classify_category, ) -from .models import Alert, AlertContext, IssueIndex, NotifiedIssue, SeverityChange, SyncContext, SyncResult +from .models import AlertContext, IssueIndex, NotifiedIssue, SeverityChange, SyncContext, SyncResult from .sec_events import render_sec_event, strip_sec_events_from_body from .secmeta import json_list, load_secmeta, parse_json_list, render_secmeta from .templates import PARENT_BODY_TEMPLATE diff --git a/src/security/utils/templates.py b/src/security/issues/templates.py similarity index 100% rename from src/security/utils/templates.py rename to src/security/issues/templates.py diff --git a/src/security/sync_security_alerts.py b/src/security/main.py similarity index 89% rename from src/security/sync_security_alerts.py rename to src/security/main.py index 6441285..c46d6ab 100644 --- a/src/security/sync_security_alerts.py +++ b/src/security/main.py @@ -15,30 +15,17 @@ # limitations under the License. # -"""Orchestrator that runs the full security-alert sync pipeline. - -Steps ------ -1. **check_labels** – verify required labels exist in the repository. -2. **collect_alert** – fetch code-scanning alerts and write a normalised JSON file. -3. **promote_alerts** – create / update GitHub Issues from the collected alerts. -""" +"""Orchestrator that runs the full Security pipeline: GH sec-Issues creation.""" import argparse import logging import os -import sys - -_repo_root = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")) -if _repo_root not in sys.path: - sys.path.insert(0, _repo_root) -from shared.common import parse_runner_debug -from shared.logging_config import setup_logging +from core.config import parse_runner_debug, setup_logging -from check_labels import check_labels -from collect_alert import main as collect_alert_main -from promote_alerts import main as promote_alerts_main +from security.check_labels import check_labels +from security.collect_alert import main as collect_alert_main +from security.promote_alerts import main as promote_alerts_main logger = logging.getLogger(__name__) diff --git a/src/security/notifications/__init__.py b/src/security/notifications/__init__.py new file mode 100644 index 0000000..7b5f1f0 --- /dev/null +++ b/src/security/notifications/__init__.py @@ -0,0 +1,17 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Notification domain – Teams webhook integration.""" diff --git a/src/security/utils/teams.py b/src/security/notifications/teams.py similarity index 91% rename from src/security/utils/teams.py rename to src/security/notifications/teams.py index 8773054..01a1362 100644 --- a/src/security/utils/teams.py +++ b/src/security/notifications/teams.py @@ -15,7 +15,7 @@ # """Teams webhook notification – builds the notification payload and invokes -``send_to_teams.py`` for new / reopened issues. +``send_notifications.py`` for new / reopened issues. """ import logging @@ -24,7 +24,7 @@ import sys import tempfile -from .models import NotifiedIssue, SeverityChange, severity_direction +from security.issues.models import NotifiedIssue, SeverityChange, severity_direction def build_teams_notification_body(notifications: list[NotifiedIssue]) -> str: @@ -70,7 +70,7 @@ def _post_to_teams( label: str, dry_run: bool = False, ) -> None: - """Write *body* to a temp file and invoke send_to_teams.py.""" + """Write *body* to a temp file and invoke send_notifications.py.""" if dry_run: if webhook_url: logging.info(f"DRY-RUN: {label} webhook configured; no delivery will occur") @@ -80,10 +80,10 @@ def _post_to_teams( ) script_dir = os.path.dirname(os.path.abspath(__file__)) - send_script = os.path.join(os.path.dirname(script_dir), "send_to_teams.py") + send_script = os.path.join(os.path.dirname(script_dir), "send_notifications.py") if not os.path.exists(send_script): - logging.warning(f"send_to_teams.py not found at {send_script} – skipping {label.lower()}") + logging.warning(f"send_notifications.py not found at {send_script} – skipping {label.lower()}") return body_file: str | None = None @@ -116,7 +116,7 @@ def _post_to_teams( logging.warning(f"{label} failed: {result.stderr}") else: if dry_run: - logging.info(f"DRY-RUN: send_to_teams.py {label.lower()} output:") + logging.info(f"DRY-RUN: send_notifications.py {label.lower()} output:") logging.info(result.stdout) else: logging.info(f"{label} sent successfully") @@ -134,7 +134,7 @@ def notify_teams( *, dry_run: bool = False, ) -> None: - """Send a Teams message about new / reopened issues via send_to_teams.py.""" + """Send a Teams message about new / reopened issues via send_notifications.py.""" if not notifications: logging.info("No new or reopened issues – skipping Teams notification") return @@ -156,7 +156,7 @@ def notify_teams_severity_changes( *, dry_run: bool = False, ) -> None: - """Send a Teams message about parent severity changes via send_to_teams.py.""" + """Send a Teams message about parent severity changes via send_notifications.py.""" if not changes: logging.debug("No severity changes – skipping Teams severity-change notification") return diff --git a/src/security/promote_alerts.py b/src/security/promote_alerts.py index 98db6df..ed4437f 100644 --- a/src/security/promote_alerts.py +++ b/src/security/promote_alerts.py @@ -35,9 +35,9 @@ `python3 promote_alerts.py --file alerts.json --verbose --dry-run` Implementation: -- Shared, cross-solution logic lives in the ``shared`` package +- Core, cross-solution logic lives in the ``core`` package (common helpers, GitHub wrappers, priority mapping, template renderer). -- Security-specific orchestration and domain logic lives in ``utils``. +- Security-specific orchestration and domain logic lives in ``alerts``, ``issues``, ``notifications``. - This file remains the CLI entry-point only: argument parsing → wiring → main(). """ @@ -45,22 +45,15 @@ import logging import os import shutil -import sys - -# Ensure the repo root is on sys.path so `shared.*` is importable. -_repo_root = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")) -if _repo_root not in sys.path: - sys.path.insert(0, _repo_root) - -from shared.common import parse_runner_debug -from shared.github_issues import gh_issue_list_by_label -from shared.priority import parse_severity_priority_map - -from utils.alert_parser import load_open_alerts_from_file -from utils.constants import LABEL_SCOPE_SECURITY -from utils.issue_sync import sync_alerts_and_issues -from shared.logging_config import setup_logging -from utils.teams import notify_teams, notify_teams_severity_changes + +from core.config import parse_runner_debug, setup_logging +from core.github.issues import gh_issue_list_by_label +from core.priority import parse_severity_priority_map + +from security.alerts.parser import load_open_alerts_from_file +from security.constants import LABEL_SCOPE_SECURITY +from security.issues.sync import sync_alerts_and_issues +from security.notifications.teams import notify_teams, notify_teams_severity_changes def parse_args(argv: list[str] | None = None) -> argparse.Namespace: diff --git a/src/security/send_to_teams.py b/src/security/send_notifications.py similarity index 91% rename from src/security/send_to_teams.py rename to src/security/send_notifications.py index df5a369..7b18969 100644 --- a/src/security/send_to_teams.py +++ b/src/security/send_notifications.py @@ -43,16 +43,16 @@ Usage examples -------------- # Body from a file -python3 send_to_teams.py --body-file reports/summary.md --title "Security Report" +python3 send_notifications.py --body-file reports/summary.md --title "Security Report" # Body from a CLI argument -python3 send_to_teams.py --body "All checks **passed** ✅" +python3 send_notifications.py --body "All checks **passed** ✅" # Body from stdin (pipe) -cat reports/summary.md | python3 send_to_teams.py --title "Daily digest" +cat reports/summary.md | python3 send_notifications.py --title "Daily digest" # Dry-run (print the payload without sending) -python3 send_to_teams.py --body-file reports/summary.md --dry-run +python3 send_notifications.py --body-file reports/summary.md --dry-run """ import argparse @@ -64,16 +64,7 @@ import requests -# Ensure the repo root is on sys.path so `shared.*` is importable. -_script_dir = os.path.dirname(os.path.abspath(__file__)) -if _script_dir not in sys.path: - sys.path.insert(0, _script_dir) -_repo_root = os.path.normpath(os.path.join(_script_dir, "..")) -if _repo_root not in sys.path: - sys.path.insert(0, _repo_root) - -from shared.common import parse_runner_debug -from shared.logging_config import setup_logging +from core.config import parse_runner_debug, setup_logging def _text_block(text: str, **kwargs: Any) -> Dict[str, Any]: diff --git a/tests/security/test_github_issues.py b/tests/core/github/test_issues.py similarity index 75% rename from tests/security/test_github_issues.py rename to tests/core/github/test_issues.py index ec880d6..d03f7dc 100644 --- a/tests/security/test_github_issues.py +++ b/tests/core/github/test_issues.py @@ -26,7 +26,7 @@ import pytest from pytest_mock import MockerFixture -from shared.github_issues import ( +from core.github.issues import ( _gh_with_retry, _is_not_found_error, _not_found_hint, @@ -96,8 +96,8 @@ def test_not_found_hint_empty_on_success() -> None: def test_retry_succeeds_first_attempt(mocker: MockerFixture) -> None: """No retries when the first call succeeds.""" - mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="ok")) - mock_sleep = mocker.patch("shared.github_issues.time.sleep") + mock_run = mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout="ok")) + mock_sleep = mocker.patch("core.github.issues.time.sleep") result = _gh_with_retry(["some", "cmd"]) assert result.returncode == 0 mock_run.assert_called_once() @@ -106,18 +106,18 @@ def test_retry_succeeds_first_attempt(mocker: MockerFixture) -> None: def test_retry_succeeds_on_second_attempt(mocker: MockerFixture) -> None: """Retries once on 404 then succeeds.""" mock_run = mocker.patch( - "shared.github_issues.run_gh", + "core.github.issues.run_gh", side_effect=[_not_found(), _ok(stdout="42")], ) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.time.sleep") result = _gh_with_retry(["some", "cmd"], retries=3) assert result.returncode == 0 assert mock_run.call_count == 2 def test_retry_exhausts_all_attempts(mocker: MockerFixture) -> None: """Returns the last failure after all retries are consumed.""" - mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mocker.patch("shared.github_issues.time.sleep") + mock_run = mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.time.sleep") result = _gh_with_retry(["some", "cmd"], retries=3) assert result.returncode != 0 # 1 initial + 3 retries = 4 total @@ -125,16 +125,16 @@ def test_retry_exhausts_all_attempts(mocker: MockerFixture) -> None: def test_retry_does_not_retry_non_404_error(mocker: MockerFixture) -> None: """Non-404 errors are not retried.""" - mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_err("server error")) - mocker.patch("shared.github_issues.time.sleep") + mock_run = mocker.patch("core.github.issues.run_gh", return_value=_err("server error")) + mocker.patch("core.github.issues.time.sleep") result = _gh_with_retry(["some", "cmd"], retries=3) assert result.returncode != 0 mock_run.assert_called_once() # no retries def test_retry_sleeps_with_exponential_backoff(mocker: MockerFixture) -> None: """Sleep duration grows as backoff_base ** attempt.""" - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mock_sleep = mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mock_sleep = mocker.patch("core.github.issues.time.sleep") _gh_with_retry(["cmd"], retries=3, backoff_base=2.0) sleep_calls = [c.args[0] for c in mock_sleep.call_args_list] # attempts 1, 2, 3 → 2**1=2, 2**2=4, 2**3=8 @@ -142,60 +142,60 @@ def test_retry_sleeps_with_exponential_backoff(mocker: MockerFixture) -> None: def test_retry_zero_retries_no_sleep(mocker: MockerFixture) -> None: """retries=0 means a single attempt with no sleep.""" - mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mock_sleep = mocker.patch("shared.github_issues.time.sleep") + mock_run = mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mock_sleep = mocker.patch("core.github.issues.time.sleep") _gh_with_retry(["cmd"], retries=0) mock_run.assert_called_once() mock_sleep.assert_not_called() def test_get_rest_id_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="987654\n")) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout="987654\n")) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_get_rest_id("org/repo", 42) == 987654 def test_get_rest_id_retries_on_404(mocker: MockerFixture) -> None: mock_run = mocker.patch( - "shared.github_issues.run_gh", + "core.github.issues.run_gh", side_effect=[_not_found(), _ok(stdout="1111\n")], ) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.time.sleep") result = gh_issue_get_rest_id("org/repo", 5) assert result == 1111 assert mock_run.call_count == 2 def test_get_rest_id_returns_none_after_all_retries(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_get_rest_id("org/repo", 5) is None def test_get_rest_id_parse_failure(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="not-a-number")) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout="not-a-number")) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_get_rest_id("org/repo", 1) is None def test_get_rest_id_not_found_hint_in_log(mocker: MockerFixture, caplog) -> None: """Log message includes the not-found hint for 404 errors.""" - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.time.sleep") with caplog.at_level(logging.WARNING, logger="root"): gh_issue_get_rest_id("org/repo", 99) assert any("deleted or transferred" in r.message for r in caplog.records) def test_add_sub_issue_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + mocker.patch("core.github.issues.run_gh", return_value=_ok()) assert gh_issue_add_sub_issue("org/repo", 10, 9999) is True def test_add_sub_issue_failure_logs_hint(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) with caplog.at_level(logging.ERROR, logger="root"): result = gh_issue_add_sub_issue("org/repo", 10, 9999) assert result is False assert any("deleted or transferred" in r.message for r in caplog.records) def test_add_sub_issue_failure_plain_error(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_err("rate limited")) + mocker.patch("core.github.issues.run_gh", return_value=_err("rate limited")) with caplog.at_level(logging.ERROR, logger="root"): result = gh_issue_add_sub_issue("org/repo", 10, 9999) assert result is False @@ -203,57 +203,57 @@ def test_add_sub_issue_failure_plain_error(mocker: MockerFixture, caplog) -> Non def test_add_sub_issue_by_number_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", side_effect=[_ok(stdout="5555\n"), _ok()]) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", side_effect=[_ok(stdout="5555\n"), _ok()]) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_add_sub_issue_by_number("org/repo", 10, 42) is True def test_add_sub_issue_by_number_rest_id_fails(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_add_sub_issue_by_number("org/repo", 10, 42) is False def test_get_sub_issue_numbers_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="[1, 2, 3]\n")) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout="[1, 2, 3]\n")) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_get_sub_issue_numbers("org/repo", 10) == {1, 2, 3} def test_get_sub_issue_numbers_empty(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="[]\n")) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout="[]\n")) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_get_sub_issue_numbers("org/repo", 10) == set() def test_get_sub_issue_numbers_not_found_error(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.time.sleep") with caplog.at_level(logging.ERROR, logger="root"): result = gh_issue_get_sub_issue_numbers("org/repo", 10) assert result == set() assert any("deleted or transferred" in r.message for r in caplog.records) def test_get_sub_issue_numbers_parse_error(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="not-json")) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout="not-json")) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_get_sub_issue_numbers("org/repo", 10) == set() def test_issue_comment_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok()) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_ok()) + mocker.patch("core.github.issues.time.sleep") assert gh_issue_comment("org/repo", 1, "hello") is True def test_issue_comment_retries_on_404(mocker: MockerFixture) -> None: mock_run = mocker.patch( - "shared.github_issues.run_gh", + "core.github.issues.run_gh", side_effect=[_not_found(), _ok()], ) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.time.sleep") assert gh_issue_comment("org/repo", 1, "hello") is True assert mock_run.call_count == 2 def test_issue_comment_fails_after_all_retries(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.time.sleep") with caplog.at_level(logging.ERROR, logger="root"): result = gh_issue_comment("org/repo", 1, "hello") assert result is False @@ -265,19 +265,19 @@ def test_issue_comment_graphql_not_found_hint(mocker: MockerFixture, caplog) -> returncode=1, stderr="GraphQL: Could not resolve to an issue or pull request with the number of 42. (repository.issue)", ) - mocker.patch("shared.github_issues.run_gh", return_value=graphql_err) - mocker.patch("shared.github_issues.time.sleep") + mocker.patch("core.github.issues.run_gh", return_value=graphql_err) + mocker.patch("core.github.issues.time.sleep") with caplog.at_level(logging.ERROR, logger="root"): gh_issue_comment("org/repo", 42, "body") assert any("deleted or transferred" in r.message for r in caplog.records) def test_edit_state_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + mocker.patch("core.github.issues.run_gh", return_value=_ok()) assert gh_issue_edit_state("org/repo", 1, "open") is True def test_edit_state_not_found_hint(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) with caplog.at_level(logging.ERROR, logger="root"): result = gh_issue_edit_state("org/repo", 1, "open") assert result is False @@ -289,11 +289,11 @@ def test_edit_state_invalid_state_raises() -> None: def test_edit_title_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + mocker.patch("core.github.issues.run_gh", return_value=_ok()) assert gh_issue_edit_title("org/repo", 1, "New title") is True def test_edit_title_not_found_hint(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) with caplog.at_level(logging.ERROR, logger="root"): result = gh_issue_edit_title("org/repo", 1, "New title") assert result is False @@ -301,11 +301,11 @@ def test_edit_title_not_found_hint(mocker: MockerFixture, caplog) -> None: def test_edit_body_success(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + mocker.patch("core.github.issues.run_gh", return_value=_ok()) assert gh_issue_edit_body("org/repo", 1, "new body") is True def test_edit_body_not_found_hint(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) with caplog.at_level(logging.ERROR, logger="root"): result = gh_issue_edit_body("org/repo", 1, "new body") assert result is False @@ -313,17 +313,17 @@ def test_edit_body_not_found_hint(mocker: MockerFixture, caplog) -> None: def test_add_labels_success(mocker: MockerFixture) -> None: - mock_run = mocker.patch("shared.github_issues.run_gh", return_value=_ok()) + mock_run = mocker.patch("core.github.issues.run_gh", return_value=_ok()) gh_issue_add_labels("org/repo", 1, ["bug", "security"]) mock_run.assert_called_once() def test_add_labels_no_labels_skips_call(mocker: MockerFixture) -> None: - mock_run = mocker.patch("shared.github_issues.run_gh") + mock_run = mocker.patch("core.github.issues.run_gh") gh_issue_add_labels("org/repo", 1, []) mock_run.assert_not_called() def test_add_labels_not_found_hint(mocker: MockerFixture, caplog) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_not_found()) + mocker.patch("core.github.issues.run_gh", return_value=_not_found()) with caplog.at_level(logging.WARNING, logger="root"): gh_issue_add_labels("org/repo", 1, ["bug"]) assert any("deleted or transferred" in r.message for r in caplog.records) @@ -331,18 +331,18 @@ def test_add_labels_not_found_hint(mocker: MockerFixture, caplog) -> None: def test_create_issue_success_url(mocker: MockerFixture) -> None: mocker.patch( - "shared.github_issues.run_gh", + "core.github.issues.run_gh", return_value=_ok(stdout="https://github.com/org/repo/issues/123\n"), ) num = gh_issue_create("org/repo", "title", "body", ["label"]) assert num == 123 def test_create_issue_success_bare_number(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout="issues/456")) + mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout="issues/456")) assert gh_issue_create("org/repo", "t", "b", []) == 456 def test_create_issue_failure_returns_none(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_err("permission denied")) + mocker.patch("core.github.issues.run_gh", return_value=_err("permission denied")) assert gh_issue_create("org/repo", "t", "b", []) is None @@ -351,7 +351,7 @@ def test_list_by_label_success(mocker: MockerFixture) -> None: {"number": 1, "state": "open", "title": "T1", "body": "b1", "labels": [{"name": "bug"}]}, {"number": 2, "state": "closed", "title": "T2", "body": "b2", "labels": []}, ] - mocker.patch("shared.github_issues.run_gh", return_value=_ok(stdout=json.dumps(payload))) + mocker.patch("core.github.issues.run_gh", return_value=_ok(stdout=json.dumps(payload))) issues = gh_issue_list_by_label("org/repo", "bug") assert len(issues) == 2 assert issues[1].title == "T1" @@ -359,10 +359,10 @@ def test_list_by_label_success(mocker: MockerFixture) -> None: assert issues[2].labels == [] def test_list_by_label_empty_label_returns_empty(mocker: MockerFixture) -> None: - mock_run = mocker.patch("shared.github_issues.run_gh") + mock_run = mocker.patch("core.github.issues.run_gh") assert gh_issue_list_by_label("org/repo", "") == {} mock_run.assert_not_called() def test_list_by_label_gh_failure_returns_empty(mocker: MockerFixture) -> None: - mocker.patch("shared.github_issues.run_gh", return_value=_err("network error")) + mocker.patch("core.github.issues.run_gh", return_value=_err("network error")) assert gh_issue_list_by_label("org/repo", "bug") == {} diff --git a/tests/security/alerts/test_models.py b/tests/security/alerts/test_models.py new file mode 100644 index 0000000..1de2f72 --- /dev/null +++ b/tests/security/alerts/test_models.py @@ -0,0 +1,52 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Unit tests for ``security.alerts.models``.""" + +from security.alerts.models import AlertMetadata + + +# ===================================================================== +# AlertMetadata – None-safe __post_init__ +# ===================================================================== + + +def test_alert_metadata_none_fields_do_not_crash() -> None: + """AlertMetadata must not raise when nullable collector fields are None.""" + md = AlertMetadata( + severity=None, # type: ignore[arg-type] – mirrors _normalise_alert output + rule_id=None, # type: ignore[arg-type] + rule_name=None, # type: ignore[arg-type] + state=None, # type: ignore[arg-type] + tool=None, # type: ignore[arg-type] + ) + assert md.severity == "unknown" + assert md.rule_id == "" + assert md.rule_name == "" + assert md.state == "" + assert md.tool == "" + + +def test_alert_metadata_strips_whitespace() -> None: + md = AlertMetadata(severity=" high ", rule_id=" CVE-123 ", tool=" AquaSec ") + assert md.severity == "high" + assert md.rule_id == "CVE-123" + assert md.tool == "AquaSec" + + +def test_alert_metadata_state_lowercased() -> None: + md = AlertMetadata(state=" OPEN ") + assert md.state == "open" diff --git a/tests/security/utils/test_alert_parser.py b/tests/security/alerts/test_parser.py similarity index 98% rename from tests/security/utils/test_alert_parser.py rename to tests/security/alerts/test_parser.py index 08be648..9f6675a 100644 --- a/tests/security/utils/test_alert_parser.py +++ b/tests/security/alerts/test_parser.py @@ -22,7 +22,7 @@ import pytest -from utils.alert_parser import ( +from security.alerts.parser import ( AlertMessageKey, compute_occurrence_fp, load_open_alerts_from_file, @@ -70,7 +70,7 @@ ) _RAW_PIPELINE_MESSAGE = ( - "Artifact: .github/workflows/aquasec-night-scan.yml\n" + "Artifact: .github/workflows/aquasec-night-scan-example.yml\n" "Type: pipelineMisconfigurations\n" "Vulnerability: AVD-PIPELINE-0008\n" "Severity: MEDIUM\n" @@ -82,7 +82,7 @@ "First seen: 2026-02-09T15:51:33.454Z\n" "SCM file: https://github.com/test-org/test-repo/blob/" "64c62d98a7db5dbd80ae8b0affd531099cf54280/" - ".github/workflows/aquasec-night-scan.yml\n" + ".github/workflows/aquasec-night-scan-example.yml\n" "Start line: 21\n" "Alert hash: bed23a624d7f1f07f56a07c6349bcd8b" ) diff --git a/tests/security/conftest.py b/tests/security/conftest.py index 8fec7e3..a475954 100644 --- a/tests/security/conftest.py +++ b/tests/security/conftest.py @@ -23,7 +23,7 @@ import pytest -from utils.models import Alert +from security.alerts.models import Alert # ── Raw alert payloads (read-only module-level originals) ────────────── @@ -189,13 +189,13 @@ "commit_sha": "d28cb4b49c437fdc4e26471ced2b128c63839d0e", "instance_url": None, "classifications": [], - "file": ".github/workflows/aquasec-night-scan.yml", + "file": ".github/workflows/aquasec-night-scan-example.yml", "start_line": 21, "end_line": None, }, "alert_details": { "alert_hash": "bed23a624d7f1f07f56a07c6349bcd8b", - "artifact": ".github/workflows/aquasec-night-scan.yml", + "artifact": ".github/workflows/aquasec-night-scan-example.yml", "type": "pipelineMisconfigurations", "vulnerability": "AVD-PIPELINE-0008", "severity": "MEDIUM", @@ -206,7 +206,7 @@ "scm_file": ( "https://github.com/test-org/test-repo/blob/" "64c62d98a7db5dbd80ae8b0affd531099cf54280/" - ".github/workflows/aquasec-night-scan.yml" + ".github/workflows/aquasec-night-scan-example.yml" ), "start_line": "21", "message": ( diff --git a/tests/security/utils/test_issue_builder.py b/tests/security/issues/test_builder.py similarity index 99% rename from tests/security/utils/test_issue_builder.py rename to tests/security/issues/test_builder.py index 2234f87..d182e8f 100644 --- a/tests/security/utils/test_issue_builder.py +++ b/tests/security/issues/test_builder.py @@ -18,7 +18,7 @@ import pytest -from utils.issue_builder import ( +from security.issues.builder import ( alert_extra_data, build_child_issue_body, build_issue_title, @@ -27,7 +27,7 @@ build_parent_template_values, classify_category, ) -from utils.models import Alert +from security.alerts.models import Alert # ===================================================================== diff --git a/tests/security/utils/test_models.py b/tests/security/issues/test_models.py similarity index 75% rename from tests/security/utils/test_models.py rename to tests/security/issues/test_models.py index 83abddb..a0ad502 100644 --- a/tests/security/utils/test_models.py +++ b/tests/security/issues/test_models.py @@ -14,13 +14,10 @@ # limitations under the License. # -"""Unit tests for ``utils.models``.""" +"""Unit tests for ``security.issues.models``.""" -import pytest - -from utils.models import ( +from security.issues.models import ( AlertContext, - AlertMetadata, IssueIndex, NotifiedIssue, SEVERITY_ORDER, @@ -29,7 +26,6 @@ SyncResult, severity_direction, ) -from shared.models import Issue # ===================================================================== @@ -128,36 +124,3 @@ def test_sync_context_creation() -> None: notifications=[], severity_priority_map={}, priority_sync=None, ) assert sc.dry_run is True - - -# ===================================================================== -# AlertMetadata – None-safe __post_init__ -# ===================================================================== - - -def test_alert_metadata_none_fields_do_not_crash() -> None: - """AlertMetadata must not raise when nullable collector fields are None.""" - md = AlertMetadata( - severity=None, # type: ignore[arg-type] – mirrors _normalise_alert output - rule_id=None, # type: ignore[arg-type] - rule_name=None, # type: ignore[arg-type] - state=None, # type: ignore[arg-type] - tool=None, # type: ignore[arg-type] - ) - assert md.severity == "unknown" - assert md.rule_id == "" - assert md.rule_name == "" - assert md.state == "" - assert md.tool == "" - - -def test_alert_metadata_strips_whitespace() -> None: - md = AlertMetadata(severity=" high ", rule_id=" CVE-123 ", tool=" AquaSec ") - assert md.severity == "high" - assert md.rule_id == "CVE-123" - assert md.tool == "AquaSec" - - -def test_alert_metadata_state_lowercased() -> None: - md = AlertMetadata(state=" OPEN ") - assert md.state == "open" diff --git a/tests/security/utils/test_sec_events.py b/tests/security/issues/test_sec_events.py similarity index 99% rename from tests/security/utils/test_sec_events.py rename to tests/security/issues/test_sec_events.py index e5685d7..849bc29 100644 --- a/tests/security/utils/test_sec_events.py +++ b/tests/security/issues/test_sec_events.py @@ -18,7 +18,7 @@ import pytest -from utils.sec_events import ( +from security.issues.sec_events import ( parse_sec_event_fields, render_sec_event, strip_sec_events_from_body, diff --git a/tests/security/utils/test_secmeta.py b/tests/security/issues/test_secmeta.py similarity index 99% rename from tests/security/utils/test_secmeta.py rename to tests/security/issues/test_secmeta.py index fb2a7b5..8fc28cc 100644 --- a/tests/security/utils/test_secmeta.py +++ b/tests/security/issues/test_secmeta.py @@ -18,7 +18,7 @@ import pytest -from utils.secmeta import ( +from security.issues.secmeta import ( json_list, load_secmeta, parse_json_list, diff --git a/tests/security/utils/test_issue_sync.py b/tests/security/issues/test_sync.py similarity index 91% rename from tests/security/utils/test_issue_sync.py rename to tests/security/issues/test_sync.py index 81d399c..bd40352 100644 --- a/tests/security/utils/test_issue_sync.py +++ b/tests/security/issues/test_sync.py @@ -14,7 +14,7 @@ # limitations under the License. # -"""Unit tests for ``utils.issue_sync`` – pure-logic helpers and +"""Unit tests for ``security.issues.sync`` – pure-logic helpers and orchestration functions with mocked GitHub CLI calls. """ @@ -23,8 +23,8 @@ import pytest from pytest_mock import MockerFixture -from shared.models import Issue -from utils.issue_sync import ( +from core.models import Issue +from security.issues.sync import ( _append_notification, _close_resolved_parent_issues, _comment_child_event, @@ -46,15 +46,15 @@ maybe_reopen_parent_issue, sync_alerts_and_issues, ) -from utils.models import ( - Alert, +from security.alerts.models import Alert +from security.issues.models import ( AlertContext, IssueIndex, NotifiedIssue, SeverityChange, SyncContext, ) -from utils.secmeta import render_secmeta +from security.issues.secmeta import render_secmeta # ===================================================================== @@ -227,7 +227,7 @@ def test_find_parent_not_found() -> None: def test_reopen_parent_none(mocker: MockerFixture) -> None: """No-op when parent_issue is None — no gh call is made.""" - mock_edit = mocker.patch("utils.issue_sync.gh_issue_edit_state") + mock_edit = mocker.patch("security.issues.sync.gh_issue_edit_state") maybe_reopen_parent_issue( "org/repo", None, rule_id="R1", dry_run=False, context="test", ) @@ -251,8 +251,8 @@ def test_reopen_parent_dry_run() -> None: def test_reopen_parent_real(mocker: MockerFixture) -> None: """Non-dry-run reopens issue and posts sec-event comment.""" - mock_edit_state = mocker.patch("utils.issue_sync.gh_issue_edit_state", return_value=True) - mock_comment = mocker.patch("utils.issue_sync.gh_issue_comment") + mock_edit_state = mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=True) + mock_comment = mocker.patch("security.issues.sync.gh_issue_comment") parent = Issue(number=1, state="closed", title="P", body="b") maybe_reopen_parent_issue( "org/repo", parent, rule_id="R1", dry_run=False, context="reopen_child", child_issue_number=5, @@ -266,7 +266,7 @@ def test_reopen_parent_real(mocker: MockerFixture) -> None: def test_reopen_parent_gh_failure(mocker: MockerFixture) -> None: """If gh_issue_edit_state fails, state stays closed.""" - mocker.patch("utils.issue_sync.gh_issue_edit_state", return_value=False) + mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=False) parent = Issue(number=1, state="closed", title="P", body="b") maybe_reopen_parent_issue( "org/repo", parent, rule_id="R1", dry_run=False, context="test", @@ -403,7 +403,7 @@ def test_reopen_child_dry_run() -> None: def test_reopen_child_real(mocker: MockerFixture) -> None: """Non-dry-run calls gh_issue_edit_state and appends notification.""" - mock_edit = mocker.patch("utils.issue_sync.gh_issue_edit_state", return_value=True) + mock_edit = mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=True) body = render_secmeta({"type": "child", "category": "sast"}) + "\nbody" issue = Issue(number=5, state="closed", title="T", body=body) ctx = _make_alert_context() @@ -416,7 +416,7 @@ def test_reopen_child_real(mocker: MockerFixture) -> None: def test_reopen_child_cascades_to_parent(mocker: MockerFixture) -> None: """Reopening child also reopens the closed parent (dry-run).""" - mocker.patch("utils.issue_sync.gh_issue_edit_state", return_value=True) + mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=True) body = render_secmeta({"type": "child"}) + "\nbody" issue = Issue(number=5, state="closed", title="T", body=body) parent = Issue(number=1, state="closed", title="P", body="pb") @@ -433,7 +433,7 @@ def test_reopen_child_cascades_to_parent(mocker: MockerFixture) -> None: def test_rebuild_body_changed(mocker: MockerFixture, sast_alert: Alert) -> None: """When body changes, gh_issue_edit_body is called.""" - mock_edit = mocker.patch("utils.issue_sync.gh_issue_edit_body") + mock_edit = mocker.patch("security.issues.sync.gh_issue_edit_body") issue = Issue(number=1, state="open", title="T", body="old body") ctx = _make_alert_context(alert=sast_alert) sync = _make_sync_context() @@ -443,8 +443,8 @@ def test_rebuild_body_changed(mocker: MockerFixture, sast_alert: Alert) -> None: def test_rebuild_body_unchanged(sast_alert: Alert) -> None: """When body is identical, no API call is made.""" - from utils.issue_builder import build_child_issue_body - from utils.sec_events import strip_sec_events_from_body + from security.issues.builder import build_child_issue_body + from security.issues.sec_events import strip_sec_events_from_body secmeta = {"schema": "1", "type": "child", "fingerprint": "fp1"} human_body = build_child_issue_body(sast_alert) @@ -471,7 +471,7 @@ def test_rebuild_body_dry_run(sast_alert: Alert) -> None: def test_comment_reopen_event(mocker: MockerFixture) -> None: """Posts a reopen sec-event comment when reopened=True.""" - mock_comment = mocker.patch("utils.issue_sync.gh_issue_comment") + mock_comment = mocker.patch("security.issues.sync.gh_issue_comment") issue = Issue(number=1, state="open", title="T", body="b") ctx = _make_alert_context() sync = _make_sync_context() @@ -482,7 +482,7 @@ def test_comment_reopen_event(mocker: MockerFixture) -> None: def test_comment_occurrence_event_no_comment(mocker: MockerFixture) -> None: """No sec-event comment when issue is already open (new_occurrence=True but reopened=False).""" - mock_comment = mocker.patch("utils.issue_sync.gh_issue_comment") + mock_comment = mocker.patch("security.issues.sync.gh_issue_comment") issue = Issue(number=1, state="open", title="T", body="b") ctx = _make_alert_context() sync = _make_sync_context() @@ -519,8 +519,8 @@ def test_comment_occurrence_dry_run() -> None: def test_sync_title_drift_corrected(mocker: MockerFixture) -> None: """Title is updated when it drifts from the expected format.""" - mock_title = mocker.patch("utils.issue_sync.gh_issue_edit_title", return_value=True) - mock_labels = mocker.patch("utils.issue_sync.gh_issue_add_labels") + mock_title = mocker.patch("security.issues.sync.gh_issue_edit_title", return_value=True) + mock_labels = mocker.patch("security.issues.sync.gh_issue_add_labels") issue = Issue(number=1, state="open", title="Wrong title", body="b") ctx = _make_alert_context(rule_name="sast", rule_id="CVE-2026-1234", fingerprint="fp_test_123") sync = _make_sync_context() @@ -530,8 +530,8 @@ def test_sync_title_drift_corrected(mocker: MockerFixture) -> None: def test_sync_title_already_correct(mocker: MockerFixture) -> None: """Title is not updated when it matches the expected format.""" - mock_labels = mocker.patch("utils.issue_sync.gh_issue_add_labels") - from utils.issue_builder import build_issue_title + mock_labels = mocker.patch("security.issues.sync.gh_issue_add_labels") + from security.issues.builder import build_issue_title title = build_issue_title("sast", "CVE-2026-1234", "fp_test_123") issue = Issue(number=1, state="open", title=title, body="b") ctx = _make_alert_context(rule_name="sast", rule_id="CVE-2026-1234", fingerprint="fp_test_123") @@ -554,8 +554,8 @@ def test_sync_title_dry_run() -> None: def test_handle_new_child_creates_issue(mocker: MockerFixture, sast_alert: Alert) -> None: """Creates a new issue and registers it in the index.""" - mock_create = mocker.patch("utils.issue_sync.gh_issue_create", return_value=42) - mocker.patch("utils.issue_sync.gh_issue_comment") + mock_create = mocker.patch("security.issues.sync.gh_issue_create", return_value=42) + mocker.patch("security.issues.sync.gh_issue_comment") ctx = _make_alert_context(alert=sast_alert, rule_name="sast") issues: dict[int, Issue] = {} index = IssueIndex(by_fingerprint={}, parent_by_rule_id={}) @@ -578,9 +578,9 @@ def test_handle_new_child_dry_run(sast_alert: Alert) -> None: def test_handle_new_child_links_to_parent(mocker: MockerFixture, sast_alert: Alert) -> None: """When a parent issue exists, the child is linked as a sub-issue.""" - mocker.patch("utils.issue_sync.gh_issue_create", return_value=42) - mock_sub = mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number") - mocker.patch("utils.issue_sync.gh_issue_comment") + mocker.patch("security.issues.sync.gh_issue_create", return_value=42) + mock_sub = mocker.patch("security.issues.sync.gh_issue_add_sub_issue_by_number") + mocker.patch("security.issues.sync.gh_issue_comment") parent = Issue(number=1, state="open", title="P", body="pb") ctx = _make_alert_context(alert=sast_alert) sync = _make_sync_context(notifications=[]) @@ -589,7 +589,7 @@ def test_handle_new_child_links_to_parent(mocker: MockerFixture, sast_alert: Ale def test_handle_new_child_create_fails(mocker: MockerFixture, sast_alert: Alert) -> None: """If gh_issue_create returns None, no crash and no index update.""" - mocker.patch("utils.issue_sync.gh_issue_create", return_value=None) + mocker.patch("security.issues.sync.gh_issue_create", return_value=None) ctx = _make_alert_context(alert=sast_alert) sync = _make_sync_context(notifications=[]) _handle_new_child_issue(ctx=ctx, sync=sync, parent_issue=None) @@ -603,8 +603,8 @@ def test_handle_new_child_create_fails(mocker: MockerFixture, sast_alert: Alert) def test_ensure_child_linked_already_linked(mocker: MockerFixture) -> None: """No-op when the child is already in the parent's sub-issues.""" - mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value={5}) - mock_add = mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number") + mocker.patch("security.issues.sync.gh_issue_get_sub_issue_numbers", return_value={5}) + mock_add = mocker.patch("security.issues.sync.gh_issue_add_sub_issue_by_number") parent = Issue(number=1, state="open", title="P", body="pb") child = Issue(number=5, state="open", title="C", body="cb") ctx = _make_alert_context() @@ -615,8 +615,8 @@ def test_ensure_child_linked_already_linked(mocker: MockerFixture) -> None: def test_ensure_child_linked_missing_adds_link(mocker: MockerFixture) -> None: """Adds the sub-issue link when the child is missing from the parent.""" - mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value=set()) - mock_add = mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number", return_value=True) + mocker.patch("security.issues.sync.gh_issue_get_sub_issue_numbers", return_value=set()) + mock_add = mocker.patch("security.issues.sync.gh_issue_add_sub_issue_by_number", return_value=True) parent = Issue(number=1, state="open", title="P", body="pb") child = Issue(number=5, state="open", title="C", body="cb") ctx = _make_alert_context() @@ -627,8 +627,8 @@ def test_ensure_child_linked_missing_adds_link(mocker: MockerFixture) -> None: def test_ensure_child_linked_missing_dry_run(mocker: MockerFixture) -> None: """In dry-run mode logs intent without calling the add-sub-issue API.""" - mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value=set()) - mock_add = mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number") + mocker.patch("security.issues.sync.gh_issue_get_sub_issue_numbers", return_value=set()) + mock_add = mocker.patch("security.issues.sync.gh_issue_add_sub_issue_by_number") parent = Issue(number=1, state="open", title="P", body="pb") child = Issue(number=5, state="open", title="C", body="cb") ctx = _make_alert_context() @@ -639,8 +639,8 @@ def test_ensure_child_linked_missing_dry_run(mocker: MockerFixture) -> None: def test_ensure_child_linked_cache_populated(mocker: MockerFixture) -> None: """gh_issue_get_sub_issue_numbers is called only once per parent (cached).""" - mock_list = mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value={5, 6}) - mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number") + mock_list = mocker.patch("security.issues.sync.gh_issue_get_sub_issue_numbers", return_value={5, 6}) + mocker.patch("security.issues.sync.gh_issue_add_sub_issue_by_number") parent = Issue(number=1, state="open", title="P", body="pb") child_a = Issue(number=5, state="open", title="A", body="ab") child_b = Issue(number=6, state="open", title="B", body="bb") @@ -654,8 +654,8 @@ def test_ensure_child_linked_cache_populated(mocker: MockerFixture) -> None: def test_ensure_child_linked_api_failure_no_cache_update(mocker: MockerFixture) -> None: """When the API call to add the link fails, the cache is not updated.""" - mocker.patch("utils.issue_sync.gh_issue_get_sub_issue_numbers", return_value=set()) - mocker.patch("utils.issue_sync.gh_issue_add_sub_issue_by_number", return_value=False) + mocker.patch("security.issues.sync.gh_issue_get_sub_issue_numbers", return_value=set()) + mocker.patch("security.issues.sync.gh_issue_add_sub_issue_by_number", return_value=False) parent = Issue(number=1, state="open", title="P", body="pb") child = Issue(number=5, state="open", title="C", body="cb") ctx = _make_alert_context() @@ -671,8 +671,8 @@ def test_ensure_child_linked_api_failure_no_cache_update(mocker: MockerFixture) def test_ensure_parent_creates_new(mocker: MockerFixture, sast_alert: Alert) -> None: """Creates a parent issue when none exists for the rule_id.""" - mock_create = mocker.patch("utils.issue_sync.gh_issue_create", return_value=99) - mocker.patch("utils.issue_sync.gh_issue_comment") + mock_create = mocker.patch("security.issues.sync.gh_issue_create", return_value=99) + mocker.patch("security.issues.sync.gh_issue_comment") issues: dict[int, Issue] = {} index = IssueIndex(by_fingerprint={}, parent_by_rule_id={}) result = ensure_parent_issue(sast_alert, issues, index, dry_run=False) @@ -729,7 +729,7 @@ def test_ensure_parent_no_rule_id() -> None: def test_ensure_parent_create_fails(mocker: MockerFixture, sast_alert: Alert) -> None: """Returns None if gh_issue_create fails.""" - mocker.patch("utils.issue_sync.gh_issue_create", return_value=None) + mocker.patch("security.issues.sync.gh_issue_create", return_value=None) issues: dict[int, Issue] = {} index = IssueIndex(by_fingerprint={}, parent_by_rule_id={}) result = ensure_parent_issue(sast_alert, issues, index, dry_run=False) @@ -754,7 +754,7 @@ def test_ensure_parent_body_deferred(sast_alert: Alert) -> None: def test_ensure_parent_title_drift_corrected(mocker: MockerFixture, sast_alert: Alert) -> None: """Title is updated when it drifts from the expected format.""" - mock_title = mocker.patch("utils.issue_sync.gh_issue_edit_title", return_value=True) + mock_title = mocker.patch("security.issues.sync.gh_issue_edit_title", return_value=True) parent = _issue_with_secmeta(10, { "type": "parent", "rule_id": sast_alert.metadata.rule_id, @@ -776,7 +776,7 @@ def test_ensure_parent_title_drift_corrected(mocker: MockerFixture, sast_alert: def test_flush_writes_changed_bodies(mocker: MockerFixture) -> None: """Writes body when it has changed.""" - mock_edit = mocker.patch("utils.issue_sync.gh_issue_edit_body") + mock_edit = mocker.patch("security.issues.sync.gh_issue_edit_body") issue = Issue(number=1, state="open", title="T", body="new body") bods = {1: ("org/repo", "old body")} _flush_parent_body_updates(bods, {1: issue}, dry_run=False) @@ -816,7 +816,7 @@ def test_label_orphan_no_orphans() -> None: def test_label_orphan_found(mocker: MockerFixture) -> None: """Labels child issues that have no matching alert.""" - mock_labels = mocker.patch("utils.issue_sync.gh_issue_add_labels") + mock_labels = mocker.patch("security.issues.sync.gh_issue_add_labels") child = _issue_with_secmeta(1, { "type": "child", "fingerprint": "fp_orphan", "repo": "org/repo", }) @@ -868,7 +868,7 @@ def test_label_orphan_no_repo_in_secmeta() -> None: def test_close_resolved_parent_issue(mocker: MockerFixture) -> None: """Closes an open parent when all known children are closed.""" - mock_edit = mocker.patch("utils.issue_sync.gh_issue_edit_state", return_value=True) + mock_edit = mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=True) parent = _issue_with_secmeta(10, { "type": "parent", "rule_id": "R1", "repo": "org/repo", }) @@ -889,7 +889,7 @@ def test_close_resolved_parent_issue(mocker: MockerFixture) -> None: def test_close_resolved_parent_skips_open_child(mocker: MockerFixture) -> None: """Leaves the parent open when any child is still open.""" - mock_edit = mocker.patch("utils.issue_sync.gh_issue_edit_state", return_value=True) + mock_edit = mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=True) parent = _issue_with_secmeta(10, { "type": "parent", "rule_id": "R1", "repo": "org/repo", }) @@ -987,7 +987,7 @@ def test_sync_severity_change_detected(sast_alert: Alert) -> None: def test_sync_closes_parent_when_all_children_closed(mocker: MockerFixture) -> None: """Closes a parent during sync when its children are already closed.""" - mock_edit = mocker.patch("utils.issue_sync.gh_issue_edit_state", return_value=True) + mock_edit = mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=True) parent = _issue_with_secmeta(10, { "type": "parent", "rule_id": "R1", "repo": "org/repo", }) @@ -1033,7 +1033,7 @@ def test_init_priority_sync_no_org_returns_none() -> None: def test_init_priority_sync_field_lookup_fails(mocker: MockerFixture) -> None: """Returns None when gh_project_get_priority_field fails.""" - mocker.patch("utils.issue_sync.gh_project_get_priority_field", return_value=None) + mocker.patch("security.issues.sync.gh_project_get_priority_field", return_value=None) result = _init_priority_sync( {}, severity_priority_map={"high": "Urgent"}, project_number=7, project_org="org", dry_run=False, diff --git a/tests/security/utils/test_templates.py b/tests/security/issues/test_templates.py similarity index 97% rename from tests/security/utils/test_templates.py rename to tests/security/issues/test_templates.py index cd3a423..97265d2 100644 --- a/tests/security/utils/test_templates.py +++ b/tests/security/issues/test_templates.py @@ -16,8 +16,8 @@ """Unit tests for ``utils.templates``.""" -from shared.templates import render_markdown_template -from utils.templates import CHILD_BODY_TEMPLATE, PARENT_BODY_TEMPLATE +from core.rendering import render_markdown_template +from security.issues.templates import CHILD_BODY_TEMPLATE, PARENT_BODY_TEMPLATE # ===================================================================== diff --git a/tests/security/utils/test_teams.py b/tests/security/notifications/test_teams.py similarity index 92% rename from tests/security/utils/test_teams.py rename to tests/security/notifications/test_teams.py index 20be5a0..5de3a58 100644 --- a/tests/security/utils/test_teams.py +++ b/tests/security/notifications/test_teams.py @@ -22,8 +22,8 @@ import pytest -from utils.models import NotifiedIssue, SeverityChange -from utils.teams import ( +from security.issues.models import NotifiedIssue, SeverityChange +from security.notifications.teams import ( build_severity_change_body, build_teams_notification_body, notify_teams, @@ -136,8 +136,8 @@ def fake_run(cmd, **kwargs): calls.append((cmd, kwargs)) return types.SimpleNamespace(returncode=0, stdout="ok", stderr="") - monkeypatch.setattr("utils.teams.subprocess.run", fake_run) - monkeypatch.setattr("utils.teams.os.path.exists", lambda _: True) + monkeypatch.setattr("security.notifications.teams.subprocess.run", fake_run) + monkeypatch.setattr("security.notifications.teams.os.path.exists", lambda _: True) return calls @@ -166,8 +166,8 @@ def test_notify_teams_skips_when_script_not_found( caplog: pytest.LogCaptureFixture, ) -> None: calls: list = [] - monkeypatch.setattr("utils.teams.subprocess.run", lambda cmd, **kw: calls.append(cmd)) - monkeypatch.setattr("utils.teams.os.path.exists", lambda _: False) + monkeypatch.setattr("security.notifications.teams.subprocess.run", lambda cmd, **kw: calls.append(cmd)) + monkeypatch.setattr("security.notifications.teams.os.path.exists", lambda _: False) with caplog.at_level(logging.WARNING): notify_teams("https://hook", sample_notifications, dry_run=False) assert len(calls) == 0 @@ -195,8 +195,8 @@ def test_notify_teams_subprocess_failure( def fake_run(cmd, **kwargs): return types.SimpleNamespace(returncode=1, stdout="", stderr="send failed") - monkeypatch.setattr("utils.teams.subprocess.run", fake_run) - monkeypatch.setattr("utils.teams.os.path.exists", lambda _: True) + monkeypatch.setattr("security.notifications.teams.subprocess.run", fake_run) + monkeypatch.setattr("security.notifications.teams.os.path.exists", lambda _: True) with caplog.at_level(logging.WARNING): notify_teams("https://hook", sample_notifications, dry_run=False) assert any("failed" in r.message.lower() for r in caplog.records) diff --git a/tests/security/test_check_labels.py b/tests/security/test_check_labels.py index 19d18a6..78975bf 100644 --- a/tests/security/test_check_labels.py +++ b/tests/security/test_check_labels.py @@ -22,7 +22,7 @@ import pytest from pytest_mock import MockerFixture -from check_labels import REQUIRED_LABELS, check_labels, fetch_repo_labels, main +from security.check_labels import REQUIRED_LABELS, check_labels, fetch_repo_labels, main REPO = "my-org/my-repo" @@ -35,7 +35,7 @@ def _gh_result(labels: list[str]) -> subprocess.CompletedProcess: def test_fetch_repo_labels_returns_names(mocker: MockerFixture) -> None: - mock_gh = mocker.patch("check_labels.run_gh", return_value=_gh_result(["scope:security", "epic"])) + mock_gh = mocker.patch("security.check_labels.run_gh", return_value=_gh_result(["scope:security", "epic"])) assert fetch_repo_labels(REPO) == ["scope:security", "epic"] mock_gh.assert_called_once_with( ["label", "list", "--repo", REPO, "--json", "name", "--limit", "500"], @@ -45,19 +45,19 @@ def test_fetch_repo_labels_returns_names(mocker: MockerFixture) -> None: def test_fetch_repo_labels_skips_empty_names(mocker: MockerFixture) -> None: payload = json.dumps([{"name": "good"}, {"name": ""}, {}]) mocker.patch( - "check_labels.run_gh", + "security.check_labels.run_gh", return_value=subprocess.CompletedProcess(args=[], returncode=0, stdout=payload, stderr=""), ) assert fetch_repo_labels(REPO) == ["good"] def test_check_labels_all_present(mocker: MockerFixture) -> None: - mocker.patch("check_labels.fetch_repo_labels", return_value=list(REQUIRED_LABELS) + ["extra-label"]) + mocker.patch("security.check_labels.fetch_repo_labels", return_value=list(REQUIRED_LABELS) + ["extra-label"]) assert check_labels(REPO) == [] def test_check_labels_some_missing(mocker: MockerFixture) -> None: - mocker.patch("check_labels.fetch_repo_labels", return_value=["scope:security", "epic"]) + mocker.patch("security.check_labels.fetch_repo_labels", return_value=["scope:security", "epic"]) missing = check_labels(REPO) assert "type:tech-debt" in missing assert "sec:adept-to-close" in missing @@ -65,23 +65,23 @@ def test_check_labels_some_missing(mocker: MockerFixture) -> None: def test_check_labels_all_missing(mocker: MockerFixture) -> None: - mocker.patch("check_labels.fetch_repo_labels", return_value=[]) + mocker.patch("security.check_labels.fetch_repo_labels", return_value=[]) assert check_labels(REPO) == list(REQUIRED_LABELS) def test_check_labels_custom_required(mocker: MockerFixture) -> None: - mocker.patch("check_labels.fetch_repo_labels", return_value=["a"]) + mocker.patch("security.check_labels.fetch_repo_labels", return_value=["a"]) assert check_labels(REPO, required=["a", "b"]) == ["b"] def test_main_success(mocker: MockerFixture) -> None: - mock_check = mocker.patch("check_labels.check_labels", return_value=[]) + mock_check = mocker.patch("security.check_labels.check_labels", return_value=[]) assert main(["--repo", REPO]) == 0 mock_check.assert_called_once_with(REPO) def test_main_failure(mocker: MockerFixture) -> None: - mocker.patch("check_labels.check_labels", return_value=["epic"]) + mocker.patch("security.check_labels.check_labels", return_value=["epic"]) assert main(["--repo", REPO]) == 1 diff --git a/tests/security/test_collect_alert.py b/tests/security/test_collect_alert.py index 0b817b4..57fce59 100644 --- a/tests/security/test_collect_alert.py +++ b/tests/security/test_collect_alert.py @@ -22,7 +22,7 @@ import pytest from pytest_mock import MockerFixture -from collect_alert import ( +from security.collect_alert import ( RULE_DETAIL_KEYS, VALID_STATES, _gh_api_json, @@ -69,13 +69,13 @@ def _mock_happy_path(mocker: MockerFixture, repo_data: dict | None = None, raw_alerts: list | None = None): """Set up mocks for a successful main() run.""" - mocker.patch("collect_alert.shutil.which", return_value="/usr/bin/gh") + mocker.patch("security.collect_alert.shutil.which", return_value="/usr/bin/gh") mocker.patch( - "collect_alert.run_gh", + "security.collect_alert.run_gh", return_value=_gh_ok("Logged in"), ) mocker.patch( - "collect_alert._gh_api_json", + "security.collect_alert._gh_api_json", return_value=repo_data or { "id": 1, "name": "my-repo", @@ -87,7 +87,7 @@ def _mock_happy_path(mocker: MockerFixture, repo_data: dict | None = None, raw_a }, ) mocker.patch( - "collect_alert._gh_api_paginate", + "security.collect_alert._gh_api_paginate", return_value=raw_alerts if raw_alerts is not None else [], ) @@ -214,19 +214,19 @@ def test_parse_alert_details_value_with_colon() -> None: def test_gh_api_json_success(mocker: MockerFixture) -> None: payload = {"id": 123, "name": "my-repo"} - mocker.patch("collect_alert.run_gh", return_value=_gh_ok(json.dumps(payload))) + mocker.patch("security.collect_alert.run_gh", return_value=_gh_ok(json.dumps(payload))) assert _gh_api_json("/repos/my-org/my-repo") == payload def test_gh_api_json_failure_exits(mocker: MockerFixture) -> None: - mocker.patch("collect_alert.run_gh", return_value=_gh_fail("not found")) + mocker.patch("security.collect_alert.run_gh", return_value=_gh_fail("not found")) with pytest.raises(SystemExit): _gh_api_json("/repos/my-org/my-repo") def test_gh_api_paginate_single_page(mocker: MockerFixture) -> None: alerts = [{"number": 1}, {"number": 2}] - mocker.patch("collect_alert.run_gh", return_value=_gh_ok(json.dumps(alerts))) + mocker.patch("security.collect_alert.run_gh", return_value=_gh_ok(json.dumps(alerts))) assert _gh_api_paginate("/repos/org/repo/alerts") == alerts @@ -234,24 +234,24 @@ def test_gh_api_paginate_multiple_pages(mocker: MockerFixture) -> None: page1 = json.dumps([{"number": 1}]) page2 = json.dumps([{"number": 2}]) stdout = page1 + "\n" + page2 - mocker.patch("collect_alert.run_gh", return_value=_gh_ok(stdout)) + mocker.patch("security.collect_alert.run_gh", return_value=_gh_ok(stdout)) result = _gh_api_paginate("/repos/org/repo/alerts") assert result == [{"number": 1}, {"number": 2}] def test_gh_api_paginate_single_object(mocker: MockerFixture) -> None: - mocker.patch("collect_alert.run_gh", return_value=_gh_ok(json.dumps({"key": "val"}))) + mocker.patch("security.collect_alert.run_gh", return_value=_gh_ok(json.dumps({"key": "val"}))) result = _gh_api_paginate("/endpoint") assert result == [{"key": "val"}] def test_gh_api_paginate_empty_array(mocker: MockerFixture) -> None: - mocker.patch("collect_alert.run_gh", return_value=_gh_ok("[]")) + mocker.patch("security.collect_alert.run_gh", return_value=_gh_ok("[]")) assert _gh_api_paginate("/endpoint") == [] def test_gh_api_paginate_failure_exits(mocker: MockerFixture) -> None: - mocker.patch("collect_alert.run_gh", return_value=_gh_fail("error")) + mocker.patch("security.collect_alert.run_gh", return_value=_gh_fail("error")) with pytest.raises(SystemExit): _gh_api_paginate("/endpoint") @@ -385,7 +385,7 @@ def test_main_repo_metadata_in_output(mocker: MockerFixture, tmp_path) -> None: def test_main_state_forwarded_to_paginate(mocker: MockerFixture, tmp_path) -> None: _mock_happy_path(mocker) - mock_paginate = mocker.patch("collect_alert._gh_api_paginate", return_value=[]) + mock_paginate = mocker.patch("security.collect_alert._gh_api_paginate", return_value=[]) out = str(tmp_path / "alerts.json") main(["--repo", REPO, "--state", "dismissed", "--out", out]) endpoint = mock_paginate.call_args[0][0] @@ -394,7 +394,7 @@ def test_main_state_forwarded_to_paginate(mocker: MockerFixture, tmp_path) -> No def test_main_state_all_omits_state_param(mocker: MockerFixture, tmp_path) -> None: _mock_happy_path(mocker) - mock_paginate = mocker.patch("collect_alert._gh_api_paginate", return_value=[]) + mock_paginate = mocker.patch("security.collect_alert._gh_api_paginate", return_value=[]) out = str(tmp_path / "alerts.json") main(["--repo", REPO, "--state", "all", "--out", out]) endpoint = mock_paginate.call_args[0][0] @@ -402,31 +402,31 @@ def test_main_state_all_omits_state_param(mocker: MockerFixture, tmp_path) -> No def test_main_invalid_repo_format_exits(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("collect_alert.shutil.which", return_value="/usr/bin/gh") - mocker.patch("collect_alert.run_gh", return_value=_gh_ok("ok")) + mocker.patch("security.collect_alert.shutil.which", return_value="/usr/bin/gh") + mocker.patch("security.collect_alert.run_gh", return_value=_gh_ok("ok")) out = str(tmp_path / "alerts.json") with pytest.raises(SystemExit): main(["--repo", "noslash", "--out", out]) def test_main_gh_not_found_exits(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("collect_alert.shutil.which", return_value=None) + mocker.patch("security.collect_alert.shutil.which", return_value=None) out = str(tmp_path / "alerts.json") with pytest.raises(SystemExit): main(["--repo", REPO, "--out", out]) def test_main_gh_not_authenticated_exits(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("collect_alert.shutil.which", return_value="/usr/bin/gh") - mocker.patch("collect_alert.run_gh", return_value=_gh_fail("not logged in")) + mocker.patch("security.collect_alert.shutil.which", return_value="/usr/bin/gh") + mocker.patch("security.collect_alert.run_gh", return_value=_gh_fail("not logged in")) out = str(tmp_path / "alerts.json") with pytest.raises(SystemExit): main(["--repo", REPO, "--out", out]) def test_main_refuses_overwrite(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("collect_alert.shutil.which", return_value="/usr/bin/gh") - mocker.patch("collect_alert.run_gh", return_value=_gh_ok("ok")) + mocker.patch("security.collect_alert.shutil.which", return_value="/usr/bin/gh") + mocker.patch("security.collect_alert.run_gh", return_value=_gh_ok("ok")) out = tmp_path / "alerts.json" out.write_text("{}") with pytest.raises(SystemExit): @@ -435,7 +435,7 @@ def test_main_refuses_overwrite(mocker: MockerFixture, tmp_path) -> None: def test_main_verbose_via_flag(mocker: MockerFixture, tmp_path) -> None: _mock_happy_path(mocker) - mock_setup = mocker.patch("collect_alert.setup_logging") + mock_setup = mocker.patch("security.collect_alert.setup_logging") out = str(tmp_path / "alerts.json") main(["--repo", REPO, "--out", out, "--verbose"]) mock_setup.assert_called_once_with(True) @@ -444,7 +444,7 @@ def test_main_verbose_via_flag(mocker: MockerFixture, tmp_path) -> None: def test_main_verbose_via_runner_debug(mocker: MockerFixture, tmp_path, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("RUNNER_DEBUG", "1") _mock_happy_path(mocker) - mock_setup = mocker.patch("collect_alert.setup_logging") + mock_setup = mocker.patch("security.collect_alert.setup_logging") out = str(tmp_path / "alerts.json") main(["--repo", REPO, "--out", out]) mock_setup.assert_called_once_with(True) diff --git a/tests/security/utils/test_constants.py b/tests/security/test_constants.py similarity index 97% rename from tests/security/utils/test_constants.py rename to tests/security/test_constants.py index c832dbe..22b5fbd 100644 --- a/tests/security/utils/test_constants.py +++ b/tests/security/test_constants.py @@ -16,7 +16,7 @@ """Unit tests for ``utils.constants``.""" -from utils.constants import ( +from security.constants import ( LABEL_EPIC, LABEL_SCOPE_SECURITY, LABEL_SEC_ADEPT_TO_CLOSE, diff --git a/tests/security/test_sync_security_alerts.py b/tests/security/test_main.py similarity index 78% rename from tests/security/test_sync_security_alerts.py rename to tests/security/test_main.py index 3d5b842..f0de391 100644 --- a/tests/security/test_sync_security_alerts.py +++ b/tests/security/test_main.py @@ -14,12 +14,12 @@ # limitations under the License. # -"""Unit tests for ``sync_security_alerts.py``.""" +"""Unit tests for ``security.main``.""" import pytest from pytest_mock import MockerFixture -from sync_security_alerts import VALID_STATES, _resolve_repo, main, parse_args +from security.main import VALID_STATES, _resolve_repo, main, parse_args REPO = "my-org/my-repo" @@ -27,9 +27,9 @@ def _run_promote(mocker: MockerFixture, tmp_path, extra_args: list[str] | None = None) -> list[str]: """Helper: run main() with mocked pipeline and return the argv passed to promote_alerts.""" - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mocker.patch("sync_security_alerts.collect_alert_main") - mock_promote = mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mocker.patch("security.main.collect_alert_main") + mock_promote = mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") argv = ["--repo", REPO, "--out", out] + (extra_args or []) main(argv) @@ -127,15 +127,15 @@ def test_resolve_repo_no_slash_raises() -> None: def test_missing_labels_returns_1(mocker: MockerFixture) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=["epic"]) + mocker.patch("security.main.check_labels", return_value=["epic"]) rc = main(["--repo", REPO]) assert rc == 1 def test_skip_label_check(mocker: MockerFixture, tmp_path) -> None: - mock_check = mocker.patch("sync_security_alerts.check_labels") - mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mock_check = mocker.patch("security.main.check_labels") + mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") rc = main(["--repo", REPO, "--skip-label-check", "--out", out]) mock_check.assert_not_called() @@ -143,16 +143,16 @@ def test_skip_label_check(mocker: MockerFixture, tmp_path) -> None: def test_labels_ok_proceeds(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") rc = main(["--repo", REPO, "--out", out]) assert rc == 0 def test_existing_file_without_force_returns_1(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) + mocker.patch("security.main.check_labels", return_value=[]) out = tmp_path / "alerts.json" out.write_text("{}") rc = main(["--repo", REPO, "--out", str(out)]) @@ -160,9 +160,9 @@ def test_existing_file_without_force_returns_1(mocker: MockerFixture, tmp_path) def test_existing_file_with_force_removes_it(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = tmp_path / "alerts.json" out.write_text("{}") rc = main(["--repo", REPO, "--out", str(out), "--force"]) @@ -171,18 +171,18 @@ def test_existing_file_with_force_removes_it(mocker: MockerFixture, tmp_path) -> def test_nonexistent_file_proceeds(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "new.json") rc = main(["--repo", REPO, "--out", out]) assert rc == 0 def test_collect_called_with_basic_args(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mock_collect = mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mock_collect = mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") main(["--repo", REPO, "--state", "fixed", "--out", out]) call_args = mock_collect.call_args[0][0] @@ -195,9 +195,9 @@ def test_collect_called_with_basic_args(mocker: MockerFixture, tmp_path) -> None def test_verbose_forwarded_to_collect(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mock_collect = mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mock_collect = mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") main(["--repo", REPO, "--verbose", "--out", out]) call_args = mock_collect.call_args[0][0] @@ -260,25 +260,25 @@ def test_promote_empty_optionals_not_forwarded(mocker: MockerFixture, tmp_path) def test_pipeline_success_returns_0(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") assert main(["--repo", REPO, "--out", out]) == 0 def test_collect_error_propagates(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mocker.patch("sync_security_alerts.collect_alert_main", side_effect=SystemExit(1)) + mocker.patch("security.main.check_labels", return_value=[]) + mocker.patch("security.main.collect_alert_main", side_effect=SystemExit(1)) out = str(tmp_path / "alerts.json") with pytest.raises(SystemExit): main(["--repo", REPO, "--out", out]) def test_promote_error_propagates(mocker: MockerFixture, tmp_path) -> None: - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main", side_effect=SystemExit(1)) + mocker.patch("security.main.check_labels", return_value=[]) + mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main", side_effect=SystemExit(1)) out = str(tmp_path / "alerts.json") with pytest.raises(SystemExit): main(["--repo", REPO, "--out", out]) @@ -287,16 +287,16 @@ def test_promote_error_propagates(mocker: MockerFixture, tmp_path) -> None: def test_pipeline_call_order(mocker: MockerFixture, tmp_path) -> None: call_order: list[str] = [] mocker.patch( - "sync_security_alerts.check_labels", + "security.main.check_labels", return_value=[], side_effect=lambda *a, **k: (call_order.append("check"), [])[-1], ) mocker.patch( - "sync_security_alerts.collect_alert_main", + "security.main.collect_alert_main", side_effect=lambda *a, **k: call_order.append("collect"), ) mocker.patch( - "sync_security_alerts.promote_alerts_main", + "security.main.promote_alerts_main", side_effect=lambda *a, **k: call_order.append("promote"), ) out = str(tmp_path / "alerts.json") @@ -306,9 +306,9 @@ def test_pipeline_call_order(mocker: MockerFixture, tmp_path) -> None: def test_env_repo_fallback(mocker: MockerFixture, tmp_path, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("GITHUB_REPOSITORY", REPO) - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mock_collect = mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mock_collect = mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") assert main(["--out", out]) == 0 call_args = mock_collect.call_args[0][0] @@ -323,9 +323,9 @@ def test_no_repo_returns_error(monkeypatch: pytest.MonkeyPatch) -> None: def test_verbose_via_runner_debug(mocker: MockerFixture, tmp_path, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("RUNNER_DEBUG", "1") - mocker.patch("sync_security_alerts.check_labels", return_value=[]) - mock_collect = mocker.patch("sync_security_alerts.collect_alert_main") - mocker.patch("sync_security_alerts.promote_alerts_main") + mocker.patch("security.main.check_labels", return_value=[]) + mock_collect = mocker.patch("security.main.collect_alert_main") + mocker.patch("security.main.promote_alerts_main") out = str(tmp_path / "alerts.json") main(["--repo", REPO, "--out", out]) call_args = mock_collect.call_args[0][0] diff --git a/tests/security/test_promote_alerts.py b/tests/security/test_promote_alerts.py index 5984f11..591d027 100644 --- a/tests/security/test_promote_alerts.py +++ b/tests/security/test_promote_alerts.py @@ -21,7 +21,8 @@ import pytest from pytest_mock import MockerFixture -from utils.models import LoadedAlerts, NotifiedIssue, SeverityChange, SyncResult +from security.alerts.models import LoadedAlerts +from security.issues.models import NotifiedIssue, SeverityChange, SyncResult # Default empty sync result reused across tests. _SYNC_RESULT_EMPTY = SyncResult(notifications=[], severity_changes=[]) @@ -34,12 +35,12 @@ def test_parse_args_defaults(monkeypatch: pytest.MonkeyPatch) -> None: """Defaults are applied when no CLI args are given.""" - monkeypatch.setattr("sys.argv", ["promote_alerts.py"]) + monkeypatch.setattr("sys.argv", ["security.promote_alerts.py"]) monkeypatch.delenv("SEVERITY_PRIORITY_MAP", raising=False) monkeypatch.delenv("PROJECT_NUMBER", raising=False) monkeypatch.delenv("PROJECT_ORG", raising=False) monkeypatch.delenv("TEAMS_WEBHOOK_URL", raising=False) - from promote_alerts import parse_args + from security.promote_alerts import parse_args args = parse_args() assert args.file == "alerts.json" @@ -50,7 +51,7 @@ def test_parse_args_defaults(monkeypatch: pytest.MonkeyPatch) -> None: def test_parse_args_all_flags(monkeypatch: pytest.MonkeyPatch) -> None: """All CLI flags and options are parsed correctly.""" monkeypatch.setattr("sys.argv", [ - "promote_alerts.py", + "security.promote_alerts.py", "--file", "custom.json", "--dry-run", "--verbose", @@ -60,7 +61,7 @@ def test_parse_args_all_flags(monkeypatch: pytest.MonkeyPatch) -> None: "--project-org", "my-org", "--teams-webhook-url", "https://hook.example.com", ]) - from promote_alerts import parse_args + from security.promote_alerts import parse_args args = parse_args() assert args.file == "custom.json" @@ -80,9 +81,9 @@ def test_parse_args_all_flags(monkeypatch: pytest.MonkeyPatch) -> None: def test_missing_gh_cli_raises(monkeypatch: pytest.MonkeyPatch) -> None: """main() raises SystemExit when gh CLI is not found.""" - monkeypatch.setattr("sys.argv", ["promote_alerts.py"]) + monkeypatch.setattr("sys.argv", ["security.promote_alerts.py"]) monkeypatch.setattr("shutil.which", lambda _cmd: None) - from promote_alerts import main + from security.promote_alerts import main with pytest.raises(SystemExit, match="gh CLI"): main() @@ -97,21 +98,21 @@ def test_missing_gh_cli_raises(monkeypatch: pytest.MonkeyPatch) -> None: def main_mocks(mocker: MockerFixture) -> SimpleNamespace: """Provide mocked dependencies for ``main()`` with sensible defaults.""" return SimpleNamespace( - which=mocker.patch("promote_alerts.shutil.which", return_value="/usr/bin/gh"), + which=mocker.patch("security.promote_alerts.shutil.which", return_value="/usr/bin/gh"), load=mocker.patch( - "promote_alerts.load_open_alerts_from_file", + "security.promote_alerts.load_open_alerts_from_file", return_value=LoadedAlerts(repo_full="org/repo", open_by_number={}), ), list_issues=mocker.patch( - "promote_alerts.gh_issue_list_by_label", + "security.promote_alerts.gh_issue_list_by_label", return_value={}, ), sync=mocker.patch( - "promote_alerts.sync_alerts_and_issues", + "security.promote_alerts.sync_alerts_and_issues", return_value=_SYNC_RESULT_EMPTY, ), - notify=mocker.patch("promote_alerts.notify_teams"), - notify_sev=mocker.patch("promote_alerts.notify_teams_severity_changes"), + notify=mocker.patch("security.promote_alerts.notify_teams"), + notify_sev=mocker.patch("security.promote_alerts.notify_teams_severity_changes"), ) @@ -125,12 +126,12 @@ def test_main_dry_run( monkeypatch: pytest.MonkeyPatch, ) -> None: """Dry-run mode passes dry_run=True to sync_alerts_and_issues.""" - monkeypatch.setattr("sys.argv", ["promote_alerts.py", "--dry-run"]) + monkeypatch.setattr("sys.argv", ["security.promote_alerts.py", "--dry-run"]) monkeypatch.delenv("TEAMS_WEBHOOK_URL", raising=False) monkeypatch.delenv("SEVERITY_PRIORITY_MAP", raising=False) monkeypatch.delenv("PROJECT_NUMBER", raising=False) monkeypatch.delenv("PROJECT_ORG", raising=False) - from promote_alerts import main + from security.promote_alerts import main main() _, kwargs = main_mocks.sync.call_args @@ -142,12 +143,12 @@ def test_main_passes_file_arg( monkeypatch: pytest.MonkeyPatch, ) -> None: """--file value is forwarded to load_open_alerts_from_file.""" - monkeypatch.setattr("sys.argv", ["promote_alerts.py", "--file", "custom.json"]) + monkeypatch.setattr("sys.argv", ["security.promote_alerts.py", "--file", "custom.json"]) monkeypatch.delenv("TEAMS_WEBHOOK_URL", raising=False) monkeypatch.delenv("SEVERITY_PRIORITY_MAP", raising=False) monkeypatch.delenv("PROJECT_NUMBER", raising=False) monkeypatch.delenv("PROJECT_ORG", raising=False) - from promote_alerts import main + from security.promote_alerts import main main() main_mocks.load.assert_called_once_with("custom.json") @@ -158,7 +159,7 @@ def test_main_no_webhook_skips_notification( monkeypatch: pytest.MonkeyPatch, ) -> None: """Without TEAMS_WEBHOOK_URL, notify_teams is still called (with empty url).""" - monkeypatch.setattr("sys.argv", ["promote_alerts.py"]) + monkeypatch.setattr("sys.argv", ["security.promote_alerts.py"]) monkeypatch.delenv("TEAMS_WEBHOOK_URL", raising=False) monkeypatch.delenv("SEVERITY_PRIORITY_MAP", raising=False) monkeypatch.delenv("PROJECT_NUMBER", raising=False) @@ -174,7 +175,7 @@ def test_main_no_webhook_skips_notification( ], severity_changes=[], ) - from promote_alerts import main + from security.promote_alerts import main main() # Without webhook URL, logging.debug is hit and notify_teams is not called @@ -187,7 +188,7 @@ def test_main_with_webhook_sends_notifications( ) -> None: """When TEAMS_WEBHOOK_URL is set and there are notifications, notify_teams is called.""" monkeypatch.setattr("sys.argv", [ - "promote_alerts.py", "--teams-webhook-url", "https://hook.example.com", + "security.promote_alerts.py", "--teams-webhook-url", "https://hook.example.com", ]) monkeypatch.delenv("TEAMS_WEBHOOK_URL", raising=False) monkeypatch.delenv("SEVERITY_PRIORITY_MAP", raising=False) @@ -203,7 +204,7 @@ def test_main_with_webhook_sends_notifications( ], severity_changes=[], ) - from promote_alerts import main + from security.promote_alerts import main main() main_mocks.notify.assert_called_once() @@ -217,13 +218,13 @@ def test_main_severity_priority_map_forwarded( ) -> None: """--severity-priority-map value is parsed and forwarded to sync.""" monkeypatch.setattr("sys.argv", [ - "promote_alerts.py", "--severity-priority-map", "High=Urgent,Low=Minor", + "security.promote_alerts.py", "--severity-priority-map", "High=Urgent,Low=Minor", ]) monkeypatch.delenv("TEAMS_WEBHOOK_URL", raising=False) monkeypatch.delenv("SEVERITY_PRIORITY_MAP", raising=False) monkeypatch.delenv("PROJECT_NUMBER", raising=False) monkeypatch.delenv("PROJECT_ORG", raising=False) - from promote_alerts import main + from security.promote_alerts import main main() _, kwargs = main_mocks.sync.call_args @@ -236,13 +237,13 @@ def test_main_project_number_forwarded( ) -> None: """--project-number and --project-org are forwarded to sync.""" monkeypatch.setattr("sys.argv", [ - "promote_alerts.py", "--project-number", "42", "--project-org", "my-org", + "security.promote_alerts.py", "--project-number", "42", "--project-org", "my-org", ]) monkeypatch.delenv("TEAMS_WEBHOOK_URL", raising=False) monkeypatch.delenv("SEVERITY_PRIORITY_MAP", raising=False) monkeypatch.delenv("PROJECT_NUMBER", raising=False) monkeypatch.delenv("PROJECT_ORG", raising=False) - from promote_alerts import main + from security.promote_alerts import main main() _, kwargs = main_mocks.sync.call_args diff --git a/tests/security/test_send_to_teams.py b/tests/security/test_send_notifications.py similarity index 94% rename from tests/security/test_send_to_teams.py rename to tests/security/test_send_notifications.py index 054c074..e110dac 100644 --- a/tests/security/test_send_to_teams.py +++ b/tests/security/test_send_notifications.py @@ -24,7 +24,7 @@ import pytest -from send_to_teams import ( +from security.send_notifications import ( _build_card_body, _parse_args, _resolve_body, @@ -149,7 +149,7 @@ def test_no_body_raises(monkeypatch: pytest.MonkeyPatch) -> None: args = _parse_args([]) # stdin is a tty in tests, so it should raise fake_stdin = types.SimpleNamespace(isatty=lambda: True) - monkeypatch.setattr("send_to_teams.sys.stdin", fake_stdin) + monkeypatch.setattr("security.send_to_teams.sys.stdin", fake_stdin) with pytest.raises(SystemExit): _resolve_body(args) @@ -157,7 +157,7 @@ def test_from_stdin(monkeypatch: pytest.MonkeyPatch) -> None: """Body is read from stdin when neither --body nor --body-file is given.""" args = _parse_args([]) fake_stdin = types.SimpleNamespace(isatty=lambda: False, read=lambda: "piped content") - monkeypatch.setattr("send_to_teams.sys.stdin", fake_stdin) + monkeypatch.setattr("security.send_to_teams.sys.stdin", fake_stdin) assert _resolve_body(args) == "piped content" @@ -187,7 +187,7 @@ def test_no_webhook_raises(monkeypatch: pytest.MonkeyPatch) -> None: def test_main_sends_when_not_dry_run(monkeypatch: pytest.MonkeyPatch) -> None: """Non-dry-run path: main() calls send_to_teams with the webhook URL.""" calls: list[tuple] = [] - monkeypatch.setattr("send_to_teams.send_to_teams", lambda url, payload: calls.append((url, payload))) + monkeypatch.setattr("security.send_to_teams.send_to_teams", lambda url, payload: calls.append((url, payload))) main(["--body", "hi", "--webhook-url", "https://hook"]) assert len(calls) == 1 assert calls[0][0] == "https://hook" @@ -206,7 +206,7 @@ def fake_post(url, **kwargs): calls.append((url, kwargs)) return types.SimpleNamespace(status_code=200, text="1") - monkeypatch.setattr("send_to_teams.requests.post", fake_post) + monkeypatch.setattr("security.send_to_teams.requests.post", fake_post) send_to_teams("https://hook", {"type": "message"}) assert len(calls) == 1 @@ -214,6 +214,6 @@ def test_failure_raises(monkeypatch: pytest.MonkeyPatch) -> None: def fake_post(url, **kwargs): return types.SimpleNamespace(status_code=500, text="error") - monkeypatch.setattr("send_to_teams.requests.post", fake_post) + monkeypatch.setattr("security.send_to_teams.requests.post", fake_post) with pytest.raises(SystemExit, match="failed"): send_to_teams("https://hook", {"type": "message"}) From 5abab725fb45f87fba26a50b0cb47f9acecc280b Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Thu, 16 Apr 2026 17:00:15 +0200 Subject: [PATCH 04/16] Returning back old logic, so the PR is dedicated only to new folder structure. --- .github/copilot-instructions.md | 2 +- .../remove-adept-to-close-on-issue-close.yml | 73 ++++++ pyproject.toml | 6 +- src/security/derive_team_security_metrics.py | 208 ++++++++++++++++++ src/security/extract_team_security_stats.py | 206 +++++++++++++++++ tests/security/test_send_notifications.py | 10 +- 6 files changed, 498 insertions(+), 7 deletions(-) create mode 100644 .github/workflows/remove-adept-to-close-on-issue-close.yml create mode 100644 src/security/derive_team_security_metrics.py create mode 100644 src/security/extract_team_security_stats.py diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 311b23e..1032088 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -17,7 +17,7 @@ src/ │ ├── helpers.py # Pure utilities (sha256, iso_date, normalize_path) │ ├── models.py # Shared data models (Issue) │ ├── priority.py # Severity-to-priority mapping -│ └── templates.py # Generic Markdown template renderer +│ └── rendering.py # Generic Markdown template renderer │ ├── security/ # Security workflow domain │ ├── main.py # Pipeline orchestrator (check → collect → promote) diff --git a/.github/workflows/remove-adept-to-close-on-issue-close.yml b/.github/workflows/remove-adept-to-close-on-issue-close.yml new file mode 100644 index 0000000..88acb4d --- /dev/null +++ b/.github/workflows/remove-adept-to-close-on-issue-close.yml @@ -0,0 +1,73 @@ +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Reusable workflow – Remove sec:adept-to-close label when an issue is closed. +# +# Called from application repositories via workflow_call. +# The caller must trigger on `issues: [closed]`. +# Note: for `workflow_call`, the called workflow receives the same event payload as the caller, +# so `context.payload` (aka `github.event`) is populated without needing to "forward" it via inputs. + +name: Remove sec:adept-to-close on close + +on: + workflow_call: + +permissions: + issues: write + +jobs: + cleanup-label: + runs-on: ubuntu-latest + steps: + - name: Remove label when conditions match + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd + with: + script: | + const issue = context.payload.issue; + + // Safety: ignore PRs (they can appear as issues in GitHub UI) + if (issue.pull_request) { + core.info('Skipping: payload refers to a pull request, not an issue.'); + return; + } + + const labels = (issue.labels ?? []) + .map(l => (typeof l === 'string' ? l : l?.name)) + .filter(Boolean); + + const hasScopeSecurity = labels.includes('scope:security'); + const hasTechDebt = labels.includes('type:tech-debt'); + const hasAdeptToClose = labels.includes('sec:adept-to-close'); + + if (!hasScopeSecurity || !hasTechDebt) { + core.info( + `Skipping: required labels missing (scope:security=${hasScopeSecurity}, type:tech-debt=${hasTechDebt}).` + ); + return; + } + + if (!hasAdeptToClose) { + core.info('No-op: label sec:adept-to-close is not present on the issue.'); + return; + } + + await github.rest.issues.removeLabel({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: issue.number, + name: 'sec:adept-to-close', + }); diff --git a/pyproject.toml b/pyproject.toml index be4f01c..957e1a1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,11 @@ target-version = ['py314'] force-exclude = '''test''' [tool.coverage.run] -omit = ["tests/*"] +omit = [ + "tests/*", + "src/security/derive_team_security_metrics.py", + "src/security/extract_team_security_stats.py", +] [tool.mypy] check_untyped_defs = true diff --git a/src/security/derive_team_security_metrics.py b/src/security/derive_team_security_metrics.py new file mode 100644 index 0000000..8316e66 --- /dev/null +++ b/src/security/derive_team_security_metrics.py @@ -0,0 +1,208 @@ +#!/usr/bin/env python3 +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# pylint: skip-file +""" +Derive team security metrics from issue snapshots (Issues-only). + +Resurfacing definition (B): +- A fingerprint is considered 'resurfaced' when its occurrence_count transitions + from 0 in the previous snapshot to >0 in the current snapshot. + +Inputs: +- data/issues_snapshot.json (required) +- data/issues_snapshot.prev.json (optional; if missing, resurfacing cannot be computed) + +Outputs: +- reports/metrics.json +- reports/summary.md (appends derived metrics) +""" + +import json +import logging +import os +from datetime import datetime +from typing import Any, Dict, List, Optional + +SNAPSHOT_CUR = os.environ.get("SNAPSHOT_CURRENT", "data/issues_snapshot.json") +SNAPSHOT_PREV = os.environ.get("SNAPSHOT_PREVIOUS", "data/issues_snapshot.prev.json") + +OUT_METRICS_JSON = os.environ.get("OUT_METRICS_JSON", "reports/metrics.json") +OUT_SUMMARY_MD = os.environ.get("OUT_SUMMARY_MD", "reports/summary.md") + + +def require_env(key: str) -> str: + """Return the value of environment variable *key*, or exit.""" + try: + return os.environ[key] + except KeyError as exc: + raise SystemExit(f"Missing required environment variable: {key}") from exc + + +TEAM_SLUG = require_env("GITHUB_TEAM_SLUG") + + +def _safe_int(v: Any, default: int = 0) -> int: + """Coerce *v* to ``int``, returning *default* on failure.""" + try: + if v is None: + return default + if isinstance(v, int): + return v + s = str(v).strip() + if s == "": + return default + return int(float(s)) + except Exception: + return default + + +def _load_json(path: str) -> Optional[Any]: + """Load and return JSON from *path*, or ``None`` if the file is missing.""" + if not os.path.exists(path): + return None + with open(path, "r", encoding="utf-8") as f: + return json.load(f) + + +def _index_by_fingerprint(snapshot: List[Dict[str, Any]]) -> Dict[str, Dict[str, Any]]: + """Index snapshot items by their secmeta fingerprint.""" + idx: Dict[str, Dict[str, Any]] = {} + for item in snapshot: + fp = (item.get("secmeta") or {}).get("fingerprint") + if not fp: + # If secmeta is missing or malformed, it cannot participate in fingerprint-level stats. + continue + idx[fp] = item + return idx + + +def _severity_from_labels(labels: List[str]) -> str: + """Extract the severity token from ``sec:sev/`` labels.""" + for l in labels: + if l.startswith("sec:sev/"): + return l.split("/", 1)[1] + return "unknown" + + +def main() -> None: + """Derive and write team security metrics from issue snapshots.""" + # TODO decide about changes related to this script + logging.warning( + "This script is deprecated and may be removed in the future. Please refer to the updated documentation for deriving security metrics." + ) + return + + cur = _load_json(SNAPSHOT_CUR) + if cur is None: + raise SystemExit(f"Missing current snapshot: {SNAPSHOT_CUR}") + + if not isinstance(cur, list): + raise SystemExit(f"Current snapshot is not a list: {SNAPSHOT_CUR}") + + prev = _load_json(SNAPSHOT_PREV) + + cur_idx = _index_by_fingerprint(cur) + prev_idx = _index_by_fingerprint(prev) if isinstance(prev, list) else {} + + # Basic counts + total = len(cur) + by_sev: Dict[str, int] = {} + postponed = 0 + needs_review = 0 + + for item in cur: + labels = item.get("labels") or [] + sev = _severity_from_labels(labels) + by_sev[sev] = by_sev.get(sev, 0) + 1 + if "sec:state/postponed" in labels: + postponed += 1 + if "sec:state/needs-review" in labels: + needs_review += 1 + + # Resurfacing (B): prev occurrence_count == 0 and current > 0 + resurfaced: List[Dict[str, Any]] = [] + if prev_idx: + for fp, cur_item in cur_idx.items(): + cur_occ = _safe_int((cur_item.get("secmeta") or {}).get("occurrence_count"), 0) + prev_item = prev_idx.get(fp) + prev_occ = _safe_int(((prev_item or {}).get("secmeta") or {}).get("occurrence_count"), 0) + if prev_item is not None and prev_occ == 0 and cur_occ > 0: + resurfaced.append( + { + "fingerprint": fp, + "repo": cur_item.get("repo"), + "issue_number": cur_item.get("issue_number"), + "title": cur_item.get("title"), + "severity": _severity_from_labels(cur_item.get("labels") or []), + "prev_occurrence_count": prev_occ, + "current_occurrence_count": cur_occ, + } + ) + + metrics = { + "team": TEAM_SLUG, + "generated_at_utc": datetime.utcnow().isoformat() + "Z", + "snapshot_current": SNAPSHOT_CUR, + "snapshot_previous": SNAPSHOT_PREV if prev_idx else None, + "counts": { + "total_security_issues": total, + "postponed": postponed, + "needs_review": needs_review, + "by_severity": dict(sorted(by_sev.items())), + }, + "resurfaced": { + "definition": "B: fingerprint occurrence_count from 0 (previous snapshot) to >0 (current snapshot)", + "count": len(resurfaced), + "items": resurfaced, + }, + } + + os.makedirs(os.path.dirname(OUT_METRICS_JSON), exist_ok=True) + os.makedirs(os.path.dirname(OUT_SUMMARY_MD), exist_ok=True) + + with open(OUT_METRICS_JSON, "w", encoding="utf-8") as f: + json.dump(metrics, f, indent=2) + + # Append to (or create) summary.md + summary_lines: List[str] = [] + summary_lines.append(f"\n## Derived metrics\n") + summary_lines.append(f"Generated at: {metrics['generated_at_utc']}\n") + if metrics["snapshot_previous"] is None: + summary_lines.append("- Resurfacing: not computed (no previous snapshot found)\n") + else: + summary_lines.append(f"- Resurfaced fingerprints (definition B): {metrics['resurfaced']['count']}\n") + if resurfaced: + summary_lines.append("\n### Resurfaced items\n") + for r in resurfaced[:50]: + summary_lines.append( + f"- {r['severity']} {r['repo']}#{r['issue_number']} (occ {r['prev_occurrence_count']} -> {r['current_occurrence_count']}): {r['title']}\n" + ) + if len(resurfaced) > 50: + summary_lines.append(f"- ... and {len(resurfaced) - 50} more\n") + + # Ensure summary exists; if not, create a minimal header. + if not os.path.exists(OUT_SUMMARY_MD): + with open(OUT_SUMMARY_MD, "w", encoding="utf-8") as f: + f.write(f"# Security summary for team `{TEAM_SLUG}`\n\n") + + with open(OUT_SUMMARY_MD, "a", encoding="utf-8") as f: + f.writelines(summary_lines) + + +if __name__ == "__main__": + main() diff --git a/src/security/extract_team_security_stats.py b/src/security/extract_team_security_stats.py new file mode 100644 index 0000000..da1b6ac --- /dev/null +++ b/src/security/extract_team_security_stats.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python3 +# +# Copyright 2026 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# pylint: skip-file +""" +Extract security statistics per GitHub team. + +Model: +- Source of truth: GitHub Issues +- Scope: repositories owned by a given GitHub team +- Unit: one Issue = one logical vulnerability (fingerprint) + +Outputs: +- data/issues_snapshot.json +- data/events_flat.csv +- reports/summary.md +""" + +import csv +import json +import logging +import os +import re +from datetime import datetime +from github import Github + + +# -------------------- +# Configuration +# -------------------- +def require_env(key: str) -> str: + """Return the value of environment variable *key*, or exit.""" + try: + return os.environ[key] + except KeyError as exc: + raise SystemExit(f"Missing required environment variable: {key}") from exc + + +GITHUB_TOKEN = require_env("GITHUB_TOKEN") +ORG = require_env("GITHUB_ORG") +TEAM_SLUG = require_env("GITHUB_TEAM_SLUG") + +OUT_DATA = "data" +OUT_REPORTS = "reports" + +SEC_LABEL_PREFIX = "sec:" + +SEC_EVENT_RE = re.compile(r"\[sec-event\](.*?)\[/sec-event\]", re.S) +SECMETA_RE = re.compile(r"```secmeta(.*?)```", re.S) + +# -------------------- +# Helpers +# -------------------- + + +def ensure_dirs(): + """Create output directories if they don't exist.""" + os.makedirs(OUT_DATA, exist_ok=True) + os.makedirs(OUT_REPORTS, exist_ok=True) + + +def parse_kv_block(block: str) -> dict: + """Parse a ``key=value``-per-line block into a dict.""" + data = {} + for line in block.splitlines(): + line = line.strip() + if not line or "=" not in line: + continue + k, v = line.split("=", 1) + data[k.strip()] = v.strip() + return data + + +def parse_secmeta(body: str) -> dict: + """Extract the secmeta key-value block from an issue body.""" + match = SECMETA_RE.search(body or "") + if not match: + return {} + return parse_kv_block(match.group(1)) + + +def parse_events(comments): + """Extract ``[sec-event]`` blocks from issue comments.""" + events = [] + for c in comments: + for raw in SEC_EVENT_RE.findall(c.body or ""): + evt = parse_kv_block(raw) + evt["timestamp"] = c.created_at.isoformat() + events.append(evt) + return events + + +def issue_has_sec_label(issue): + """Return ``True`` if *issue* carries any ``sec:`` prefixed label.""" + return any(l.name.startswith(SEC_LABEL_PREFIX) for l in issue.labels) + + +# -------------------- +# Main extraction +# -------------------- + + +def main(): + """Extract security statistics from GitHub Issues for the configured team.""" + # TODO decide about changes related to this script + logging.warning( + "This script is deprecated and may be removed in the future. Please refer to the updated documentation for deriving security metrics." + ) + return + + ensure_dirs() + + gh = Github(GITHUB_TOKEN) + org = gh.get_organization(ORG) + team = org.get_team_by_slug(TEAM_SLUG) + + repos = list(team.get_repos()) + + snapshot = [] + flat_events = [] + + for repo in repos: + issues = repo.get_issues(state="all") + for issue in issues: + # Skip PRs that may be returned by the issues API + if getattr(issue, "pull_request", None): + continue + + if not issue_has_sec_label(issue): + continue + + secmeta = parse_secmeta(issue.body or "") + events = parse_events(issue.get_comments()) + + snapshot.append( + { + "repo": repo.full_name, + "issue_number": issue.number, + "title": issue.title, + "state": issue.state, + "labels": [l.name for l in issue.labels], + "secmeta": secmeta, + "created_at": issue.created_at.isoformat(), + "updated_at": issue.updated_at.isoformat(), + "event_count": len(events), + } + ) + + for e in events: + fp = secmeta.get("fingerprint") if secmeta else None + if not fp: + continue # ignore events without a fingerprint + flat_events.append( + { + "repo": repo.full_name, + "issue_number": issue.number, + "fingerprint": fp, + "action": e.get("action"), + "reason": e.get("reason"), + "timestamp": e.get("timestamp"), + } + ) + + # Write snapshot + with open(os.path.join(OUT_DATA, "issues_snapshot.json"), "w") as f: + json.dump(snapshot, f, indent=2) + + # Write flat events + with open(os.path.join(OUT_DATA, "events_flat.csv"), "w", newline="") as f: + writer = csv.DictWriter(f, fieldnames=["repo", "issue_number", "fingerprint", "action", "reason", "timestamp"]) + writer.writeheader() + writer.writerows(flat_events) + + # Summary report + total = len(snapshot) + by_sev = {} + + for item in snapshot: + sev = next((l for l in item["labels"] if l.startswith("sec:sev/")), "sec:sev/unknown") + by_sev[sev] = by_sev.get(sev, 0) + 1 + + with open(os.path.join(OUT_REPORTS, "summary.md"), "w") as f: + f.write(f"# Security summary for team `{TEAM_SLUG}`\n\n") + f.write(f"Generated at: {datetime.utcnow().isoformat()} UTC\n\n") + f.write(f"## Total security issues: {total}\n\n") + f.write("## By severity\n\n") + for sev, cnt in sorted(by_sev.items()): + f.write(f"- {sev}: {cnt}\n") + + +if __name__ == "__main__": + main() diff --git a/tests/security/test_send_notifications.py b/tests/security/test_send_notifications.py index e110dac..db77005 100644 --- a/tests/security/test_send_notifications.py +++ b/tests/security/test_send_notifications.py @@ -149,7 +149,7 @@ def test_no_body_raises(monkeypatch: pytest.MonkeyPatch) -> None: args = _parse_args([]) # stdin is a tty in tests, so it should raise fake_stdin = types.SimpleNamespace(isatty=lambda: True) - monkeypatch.setattr("security.send_to_teams.sys.stdin", fake_stdin) + monkeypatch.setattr("security.send_notifications.sys.stdin", fake_stdin) with pytest.raises(SystemExit): _resolve_body(args) @@ -157,7 +157,7 @@ def test_from_stdin(monkeypatch: pytest.MonkeyPatch) -> None: """Body is read from stdin when neither --body nor --body-file is given.""" args = _parse_args([]) fake_stdin = types.SimpleNamespace(isatty=lambda: False, read=lambda: "piped content") - monkeypatch.setattr("security.send_to_teams.sys.stdin", fake_stdin) + monkeypatch.setattr("security.send_notifications.sys.stdin", fake_stdin) assert _resolve_body(args) == "piped content" @@ -187,7 +187,7 @@ def test_no_webhook_raises(monkeypatch: pytest.MonkeyPatch) -> None: def test_main_sends_when_not_dry_run(monkeypatch: pytest.MonkeyPatch) -> None: """Non-dry-run path: main() calls send_to_teams with the webhook URL.""" calls: list[tuple] = [] - monkeypatch.setattr("security.send_to_teams.send_to_teams", lambda url, payload: calls.append((url, payload))) + monkeypatch.setattr("security.send_notifications.send_to_teams", lambda url, payload: calls.append((url, payload))) main(["--body", "hi", "--webhook-url", "https://hook"]) assert len(calls) == 1 assert calls[0][0] == "https://hook" @@ -206,7 +206,7 @@ def fake_post(url, **kwargs): calls.append((url, kwargs)) return types.SimpleNamespace(status_code=200, text="1") - monkeypatch.setattr("security.send_to_teams.requests.post", fake_post) + monkeypatch.setattr("security.send_notifications.requests.post", fake_post) send_to_teams("https://hook", {"type": "message"}) assert len(calls) == 1 @@ -214,6 +214,6 @@ def test_failure_raises(monkeypatch: pytest.MonkeyPatch) -> None: def fake_post(url, **kwargs): return types.SimpleNamespace(status_code=500, text="error") - monkeypatch.setattr("security.send_to_teams.requests.post", fake_post) + monkeypatch.setattr("security.send_notifications.requests.post", fake_post) with pytest.raises(SystemExit, match="failed"): send_to_teams("https://hook", {"type": "message"}) From e4aa1592f43a6528391b5b4fded9f89fd648da60 Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Thu, 16 Apr 2026 17:04:16 +0200 Subject: [PATCH 05/16] Removing aquasec-branch-comparison.yml --- .../workflows/aquasec-branch-comparison.yml | 77 ------------------- 1 file changed, 77 deletions(-) delete mode 100644 .github/workflows/aquasec-branch-comparison.yml diff --git a/.github/workflows/aquasec-branch-comparison.yml b/.github/workflows/aquasec-branch-comparison.yml deleted file mode 100644 index 1e560a3..0000000 --- a/.github/workflows/aquasec-branch-comparison.yml +++ /dev/null @@ -1,77 +0,0 @@ -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -name: AquaSec Branch Comparison - -on: - pull_request: - types: [ opened, synchronize, reopened ] - -concurrency: - group: aquasec-branch-comparison-${{ github.event.pull_request.number }} - cancel-in-progress: true - -permissions: - contents: read - pull-requests: write - -jobs: - branch-comparison: - name: AquaSec Branch Comparison - if: ${{ !github.event.pull_request.head.repo.fork }} - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd - with: - persist-credentials: false - fetch-depth: 0 - - - name: Compare branches - id: aquasec - uses: AbsaOSS/aquasec-scan-results@15ee405515a000288b4ae9cdcb9943ea974f74b7 - with: - aqua-key: ${{ secrets.AQUA_KEY }} - aqua-secret: ${{ secrets.AQUA_SECRET }} - group-id: ${{ secrets.AQUA_GROUP_ID }} - repository-id: ${{ secrets.AQUA_REPOSITORY_ID }} - dev-branch-comparison: 'true' - - - name: Find existing PR comment - if: always() && steps.aquasec.outputs.comparison-summary-file != '' - uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad - id: find-comment - with: - issue-number: ${{ github.event.pull_request.number }} - comment-author: 'github-actions[bot]' - body-includes: '' - - - name: Post or update PR comment - if: always() && steps.aquasec.outputs.comparison-summary-file != '' - uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 - with: - issue-number: ${{ github.event.pull_request.number }} - comment-id: ${{ steps.find-comment.outputs.comment-id }} - edit-mode: replace - body-path: ${{ steps.aquasec.outputs.comparison-summary-file }} - - - name: Upload comparison summary as artifact - if: always() && steps.aquasec.outputs.comparison-summary-file != '' - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f - with: - name: aquasec-comparison-summary-pr-${{ github.event.pull_request.number }} - path: ${{ steps.aquasec.outputs.comparison-summary-file }} - retention-days: 7 From 0d893a827389b73fbeefd4734bd7c9147b5f4fc7 Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Fri, 17 Apr 2026 10:52:08 +0200 Subject: [PATCH 06/16] Fixing the init modules and copilot instructions. --- .github/copilot-instructions.md | 2 +- src/core/__init__.py | 2 -- src/core/github/__init__.py | 2 -- src/security/__init__.py | 2 -- src/security/alerts/__init__.py | 2 -- src/security/issues/__init__.py | 2 -- src/security/notifications/__init__.py | 2 -- 7 files changed, 1 insertion(+), 13 deletions(-) diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 1032088..8b5c60e 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -6,7 +6,7 @@ Each workflow domain is a self-contained package under `src/`. The workflows are designed to be called via `workflow_call` from other repositories. Structure -``` +```text src/ ├── core/ # Shared foundation (GitHub API, config, helpers) │ ├── github/ # GitHub CLI wrappers (issues, projects) diff --git a/src/core/__init__.py b/src/core/__init__.py index 228a826..ebfbdd3 100644 --- a/src/core/__init__.py +++ b/src/core/__init__.py @@ -13,5 +13,3 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -"""Core utilities reusable across workflow solutions.""" diff --git a/src/core/github/__init__.py b/src/core/github/__init__.py index 24ae5d5..ebfbdd3 100644 --- a/src/core/github/__init__.py +++ b/src/core/github/__init__.py @@ -13,5 +13,3 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -"""GitHub API wrappers – CLI subprocess helpers, Issues CRUD, Projects V2 GraphQL.""" diff --git a/src/security/__init__.py b/src/security/__init__.py index 8208bf4..ebfbdd3 100644 --- a/src/security/__init__.py +++ b/src/security/__init__.py @@ -13,5 +13,3 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -"""Security workflow – alert-to-issue sync pipeline.""" diff --git a/src/security/alerts/__init__.py b/src/security/alerts/__init__.py index aad9ddb..ebfbdd3 100644 --- a/src/security/alerts/__init__.py +++ b/src/security/alerts/__init__.py @@ -13,5 +13,3 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -"""Alert domain – models, parsing, and constants.""" diff --git a/src/security/issues/__init__.py b/src/security/issues/__init__.py index 481b539..ebfbdd3 100644 --- a/src/security/issues/__init__.py +++ b/src/security/issues/__init__.py @@ -13,5 +13,3 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -"""Issue management domain – builder, sync, metadata, and templates.""" diff --git a/src/security/notifications/__init__.py b/src/security/notifications/__init__.py index 7b5f1f0..ebfbdd3 100644 --- a/src/security/notifications/__init__.py +++ b/src/security/notifications/__init__.py @@ -13,5 +13,3 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -"""Notification domain – Teams webhook integration.""" From df3dc61ba51c0fe8c5f05c1be0f5581735c7113a Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Fri, 17 Apr 2026 11:30:01 +0200 Subject: [PATCH 07/16] Test purposes. --- .github/workflows/aquasec-scan.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/aquasec-scan.yml b/.github/workflows/aquasec-scan.yml index 4cbf4b7..9c34215 100644 --- a/.github/workflows/aquasec-scan.yml +++ b/.github/workflows/aquasec-scan.yml @@ -109,7 +109,7 @@ jobs: uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 with: repository: AbsaOSS/organizational-workflows - ref: master + ref: ${{ github.action_ref }} # TODO: change back to master after testing path: org-workflows persist-credentials: false From 2cc03548209cea1d973b003f4c84da16fdb5b617 Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Fri, 17 Apr 2026 11:38:12 +0200 Subject: [PATCH 08/16] Test purposes. --- .github/workflows/aquasec-scan.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/aquasec-scan.yml b/.github/workflows/aquasec-scan.yml index 9c34215..9eb6d37 100644 --- a/.github/workflows/aquasec-scan.yml +++ b/.github/workflows/aquasec-scan.yml @@ -79,7 +79,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 with: persist-credentials: false fetch-depth: 0 @@ -95,7 +95,7 @@ jobs: verbose-logging: ${{ inputs.verbose-logging }} - name: Upload scan results to GitHub Security and quality - uses: github/codeql-action/upload-sarif@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 + uses: github/codeql-action/upload-sarif@ce64ddcb0d8d890d2df4a9d1c04ff297367dea2a # v3 with: sarif_file: ${{ steps.aquasec.outputs.nightscan-sarif-file }} category: aquasec @@ -106,7 +106,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 with: repository: AbsaOSS/organizational-workflows ref: ${{ github.action_ref }} # TODO: change back to master after testing @@ -114,7 +114,7 @@ jobs: persist-credentials: false - name: Set up Python - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 with: python-version: '3.14' cache: 'pip' From 0fcd998d129ea0ac112730e345cfe128f8868073 Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Fri, 17 Apr 2026 11:45:12 +0200 Subject: [PATCH 09/16] Test purposes. --- .github/workflows/aquasec-scan.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/aquasec-scan.yml b/.github/workflows/aquasec-scan.yml index 9eb6d37..e384dd6 100644 --- a/.github/workflows/aquasec-scan.yml +++ b/.github/workflows/aquasec-scan.yml @@ -79,7 +79,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd with: persist-credentials: false fetch-depth: 0 @@ -95,7 +95,7 @@ jobs: verbose-logging: ${{ inputs.verbose-logging }} - name: Upload scan results to GitHub Security and quality - uses: github/codeql-action/upload-sarif@ce64ddcb0d8d890d2df4a9d1c04ff297367dea2a # v3 + uses: github/codeql-action/upload-sarif@7434149006143a4d75b82a2f411ef15b03ccc2d7 with: sarif_file: ${{ steps.aquasec.outputs.nightscan-sarif-file }} category: aquasec @@ -106,7 +106,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd with: repository: AbsaOSS/organizational-workflows ref: ${{ github.action_ref }} # TODO: change back to master after testing @@ -114,7 +114,7 @@ jobs: persist-credentials: false - name: Set up Python - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 with: python-version: '3.14' cache: 'pip' From 446f919e6966954a9a943d1a7ce11b3efe93a077 Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Fri, 17 Apr 2026 11:51:30 +0200 Subject: [PATCH 10/16] Test purposes. --- .github/workflows/aquasec-scan.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/aquasec-scan.yml b/.github/workflows/aquasec-scan.yml index e384dd6..750e1a7 100644 --- a/.github/workflows/aquasec-scan.yml +++ b/.github/workflows/aquasec-scan.yml @@ -109,7 +109,7 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd with: repository: AbsaOSS/organizational-workflows - ref: ${{ github.action_ref }} # TODO: change back to master after testing + ref: feature/new-folder-structure # TODO: change back to master after testing path: org-workflows persist-credentials: false From 449395e813f34c2764596c47c1443fb9ee0b5e93 Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Fri, 17 Apr 2026 11:58:42 +0200 Subject: [PATCH 11/16] Adding PYTHONPATH to the solution. --- .github/workflows/aquasec-scan.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/aquasec-scan.yml b/.github/workflows/aquasec-scan.yml index 750e1a7..cd7f420 100644 --- a/.github/workflows/aquasec-scan.yml +++ b/.github/workflows/aquasec-scan.yml @@ -125,6 +125,7 @@ jobs: - name: Create issues from security alerts env: + PYTHONPATH: org-workflows/src GH_TOKEN: ${{ github.token }} TEAMS_WEBHOOK_URL: ${{ secrets.TEAMS_WEBHOOK_URL }} SEVERITY_PRIORITY_MAP: ${{ inputs.severity-priority-map }} From 2485c78ef93e50c451958dfe325d9d9b980963f3 Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Fri, 17 Apr 2026 12:08:15 +0200 Subject: [PATCH 12/16] Removing test configuration --- .github/workflows/aquasec-scan.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/aquasec-scan.yml b/.github/workflows/aquasec-scan.yml index cd7f420..dc96f28 100644 --- a/.github/workflows/aquasec-scan.yml +++ b/.github/workflows/aquasec-scan.yml @@ -109,7 +109,7 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd with: repository: AbsaOSS/organizational-workflows - ref: feature/new-folder-structure # TODO: change back to master after testing + ref: master path: org-workflows persist-credentials: false From a8e3ae99b5f7544a592fbe81541c987c02ad2531 Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Fri, 17 Apr 2026 14:11:12 +0200 Subject: [PATCH 13/16] Removing sec stats and remove-adept-to-close-on-issue-close.yml --- .../remove-adept-to-close-on-issue-close.yml | 73 ------ pyproject.toml | 2 - src/security/derive_team_security_metrics.py | 208 ------------------ src/security/extract_team_security_stats.py | 206 ----------------- 4 files changed, 489 deletions(-) delete mode 100644 .github/workflows/remove-adept-to-close-on-issue-close.yml delete mode 100644 src/security/derive_team_security_metrics.py delete mode 100644 src/security/extract_team_security_stats.py diff --git a/.github/workflows/remove-adept-to-close-on-issue-close.yml b/.github/workflows/remove-adept-to-close-on-issue-close.yml deleted file mode 100644 index 88acb4d..0000000 --- a/.github/workflows/remove-adept-to-close-on-issue-close.yml +++ /dev/null @@ -1,73 +0,0 @@ -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Reusable workflow – Remove sec:adept-to-close label when an issue is closed. -# -# Called from application repositories via workflow_call. -# The caller must trigger on `issues: [closed]`. -# Note: for `workflow_call`, the called workflow receives the same event payload as the caller, -# so `context.payload` (aka `github.event`) is populated without needing to "forward" it via inputs. - -name: Remove sec:adept-to-close on close - -on: - workflow_call: - -permissions: - issues: write - -jobs: - cleanup-label: - runs-on: ubuntu-latest - steps: - - name: Remove label when conditions match - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd - with: - script: | - const issue = context.payload.issue; - - // Safety: ignore PRs (they can appear as issues in GitHub UI) - if (issue.pull_request) { - core.info('Skipping: payload refers to a pull request, not an issue.'); - return; - } - - const labels = (issue.labels ?? []) - .map(l => (typeof l === 'string' ? l : l?.name)) - .filter(Boolean); - - const hasScopeSecurity = labels.includes('scope:security'); - const hasTechDebt = labels.includes('type:tech-debt'); - const hasAdeptToClose = labels.includes('sec:adept-to-close'); - - if (!hasScopeSecurity || !hasTechDebt) { - core.info( - `Skipping: required labels missing (scope:security=${hasScopeSecurity}, type:tech-debt=${hasTechDebt}).` - ); - return; - } - - if (!hasAdeptToClose) { - core.info('No-op: label sec:adept-to-close is not present on the issue.'); - return; - } - - await github.rest.issues.removeLabel({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: issue.number, - name: 'sec:adept-to-close', - }); diff --git a/pyproject.toml b/pyproject.toml index 957e1a1..ffde4ee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,8 +10,6 @@ force-exclude = '''test''' [tool.coverage.run] omit = [ "tests/*", - "src/security/derive_team_security_metrics.py", - "src/security/extract_team_security_stats.py", ] [tool.mypy] diff --git a/src/security/derive_team_security_metrics.py b/src/security/derive_team_security_metrics.py deleted file mode 100644 index 8316e66..0000000 --- a/src/security/derive_team_security_metrics.py +++ /dev/null @@ -1,208 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# pylint: skip-file -""" -Derive team security metrics from issue snapshots (Issues-only). - -Resurfacing definition (B): -- A fingerprint is considered 'resurfaced' when its occurrence_count transitions - from 0 in the previous snapshot to >0 in the current snapshot. - -Inputs: -- data/issues_snapshot.json (required) -- data/issues_snapshot.prev.json (optional; if missing, resurfacing cannot be computed) - -Outputs: -- reports/metrics.json -- reports/summary.md (appends derived metrics) -""" - -import json -import logging -import os -from datetime import datetime -from typing import Any, Dict, List, Optional - -SNAPSHOT_CUR = os.environ.get("SNAPSHOT_CURRENT", "data/issues_snapshot.json") -SNAPSHOT_PREV = os.environ.get("SNAPSHOT_PREVIOUS", "data/issues_snapshot.prev.json") - -OUT_METRICS_JSON = os.environ.get("OUT_METRICS_JSON", "reports/metrics.json") -OUT_SUMMARY_MD = os.environ.get("OUT_SUMMARY_MD", "reports/summary.md") - - -def require_env(key: str) -> str: - """Return the value of environment variable *key*, or exit.""" - try: - return os.environ[key] - except KeyError as exc: - raise SystemExit(f"Missing required environment variable: {key}") from exc - - -TEAM_SLUG = require_env("GITHUB_TEAM_SLUG") - - -def _safe_int(v: Any, default: int = 0) -> int: - """Coerce *v* to ``int``, returning *default* on failure.""" - try: - if v is None: - return default - if isinstance(v, int): - return v - s = str(v).strip() - if s == "": - return default - return int(float(s)) - except Exception: - return default - - -def _load_json(path: str) -> Optional[Any]: - """Load and return JSON from *path*, or ``None`` if the file is missing.""" - if not os.path.exists(path): - return None - with open(path, "r", encoding="utf-8") as f: - return json.load(f) - - -def _index_by_fingerprint(snapshot: List[Dict[str, Any]]) -> Dict[str, Dict[str, Any]]: - """Index snapshot items by their secmeta fingerprint.""" - idx: Dict[str, Dict[str, Any]] = {} - for item in snapshot: - fp = (item.get("secmeta") or {}).get("fingerprint") - if not fp: - # If secmeta is missing or malformed, it cannot participate in fingerprint-level stats. - continue - idx[fp] = item - return idx - - -def _severity_from_labels(labels: List[str]) -> str: - """Extract the severity token from ``sec:sev/`` labels.""" - for l in labels: - if l.startswith("sec:sev/"): - return l.split("/", 1)[1] - return "unknown" - - -def main() -> None: - """Derive and write team security metrics from issue snapshots.""" - # TODO decide about changes related to this script - logging.warning( - "This script is deprecated and may be removed in the future. Please refer to the updated documentation for deriving security metrics." - ) - return - - cur = _load_json(SNAPSHOT_CUR) - if cur is None: - raise SystemExit(f"Missing current snapshot: {SNAPSHOT_CUR}") - - if not isinstance(cur, list): - raise SystemExit(f"Current snapshot is not a list: {SNAPSHOT_CUR}") - - prev = _load_json(SNAPSHOT_PREV) - - cur_idx = _index_by_fingerprint(cur) - prev_idx = _index_by_fingerprint(prev) if isinstance(prev, list) else {} - - # Basic counts - total = len(cur) - by_sev: Dict[str, int] = {} - postponed = 0 - needs_review = 0 - - for item in cur: - labels = item.get("labels") or [] - sev = _severity_from_labels(labels) - by_sev[sev] = by_sev.get(sev, 0) + 1 - if "sec:state/postponed" in labels: - postponed += 1 - if "sec:state/needs-review" in labels: - needs_review += 1 - - # Resurfacing (B): prev occurrence_count == 0 and current > 0 - resurfaced: List[Dict[str, Any]] = [] - if prev_idx: - for fp, cur_item in cur_idx.items(): - cur_occ = _safe_int((cur_item.get("secmeta") or {}).get("occurrence_count"), 0) - prev_item = prev_idx.get(fp) - prev_occ = _safe_int(((prev_item or {}).get("secmeta") or {}).get("occurrence_count"), 0) - if prev_item is not None and prev_occ == 0 and cur_occ > 0: - resurfaced.append( - { - "fingerprint": fp, - "repo": cur_item.get("repo"), - "issue_number": cur_item.get("issue_number"), - "title": cur_item.get("title"), - "severity": _severity_from_labels(cur_item.get("labels") or []), - "prev_occurrence_count": prev_occ, - "current_occurrence_count": cur_occ, - } - ) - - metrics = { - "team": TEAM_SLUG, - "generated_at_utc": datetime.utcnow().isoformat() + "Z", - "snapshot_current": SNAPSHOT_CUR, - "snapshot_previous": SNAPSHOT_PREV if prev_idx else None, - "counts": { - "total_security_issues": total, - "postponed": postponed, - "needs_review": needs_review, - "by_severity": dict(sorted(by_sev.items())), - }, - "resurfaced": { - "definition": "B: fingerprint occurrence_count from 0 (previous snapshot) to >0 (current snapshot)", - "count": len(resurfaced), - "items": resurfaced, - }, - } - - os.makedirs(os.path.dirname(OUT_METRICS_JSON), exist_ok=True) - os.makedirs(os.path.dirname(OUT_SUMMARY_MD), exist_ok=True) - - with open(OUT_METRICS_JSON, "w", encoding="utf-8") as f: - json.dump(metrics, f, indent=2) - - # Append to (or create) summary.md - summary_lines: List[str] = [] - summary_lines.append(f"\n## Derived metrics\n") - summary_lines.append(f"Generated at: {metrics['generated_at_utc']}\n") - if metrics["snapshot_previous"] is None: - summary_lines.append("- Resurfacing: not computed (no previous snapshot found)\n") - else: - summary_lines.append(f"- Resurfaced fingerprints (definition B): {metrics['resurfaced']['count']}\n") - if resurfaced: - summary_lines.append("\n### Resurfaced items\n") - for r in resurfaced[:50]: - summary_lines.append( - f"- {r['severity']} {r['repo']}#{r['issue_number']} (occ {r['prev_occurrence_count']} -> {r['current_occurrence_count']}): {r['title']}\n" - ) - if len(resurfaced) > 50: - summary_lines.append(f"- ... and {len(resurfaced) - 50} more\n") - - # Ensure summary exists; if not, create a minimal header. - if not os.path.exists(OUT_SUMMARY_MD): - with open(OUT_SUMMARY_MD, "w", encoding="utf-8") as f: - f.write(f"# Security summary for team `{TEAM_SLUG}`\n\n") - - with open(OUT_SUMMARY_MD, "a", encoding="utf-8") as f: - f.writelines(summary_lines) - - -if __name__ == "__main__": - main() diff --git a/src/security/extract_team_security_stats.py b/src/security/extract_team_security_stats.py deleted file mode 100644 index da1b6ac..0000000 --- a/src/security/extract_team_security_stats.py +++ /dev/null @@ -1,206 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# pylint: skip-file -""" -Extract security statistics per GitHub team. - -Model: -- Source of truth: GitHub Issues -- Scope: repositories owned by a given GitHub team -- Unit: one Issue = one logical vulnerability (fingerprint) - -Outputs: -- data/issues_snapshot.json -- data/events_flat.csv -- reports/summary.md -""" - -import csv -import json -import logging -import os -import re -from datetime import datetime -from github import Github - - -# -------------------- -# Configuration -# -------------------- -def require_env(key: str) -> str: - """Return the value of environment variable *key*, or exit.""" - try: - return os.environ[key] - except KeyError as exc: - raise SystemExit(f"Missing required environment variable: {key}") from exc - - -GITHUB_TOKEN = require_env("GITHUB_TOKEN") -ORG = require_env("GITHUB_ORG") -TEAM_SLUG = require_env("GITHUB_TEAM_SLUG") - -OUT_DATA = "data" -OUT_REPORTS = "reports" - -SEC_LABEL_PREFIX = "sec:" - -SEC_EVENT_RE = re.compile(r"\[sec-event\](.*?)\[/sec-event\]", re.S) -SECMETA_RE = re.compile(r"```secmeta(.*?)```", re.S) - -# -------------------- -# Helpers -# -------------------- - - -def ensure_dirs(): - """Create output directories if they don't exist.""" - os.makedirs(OUT_DATA, exist_ok=True) - os.makedirs(OUT_REPORTS, exist_ok=True) - - -def parse_kv_block(block: str) -> dict: - """Parse a ``key=value``-per-line block into a dict.""" - data = {} - for line in block.splitlines(): - line = line.strip() - if not line or "=" not in line: - continue - k, v = line.split("=", 1) - data[k.strip()] = v.strip() - return data - - -def parse_secmeta(body: str) -> dict: - """Extract the secmeta key-value block from an issue body.""" - match = SECMETA_RE.search(body or "") - if not match: - return {} - return parse_kv_block(match.group(1)) - - -def parse_events(comments): - """Extract ``[sec-event]`` blocks from issue comments.""" - events = [] - for c in comments: - for raw in SEC_EVENT_RE.findall(c.body or ""): - evt = parse_kv_block(raw) - evt["timestamp"] = c.created_at.isoformat() - events.append(evt) - return events - - -def issue_has_sec_label(issue): - """Return ``True`` if *issue* carries any ``sec:`` prefixed label.""" - return any(l.name.startswith(SEC_LABEL_PREFIX) for l in issue.labels) - - -# -------------------- -# Main extraction -# -------------------- - - -def main(): - """Extract security statistics from GitHub Issues for the configured team.""" - # TODO decide about changes related to this script - logging.warning( - "This script is deprecated and may be removed in the future. Please refer to the updated documentation for deriving security metrics." - ) - return - - ensure_dirs() - - gh = Github(GITHUB_TOKEN) - org = gh.get_organization(ORG) - team = org.get_team_by_slug(TEAM_SLUG) - - repos = list(team.get_repos()) - - snapshot = [] - flat_events = [] - - for repo in repos: - issues = repo.get_issues(state="all") - for issue in issues: - # Skip PRs that may be returned by the issues API - if getattr(issue, "pull_request", None): - continue - - if not issue_has_sec_label(issue): - continue - - secmeta = parse_secmeta(issue.body or "") - events = parse_events(issue.get_comments()) - - snapshot.append( - { - "repo": repo.full_name, - "issue_number": issue.number, - "title": issue.title, - "state": issue.state, - "labels": [l.name for l in issue.labels], - "secmeta": secmeta, - "created_at": issue.created_at.isoformat(), - "updated_at": issue.updated_at.isoformat(), - "event_count": len(events), - } - ) - - for e in events: - fp = secmeta.get("fingerprint") if secmeta else None - if not fp: - continue # ignore events without a fingerprint - flat_events.append( - { - "repo": repo.full_name, - "issue_number": issue.number, - "fingerprint": fp, - "action": e.get("action"), - "reason": e.get("reason"), - "timestamp": e.get("timestamp"), - } - ) - - # Write snapshot - with open(os.path.join(OUT_DATA, "issues_snapshot.json"), "w") as f: - json.dump(snapshot, f, indent=2) - - # Write flat events - with open(os.path.join(OUT_DATA, "events_flat.csv"), "w", newline="") as f: - writer = csv.DictWriter(f, fieldnames=["repo", "issue_number", "fingerprint", "action", "reason", "timestamp"]) - writer.writeheader() - writer.writerows(flat_events) - - # Summary report - total = len(snapshot) - by_sev = {} - - for item in snapshot: - sev = next((l for l in item["labels"] if l.startswith("sec:sev/")), "sec:sev/unknown") - by_sev[sev] = by_sev.get(sev, 0) + 1 - - with open(os.path.join(OUT_REPORTS, "summary.md"), "w") as f: - f.write(f"# Security summary for team `{TEAM_SLUG}`\n\n") - f.write(f"Generated at: {datetime.utcnow().isoformat()} UTC\n\n") - f.write(f"## Total security issues: {total}\n\n") - f.write("## By severity\n\n") - for sev, cnt in sorted(by_sev.items()): - f.write(f"- {sev}: {cnt}\n") - - -if __name__ == "__main__": - main() From 6f9f375cfec0619f3951d456f7ae295c1a69c0cf Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Fri, 17 Apr 2026 14:15:32 +0200 Subject: [PATCH 14/16] Removing sec-events (gh comments) --- src/core/github/client.py | 7 +- src/core/github/projects.py | 16 +-- src/security/constants.py | 3 - src/security/issues/sec_events.py | 72 ---------- src/security/issues/sync.py | 92 +------------ tests/security/issues/test_sec_events.py | 163 ----------------------- tests/security/issues/test_sync.py | 61 +-------- tests/security/test_constants.py | 9 -- 8 files changed, 7 insertions(+), 416 deletions(-) delete mode 100644 src/security/issues/sec_events.py delete mode 100644 tests/security/issues/test_sec_events.py diff --git a/src/core/github/client.py b/src/core/github/client.py index 12b39ad..0227f61 100644 --- a/src/core/github/client.py +++ b/src/core/github/client.py @@ -18,29 +18,26 @@ import logging import subprocess -from collections.abc import Mapping def run_cmd( cmd: list[str], *, capture_output: bool = True, - env: Mapping[str, str] | None = None, ) -> subprocess.CompletedProcess: """Run *cmd* as a subprocess and return the completed process.""" - return subprocess.run(cmd, check=False, capture_output=capture_output, text=True, env=env) + return subprocess.run(cmd, check=False, capture_output=capture_output, text=True) def run_gh( args: list[str], *, capture_output: bool = True, - env: Mapping[str, str] | None = None, ) -> subprocess.CompletedProcess: """Run a ``gh`` CLI command and return the completed process.""" cmd = ["gh"] + args try: - return run_cmd(cmd, capture_output=capture_output, env=env) + return run_cmd(cmd, capture_output=capture_output) except FileNotFoundError as exc: logging.error("gh CLI not found. Install and authenticate gh.") raise SystemExit(1) from exc diff --git a/src/core/github/projects.py b/src/core/github/projects.py index 7e16f04..6d73c9c 100644 --- a/src/core/github/projects.py +++ b/src/core/github/projects.py @@ -21,8 +21,6 @@ import json import logging -import os -from collections.abc import Mapping from dataclasses import dataclass from typing import Any @@ -52,21 +50,11 @@ class ProjectPriorityField: def _run_graphql(query: str, variables: dict[str, Any] | None = None) -> dict[str, Any] | None: - """Execute a GraphQL query via ``gh api graphql`` and return parsed JSON. - - When ``GH_PROJECT_ONLY_TOKEN`` is set in the environment the GraphQL call is made - with that token instead of the default ``GH_TOKEN``. This allows cross-org - project access while the rest of the pipeline continues to use the scoped - ``github.token``. - """ + """Execute a GraphQL query via ``gh api graphql`` and return parsed JSON.""" args = ["api", "graphql", "-f", f"query={query}"] for k, v in (variables or {}).items(): args += ["-F", f"{k}={v}"] - env: Mapping[str, str] | None = None - project_token = os.environ.get("GH_PROJECT_ONLY_TOKEN", "") - if project_token: - env = {**os.environ, "GH_TOKEN": project_token} - res = run_gh(args, env=env) + res = run_gh(args) if res.returncode != 0: logging.warning(f"GraphQL call failed: {res.stderr}") return None diff --git a/src/security/constants.py b/src/security/constants.py index ed32aed..ae85d38 100644 --- a/src/security/constants.py +++ b/src/security/constants.py @@ -21,9 +21,6 @@ LABEL_EPIC = "epic" LABEL_SEC_ADEPT_TO_CLOSE = "sec:adept-to-close" -SEC_EVENT_OPEN = "open" -SEC_EVENT_REOPEN = "reopen" - SECMETA_TYPE_PARENT = "parent" SECMETA_TYPE_CHILD = "child" diff --git a/src/security/issues/sec_events.py b/src/security/issues/sec_events.py deleted file mode 100644 index 54625f4..0000000 --- a/src/security/issues/sec_events.py +++ /dev/null @@ -1,72 +0,0 @@ -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -"""``[sec-event]`` comment blocks – parsing, rendering, and stripping -structured lifecycle-event blocks from issue bodies. -""" - -import re - -from security.issues.secmeta import render_kv_lines - - -def parse_sec_event_fields(raw: str) -> dict[str, str]: - """Parse ``key=value`` lines from a raw sec-event block.""" - fields: dict[str, str] = {} - for line in (raw or "").splitlines(): - s = line.strip() - if not s or "=" not in s: - continue - k, v = s.split("=", 1) - fields[k.strip()] = v.strip() - return fields - - -def render_sec_event(fields: dict[str, str]) -> str: - """Render a structured ``[sec-event]`` comment block from *fields*.""" - preferred_order = [ - "action", - "seen_at", - "source", - "gh_alert_number", - "occurrence_fp", - "commit_sha", - "path", - "start_line", - "end_line", - ] - lines = ["[sec-event]"] - lines.extend(render_kv_lines(fields, preferred_order, skip_empty=True)) - lines.append("[/sec-event]") - return "\n".join(lines) + "\n" - - -def strip_sec_events_from_body(body: str) -> str: - """Remove any legacy sec-event content from an issue body. - - - Drops a dedicated '## Security Events' section if present (from previous versions). - - Removes any inline [sec-event] blocks. - """ - - text = body or "" - # Drop everything from the header onward (the section was intended to be last). - m = re.search(r"\n##\s+Security\s+Events\s*\n", text, flags=re.IGNORECASE) - if m: - text = text[: m.start()].rstrip() + "\n" - # Remove any inline blocks. - text = re.compile(r"\[sec-event\]\s*(.*?)\s*\[/sec-event\]", re.S).sub("", text) - text = re.sub(r"\n{3,}", "\n\n", text).strip() + "\n" - return text diff --git a/src/security/issues/sync.py b/src/security/issues/sync.py index f414f0f..c0d076c 100644 --- a/src/security/issues/sync.py +++ b/src/security/issues/sync.py @@ -24,11 +24,10 @@ import logging -from core.helpers import iso_date, normalize_path, utc_today +from core.helpers import iso_date, normalize_path from core.github.issues import ( gh_issue_add_labels, gh_issue_add_sub_issue_by_number, - gh_issue_comment, gh_issue_create, gh_issue_edit_body, gh_issue_edit_state, @@ -47,8 +46,6 @@ LABEL_SEC_ADEPT_TO_CLOSE, LABEL_TYPE_TECH_DEBT, NOT_AVAILABLE, - SEC_EVENT_OPEN, - SEC_EVENT_REOPEN, SECMETA_TYPE_CHILD, SECMETA_TYPE_PARENT, ) @@ -61,7 +58,6 @@ classify_category, ) from .models import AlertContext, IssueIndex, NotifiedIssue, SeverityChange, SyncContext, SyncResult -from .sec_events import render_sec_event, strip_sec_events_from_body from .secmeta import json_list, load_secmeta, parse_json_list, render_secmeta from .templates import PARENT_BODY_TEMPLATE @@ -127,30 +123,11 @@ def maybe_reopen_parent_issue( context, child_issue_number or "", ) - logging.info( - "DRY-RUN: would comment parent reopen sec-event on issue #%d (rule_id=%s)", - parent_issue.number, - rule_id, - ) parent_issue.state = "open" return if gh_issue_edit_state(repo, parent_issue.number, "open"): parent_issue.state = "open" - gh_issue_comment( - repo, - parent_issue.number, - render_sec_event( - { - "action": SEC_EVENT_REOPEN, - "seen_at": utc_today(), - "source": "code_scanning", - "rule_id": rule_id, - "context": context, - "child_issue": str(child_issue_number) if child_issue_number else "", - } - ), - ) def _close_resolved_parent_issues( @@ -288,7 +265,6 @@ def ensure_parent_issue( ).strip() + "\n" ) - rebuilt = strip_sec_events_from_body(rebuilt) # Snapshot the original body on first encounter so we can # defer the API call until all alerts have been processed. @@ -330,24 +306,6 @@ def ensure_parent_issue( if num is None: return None - # Parent lifecycle event (human visible): opened/created. - if dry_run: - logging.info("DRY-RUN: would comment parent open sec-event on issue #%d (rule_id=%s)", num, rule_id) - else: - gh_issue_comment( - repo_full, - num, - render_sec_event( - { - "action": SEC_EVENT_OPEN, - "seen_at": iso_date(alert.metadata.created_at), - "source": "code_scanning", - "rule_id": rule_id, - "severity": alert.metadata.severity, - } - ), - ) - created = Issue(number=num, state="open", title=title, body=body) issues[num] = created index.parent_by_rule_id[rule_id] = created @@ -504,24 +462,6 @@ def _handle_new_child_issue( logging.info("Add sub-issue link parent=#%d child=#%d (alert %d)", parent_issue.number, num, ctx.alert_number) gh_issue_add_sub_issue_by_number(ctx.repo, parent_issue.number, num) - gh_issue_comment( - ctx.repo, - num, - render_sec_event( - { - "action": SEC_EVENT_OPEN, - "seen_at": ctx.first_seen, - "source": "code_scanning", - "gh_alert_number": str(ctx.alert_number), - "occurrence_fp": str(ctx.occurrence_fp), - "commit_sha": str(ctx.commit_sha), - "path": str(ctx.path), - "start_line": str(ctx.start_line or ""), - "end_line": str(ctx.end_line or ""), - } - ), - ) - if sync.priority_sync is not None: sync.priority_sync.enqueue(ctx.repo, num, ctx.severity, sync.severity_priority_map) @@ -638,7 +578,6 @@ def _rebuild_and_apply_child_body( """Render a fresh child body from *secmeta* + template and apply if changed.""" human_body = build_child_issue_body(ctx.alert) new_body = render_secmeta(secmeta) + "\n\n" + human_body - new_body = strip_sec_events_from_body(new_body) if new_body != issue.body: if sync.dry_run: @@ -652,32 +591,6 @@ def _rebuild_and_apply_child_body( issue.body = new_body -def _comment_child_event( - *, - ctx: AlertContext, - sync: SyncContext, - issue: Issue, - reopened: bool, -) -> None: - """Post a reopen sec-event comment on the child issue.""" - if reopened: - if sync.dry_run: - logging.info("DRY-RUN: would comment reopen event on issue #%d (alert %d)", issue.number, ctx.alert_number) - else: - gh_issue_comment( - ctx.repo, - issue.number, - render_sec_event( - { - "action": SEC_EVENT_REOPEN, - "seen_at": utc_today(), - "source": "code_scanning", - "gh_alert_number": str(ctx.alert_number), - } - ), - ) - - def _sync_child_title_and_labels( *, ctx: AlertContext, @@ -758,10 +671,9 @@ def _handle_existing_child_issue( if parent_issue is None and ctx.rule_id: parent_issue = find_parent_issue(sync.index, rule_id=ctx.rule_id) - reopened = _maybe_reopen_child(ctx=ctx, sync=sync, issue=issue, parent_issue=parent_issue) + _maybe_reopen_child(ctx=ctx, sync=sync, issue=issue, parent_issue=parent_issue) secmeta, _ = _merge_child_secmeta(ctx=ctx, issue=issue) _rebuild_and_apply_child_body(ctx=ctx, sync=sync, issue=issue, secmeta=secmeta) - _comment_child_event(ctx=ctx, sync=sync, issue=issue, reopened=reopened) _sync_child_title_and_labels(ctx=ctx, sync=sync, issue=issue) if parent_issue is not None: diff --git a/tests/security/issues/test_sec_events.py b/tests/security/issues/test_sec_events.py deleted file mode 100644 index 849bc29..0000000 --- a/tests/security/issues/test_sec_events.py +++ /dev/null @@ -1,163 +0,0 @@ -# -# Copyright 2026 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -"""Unit tests for ``utils.sec_events``.""" - -import pytest - -from security.issues.sec_events import ( - parse_sec_event_fields, - render_sec_event, - strip_sec_events_from_body, -) - - -# ===================================================================== -# parse_sec_event_fields -# ===================================================================== - - -def test_basic_kv() -> None: - raw = "action=created\nseen_at=2026-01-01\nsource=aquasec" - result = parse_sec_event_fields(raw) - assert result == { - "action": "created", - "seen_at": "2026-01-01", - "source": "aquasec", - } - -def test_ignores_blank_and_no_equals() -> None: - raw = "action=created\n\njust-text\nfoo=bar" - result = parse_sec_event_fields(raw) - assert result == {"action": "created", "foo": "bar"} - -def test_equals_in_value() -> None: - raw = "path=a=b" - result = parse_sec_event_fields(raw) - assert result == {"path": "a=b"} - -def test_empty_string() -> None: - assert parse_sec_event_fields("") == {} - -def test_none_input() -> None: - assert parse_sec_event_fields(None) == {} - -def test_strips_whitespace() -> None: - raw = " action = created \n seen_at = 2026-01-01 " - result = parse_sec_event_fields(raw) - assert result == {"action": "created", "seen_at": "2026-01-01"} - - -# ===================================================================== -# render_sec_event -# ===================================================================== - - -def test_renders_fields_in_preferred_order() -> None: - fields = { - "commit_sha": "abc123", - "action": "created", - "seen_at": "2026-01-01", - } - rendered = render_sec_event(fields) - lines = rendered.strip().splitlines() - assert lines[0] == "[sec-event]" - assert lines[-1] == "[/sec-event]" - # action should come before commit_sha (preferred order) - assert lines.index("action=created") < lines.index("commit_sha=abc123") - -def test_includes_non_preferred_keys_sorted() -> None: - fields = {"action": "created", "z_custom": "1", "a_extra": "2"} - rendered = render_sec_event(fields) - # extra keys sorted alphabetically after preferred - assert "a_extra=2" in rendered - assert "z_custom=1" in rendered - lines = rendered.strip().splitlines() - idx_a = lines.index("a_extra=2") - idx_z = lines.index("z_custom=1") - assert idx_a < idx_z - -def test_skips_blank_values() -> None: - fields = {"action": "created", "path": " ", "source": ""} - rendered = render_sec_event(fields) - assert "path=" not in rendered - assert "source=" not in rendered - assert "action=created" in rendered - -def test_roundtrip() -> None: - fields = { - "action": "created", - "seen_at": "2026-01-01", - "source": "aquasec", - "gh_alert_number": "42", - "occurrence_fp": "fp123", - } - rendered = render_sec_event(fields) - # Extract inner content (skip opening/closing tags) - inner = "\n".join(rendered.strip().splitlines()[1:-1]) - parsed = parse_sec_event_fields(inner) - assert parsed == fields - - -# ===================================================================== -# strip_sec_events_from_body -# ===================================================================== - - -def test_removes_inline_block() -> None: - body = "Some text\n[sec-event]\naction=created\n[/sec-event]\nMore text" - result = strip_sec_events_from_body(body) - assert "[sec-event]" not in result - assert "Some text" in result - assert "More text" in result - -def test_removes_section_header() -> None: - body = "Intro\n\n## Security Events\nold stuff\n" - result = strip_sec_events_from_body(body) - assert "## Security Events" not in result - assert "Intro" in result - -def test_empty_body() -> None: - result = strip_sec_events_from_body("") - assert result.strip() == "" - -def test_none_body() -> None: - result = strip_sec_events_from_body(None) - assert result.strip() == "" - -def test_no_events() -> None: - body = "Just regular body text\n" - result = strip_sec_events_from_body(body) - assert "Just regular body text" in result - -def test_multiple_inline_blocks() -> None: - body = ( - "Text\n" - "[sec-event]\naction=created\n[/sec-event]\n" - "Middle\n" - "[sec-event]\naction=reopened\n[/sec-event]\n" - "End\n" - ) - result = strip_sec_events_from_body(body) - assert "[sec-event]" not in result - assert "Text" in result - assert "Middle" in result - assert "End" in result - -def test_collapses_excessive_newlines() -> None: - body = "A\n\n\n\n\nB\n" - result = strip_sec_events_from_body(body) - assert "\n\n\n" not in result diff --git a/tests/security/issues/test_sync.py b/tests/security/issues/test_sync.py index bd40352..746bdb2 100644 --- a/tests/security/issues/test_sync.py +++ b/tests/security/issues/test_sync.py @@ -27,7 +27,6 @@ from security.issues.sync import ( _append_notification, _close_resolved_parent_issues, - _comment_child_event, _ensure_child_linked_to_parent, _flush_parent_body_updates, _handle_existing_child_issue, @@ -250,19 +249,14 @@ def test_reopen_parent_dry_run() -> None: assert parent.state == "open" def test_reopen_parent_real(mocker: MockerFixture) -> None: - """Non-dry-run reopens issue and posts sec-event comment.""" + """Non-dry-run reopens issue via state edit.""" mock_edit_state = mocker.patch("security.issues.sync.gh_issue_edit_state", return_value=True) - mock_comment = mocker.patch("security.issues.sync.gh_issue_comment") parent = Issue(number=1, state="closed", title="P", body="b") maybe_reopen_parent_issue( "org/repo", parent, rule_id="R1", dry_run=False, context="reopen_child", child_issue_number=5, ) assert parent.state == "open" mock_edit_state.assert_called_once_with("org/repo", 1, "open") - mock_comment.assert_called_once() - comment_body = mock_comment.call_args[0][2] - assert "reopen" in comment_body - assert "R1" in comment_body def test_reopen_parent_gh_failure(mocker: MockerFixture) -> None: """If gh_issue_edit_state fails, state stays closed.""" @@ -444,12 +438,10 @@ def test_rebuild_body_changed(mocker: MockerFixture, sast_alert: Alert) -> None: def test_rebuild_body_unchanged(sast_alert: Alert) -> None: """When body is identical, no API call is made.""" from security.issues.builder import build_child_issue_body - from security.issues.sec_events import strip_sec_events_from_body secmeta = {"schema": "1", "type": "child", "fingerprint": "fp1"} human_body = build_child_issue_body(sast_alert) body = render_secmeta(secmeta) + "\n\n" + human_body - body = strip_sec_events_from_body(body) issue = Issue(number=1, state="open", title="T", body=body) ctx = _make_alert_context(alert=sast_alert) sync = _make_sync_context() @@ -464,54 +456,6 @@ def test_rebuild_body_dry_run(sast_alert: Alert) -> None: _rebuild_and_apply_child_body(ctx=ctx, sync=sync, issue=issue, secmeta=secmeta) -# ===================================================================== -# _comment_child_event -# ===================================================================== - - -def test_comment_reopen_event(mocker: MockerFixture) -> None: - """Posts a reopen sec-event comment when reopened=True.""" - mock_comment = mocker.patch("security.issues.sync.gh_issue_comment") - issue = Issue(number=1, state="open", title="T", body="b") - ctx = _make_alert_context() - sync = _make_sync_context() - _comment_child_event(ctx=ctx, sync=sync, issue=issue, reopened=True) - mock_comment.assert_called_once() - comment_body = mock_comment.call_args[0][2] - assert "reopen" in comment_body - -def test_comment_occurrence_event_no_comment(mocker: MockerFixture) -> None: - """No sec-event comment when issue is already open (new_occurrence=True but reopened=False).""" - mock_comment = mocker.patch("security.issues.sync.gh_issue_comment") - issue = Issue(number=1, state="open", title="T", body="b") - ctx = _make_alert_context() - sync = _make_sync_context() - _comment_child_event(ctx=ctx, sync=sync, issue=issue, reopened=False) - mock_comment.assert_not_called() - -def test_comment_no_event() -> None: - """No comment when neither reopened nor new_occurrence.""" - issue = Issue(number=1, state="open", title="T", body="b") - ctx = _make_alert_context() - sync = _make_sync_context() - _comment_child_event(ctx=ctx, sync=sync, issue=issue, reopened=False) - -def test_comment_reopen_dry_run() -> None: - """Dry-run mode does not call gh_issue_comment for reopen.""" - issue = Issue(number=1, state="open", title="T", body="b") - ctx = _make_alert_context() - sync = _make_sync_context(dry_run=True) - _comment_child_event(ctx=ctx, sync=sync, issue=issue, reopened=True) - -def test_comment_occurrence_dry_run() -> None: - """No comment in any mode when issue is already open (occurrence-only path).""" - issue = Issue(number=1, state="open", title="T", body="b") - ctx = _make_alert_context() - sync = _make_sync_context(dry_run=True) - # Dry-run should also be silent for already-open issues. - _comment_child_event(ctx=ctx, sync=sync, issue=issue, reopened=False) - - # ===================================================================== # _sync_child_title_and_labels # ===================================================================== @@ -555,7 +499,6 @@ def test_sync_title_dry_run() -> None: def test_handle_new_child_creates_issue(mocker: MockerFixture, sast_alert: Alert) -> None: """Creates a new issue and registers it in the index.""" mock_create = mocker.patch("security.issues.sync.gh_issue_create", return_value=42) - mocker.patch("security.issues.sync.gh_issue_comment") ctx = _make_alert_context(alert=sast_alert, rule_name="sast") issues: dict[int, Issue] = {} index = IssueIndex(by_fingerprint={}, parent_by_rule_id={}) @@ -580,7 +523,6 @@ def test_handle_new_child_links_to_parent(mocker: MockerFixture, sast_alert: Ale """When a parent issue exists, the child is linked as a sub-issue.""" mocker.patch("security.issues.sync.gh_issue_create", return_value=42) mock_sub = mocker.patch("security.issues.sync.gh_issue_add_sub_issue_by_number") - mocker.patch("security.issues.sync.gh_issue_comment") parent = Issue(number=1, state="open", title="P", body="pb") ctx = _make_alert_context(alert=sast_alert) sync = _make_sync_context(notifications=[]) @@ -672,7 +614,6 @@ def test_ensure_child_linked_api_failure_no_cache_update(mocker: MockerFixture) def test_ensure_parent_creates_new(mocker: MockerFixture, sast_alert: Alert) -> None: """Creates a parent issue when none exists for the rule_id.""" mock_create = mocker.patch("security.issues.sync.gh_issue_create", return_value=99) - mocker.patch("security.issues.sync.gh_issue_comment") issues: dict[int, Issue] = {} index = IssueIndex(by_fingerprint={}, parent_by_rule_id={}) result = ensure_parent_issue(sast_alert, issues, index, dry_run=False) diff --git a/tests/security/test_constants.py b/tests/security/test_constants.py index 22b5fbd..91560d2 100644 --- a/tests/security/test_constants.py +++ b/tests/security/test_constants.py @@ -21,8 +21,6 @@ LABEL_SCOPE_SECURITY, LABEL_SEC_ADEPT_TO_CLOSE, LABEL_TYPE_TECH_DEBT, - SEC_EVENT_OPEN, - SEC_EVENT_REOPEN, SECMETA_TYPE_CHILD, SECMETA_TYPE_PARENT, ) @@ -41,13 +39,6 @@ def test_adept_to_close() -> None: assert LABEL_SEC_ADEPT_TO_CLOSE == "sec:adept-to-close" -def test_open() -> None: - assert SEC_EVENT_OPEN == "open" - -def test_reopen() -> None: - assert SEC_EVENT_REOPEN == "reopen" - - def test_parent() -> None: assert SECMETA_TYPE_PARENT == "parent" From a0a80caa86e46f33cd3d39c82530cf72343516af Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Wed, 22 Apr 2026 10:37:42 +0200 Subject: [PATCH 15/16] Removing daily updating logic. --- src/security/alerts/models.py | 2 + src/security/collect_alert.py | 1 + src/security/issues/builder.py | 22 ++++---- src/security/issues/models.py | 4 +- src/security/issues/secmeta.py | 14 +----- src/security/issues/sync.py | 86 ++++++-------------------------- src/security/issues/templates.py | 1 - tests/security/conftest.py | 3 ++ 8 files changed, 34 insertions(+), 99 deletions(-) diff --git a/src/security/alerts/models.py b/src/security/alerts/models.py index 9c56c93..b1a074f 100644 --- a/src/security/alerts/models.py +++ b/src/security/alerts/models.py @@ -34,6 +34,7 @@ class AlertMetadata: alert_url: str = "" rule_id: str = "" rule_name: str = "" + rule_description: str = "" severity: str = "" confidence: str = "" tags: list[str] = field(default_factory=list) @@ -51,6 +52,7 @@ class AlertMetadata: def __post_init__(self) -> None: self.rule_id = (self.rule_id or "").strip() self.rule_name = (self.rule_name or "").strip() + self.rule_description = (self.rule_description or "").strip() self.severity = (self.severity or "").strip() or "unknown" self.state = (self.state or "").lower().strip() self.tool = (self.tool or "").strip() diff --git a/src/security/collect_alert.py b/src/security/collect_alert.py index 6590378..110fec3 100644 --- a/src/security/collect_alert.py +++ b/src/security/collect_alert.py @@ -127,6 +127,7 @@ def _normalise_alert(alert: dict) -> dict: "alert_url": alert.get("html_url"), "rule_id": rule.get("id"), "rule_name": rule.get("name"), + "rule_description": rule.get("description"), "severity": rule.get("security_severity_level"), "confidence": rule.get("severity"), "tags": rule.get("tags") or [], diff --git a/src/security/issues/builder.py b/src/security/issues/builder.py index a2c4cea..29ac998 100644 --- a/src/security/issues/builder.py +++ b/src/security/issues/builder.py @@ -87,7 +87,7 @@ def build_parent_template_values(alert: Alert, *, rule_id: str, severity: str) - return { "category": alert.metadata.rule_name or NOT_AVAILABLE, "avd_id": alert.alert_details.vulnerability or rule_id, - "title": rule_id, + "title": alert.metadata.rule_description or rule_id, "severity": severity, "published_date": iso_date(alert.rule_details.published_date or NOT_AVAILABLE), "package_name": alert.rule_details.package_name, @@ -103,16 +103,10 @@ def build_parent_issue_body(alert: Alert) -> str: repo_full = alert.repo secmeta: dict[str, str] = { - "schema": "1", "type": SECMETA_TYPE_PARENT, "repo": repo_full, - "source": "code_scanning", - "tool": alert.metadata.tool, - "severity": severity, "rule_id": rule_id, - "first_seen": iso_date(alert.metadata.created_at), - "last_seen": iso_date(alert.metadata.updated_at), - "postponed_until": "", + "severity": severity, } values = build_parent_template_values(alert, rule_id=rule_id, severity=severity) @@ -120,10 +114,15 @@ def build_parent_issue_body(alert: Alert) -> str: return render_secmeta(secmeta) + "\n\n" + human_body -def build_issue_title(rule_name: str | None, rule_id: str, fingerprint: str) -> str: +def build_issue_title( + rule_description: str | None, + rule_name: str | None, + rule_id: str, + fingerprint: str, +) -> str: """Build the title string for a child issue.""" prefix = fingerprint[:8] if fingerprint else NOT_AVAILABLE - summary = (rule_name or rule_id or "Security finding").strip() or "Security finding" + summary = (rule_description or rule_name or rule_id or "Security finding").strip() or "Security finding" return f"[SEC][FP={prefix}] {summary}" @@ -136,7 +135,7 @@ def build_child_issue_body(alert: Alert) -> str: vulnerability = alert.alert_details.vulnerability avd_id = vulnerability if vulnerability.startswith("AVD-") else NOT_AVAILABLE - title = alert.metadata.rule_id + title = alert.metadata.rule_description or alert.metadata.rule_id scm_file = alert.alert_details.scm_file start_line = alert.metadata.start_line @@ -169,7 +168,6 @@ def build_child_issue_body(alert: Alert) -> str: "installed_version": alert.alert_details.installed_version, "fixed_version": alert.rule_details.fixed_version, "reachable": alert.alert_details.reachable, - "scan_date": iso_date(alert.alert_details.scan_date or alert.metadata.updated_at or NOT_AVAILABLE), "first_seen": iso_date(alert.alert_details.first_seen or alert.metadata.created_at or NOT_AVAILABLE), } return render_markdown_template(CHILD_BODY_TEMPLATE, values).strip() + "\n" diff --git a/src/security/issues/models.py b/src/security/issues/models.py index d2c2d63..3bdaa9b 100644 --- a/src/security/issues/models.py +++ b/src/security/issues/models.py @@ -91,13 +91,11 @@ class AlertContext: alert: Alert alert_number: int fingerprint: str - occurrence_fp: str repo: str - first_seen: str - last_seen: str tool: str rule_id: str rule_name: str + rule_description: str severity: str cve: str path: str diff --git a/src/security/issues/secmeta.py b/src/security/issues/secmeta.py index fc1342b..f816ecc 100644 --- a/src/security/issues/secmeta.py +++ b/src/security/issues/secmeta.py @@ -79,22 +79,12 @@ def render_kv_lines( def render_secmeta(secmeta: dict[str, str]) -> str: """Render a secmeta dict as a hidden HTML-comment block for issue bodies.""" preferred_order = [ - "schema", + "type", "fingerprint", "repo", - "source", - "tool", - "severity", - "cve", - "category", "rule_id", - "first_seen", - "last_seen", - "last_seen_commit", - "postponed_until", + "severity", "gh_alert_numbers", - "occurrence_count", - "last_occurrence_fp", ] lines = render_kv_lines(secmeta, preferred_order) return "" diff --git a/src/security/issues/sync.py b/src/security/issues/sync.py index c0d076c..4822b9a 100644 --- a/src/security/issues/sync.py +++ b/src/security/issues/sync.py @@ -24,7 +24,7 @@ import logging -from core.helpers import iso_date, normalize_path +from core.helpers import normalize_path from core.github.issues import ( gh_issue_add_labels, gh_issue_add_sub_issue_by_number, @@ -39,7 +39,6 @@ from core.rendering import render_markdown_template from security.alerts.models import Alert -from security.alerts.parser import compute_occurrence_fp from security.constants import ( LABEL_EPIC, LABEL_SCOPE_SECURITY, @@ -209,11 +208,7 @@ def ensure_parent_issue( existing = find_parent_issue(index, rule_id=rule_id) if existing is not None: # Keep parent issues aligned to the template as alerts evolve. - existing_secmeta = load_secmeta(existing.body) or {"schema": "1"} - existing_first = existing_secmeta.get("first_seen") or iso_date(alert.metadata.created_at) - existing_last = existing_secmeta.get("last_seen") or iso_date(alert.metadata.updated_at) - first_seen_final = min(existing_first, iso_date(alert.metadata.created_at)) - last_seen_final = max(existing_last, iso_date(alert.metadata.updated_at)) + existing_secmeta = load_secmeta(existing.body) or {} existing_severity = str(existing_secmeta.get("severity") or "unknown") existing_severity_cmp = existing_severity.lower() @@ -230,7 +225,7 @@ def ensure_parent_issue( ) if dry_run: logging.info( - "DRY-RUN: severity change on parent #%d (rule_id=%s): %s \u2192 %s", + "DRY-RUN: severity change on parent #%d (rule_id=%s): %s - %s", existing.number, rule_id, existing_severity_cmp, @@ -243,16 +238,10 @@ def ensure_parent_issue( existing_secmeta.update( { - "schema": existing_secmeta.get("schema") or "1", "type": SECMETA_TYPE_PARENT, "repo": repo_full, - "source": existing_secmeta.get("source") or "code_scanning", - "tool": alert.metadata.tool or existing_secmeta.get("tool") or "", "severity": severity_stored, "rule_id": rule_id, - "first_seen": first_seen_final, - "last_seen": last_seen_final, - "postponed_until": existing_secmeta.get("postponed_until", ""), } ) @@ -355,29 +344,17 @@ def _handle_new_child_issue( """Create a new child issue for an alert that has no matching issue yet.""" category = classify_category(ctx.alert) secmeta: dict[str, str] = { - "schema": "1", "type": SECMETA_TYPE_CHILD, "fingerprint": ctx.fingerprint, "repo": ctx.repo, - "source": "code_scanning", - "tool": ctx.tool, - "severity": ctx.severity, - "category": category, "rule_id": ctx.rule_id, - "first_seen": ctx.first_seen, - "last_seen": ctx.last_seen, - "last_seen_commit": ctx.commit_sha, - "postponed_until": "", + "severity": ctx.severity, "gh_alert_numbers": json_list([str(ctx.alert_number)]), - "occurrence_count": "1", - "last_occurrence_fp": ctx.occurrence_fp, } - if ctx.cve: - secmeta["cve"] = ctx.cve human_body = build_child_issue_body(ctx.alert) body = render_secmeta(secmeta) + "\n\n" + human_body - title = build_issue_title(ctx.rule_name, ctx.rule_id, ctx.fingerprint) + title = build_issue_title(ctx.rule_description, ctx.rule_name, ctx.rule_id, ctx.fingerprint) if sync.dry_run: labels = [LABEL_SCOPE_SECURITY, LABEL_TYPE_TECH_DEBT] @@ -387,7 +364,7 @@ def _handle_new_child_issue( logging.info( "DRY-RUN: create child alert=%d rule_id=%s sev=%s" " fp=%s tool=%s commit=%s loc=%s title=%r labels=[%s]" - " | secmeta:first_seen=%s last_seen=%s occurrence_count=1 gh_alert_numbers=[%d]", + " gh_alert_numbers=[%d]", ctx.alert_number, ctx.rule_id, ctx.severity, @@ -397,8 +374,6 @@ def _handle_new_child_issue( loc, title, ",".join(labels), - ctx.first_seen, - ctx.last_seen, ctx.alert_number, ) if parent_issue is None and ctx.rule_id: @@ -518,12 +493,9 @@ def _merge_child_secmeta( *, ctx: AlertContext, issue: Issue, -) -> tuple[dict[str, str], bool]: - """Merge incoming alert data into the child issue's secmeta. - - Returns ``(updated_secmeta, new_occurrence)``. - """ - secmeta = load_secmeta(issue.body) or {"schema": "1"} +) -> dict[str, str]: + """Merge incoming alert data into the child issue's secmeta.""" + secmeta = load_secmeta(issue.body) or {} secmeta.pop("alert_hash", None) existing_alerts = parse_json_list(secmeta.get("gh_alert_numbers")) @@ -532,40 +504,18 @@ def _merge_child_secmeta( if str(ctx.alert_number) not in existing_alerts: existing_alerts.append(str(ctx.alert_number)) - last_occ_fp = secmeta.get("last_occurrence_fp", "") - occurrence_count = int(secmeta.get("occurrence_count") or "0" or 0) - new_occurrence = bool(ctx.occurrence_fp and ctx.occurrence_fp != last_occ_fp) - if occurrence_count <= 0: - occurrence_count = 1 - if new_occurrence: - occurrence_count += 1 - - existing_first = secmeta.get("first_seen") or ctx.first_seen - existing_last = secmeta.get("last_seen") or ctx.last_seen - first_seen_final = min(existing_first, ctx.first_seen) - last_seen_final = max(existing_last, ctx.last_seen) - secmeta.update( { + "type": SECMETA_TYPE_CHILD, "fingerprint": ctx.fingerprint, "repo": ctx.repo, - "source": secmeta.get("source") or "code_scanning", - "tool": ctx.tool or secmeta.get("tool", ""), - "severity": ctx.severity, - "category": classify_category(ctx.alert) or secmeta.get("category", ""), "rule_id": ctx.rule_id or secmeta.get("rule_id", ""), - "first_seen": first_seen_final, - "last_seen": last_seen_final, - "last_seen_commit": ctx.commit_sha or secmeta.get("last_seen_commit", ""), + "severity": ctx.severity, "gh_alert_numbers": json_list(existing_alerts), - "occurrence_count": str(occurrence_count), - "last_occurrence_fp": ctx.occurrence_fp or last_occ_fp, } ) - if ctx.cve: - secmeta["cve"] = ctx.cve - return secmeta, new_occurrence + return secmeta def _rebuild_and_apply_child_body( @@ -598,7 +548,7 @@ def _sync_child_title_and_labels( issue: Issue, ) -> None: """Fix title drift and ensure required labels and priority on the child issue.""" - expected_title = build_issue_title(ctx.rule_name, ctx.rule_id, ctx.fingerprint) + expected_title = build_issue_title(ctx.rule_description, ctx.rule_name, ctx.rule_id, ctx.fingerprint) if expected_title != (issue.title or ""): if sync.dry_run: logging.info( @@ -672,7 +622,7 @@ def _handle_existing_child_issue( parent_issue = find_parent_issue(sync.index, rule_id=ctx.rule_id) _maybe_reopen_child(ctx=ctx, sync=sync, issue=issue, parent_issue=parent_issue) - secmeta, _ = _merge_child_secmeta(ctx=ctx, issue=issue) + secmeta = _merge_child_secmeta(ctx=ctx, issue=issue) _rebuild_and_apply_child_body(ctx=ctx, sync=sync, issue=issue, secmeta=secmeta) _sync_child_title_and_labels(ctx=ctx, sync=sync, issue=issue) @@ -713,11 +663,7 @@ def ensure_issue( "Ensure the collector/scanner includes an 'Alert hash: ...' line." ) - occurrence_fp = compute_occurrence_fp(commit_sha, path, start_line, end_line) - repo_full = alert.repo - first_seen = iso_date(alert.metadata.created_at) - last_seen = iso_date(alert.metadata.updated_at) parent_issue = ensure_parent_issue( alert, @@ -738,13 +684,11 @@ def ensure_issue( alert=alert, alert_number=alert_number, fingerprint=fingerprint, - occurrence_fp=occurrence_fp, repo=repo_full, - first_seen=first_seen, - last_seen=last_seen, tool=alert.metadata.tool, rule_id=rule_id, rule_name=alert.metadata.rule_name, + rule_description=alert.metadata.rule_description, severity=alert.metadata.severity, cve=cve, path=path, diff --git a/src/security/issues/templates.py b/src/security/issues/templates.py index f90aa2d..d2d7ede 100644 --- a/src/security/issues/templates.py +++ b/src/security/issues/templates.py @@ -81,6 +81,5 @@ ## Detection Timeline -- **Scan date:** {{ scan_date }} - **First seen:** {{ first_seen }} """ diff --git a/tests/security/conftest.py b/tests/security/conftest.py index a475954..7edd1aa 100644 --- a/tests/security/conftest.py +++ b/tests/security/conftest.py @@ -38,6 +38,7 @@ "alert_url": "https://github.com/test-org/test-repo/security/code-scanning/303", "rule_id": "req-with-very-false-aquasec-python", "rule_name": "sast", + "rule_description": "Requests with verify=False", "severity": "high", "confidence": "error", "tags": ["HIGH", "sast", "security"], @@ -108,6 +109,7 @@ "alert_url": "https://github.com/test-org/test-repo/security/code-scanning/312", "rule_id": "CVE-2026-25755", "rule_name": "vulnerabilities", + "rule_description": "jsPDF PDF object injection", "severity": "high", "confidence": "error", "tags": ["HIGH", "security", "vulnerabilities"], @@ -179,6 +181,7 @@ "alert_url": "https://github.com/test-org/test-repo/security/code-scanning/317", "rule_id": "AVD-PIPELINE-0008", "rule_name": "pipelineMisconfigurations", + "rule_description": "Dependency not pinned to commit SHA", "severity": "medium", "confidence": "warning", "tags": ["MEDIUM", "pipelineMisconfigurations", "security"], From e843472848c59102c2c5278964e3d470aec549f2 Mon Sep 17 00:00:00 2001 From: "Tobias.Mikula" Date: Wed, 22 Apr 2026 10:37:51 +0200 Subject: [PATCH 16/16] Removing daily updating logic tests. --- tests/security/alerts/test_models.py | 2 + tests/security/issues/test_builder.py | 43 +++++---------- tests/security/issues/test_models.py | 5 +- tests/security/issues/test_secmeta.py | 6 +- tests/security/issues/test_sync.py | 73 ++----------------------- tests/security/issues/test_templates.py | 3 +- tests/security/test_collect_alert.py | 3 + 7 files changed, 32 insertions(+), 103 deletions(-) diff --git a/tests/security/alerts/test_models.py b/tests/security/alerts/test_models.py index 1de2f72..cfd55c3 100644 --- a/tests/security/alerts/test_models.py +++ b/tests/security/alerts/test_models.py @@ -30,12 +30,14 @@ def test_alert_metadata_none_fields_do_not_crash() -> None: severity=None, # type: ignore[arg-type] – mirrors _normalise_alert output rule_id=None, # type: ignore[arg-type] rule_name=None, # type: ignore[arg-type] + rule_description=None, # type: ignore[arg-type] state=None, # type: ignore[arg-type] tool=None, # type: ignore[arg-type] ) assert md.severity == "unknown" assert md.rule_id == "" assert md.rule_name == "" + assert md.rule_description == "" assert md.state == "" assert md.tool == "" diff --git a/tests/security/issues/test_builder.py b/tests/security/issues/test_builder.py index d182e8f..c350ab9 100644 --- a/tests/security/issues/test_builder.py +++ b/tests/security/issues/test_builder.py @@ -278,19 +278,23 @@ def test_contains_confidence(vuln_alert: Alert) -> None: def test_format() -> None: fp = "a1b2c3d4e5f6" - title = build_issue_title("sast", "rule-123", fp) - assert title == "[SEC][FP=a1b2c3d4] sast" + title = build_issue_title("A description", "sast", "rule-123", fp) + assert title == "[SEC][FP=a1b2c3d4] A description" + +def test_fallback_to_rule_name() -> None: + title = build_issue_title(None, "sast", "rule-123", "abcdef12") + assert "sast" in title def test_fallback_to_rule_id() -> None: - title = build_issue_title(None, "rule-123", "abcdef12") + title = build_issue_title(None, None, "rule-123", "abcdef12") assert "rule-123" in title def test_fallback_to_default() -> None: - title = build_issue_title(None, "", "abcdef12") + title = build_issue_title(None, None, "", "abcdef12") assert "Security finding" in title def test_empty_fingerprint() -> None: - title = build_issue_title("sast", "rule-123", "") + title = build_issue_title("A description", "sast", "rule-123", "") assert "N/A" in title @@ -303,7 +307,7 @@ def test_empty_fingerprint() -> None: def test_sast_avd_id(sast_alert: Alert) -> None: body = build_child_issue_body(sast_alert) - assert "req-with-very-false-aquasec-python" in body + assert "Requests with verify=False" in body def test_sast_alert_hash(sast_alert: Alert) -> None: body = build_child_issue_body(sast_alert) @@ -311,7 +315,7 @@ def test_sast_alert_hash(sast_alert: Alert) -> None: def test_sast_title(sast_alert: Alert) -> None: body = build_child_issue_body(sast_alert) - assert "sast" in body + assert "Requests with verify=False" in body def test_sast_message_present(sast_alert: Alert) -> None: body = build_child_issue_body(sast_alert) @@ -333,10 +337,6 @@ def test_sast_reachable_from_msg(sast_alert: Alert) -> None: body = build_child_issue_body(sast_alert) assert "False" in body -def test_sast_scan_date(sast_alert: Alert) -> None: - body = build_child_issue_body(sast_alert) - assert "2026-02-24" in body - def test_sast_first_seen(sast_alert: Alert) -> None: body = build_child_issue_body(sast_alert) assert "2025-09-17" in body @@ -345,7 +345,7 @@ def test_sast_first_seen(sast_alert: Alert) -> None: def test_vuln_avd_id(vuln_alert: Alert) -> None: body = build_child_issue_body(vuln_alert) - assert "CVE-2026-25755" in body + assert "jsPDF PDF object injection" in body def test_vuln_installed_version(vuln_alert: Alert) -> None: body = build_child_issue_body(vuln_alert) @@ -410,21 +410,6 @@ def test_all_template_sections_rendered(vuln_alert: Alert) -> None: assert "## Detection Timeline" in body -def test_scan_date_falls_back_to_metadata_updated_at() -> None: - """When alert_details.scan_date is absent, fall back to metadata.updated_at.""" - alert = Alert.from_dict({ - "metadata": { - "rule_id": "X", - "updated_at": "2026-01-15T10:00:00Z", - "created_at": "2025-12-01T08:00:00Z", - }, - "alert_details": {}, # scan_date absent → defaults to "" - "rule_details": {}, - }) - body = build_child_issue_body(alert) - assert "2026-01-15" in body - - def test_first_seen_falls_back_to_metadata_created_at() -> None: """When alert_details.first_seen is absent, fall back to metadata.created_at.""" alert = Alert.from_dict({ @@ -440,7 +425,7 @@ def test_first_seen_falls_back_to_metadata_created_at() -> None: assert "2025-12-01" in body -def test_scan_date_and_first_seen_yield_na_when_no_fallback() -> None: +def test_first_seen_yields_na_when_no_fallback() -> None: """When neither alert_details nor metadata provide dates, render N/A.""" alert = Alert.from_dict({ "metadata": {"rule_id": "X"}, # no updated_at / created_at @@ -448,4 +433,4 @@ def test_scan_date_and_first_seen_yield_na_when_no_fallback() -> None: "rule_details": {}, }) body = build_child_issue_body(alert) - assert body.count("N/A") >= 2 + assert body.count("N/A") >= 1 diff --git a/tests/security/issues/test_models.py b/tests/security/issues/test_models.py index a0ad502..9df3447 100644 --- a/tests/security/issues/test_models.py +++ b/tests/security/issues/test_models.py @@ -108,9 +108,10 @@ def test_issue_index_creation() -> None: def test_alert_context_creation() -> None: ctx = AlertContext( - alert={}, alert_number=1, fingerprint="fp", occurrence_fp="ofp", - repo="org/repo", first_seen="2026-01-01", last_seen="2026-01-02", + alert={}, alert_number=1, fingerprint="fp", + repo="org/repo", tool="AquaSec", rule_id="R1", rule_name="sast", + rule_description="Test finding description", severity="high", cve="CVE-79", path="src/f.py", start_line=10, end_line=20, commit_sha="abc123", ) diff --git a/tests/security/issues/test_secmeta.py b/tests/security/issues/test_secmeta.py index 8fc28cc..1ac9017 100644 --- a/tests/security/issues/test_secmeta.py +++ b/tests/security/issues/test_secmeta.py @@ -115,10 +115,10 @@ def test_preferred_order() -> None: } rendered = render_secmeta(data) lines = rendered.strip().split("\n") - # schema should appear before fingerprint - schema_idx = next(i for i, l in enumerate(lines) if "schema=" in l) + # type should appear before fingerprint per preferred_order + type_idx = next(i for i, l in enumerate(lines) if "type=" in l) fp_idx = next(i for i, l in enumerate(lines) if "fingerprint=" in l) - assert schema_idx < fp_idx + assert type_idx < fp_idx def test_secmeta_roundtrip() -> None: """Render then parse should recover the original data.""" diff --git a/tests/security/issues/test_sync.py b/tests/security/issues/test_sync.py index 746bdb2..da42ac1 100644 --- a/tests/security/issues/test_sync.py +++ b/tests/security/issues/test_sync.py @@ -71,13 +71,11 @@ def _make_alert_context(**overrides: Any) -> AlertContext: alert=Alert(), alert_number=1, fingerprint="fp_test_123", - occurrence_fp="occ_fp_test", repo="test-org/test-repo", - first_seen="2026-01-01", - last_seen="2026-01-02", tool="AquaSec", rule_id="CVE-2026-1234", rule_name="sast", + rule_description="Test finding description", severity="high", cve="CVE-2026-1234", path="src/main.py", @@ -302,47 +300,11 @@ def test_merge_new_alert_number() -> None: "type": "child", "fingerprint": "fp1", "gh_alert_numbers": '["100"]', - "occurrence_count": "1", - "last_occurrence_fp": "old_occ", - "first_seen": "2026-01-01", - "last_seen": "2026-01-01", }) - ctx = _make_alert_context(alert_number=200, fingerprint="fp1", occurrence_fp="new_occ") - secmeta, new_occurrence = _merge_child_secmeta(ctx=ctx, issue=child) + ctx = _make_alert_context(alert_number=200, fingerprint="fp1") + secmeta = _merge_child_secmeta(ctx=ctx, issue=child) assert "200" in secmeta["gh_alert_numbers"] assert "100" in secmeta["gh_alert_numbers"] - assert new_occurrence is True - assert secmeta["occurrence_count"] == "2" - -def test_merge_same_occurrence_fp() -> None: - """Same occurrence_fp means no new occurrence counted.""" - child = _issue_with_secmeta(1, { - "type": "child", - "fingerprint": "fp1", - "gh_alert_numbers": '["100"]', - "occurrence_count": "1", - "last_occurrence_fp": "same_occ", - "first_seen": "2026-01-01", - "last_seen": "2026-01-01", - }) - ctx = _make_alert_context(alert_number=100, fingerprint="fp1", occurrence_fp="same_occ") - secmeta, new_occurrence = _merge_child_secmeta(ctx=ctx, issue=child) - assert new_occurrence is False - assert secmeta["occurrence_count"] == "1" - -def test_merge_date_range_expansion() -> None: - """first_seen takes the min, last_seen takes the max.""" - child = _issue_with_secmeta(1, { - "type": "child", - "fingerprint": "fp1", - "first_seen": "2026-02-01", - "last_seen": "2026-02-15", - "occurrence_count": "1", - }) - ctx = _make_alert_context(first_seen="2026-01-15", last_seen="2026-03-01") - secmeta, _ = _merge_child_secmeta(ctx=ctx, issue=child) - assert secmeta["first_seen"] == "2026-01-15" - assert secmeta["last_seen"] == "2026-03-01" def test_merge_removes_alert_hash() -> None: """Legacy alert_hash key is dropped during merge.""" @@ -350,26 +312,11 @@ def test_merge_removes_alert_hash() -> None: "type": "child", "alert_hash": "old_hash", "fingerprint": "fp1", - "occurrence_count": "1", - "first_seen": "2026-01-01", - "last_seen": "2026-01-01", }) ctx = _make_alert_context(fingerprint="fp1") - secmeta, _ = _merge_child_secmeta(ctx=ctx, issue=child) + secmeta = _merge_child_secmeta(ctx=ctx, issue=child) assert "alert_hash" not in secmeta -def test_merge_zero_occurrence_count_reset() -> None: - """occurrence_count <= 0 is reset to at least 1.""" - child_secmeta_str = render_secmeta({ - "type": "child", "fingerprint": "fp1", - "occurrence_count": "0", "last_occurrence_fp": "same_fp", - "first_seen": "2026-01-01", "last_seen": "2026-01-01", - }) - child = Issue(number=1, state="open", title="T", body=child_secmeta_str + "\nBody\n") - ctx = _make_alert_context(fingerprint="fp1", occurrence_fp="same_fp") - secmeta, _ = _merge_child_secmeta(ctx=ctx, issue=child) - assert int(secmeta["occurrence_count"]) >= 1 - # ===================================================================== # _maybe_reopen_child @@ -476,7 +423,7 @@ def test_sync_title_already_correct(mocker: MockerFixture) -> None: """Title is not updated when it matches the expected format.""" mock_labels = mocker.patch("security.issues.sync.gh_issue_add_labels") from security.issues.builder import build_issue_title - title = build_issue_title("sast", "CVE-2026-1234", "fp_test_123") + title = build_issue_title("Test finding description", "sast", "CVE-2026-1234", "fp_test_123") issue = Issue(number=1, state="open", title=title, body="b") ctx = _make_alert_context(rule_name="sast", rule_id="CVE-2026-1234", fingerprint="fp_test_123") sync = _make_sync_context() @@ -635,8 +582,6 @@ def test_ensure_parent_existing_returns_existing(sast_alert: Alert) -> None: "type": "parent", "rule_id": sast_alert.metadata.rule_id, "severity": "high", - "first_seen": "2026-01-01", - "last_seen": "2026-01-01", }) issues = {10: parent} index = build_issue_index(issues) @@ -650,8 +595,6 @@ def test_ensure_parent_severity_change_detected(sast_alert: Alert) -> None: "type": "parent", "rule_id": sast_alert.metadata.rule_id, "severity": "low", - "first_seen": "2026-01-01", - "last_seen": "2026-01-01", }) issues = {10: parent} index = build_issue_index(issues) @@ -682,8 +625,6 @@ def test_ensure_parent_body_deferred(sast_alert: Alert) -> None: "type": "parent", "rule_id": sast_alert.metadata.rule_id, "severity": "high", - "first_seen": "2026-01-01", - "last_seen": "2026-01-01", }) original_body = parent.body issues = {10: parent} @@ -700,8 +641,6 @@ def test_ensure_parent_title_drift_corrected(mocker: MockerFixture, sast_alert: "type": "parent", "rule_id": sast_alert.metadata.rule_id, "severity": "high", - "first_seen": "2026-01-01", - "last_seen": "2026-01-01", }) parent.title = "Wrong old title" issues = {10: parent} @@ -917,7 +856,7 @@ def test_sync_severity_change_detected(sast_alert: Alert) -> None: """Severity change on existing parent is captured in result.""" parent = _issue_with_secmeta(10, { "type": "parent", "rule_id": sast_alert.metadata.rule_id, - "severity": "low", "first_seen": "2026-01-01", "last_seen": "2026-01-01", + "severity": "low", }) issues = {10: parent} result = sync_alerts_and_issues({303: sast_alert}, issues, dry_run=True) diff --git a/tests/security/issues/test_templates.py b/tests/security/issues/test_templates.py index 97265d2..9399980 100644 --- a/tests/security/issues/test_templates.py +++ b/tests/security/issues/test_templates.py @@ -102,7 +102,7 @@ def test_child_contains_all_placeholders() -> None: "{{ avd_id }}", "{{ alert_hash }}", "{{ title }}", "{{ message }}", "{{ repository_full_name }}", "{{ file_display }}", "{{ file_permalink }}", "{{ package_name }}", "{{ installed_version }}", "{{ fixed_version }}", - "{{ reachable }}", "{{ scan_date }}", "{{ first_seen }}", + "{{ reachable }}", "{{ first_seen }}", ] for ph in expected_placeholders: assert ph in CHILD_BODY_TEMPLATE, f"Missing placeholder: {ph}" @@ -120,7 +120,6 @@ def test_child_renders_without_error() -> None: "installed_version": "1.0", "fixed_version": "2.0", "reachable": "True", - "scan_date": "2026-01-01", "first_seen": "2026-01-01", } result = render_markdown_template(CHILD_BODY_TEMPLATE, values) diff --git a/tests/security/test_collect_alert.py b/tests/security/test_collect_alert.py index 57fce59..bc701b3 100644 --- a/tests/security/test_collect_alert.py +++ b/tests/security/test_collect_alert.py @@ -49,6 +49,7 @@ "rule": { "id": "rule-1", "name": "sast", + "description": "Requests with verify=False", "security_severity_level": "high", "severity": "error", "tags": ["HIGH", "sast"], @@ -263,6 +264,7 @@ def test_normalise_alert_metadata() -> None: assert meta["state"] == "open" assert meta["rule_id"] == "rule-1" assert meta["rule_name"] == "sast" + assert meta["rule_description"] == "Requests with verify=False" assert meta["severity"] == "high" assert meta["confidence"] == "error" assert meta["tags"] == ["HIGH", "sast"] @@ -298,6 +300,7 @@ def test_normalise_alert_minimal() -> None: assert meta["alert_number"] is None assert meta["state"] is None assert meta["rule_id"] is None + assert meta["rule_description"] is None assert meta["tool"] is None assert meta["file"] is None assert meta["tags"] == []