diff --git a/github/zap/README.md b/github/zap/README.md new file mode 100644 index 000000000..4dd22f4fc --- /dev/null +++ b/github/zap/README.md @@ -0,0 +1,143 @@ +# OpenTaint + ZAP Security Scan Action + +GitHub Action that combines [OpenTaint](https://github.com/seqra/opentaint) static analysis with [ZAP](https://www.zaproxy.org/) dynamic +testing to identify and validate security vulnerabilities + +## Quick Start + +```yaml +name: Security Scan +on: pull_request + +permissions: + contents: read + security-events: write + +env: + APP_URL: http://localhost:8080 # Your app url + +jobs: + security-scan: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up JDK + uses: actions/setup-java@v5 + with: + distribution: 'temurin' + java-version: '21' + + - name: Start application + run: # Start your app here + + - name: Run security scan + uses: seqra/opentaint/github/zap@v0 + with: + mode: 'differential' + target: $APP_URL +``` + +## Inputs + +### Required + +- `target` - Target URL for ZAP dynamic scan (must be a running application) + +### Optional + +- `mode` - Scan mode: `full` (scans current branch) or `differential` (compares PR against base branch). Default: `full` +- `template` - Path to ZAP automation template. Default: `template.yaml` +- `context-name` - Context name from template to use. Default: first context +- `artifact-name` - Name of uploaded artifact. Default: `opentaint-zap-scan-results` +- `upload-sarif` - Upload validated findings to GitHub Code Security. Default: `true` + +### OpenTaint Options + +- `project-root` - Project root path. Default: `.` +- `opentaint-version` - OpenTaint version selector. Default: `v0` +- `rules-path` - Custom rules directories (comma-separated) +- `opentaint-timeout` - Scan timeout. Default: `15m` + +### ZAP Options + +- `zap-docker-image` - ZAP Docker image. Default: `ghcr.io/zaproxy/zaproxy:stable` +- `zap-docker-env-vars` - Environment variables for ZAP container +- `zap-cmd-options` - Additional ZAP command line options + +## Template + +The action uses a [ZAP automation framework](https://www.zaproxy.org/docs/desktop/addons/automation-framework/) YAML +file + +### Requirements + +- At least one context in `env.contexts` +- API import job (`openapi` or `graphql`) +- At least one CWE policy with format `policy-CWE-{number}` + +### Details + +The action automatically: + +- Adds a required JSON report if missing +- Normalizes all report directories to `/zap/wrk/zap-output` +- Generates CWE-specific contexts based on OpenTaint findings +- Creates activeScan jobs for matching CWEs + +Policy naming: Use `policy-CWE-{number}` format (e.g., `policy-CWE-89` for SQL Injection, `policy-CWE-79` for XSS). + +### Example + +```yaml +env: + contexts: + - name: default-context + urls: + - http://localhost:8080 + +jobs: + - type: openapi + parameters: + context: default-context + targetUrl: http://localhost:8080 + apiUrl: http://localhost:8080/v3/api-docs + + - type: activeScan-config + parameters: + threadPerHost: 40 + + - type: activeScan-policy + parameters: + name: policy-CWE-89 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 40018 + threshold: MEDIUM +``` + +See [template.yaml](template.yaml) for a complete example + +## Artifacts + +The action uploads an artifact with: + +- `validated.sarif` - sarif with ZAP-confirmed vulnerabilities +- `zap-automation.yaml` - generated YAML automation file +- ZAP reports from `/zap/wrk/zap-output` folder +- Sarif from OpenTaint scan based on mode: + - `full`: `opentaint.sarif` (all OpenTaint findings) + - `differential`: `filtered-opentaint.sarif` (new findings only) + +## Examples + +- [example.yml](examples/example.yml) - Differential scan for pull requests +- [example-full-scan.yml](examples/example-full-scan.yml) - Full scan for the main branch + +## Requirements + +- Application must be running and accessible at target URL +- Java/Kotlin projects with Spring frameworks +- OpenAPI or GraphQL schema for API import diff --git a/github/zap/action.yml b/github/zap/action.yml new file mode 100644 index 000000000..ec76afad6 --- /dev/null +++ b/github/zap/action.yml @@ -0,0 +1,206 @@ +name: 'OpenTaint + ZAP Security Scan' +description: 'Run OpenTaint SAST analysis and ZAP dynamic testing with automatic vulnerability confirmation' + +branding: + icon: 'shield' + color: 'blue' + +inputs: + mode: + description: 'Scan mode: "full" (for single branch) or "differential" (for pr)' + required: false + default: 'full' + template: + description: 'Path to ZAP automation template YAML file' + required: false + default: 'template.yaml' + target: + description: 'Target URL for ZAP dynamic scan' + required: true + context-name: + description: 'Context name to use from template (default is first context)' + required: false + artifact-name: + description: 'Name of the uploaded artifact' + required: false + default: 'opentaint-zap-scan-results' + upload-sarif: + description: 'Upload confirmed SARIF to GitHub Security alerts' + required: false + default: 'true' + + # OpenTaint-specific inputs + project-root: + description: 'Relative path under $GITHUB_WORKSPACE to the root of the analyzed project' + required: false + default: '.' + opentaint-version: + description: 'OpenTaint version selector: latest, v, v., or exact v..' + required: false + default: 'v0' + rules-path: + description: 'Paths to custom OpenTaint rules directories (comma-separated)' + required: false + default: '' + opentaint-timeout: + description: 'OpenTaint scan timeout' + required: false + default: '15m' + + # ZAP-specific inputs + zap-docker-image: + description: 'The Docker image to be used for ZAP' + required: false + default: 'ghcr.io/zaproxy/zaproxy:stable' + zap-docker-env-vars: + description: 'The env vars that should be passed to the Docker container running ZAP' + required: false + default: '' + zap-cmd-options: + description: 'Additional command line options to start ZAP with' + required: false + default: '' + +runs: + using: 'composite' + steps: + - name: Install uv + uses: astral-sh/setup-uv@v7 + + - name: Create scan results directory + shell: bash + run: | + mkdir -p scan-results/base + mkdir -p scan-results/current + mkdir -p zap-output + chmod 777 zap-output + + - name: Run OpenTaint scan (current branch) + uses: seqra/opentaint/github@github/v0 + with: + project-root: ${{ inputs.project-root }} + upload-sarif: 'false' + artifact-name: 'opentaint-sarif-current' + opentaint-version: ${{ inputs.opentaint-version }} + rules-path: ${{ inputs.rules-path }} + timeout: ${{ inputs.opentaint-timeout }} + verbosity: 'info' + severity: 'warning,error' + + - name: Download OpenTaint SARIF artifact (current) + uses: actions/download-artifact@v4 + with: + name: 'opentaint-sarif-current' + path: scan-results/current + + - name: Checkout base branch + if: inputs.mode == 'differential' + uses: actions/checkout@v4 + with: + ref: ${{ github.base_ref }} + path: base-branch-checkout + + - name: Run OpenTaint scan (base branch) + if: inputs.mode == 'differential' + uses: seqra/opentaint/github@github/v0 + with: + project-root: base-branch-checkout/${{ inputs.project-root }} + upload-sarif: 'false' + artifact-name: 'opentaint-sarif-base' + opentaint-version: ${{ inputs.opentaint-version }} + rules-path: ${{ inputs.rules-path }} + timeout: ${{ inputs.opentaint-timeout }} + verbosity: 'info' + severity: 'warning,error' + + - name: Download OpenTaint SARIF artifact (base) + if: inputs.mode == 'differential' + uses: actions/download-artifact@v4 + with: + name: 'opentaint-sarif-base' + path: scan-results/base + + - name: Delete temporary OpenTaint artifacts + uses: geekyeggo/delete-artifact@v5 + with: + name: | + opentaint-sarif-current + opentaint-sarif-base + failOnError: false + + - name: Install python dependencies + shell: bash + run: | + cd ${{ github.action_path }} + uv sync + + - name: Generate ZAP automation configuration + shell: bash + run: | + BASE_SARIF_ARG="" + if [ "${{ inputs.mode }}" = "differential" ]; then + BASE_SARIF_ARG="--base-sarif ${{ github.workspace }}/scan-results/base/opentaint.sarif" + fi + + CONTEXT_ARG="" + if [ -n "${{ inputs.context-name }}" ]; then + CONTEXT_ARG="--context-name ${{ inputs.context-name }}" + fi + + cd ${{ github.action_path }} + uv run python gen_auto.py \ + --sarif ${{ github.workspace }}/scan-results/current/opentaint.sarif \ + $BASE_SARIF_ARG \ + --template ${{ github.workspace }}/${{ inputs.template }} \ + --target ${{ inputs.target }} \ + --output ${{ github.workspace }}/scan-results/zap-automation.yaml \ + $CONTEXT_ARG + + - name: Run ZAP automated scan + uses: zaproxy/action-af@v0.2.0 + with: + plan: 'scan-results/zap-automation.yaml' + docker_name: ${{ inputs.zap-docker-image }} + docker_env_vars: ${{ inputs.zap-docker-env-vars }} + cmd_options: ${{ inputs.zap-cmd-options }} + + - name: Filter SARIF by confirmed vulnerabilities + id: filter-sarif + shell: bash + run: | + cd ${{ github.action_path }} + uv run python filter_sarif.py \ + --sarif ${{ github.workspace }}/scan-results/current/opentaint.sarif \ + --report ${{ github.workspace }}/zap-output/opentaint_zap_scan_results.json \ + --output ${{ github.workspace }}/scan-results/validated.sarif \ + --verbose + + - name: Upload SARIF to GitHub Security + if: inputs.upload-sarif == 'true' + uses: github/codeql-action/upload-sarif@v4 + with: + sarif_file: scan-results/validated.sarif + category: opentaint-zap-scan + + - name: Prepare artifact files + if: always() + shell: bash + run: | + mkdir -p artifact-upload + + if [ "${{ inputs.mode }}" = "differential" ]; then + cp scan-results/current/filtered-opentaint.sarif artifact-upload/opentaint.sarif 2>/dev/null || true + else + cp scan-results/current/opentaint.sarif artifact-upload/opentaint.sarif 2>/dev/null || true + fi + cp scan-results/validated.sarif artifact-upload/ 2>/dev/null || true + cp scan-results/zap-automation.yaml artifact-upload/ || true + cp -r zap-output/* artifact-upload/ 2>/dev/null || true + + - name: Upload scan results artifact + uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ inputs.artifact-name }} + path: artifact-upload/ + retention-days: 30 diff --git a/github/zap/examples/example-full-scan.yml b/github/zap/examples/example-full-scan.yml new file mode 100644 index 000000000..0475d2d5c --- /dev/null +++ b/github/zap/examples/example-full-scan.yml @@ -0,0 +1,69 @@ +name: OpenTaint + ZAP Security Scan (Full Mode) + +on: + push: + branches: + - main + +permissions: + contents: read + security-events: write + +jobs: + security-scan: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Set up JDK 21 + uses: actions/setup-java@v5 + with: + java-version: '21' + distribution: 'temurin' + + - name: Setup Gradle + uses: gradle/actions/setup-gradle@v5 + + - name: Build application + run: ./gradlew build -x test + + - name: Start Spring Boot application + run: | + ./gradlew bootRun > app.log 2>&1 & + echo $! > app.pid + + # Wait for application to be ready + echo "Waiting for application to start..." + for i in {1..30}; do + if curl -s http://localhost:8081/v3/api-docs > /dev/null; then + echo "Application is ready!" + break + fi + echo "Waiting... ($i/30)" + sleep 2 + done + + if ! curl -s http://localhost:8081/v3/api-docs > /dev/null; then + echo "Application failed to start" + cat app.log + exit 1 + fi + + - name: Run OpenTaint + ZAP security scan + uses: seqra/opentaint/github/zap@v0 + with: + mode: 'full' + template: 'template.yaml' + target: 'http://localhost:8081' + artifact-name: 'opentaint-zap-scan-results' + upload-sarif: 'true' + zap-cmd-options: '-addonupdate -addoninstall ascanrulesBeta -addoninstall pscanrulesBeta' + + - name: Stop application + if: always() + run: | + if [ -f app.pid ]; then + kill $(cat app.pid) || true + rm app.pid + fi diff --git a/github/zap/examples/example.yml b/github/zap/examples/example.yml new file mode 100644 index 000000000..9e4fa51ec --- /dev/null +++ b/github/zap/examples/example.yml @@ -0,0 +1,68 @@ +name: OpenTaint + ZAP Security Scan + +on: + pull_request + +permissions: + contents: read + pull-requests: write + security-events: write + +jobs: + security-scan: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Set up JDK 21 + uses: actions/setup-java@v5 + with: + java-version: '21' + distribution: 'temurin' + + - name: Setup Gradle + uses: gradle/actions/setup-gradle@v5 + + - name: Build application + run: ./gradlew build -x test + + - name: Start Spring Boot application + run: | + ./gradlew bootRun > app.log 2>&1 & + echo $! > app.pid + + # Wait for application to be ready + echo "Waiting for application to start..." + for i in {1..30}; do + if curl -s http://localhost:8081/v3/api-docs > /dev/null; then + echo "Application is ready!" + break + fi + echo "Waiting... ($i/30)" + sleep 2 + done + + if ! curl -s http://localhost:8081/v3/api-docs > /dev/null; then + echo "Application failed to start" + cat app.log + exit 1 + fi + + - name: Run OpenTaint + ZAP security scan + uses: seqra/opentaint/github/zap@v0 + with: + mode: 'differential' + template: 'template.yaml' + target: 'http://localhost:8081' + artifact-name: 'opentaint-zap-scan-results' + upload-sarif: 'true' + zap-cmd-options: '-addonupdate -addoninstall ascanrulesBeta -addoninstall pscanrulesBeta' + + - name: Stop application + if: always() + run: | + if [ -f app.pid ]; then + kill $(cat app.pid) || true + rm app.pid + fi diff --git a/github/zap/filter_sarif.py b/github/zap/filter_sarif.py new file mode 100644 index 000000000..0e3991c48 --- /dev/null +++ b/github/zap/filter_sarif.py @@ -0,0 +1,120 @@ +import argparse +import copy +import json +import logging +from pathlib import Path +from urllib.parse import urlparse + +logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") +logger = logging.getLogger(__name__) + +TOOL_NAME = "OpenTaint + ZAP" + + +def extract_rule_cwe_mapping(sarif: dict) -> dict[str, int]: + """Extract rule ID to CWE number mapping from SARIF""" + rule_cwe = {} + for rule in sarif["runs"][0]["tool"]["driver"]["rules"]: + rule_id = rule["id"] + for tag in rule["properties"]["tags"]: + if tag.startswith("CWE-"): + cwe_num = int(tag.replace("CWE-", "")) + rule_cwe[rule_id] = cwe_num + break + return rule_cwe + + +def extract_confirmed_endpoints(zap_report: dict) -> set[tuple]: + """Extract confirmed vulnerable endpoints from ZAP report""" + confirmed = set() + for site in zap_report.get("site", []): + for alert in site.get("alerts", []): + cweid = alert.get("cweid") + if not cweid: + continue + cwe_num = int(cweid) + for instance in alert.get("instances", []): + method = instance.get("method", "") + node_name = instance.get("nodeName", "") + if not method or not node_name: + continue + url_part = node_name.split(" ")[0] if " " in node_name else node_name + path = urlparse(url_part).path + confirmed.add((method, path, cwe_num)) + + logger.info(f"Found {len(confirmed)} confirmed vulnerable endpoints in ZAP report") + return confirmed + + +def filter_sarif_by_confirmed(sarif: dict, confirmed_endpoints: set[tuple], rule_cwe: dict[str, int]) -> dict: + """Filter SARIF to only include confirmed vulnerabilities""" + filtered_sarif = copy.deepcopy(sarif) + filtered_results = [] + for result in sarif["runs"][0]["results"]: + rule_id = result["ruleId"] + cwe_num = rule_cwe.get(rule_id) + if not cwe_num: + continue + for related in result.get("relatedLocations", []): + for loc in related.get("logicalLocations", []): + fqn = loc.get("fullyQualifiedName", "") + if " " not in fqn: + continue + method, path = fqn.split(" ", 1) + if (method, path, cwe_num) in confirmed_endpoints: + filtered_results.append(result) + break + else: + continue + break + + filtered_sarif["runs"][0]["results"] = filtered_results + logger.info(f"Filtered SARIF: {len(sarif['runs'][0]['results'])} -> {len(filtered_results)} results") + return filtered_sarif + + +def main(): + parser = argparse.ArgumentParser(description="Filter SARIF based on confirmed ZAP vulnerabilities") + + parser.add_argument("--sarif", type=Path, required=True, help="SARIF file to filter") + parser.add_argument("--report", type=Path, required=True, help="ZAP JSON report with confirmed vulnerabilities") + parser.add_argument( + "--output", type=Path, default=Path("scan-results/validated.sarif"), help="Output filtered SARIF file" + ) + parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging") + + args = parser.parse_args() + if args.verbose: + logging.getLogger().setLevel(logging.DEBUG) + if not args.sarif.exists(): + logger.error(f"SARIF file not found: {args.sarif}") + return 1 + if not args.report.exists(): + logger.error(f"Report file not found: {args.report}") + return 1 + + try: + logger.debug(f"Loading SARIF: {args.sarif}") + with open(args.sarif) as f: + sarif = json.load(f) + logger.debug(f"Loading ZAP report: {args.report}") + with open(args.report) as f: + zap_report = json.load(f) + + rule_cwe = extract_rule_cwe_mapping(sarif) + logger.debug(f"Extracted {len(rule_cwe)} rule-to-CWE mappings") + confirmed_endpoints = extract_confirmed_endpoints(zap_report) + filtered_sarif = filter_sarif_by_confirmed(sarif, confirmed_endpoints, rule_cwe) + filtered_sarif["runs"][0]["tool"]["driver"]["name"] = TOOL_NAME + args.output.parent.mkdir(parents=True, exist_ok=True) + with open(args.output, "w") as f: + json.dump(filtered_sarif, f, indent=2) + logger.info(f"Saved filtered SARIF to: {args.output}") + return 0 + except Exception as e: + logger.error(f"Failed to filter SARIF: {e}", exc_info=args.verbose) + return 1 + + +if __name__ == "__main__": + exit(main()) diff --git a/github/zap/gen_auto.py b/github/zap/gen_auto.py new file mode 100644 index 000000000..4d1c6363e --- /dev/null +++ b/github/zap/gen_auto.py @@ -0,0 +1,448 @@ +import argparse +import copy +import json +import logging +import re +import subprocess +from collections import defaultdict +from pathlib import Path + +import yaml + +logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") +logger = logging.getLogger(__name__) + + +def load_template(template_path: Path) -> dict: + """Load and validate ZAP automation template""" + with open(template_path) as f: + template = yaml.safe_load(f) + if "env" not in template or "contexts" not in template["env"]: + raise ValueError("Template missing required 'env.contexts' section") + if "jobs" not in template: + raise ValueError("Template missing required 'jobs' section") + return template + + +def validate_template(template: dict) -> None: + """Validate required template fields, raise error if missing""" + has_openapi = False + has_graphql = False + for job in template["jobs"]: + job_type = job.get("type", "") + if job_type == "openapi": + has_openapi = True + elif job_type == "graphql": + has_graphql = True + if not has_openapi and not has_graphql: + raise ValueError("Template must contain at least one 'openapi' or 'graphql' job") + if not template["env"]["contexts"]: + raise ValueError("Template must contain at least one context in 'env.contexts'") + + cwe_policies = [] + for job in template["jobs"]: + if job.get("type") == "activeScan-policy": + policy_name = job.get("parameters", {}).get("name", "") + if policy_name.startswith("policy-CWE-"): + cwe_policies.append(policy_name) + + if not cwe_policies: + raise ValueError("Template must contain at least one policy with format 'policy-CWE-X'") + logger.info("Template validation passed") + + +def ensure_report_jobs(template: dict) -> dict: + """Ensure template has required report job and normalize all report directories""" + required_dir = "/zap/wrk/zap-output" + required_report = { + "type": "report", + "parameters": { + "template": "traditional-json", + "reportDir": required_dir, + "reportFile": "opentaint_zap_scan_results", + "reportTitle": "OpenTaint + ZAP Scan Report", + "reportDescription": "Automated security scan results for filtering sarif", + }, + "risks": ["high", "medium"], + "confidences": ["high", "medium", "low"], + } + + has_json_report = False + for job in template["jobs"]: + if job.get("type") == "report": + params = job.get("parameters", {}) + if ( + params.get("template") == "traditional-json" + and params.get("reportFile") == "opentaint_zap_scan_results" + ): + has_json_report = True + logger.info("Found required traditional-json report job") + + current_dir = params.get("reportDir", "") + if current_dir != required_dir and not current_dir.startswith(required_dir + "/"): + logger.warning( + f"Report job with template '{params.get('template', 'unknown')}' has reportDir='{current_dir}'. " + f"Replacing with '{required_dir}'" + ) + params["reportDir"] = required_dir + + # Add required JSON report if missing + if not has_json_report: + logger.warning("Template missing required traditional-json report job, adding automatically") + template["jobs"].append(required_report) + logger.info("Added traditional-json report job with reportFile='opentaint_zap_scan_results'") + + return template + + +def check_template_warnings(template: dict) -> None: + """Check template for potential issues and show warnings""" + contexts = template["env"]["contexts"] + if len(contexts) > 1: + logger.warning( + f"Template contains {len(contexts)} contexts. Will use first context by default. " + "Use --context-name to specify a different context." + ) + non_cwe_policies = [] + for job in template["jobs"]: + if job.get("type") == "activeScan-policy": + policy_name = job.get("parameters", {}).get("name", "") + if not policy_name.startswith("policy-CWE-"): + non_cwe_policies.append(policy_name) + if non_cwe_policies: + logger.warning( + f"Template contains {len(non_cwe_policies)} non-CWE policies that will be kept in output: " + f"{', '.join(non_cwe_policies)}" + ) + existing_scans = [] + for job in template["jobs"]: + if job.get("type") == "activeScan": + ctx = job.get("parameters", {}).get("context", "unknown") + existing_scans.append(ctx) + if existing_scans: + logger.warning( + f"Template contains {len(existing_scans)} existing activeScan jobs that will be removed: " + f"{', '.join(existing_scans)}" + ) + + +def check_policy_rules(template: dict) -> None: + """Warn if CWE policies have no rules defined""" + for job in template["jobs"]: + if job.get("type") == "activeScan-policy": + policy_name = job.get("parameters", {}).get("name", "") + if policy_name.startswith("policy-CWE-"): + policy_def = job.get("policyDefinition", {}) + rules = policy_def.get("rules", []) + if not rules: + logger.warning(f"Policy '{policy_name}' has no rules defined") + + +def select_context(template: dict, context_name: str | None) -> tuple[dict, int]: + """Select context by name or use first one""" + contexts = template["env"]["contexts"] + if context_name: + for idx, ctx in enumerate(contexts): + if ctx.get("name") == context_name: + logger.info(f"Using context: {context_name}") + if len(contexts) > 1: + logger.warning(f"Other {len(contexts) - 1} context(s) will be ignored") + return ctx, idx + raise ValueError(f"Context '{context_name}' not found in template") + logger.info(f"Using first context: {contexts[0].get('name', 'unnamed')}") + return contexts[0], 0 + + +def extract_path_filters(context: dict) -> dict: + """Extract and compile path filter patterns from context""" + filters = {"urls": [], "include": [], "exclude": []} + for url in context.get("urls", []): + pattern = re.escape(url) + ".*" + filters["urls"].append(re.compile(pattern)) + for pattern in context.get("includePaths", []): + try: + filters["include"].append(re.compile(pattern)) + except re.error as e: + logger.warning(f"Invalid includePath regex '{pattern}': {e}") + for pattern in context.get("excludePaths", []): + try: + filters["exclude"].append(re.compile(pattern)) + except re.error as e: + logger.warning(f"Invalid excludePath regex '{pattern}': {e}") + return filters + + +def should_include_path(path: str, filters: dict, target_url: str) -> bool: + """Check if path should be included based on context filters""" + full_path = f"{target_url}{path}" + if filters["urls"] and not any(pattern.match(full_path) for pattern in filters["urls"]): + return False + + if filters["include"] and not any(pattern.match(full_path) for pattern in filters["include"]): + return False + + return not (filters["exclude"] and any(pattern.match(full_path) for pattern in filters["exclude"])) + + +def filter_cwe_paths(cwe_paths: dict[int, list[str]], filters: dict, target_url: str) -> dict[int, list[str]]: + """Apply path filters to CWE paths mapping""" + filtered = {} + total_before = sum(len(paths) for paths in cwe_paths.values()) + total_after = 0 + for cwe_num, paths in cwe_paths.items(): + filtered_paths = [p for p in paths if should_include_path(p, filters, target_url)] + if filtered_paths: + filtered[cwe_num] = filtered_paths + total_after += len(filtered_paths) + if len(filtered_paths) < len(paths): + logger.debug(f"CWE-{cwe_num}: filtered {len(paths)} -> {len(filtered_paths)} paths") + if total_before > total_after: + logger.info( + f"Path filtering: {total_before} -> {total_after} paths ({total_before - total_after} filtered out)" + ) + return filtered + + +def extract_base_context_config(context: dict) -> dict: + """Extract inheritable config from context""" + config = {} + for key in ["authentication", "sessionManagement", "technology", "structure", "users"]: + if key in context: + config[key] = context[key] + return config + + +def extract_cwe_policies(template: dict) -> dict[int, str]: + """Extract CWE number to policy name mapping from template""" + policies = {} + for job in template["jobs"]: + if job.get("type") == "activeScan-policy": + policy_name = job["parameters"]["name"] + if policy_name.startswith("policy-CWE-"): + cwe_num = int(policy_name.replace("policy-CWE-", "")) + policies[cwe_num] = policy_name + + logger.info(f"Found {len(policies)} CWE policies in template") + return policies + + +def report_policy_coverage(policies: dict[int, str], cwe_paths: dict[int, list[str]]) -> None: + """Report which policies have matching SARIF paths""" + for cwe_num, policy_name in sorted(policies.items()): + if cwe_num in cwe_paths: + logger.info(f"{policy_name}: {len(cwe_paths[cwe_num])} paths found in SARIF") + else: + logger.info(f"{policy_name}: no paths found in SARIF") + + +def parse_sarif_for_cwe_paths(sarif_path: Path, available_cwes: set[int]) -> dict[int, list[str]]: + """Parse SARIF and return CWE to paths mapping""" + with open(sarif_path) as f: + data = json.load(f) + + rule_metadata = {} + for rule in data["runs"][0]["tool"]["driver"]["rules"]: + rule_cwes = set() + for tag in rule["properties"]["tags"]: + if tag.startswith("CWE-"): + cwe_num = int(tag.replace("CWE-", "")) + if cwe_num in available_cwes: + rule_cwes.add(cwe_num) + + if rule_cwes: + rule_metadata[rule["id"]] = rule_cwes + + cwe_paths = defaultdict(list) + for result in data["runs"][0]["results"]: + rule_id = result["ruleId"] + if rule_id not in rule_metadata: + continue + + cwes = rule_metadata[rule_id] + + for related in result.get("relatedLocations", []): + for loc in related.get("logicalLocations", []): + fqn = loc.get("fullyQualifiedName", "") + if " " not in fqn: + continue + + _, path = fqn.split(" ", 1) + for cwe in cwes: + if path not in cwe_paths[cwe]: + cwe_paths[cwe].append(path) + + logger.info(f"Parsed {len(cwe_paths)} CWE categories from SARIF") + return dict(cwe_paths) + + +def create_cwe_context(cwe_num: int, paths: list[str], target_url: str, base_config: dict) -> dict: + """Create context definition for a CWE with inherited config""" + context = { + "name": f"context-CWE-{cwe_num}", + "urls": [f"{target_url}/NONEXISTENT"], + "includePaths": [f"{target_url}{path}$" for path in paths], + "excludePaths": [], + } + + context.update(base_config) + return context + + +def create_activescan_job(cwe_num: int, policy_name: str) -> dict: + """Create activeScan job for CWE context""" + return {"type": "activeScan", "parameters": {"context": f"context-CWE-{cwe_num}", "policy": policy_name}} + + +def run_opentaint_scan(project_path: Path) -> Path: + """Run OpenTaint scan and return SARIF path""" + project_name = project_path.name + output_sarif = Path(f"scan-results/{project_name}.sarif") + output_sarif = output_sarif.resolve() + logger.info(f"Running OpenTaint scan on: {project_path}") + output_sarif.parent.mkdir(parents=True, exist_ok=True) + cmd = ["opentaint", "scan", "--output", str(output_sarif), str(project_path.resolve())] + try: + result = subprocess.run(cmd, capture_output=True, text=True, check=True) + logger.debug(f"OpenTaint output:\n{result.stdout}") + if not output_sarif.exists(): + raise RuntimeError(f"OpenTaint scan completed but SARIF file not found at: {output_sarif}") + logger.info(f"OpenTaint scan completed: {output_sarif}") + return output_sarif + except subprocess.CalledProcessError as e: + logger.error(f"OpenTaint scan failed with exit code {e.returncode}") + if e.stderr: + logger.error(f"stderr: {e.stderr}") + raise RuntimeError(f"opentaint scan failed with exit code {e.returncode}") from e + except FileNotFoundError as e: + raise RuntimeError("opentaint command not found. Make sure opentaint is installed and in PATH") from e + + +def filter_sarif_by_base(base_sarif_path: Path, new_sarif_path: Path) -> Path: + """Filter new SARIF to only include results not in base SARIF""" + logger.info(f"Filtering SARIF against base: {base_sarif_path}") + with open(base_sarif_path) as f: + base_sarif = json.load(f) + with open(new_sarif_path) as f: + new_sarif = json.load(f) + + base_hashes = set() + for result in base_sarif["runs"][0]["results"]: + base_hashes.add(result["partialFingerprints"]["vulnerabilityWithTraceHash/v1"]) + filtered_sarif = copy.deepcopy(new_sarif) + filtered_results = [] + for result in new_sarif["runs"][0]["results"]: + hash_value = result["partialFingerprints"]["vulnerabilityWithTraceHash/v1"] + if hash_value not in base_hashes: + filtered_results.append(result) + filtered_sarif["runs"][0]["results"] = filtered_results + output_path = new_sarif_path.parent / f"filtered-{new_sarif_path.name}" + output_path.parent.mkdir(parents=True, exist_ok=True) + with open(output_path, "w") as f: + json.dump(filtered_sarif, f, indent=2) + logger.info(f"Filtered SARIF: {len(new_sarif['runs'][0]['results'])} -> {len(filtered_results)} results") + logger.info(f"Saved filtered SARIF to: {output_path}") + return output_path + + +def generate_automation_yaml( + template_path: Path, sarif_path: Path, output_path: Path, target_url: str, context_name: str | None +) -> None: + """Generate ZAP automation YAML from template and SARIF""" + logger.info(f"Loading template: {template_path}") + template = load_template(template_path) + validate_template(template) + template = ensure_report_jobs(template) + check_template_warnings(template) + check_policy_rules(template) + selected_context, _ = select_context(template, context_name) + base_config = extract_base_context_config(selected_context) + path_filters = extract_path_filters(selected_context) + policies = extract_cwe_policies(template) + available_cwes = set(policies.keys()) + logger.info(f"Parsing SARIF: {sarif_path}") + cwe_paths = parse_sarif_for_cwe_paths(sarif_path, available_cwes) + report_policy_coverage(policies, cwe_paths) + filtered_cwe_paths = filter_cwe_paths(cwe_paths, path_filters, target_url) + if not filtered_cwe_paths: + logger.warning("No CWE contexts will be generated (no paths matched filters or found in SARIF)") + + cwe_contexts = [] + scan_jobs = [] + for cwe_num in sorted(filtered_cwe_paths.keys()): + if cwe_num not in policies: + logger.warning(f"No policy found for CWE-{cwe_num}, skipping") + continue + paths = filtered_cwe_paths[cwe_num] + context = create_cwe_context(cwe_num, paths, target_url, base_config) + cwe_contexts.append(context) + job = create_activescan_job(cwe_num, policies[cwe_num]) + scan_jobs.append(job) + logger.debug(f"Created context and job for CWE-{cwe_num} with {len(paths)} paths") + + output_contexts = [selected_context, *cwe_contexts] + jobs_without_activescan = [job for job in template["jobs"] if job.get("type") != "activeScan"] + insert_idx = len(jobs_without_activescan) + for i in range(len(jobs_without_activescan) - 1, -1, -1): + if jobs_without_activescan[i].get("type") == "activeScan-policy": + insert_idx = i + 1 + break + + output_jobs = jobs_without_activescan[:insert_idx] + scan_jobs + jobs_without_activescan[insert_idx:] + output = { + "env": {"contexts": output_contexts, **{k: v for k, v in template["env"].items() if k != "contexts"}}, + "jobs": output_jobs, + } + output_path.parent.mkdir(parents=True, exist_ok=True) + with open(output_path, "w") as f: + yaml.dump(output, f, default_flow_style=False, sort_keys=False) + logger.info(f"Generated automation YAML: {output_path}") + logger.info(f"Total contexts: {len(output_contexts)} (1 base + {len(cwe_contexts)} CWE)") + logger.info(f"Total jobs: {len(output_jobs)} ({len(scan_jobs)} activeScan jobs added)") + + +def main(): + parser = argparse.ArgumentParser(description="Generate ZAP Automation Framework YAML from SARIF") + + input_group = parser.add_mutually_exclusive_group(required=True) + input_group.add_argument("--sarif", type=Path, help="SARIF file path") + input_group.add_argument("--project-path", type=Path, help="Project path to scan with OpenTaint") + + parser.add_argument("--template", type=Path, required=True, help="ZAP automation template with policies") + parser.add_argument("--target", required=True, help="Target base URL") + parser.add_argument("--output", type=Path, default=Path("zap-automation.yaml"), help="Output YAML file path") + parser.add_argument("--base-sarif", type=Path, help="Base SARIF for differential scanning (e.g., from main branch)") + parser.add_argument("--context-name", help="Context name to use from template (default: first context)") + parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging") + + args = parser.parse_args() + if args.verbose: + logging.getLogger().setLevel(logging.DEBUG) + if not args.template.exists(): + logger.error(f"Template file not found: {args.template}") + return 1 + sarif_path = run_opentaint_scan(args.project_path) if args.project_path else args.sarif + if not sarif_path.exists(): + logger.error(f"SARIF file not found: {sarif_path}") + return 1 + if args.base_sarif: + if not args.base_sarif.exists(): + logger.error(f"Base SARIF file not found: {args.base_sarif}") + return 1 + sarif_path = filter_sarif_by_base(args.base_sarif, sarif_path) + + try: + generate_automation_yaml( + template_path=args.template, + sarif_path=sarif_path, + output_path=args.output, + target_url=args.target, + context_name=args.context_name, + ) + return 0 + except Exception as e: + logger.error(f"Failed to generate automation YAML: {e}", exc_info=args.verbose) + return 1 + + +if __name__ == "__main__": + exit(main()) diff --git a/github/zap/pyproject.toml b/github/zap/pyproject.toml new file mode 100644 index 000000000..3ca775590 --- /dev/null +++ b/github/zap/pyproject.toml @@ -0,0 +1,42 @@ +[project] +name = "opentaint-zap-action" +version = "0.1.0" +description = "Action for OpenTaint and ZAP integration" +readme = "README.md" +requires-python = ">=3.12" +dependencies = [ + "pyyaml>=6.0.3", +] + +[dependency-groups] +dev = [ + "ruff>=0.11.0", +] + +[tool.ruff] +line-length = 120 +target-version = "py312" + +[tool.ruff.lint] +select = [ + "E", + "W", + "F", + "I", + "B", + "C4", + "UP", + "SIM", + "S", + "RUF", +] +ignore = [ + "E501", + "S603", + "S607", +] + +[tool.ruff.format] +quote-style = "double" +indent-style = "space" +line-ending = "auto" diff --git a/github/zap/template.yaml b/github/zap/template.yaml new file mode 100644 index 000000000..f2ce8b1f1 --- /dev/null +++ b/github/zap/template.yaml @@ -0,0 +1,306 @@ +# ZAP Automation Framework Template +# This template defines the base configuration for ZAP scans +# The action will automatically generate CWE-specific contexts and scan jobs based on OpenTaint findings + +env: + # Define application contexts (at least one required) + # The first context will be used as the base for generated CWE contexts + contexts: + - name: openapi-import + urls: + - http://localhost:8081 + includePaths: [ ] + excludePaths: [ ] + + # Global parameters for the automation framework + parameters: + failOnError: false + failOnWarning: false + continueOnFailure: true + progressToStdout: true + +jobs: + # 1. API Import (required) + # Import your API definition to discover endpoints + - type: openapi + parameters: + context: openapi-import + targetUrl: http://localhost:8081 + apiUrl: http://localhost:8081/v3/api-docs + + # 2. Active Scan Configuration (optional but recommended) + - type: activeScan-config + parameters: + maxRuleDurationInMins: 0 + maxScanDurationInMins: 0 + maxAlertsPerRule: 0 + threadPerHost: 40 + handleAntiCSRFTokens: true + injectPluginIdInHeader: true + inputVectors: + urlQueryStringAndDataDrivenNodes: + enabled: true + addParam: false + odata: true + postData: + enabled: true + multiPartFormData: true + xml: true + json: + enabled: true + scanNullValues: false + googleWebToolkit: false + directWebRemoting: false + urlPath: true + httpHeaders: + enabled: true + allRequests: false + cookieData: + enabled: true + encodeCookieValues: false + scripts: true + + # 3. CWE-Specific Policies (at least one required) + # Define policies for each CWE you want to scan + # Policy names MUST follow format: policy-CWE-{number} + # The action will only scan for CWEs that have matching policies + - type: activeScan-policy + parameters: + name: policy-CWE-22 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 6 + threshold: MEDIUM + strength: INSANE + - type: activeScan-policy + parameters: + name: policy-CWE-78 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 10048 + threshold: MEDIUM + strength: INSANE + - id: 40045 + threshold: MEDIUM + strength: INSANE + - id: 40048 + threshold: MEDIUM + strength: INSANE + - id: 90020 + threshold: MEDIUM + strength: INSANE + - id: 90037 + threshold: MEDIUM + strength: INSANE + - type: activeScan-policy + parameters: + name: policy-CWE-79 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 40012 + threshold: MEDIUM + strength: INSANE + - id: 40026 + threshold: MEDIUM + strength: INSANE + - id: 40031 + threshold: MEDIUM + strength: INSANE + - type: activeScan-policy + parameters: + name: policy-CWE-89 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 40018 + threshold: MEDIUM + strength: INSANE + - id: 40019 + threshold: MEDIUM + strength: INSANE + - id: 40020 + threshold: MEDIUM + strength: INSANE + - id: 40021 + threshold: MEDIUM + strength: INSANE + - id: 40022 + threshold: MEDIUM + strength: INSANE + - id: 40027 + threshold: MEDIUM + strength: INSANE + - type: activeScan-policy + parameters: + name: policy-CWE-94 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 40028 + threshold: MEDIUM + strength: INSANE + - id: 40032 + threshold: MEDIUM + strength: INSANE + - id: 90019 + threshold: MEDIUM + strength: INSANE + - type: activeScan-policy + parameters: + name: policy-CWE-113 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 40003 + threshold: MEDIUM + strength: INSANE + - type: activeScan-policy + parameters: + name: policy-CWE-117 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 40043 + threshold: MEDIUM + strength: INSANE + - id: 40047 + threshold: MEDIUM + strength: INSANE + - type: activeScan-policy + parameters: + name: policy-CWE-352 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 20012 + threshold: MEDIUM + strength: INSANE + - type: activeScan-policy + parameters: + name: policy-CWE-601 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 20019 + threshold: MEDIUM + strength: INSANE + - type: activeScan-policy + parameters: + name: policy-CWE-611 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 90023 + threshold: MEDIUM + strength: INSANE + - type: activeScan-policy + parameters: + name: policy-CWE-643 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 90021 + threshold: MEDIUM + strength: INSANE + - type: activeScan-policy + parameters: + name: policy-CWE-917 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 90025 + threshold: MEDIUM + strength: INSANE + - type: activeScan-policy + parameters: + name: policy-CWE-918 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 40046 + threshold: MEDIUM + strength: INSANE + - type: activeScan-policy + parameters: + name: policy-CWE-943 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 40033 + threshold: MEDIUM + strength: INSANE + - id: 90039 + threshold: MEDIUM + strength: INSANE + - type: activeScan-policy + parameters: + name: policy-CWE-1336 + policyDefinition: + defaultStrength: INSANE + defaultThreshold: 'OFF' + rules: + - id: 90035 + threshold: MEDIUM + strength: INSANE + - id: 90036 + threshold: MEDIUM + strength: INSANE + # 4. Report Jobs (optional - will be added automatically if missing) + # Reports are used for vulnerability confirmation and artifact upload + # reportDir will be automatically normalized to /zap/wrk/zap-output + + # JSON report (required for SARIF filtering) + - type: report + parameters: + template: traditional-json + reportDir: /zap/wrk/zap-output + reportFile: 'opentaint_zap_scan_results' + reportTitle: OpenTaint + ZAP Scan Report + reportDescription: Automated security scan results for filtering sarif + risks: + - high + - medium + confidences: + - high + - medium + - low + + - type: report + parameters: + template: traditional-html + reportDir: /zap/wrk/zap-output + reportFile: '{{yyyy-MM-dd}}-ZAP-Report-[[site]]' + reportTitle: OpenTaint + ZAP scan results + risks: + - high + - medium + confidences: + - high + - medium + - low + + # 5. exitStatus job + # Force the ZAP container to always exit with code 0, even on errors + # Without this, a non-zero code would fail the entire GitHub Action + - type: exitStatus + parameters: + okExitValue: 0 + errorExitValue: 0 + warnExitValue: 0 diff --git a/github/zap/uv.lock b/github/zap/uv.lock new file mode 100644 index 000000000..49bf6dca2 --- /dev/null +++ b/github/zap/uv.lock @@ -0,0 +1,93 @@ +version = 1 +revision = 3 +requires-python = ">=3.12" + +[[package]] +name = "opentaint-zap-action" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "pyyaml" }, +] + +[package.dev-dependencies] +dev = [ + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [{ name = "pyyaml", specifier = ">=6.0.3" }] + +[package.metadata.requires-dev] +dev = [{ name = "ruff", specifier = ">=0.11.0" }] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/77/9b/840e0039e65fcf12758adf684d2289024d6140cde9268cc59887dc55189c/ruff-0.15.5.tar.gz", hash = "sha256:7c3601d3b6d76dce18c5c824fc8d06f4eef33d6df0c21ec7799510cde0f159a2", size = 4574214, upload-time = "2026-03-05T20:06:34.946Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/20/5369c3ce21588c708bcbe517a8fbe1a8dfdb5dfd5137e14790b1da71612c/ruff-0.15.5-py3-none-linux_armv6l.whl", hash = "sha256:4ae44c42281f42e3b06b988e442d344a5b9b72450ff3c892e30d11b29a96a57c", size = 10478185, upload-time = "2026-03-05T20:06:29.093Z" }, + { url = "https://files.pythonhosted.org/packages/44/ed/e81dd668547da281e5dce710cf0bc60193f8d3d43833e8241d006720e42b/ruff-0.15.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6edd3792d408ebcf61adabc01822da687579a1a023f297618ac27a5b51ef0080", size = 10859201, upload-time = "2026-03-05T20:06:32.632Z" }, + { url = "https://files.pythonhosted.org/packages/c4/8f/533075f00aaf19b07c5cd6aa6e5d89424b06b3b3f4583bfa9c640a079059/ruff-0.15.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:89f463f7c8205a9f8dea9d658d59eff49db05f88f89cc3047fb1a02d9f344010", size = 10184752, upload-time = "2026-03-05T20:06:40.312Z" }, + { url = "https://files.pythonhosted.org/packages/66/0e/ba49e2c3fa0395b3152bad634c7432f7edfc509c133b8f4529053ff024fb/ruff-0.15.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba786a8295c6574c1116704cf0b9e6563de3432ac888d8f83685654fe528fd65", size = 10534857, upload-time = "2026-03-05T20:06:19.581Z" }, + { url = "https://files.pythonhosted.org/packages/59/71/39234440f27a226475a0659561adb0d784b4d247dfe7f43ffc12dd02e288/ruff-0.15.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd4b801e57955fe9f02b31d20375ab3a5c4415f2e5105b79fb94cf2642c91440", size = 10309120, upload-time = "2026-03-05T20:06:00.435Z" }, + { url = "https://files.pythonhosted.org/packages/f5/87/4140aa86a93df032156982b726f4952aaec4a883bb98cb6ef73c347da253/ruff-0.15.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391f7c73388f3d8c11b794dbbc2959a5b5afe66642c142a6effa90b45f6f5204", size = 11047428, upload-time = "2026-03-05T20:05:51.867Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f7/4953e7e3287676f78fbe85e3a0ca414c5ca81237b7575bdadc00229ac240/ruff-0.15.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dc18f30302e379fe1e998548b0f5e9f4dff907f52f73ad6da419ea9c19d66c8", size = 11914251, upload-time = "2026-03-05T20:06:22.887Z" }, + { url = "https://files.pythonhosted.org/packages/77/46/0f7c865c10cf896ccf5a939c3e84e1cfaeed608ff5249584799a74d33835/ruff-0.15.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc6e7f90087e2d27f98dc34ed1b3ab7c8f0d273cc5431415454e22c0bd2a681", size = 11333801, upload-time = "2026-03-05T20:05:57.168Z" }, + { url = "https://files.pythonhosted.org/packages/d3/01/a10fe54b653061585e655f5286c2662ebddb68831ed3eaebfb0eb08c0a16/ruff-0.15.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1cb7169f53c1ddb06e71a9aebd7e98fc0fea936b39afb36d8e86d36ecc2636a", size = 11206821, upload-time = "2026-03-05T20:06:03.441Z" }, + { url = "https://files.pythonhosted.org/packages/7a/0d/2132ceaf20c5e8699aa83da2706ecb5c5dcdf78b453f77edca7fb70f8a93/ruff-0.15.5-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9b037924500a31ee17389b5c8c4d88874cc6ea8e42f12e9c61a3d754ff72f1ca", size = 11133326, upload-time = "2026-03-05T20:06:25.655Z" }, + { url = "https://files.pythonhosted.org/packages/72/cb/2e5259a7eb2a0f87c08c0fe5bf5825a1e4b90883a52685524596bfc93072/ruff-0.15.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:65bb414e5b4eadd95a8c1e4804f6772bbe8995889f203a01f77ddf2d790929dd", size = 10510820, upload-time = "2026-03-05T20:06:37.79Z" }, + { url = "https://files.pythonhosted.org/packages/ff/20/b67ce78f9e6c59ffbdb5b4503d0090e749b5f2d31b599b554698a80d861c/ruff-0.15.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d20aa469ae3b57033519c559e9bc9cd9e782842e39be05b50e852c7c981fa01d", size = 10302395, upload-time = "2026-03-05T20:05:54.504Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e5/719f1acccd31b720d477751558ed74e9c88134adcc377e5e886af89d3072/ruff-0.15.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:15388dd28c9161cdb8eda68993533acc870aa4e646a0a277aa166de9ad5a8752", size = 10754069, upload-time = "2026-03-05T20:06:06.422Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9c/d1db14469e32d98f3ca27079dbd30b7b44dbb5317d06ab36718dee3baf03/ruff-0.15.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b30da330cbd03bed0c21420b6b953158f60c74c54c5f4c1dabbdf3a57bf355d2", size = 11304315, upload-time = "2026-03-05T20:06:10.867Z" }, + { url = "https://files.pythonhosted.org/packages/28/3a/950367aee7c69027f4f422059227b290ed780366b6aecee5de5039d50fa8/ruff-0.15.5-py3-none-win32.whl", hash = "sha256:732e5ee1f98ba5b3679029989a06ca39a950cced52143a0ea82a2102cb592b74", size = 10551676, upload-time = "2026-03-05T20:06:13.705Z" }, + { url = "https://files.pythonhosted.org/packages/b8/00/bf077a505b4e649bdd3c47ff8ec967735ce2544c8e4a43aba42ee9bf935d/ruff-0.15.5-py3-none-win_amd64.whl", hash = "sha256:821d41c5fa9e19117616c35eaa3f4b75046ec76c65e7ae20a333e9a8696bc7fe", size = 11678972, upload-time = "2026-03-05T20:06:45.379Z" }, + { url = "https://files.pythonhosted.org/packages/fe/4e/cd76eca6db6115604b7626668e891c9dd03330384082e33662fb0f113614/ruff-0.15.5-py3-none-win_arm64.whl", hash = "sha256:b498d1c60d2fe5c10c45ec3f698901065772730b411f164ae270bb6bfcc4740b", size = 10965572, upload-time = "2026-03-05T20:06:16.984Z" }, +]