Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 46 additions & 15 deletions .github/workflows/soc-packs-pr-gate.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,16 @@ name: SOC Packs — PR Gate
# ─────────────────────────────────────────────────────────────────
# Runs on every PR targeting main. All jobs must pass before merge.
#
# Job 1 · scan — no customer data in the diff
# Job 2 · detect — find packs with version bumps
# Job 3 · validate — demisto-sdk normalize + validate
# Job 4 · preflight — xsoar_config.json format + doc URL checks
# Job 5 · prerelease — build zip + upload as PR prerelease tag
# e.g. SocFrameworkProofPointTap-v1.1.1-pr647
# Also uploads a modified xsoar_config.json
# as a release asset with the prerelease zip URL.
# Job 6 · deploy-dev — deploy to test tenant using prerelease assets
# Job 1 · scan — no customer data in the diff
# Job 2 · detect — find packs with version bumps
# Job 3 · json-integrity — pack_catalog.json schema + xsoar_config.json validity
# Job 4 · validate — demisto-sdk normalize + validate
# Job 5 · preflight — xsoar_config.json format + doc URL checks
# Job 6 · prerelease — build zip + upload as PR prerelease tag
# e.g. SocFrameworkProofPointTap-v1.1.1-pr647
# Also uploads a modified xsoar_config.json
# as a release asset with the prerelease zip URL.
# Job 7 · deploy-dev — deploy to test tenant using prerelease assets
#
# After merge, soc-packs-release.yml creates the real immutable tag
# and the prerelease is superseded.
Expand Down Expand Up @@ -140,10 +141,40 @@ jobs:
fh.write(f"has_changes={'true' if changed else 'false'}\n")
PY

# ── JOB 3: VALIDATE ─────────────────────────────────────────────
# ── JOB 3: JSON INTEGRITY ───────────────────────────────────────
# Runs on every PR regardless of version bumps.
# pack_catalog.json is a repo-level file — always checked.
# xsoar_config.json is scoped to changed packs when available,
# otherwise all packs are checked as a safety net.
json-integrity:
name: JSON integrity — catalog + xsoar_config
needs: detect
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- uses: actions/setup-python@v5
with:
python-version: "3.10"

- name: Validate pack_catalog.json
run: |
python tools/validate_pack_catalog.py

- name: Validate xsoar_config.json files
env:
CHANGED_PACKS: ${{ needs.detect.outputs.packs }}
run: |
if [ -n "$CHANGED_PACKS" ]; then
python tools/validate_xsoar_configs.py --packs "$CHANGED_PACKS"
else
python tools/validate_xsoar_configs.py
fi

# ── JOB 4: VALIDATE ─────────────────────────────────────────────
validate:
name: Validate — demisto-sdk
needs: detect
needs: [detect, json-integrity]
if: needs.detect.outputs.has_changes == 'true'
runs-on: ubuntu-latest
steps:
Expand Down Expand Up @@ -188,10 +219,10 @@ jobs:
done
exit $FAILED

# ── JOB 4: PREFLIGHT xsoar_config.json ──────────────────────────
# ── JOB 5: PREFLIGHT xsoar_config.json ──────────────────────────
preflight:
name: Preflight — xsoar_config.json
needs: [detect, validate]
needs: [detect, json-integrity, validate]
if: needs.detect.outputs.has_changes == 'true'
runs-on: ubuntu-latest
steps:
Expand All @@ -207,7 +238,7 @@ jobs:
run: |
python tools/preflight_xsoar_config.py --packs "$CHANGED_PACKS"

# ── JOB 5: BUILD PRERELEASE ZIP ─────────────────────────────────
# ── JOB 6: BUILD PRERELEASE ZIP ─────────────────────────────────
prerelease:
name: Build prerelease zip
needs: [detect, preflight]
Expand Down Expand Up @@ -332,7 +363,7 @@ jobs:
fh.write(f"config_url={config_url}\n")
PY

# ── JOB 6: DEPLOY TO TEST TENANT ────────────────────────────────
# ── JOB 7: DEPLOY TO TEST TENANT ────────────────────────────────
deploy-dev:
name: Deploy — dev tenant (pre-merge)
needs: [detect, prerelease]
Expand Down
39 changes: 29 additions & 10 deletions tools/pack_prep.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,34 +14,53 @@ def main():
print("Usage: python3 tools/pack_prep.py Packs/<PackName>")
sys.exit(1)

pack = sys.argv[1]
pack_path = Path(sys.argv[1])

if not Path(pack).exists():
print(f"Error: Pack path not found: {pack}")
if not pack_path.exists():
print(f"Error: Pack path not found: {pack_path}")
sys.exit(1)

print(f"\n=== Normalizing rule IDs: {pack} ===\n")
pack_name = pack_path.name
failed = False

# ── Step 1: Normalize rule IDs and adopted flags ─────────────────────────
print(f"\n=== Normalizing rule IDs: {pack_path} ===\n")
subprocess.run(
[sys.executable, "tools/normalize_ruleid_adopted.py", "--root", pack, "--fix"]
[sys.executable, "tools/normalize_ruleid_adopted.py", "--root", str(pack_path), "--fix"]
)

# ── Step 2: Validate xsoar_config.json (if present) ──────────────────────
config_path = pack_path / "xsoar_config.json"
if config_path.exists():
print(f"\n=== Checking xsoar_config.json: {config_path} ===\n")
rc = subprocess.run(
[sys.executable, "tools/validate_xsoar_configs.py", "--packs", pack_name]
).returncode
if rc != 0:
print(f"xsoar_config.json is invalid — fix before uploading.")
failed = True
else:
print(f"\n--- No xsoar_config.json in {pack_path} — skipping config check ---")

# ── Step 3: demisto-sdk validate ─────────────────────────────────────────
output_dir = Path("output")
output_dir.mkdir(exist_ok=True)
error_log = output_dir / "sdk_errors.txt"

print(f"\n=== Validating: {pack} (output → {error_log}) ===\n")
print(f"\n=== Validating: {pack_path} (output → {error_log}) ===\n")
with open(error_log, "a") as log:
rc = subprocess.run(
["demisto-sdk", "validate", "-i", pack],
["demisto-sdk", "validate", "-i", str(pack_path)],
stdout=log, stderr=log
).returncode

if rc == 0:
print("Validation passed.")
print("SDK validation passed.")
else:
print(f"Validation errors written to {error_log}")
print(f"SDK validation errors written to {error_log}")
failed = True

sys.exit(rc)
sys.exit(1 if failed else 0)


if __name__ == "__main__":
Expand Down
101 changes: 101 additions & 0 deletions tools/validate_pack_catalog.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
#!/usr/bin/env python3
"""
validate_pack_catalog.py — CI gate for pack_catalog.json

Rules enforced:
1. File is valid JSON.
2. Top-level structure is {"packs": [...]}.
3. Every pack entry contains all required fields.
4. No required field value is null or empty string.
5. Extra/unknown fields are silently allowed (additive-safe).

Required fields (from canonical schema):
id, display_name, category, version, path, visible, xsoar_config

Exit 0 on success, 1 on any failure.
"""

import json
import sys
from pathlib import Path

CATALOG_PATH = Path("pack_catalog.json")

REQUIRED_FIELDS = [
"id",
"display_name",
"category",
"version",
"path",
"visible",
"xsoar_config",
]

# Fields where an empty string is NOT acceptable (bool/str must be non-empty)
NON_EMPTY_FIELDS = {"id", "display_name", "category", "version", "path", "xsoar_config"}


def main() -> int:
if not CATALOG_PATH.exists():
print(f"ERROR: {CATALOG_PATH} not found.")
return 1

# ── Rule 1: valid JSON ───────────────────────────────────────────────────
try:
data = json.loads(CATALOG_PATH.read_text(encoding="utf-8"))
except json.JSONDecodeError as exc:
print(f"ERROR: {CATALOG_PATH} is not valid JSON — {exc}")
return 1

print(f"OK JSON is valid ({CATALOG_PATH})")

# ── Rule 2: top-level shape ──────────────────────────────────────────────
if not isinstance(data, dict) or "packs" not in data:
print('ERROR: top-level structure must be {"packs": [...]}')
return 1

packs = data["packs"]
if not isinstance(packs, list):
print('ERROR: "packs" must be a JSON array.')
return 1

print(f"OK packs array present ({len(packs)} entries)")

# ── Rules 3 & 4: per-entry field checks ─────────────────────────────────
errors: list[str] = []

for idx, entry in enumerate(packs):
if not isinstance(entry, dict):
errors.append(f" pack[{idx}]: entry is not an object")
continue

pack_id = entry.get("id", f"<index {idx}>")
prefix = f" pack '{pack_id}'"

for field in REQUIRED_FIELDS:
if field not in entry:
errors.append(f"{prefix}: missing required field '{field}'")
continue

value = entry[field]

if value is None:
errors.append(f"{prefix}: field '{field}' is null")
continue

if field in NON_EMPTY_FIELDS and isinstance(value, str) and value.strip() == "":
errors.append(f"{prefix}: field '{field}' is empty string")

if errors:
print(f"\nFAIL {len(errors)} error(s) found in {CATALOG_PATH}:")
for e in errors:
print(e)
return 1

print(f"OK all {len(packs)} pack entries pass required-field checks")
print("\nPASS pack_catalog.json is valid.")
return 0


if __name__ == "__main__":
sys.exit(main())
109 changes: 109 additions & 0 deletions tools/validate_xsoar_configs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
#!/usr/bin/env python3
"""
validate_xsoar_configs.py — CI gate for xsoar_config.json in every pack

Rules enforced:
1. Every pack directory that contains an xsoar_config.json must be valid JSON.
2. Optionally scope to a comma-separated list of pack names via --packs.
When --packs is omitted, ALL packs under PACKS_DIR are checked.

Exit 0 on success, 1 if any file fails to parse.

Usage:
# All packs
python tools/validate_xsoar_configs.py

# Specific packs (CI: pass changed packs from detect job)
python tools/validate_xsoar_configs.py --packs soc-optimization-unified,soc-framework-nist-ir
"""

import argparse
import json
import os
import sys
from pathlib import Path

PACKS_DIR = Path(os.environ.get("PACKS_DIR", "Packs"))


def validate_file(path: Path) -> str | None:
"""Return an error string, or None if the file is valid JSON."""
try:
json.loads(path.read_text(encoding="utf-8"))
return None
except json.JSONDecodeError as exc:
return str(exc)
except OSError as exc:
return f"cannot read file — {exc}"


def collect_pack_dirs(packs_filter: list[str] | None) -> list[Path]:
if not PACKS_DIR.is_dir():
print(f"ERROR: packs directory '{PACKS_DIR}' not found.")
sys.exit(1)

if packs_filter:
dirs = []
for name in packs_filter:
p = PACKS_DIR / name
if p.is_dir():
dirs.append(p)
else:
# Warn but do not fail — pack may have been deleted
print(f"WARN: pack directory '{p}' not found, skipping.")
return dirs

return [p for p in sorted(PACKS_DIR.iterdir()) if p.is_dir()]


def main() -> int:
parser = argparse.ArgumentParser(description="Validate xsoar_config.json files.")
parser.add_argument(
"--packs",
default="",
help="Comma-separated list of pack names to check (default: all).",
)
args = parser.parse_args()

packs_filter = [p.strip() for p in args.packs.split(",") if p.strip()] or None
pack_dirs = collect_pack_dirs(packs_filter)

if not pack_dirs:
print("No pack directories found — nothing to validate.")
return 0

checked = 0
errors: list[tuple[Path, str]] = []

for pack_dir in pack_dirs:
config_path = pack_dir / "xsoar_config.json"
if not config_path.exists():
# Not every pack is required to have one; skip silently.
continue

err = validate_file(config_path)
checked += 1
if err:
errors.append((config_path, err))
print(f"FAIL {config_path}: {err}")
else:
print(f"OK {config_path}")

if checked == 0:
print("No xsoar_config.json files found — nothing to validate.")
return 0

print(f"\n{checked} file(s) checked.")

if errors:
print(f"\nFAIL {len(errors)} invalid xsoar_config.json file(s):")
for path, msg in errors:
print(f" {path}: {msg}")
return 1

print("PASS all xsoar_config.json files are valid JSON.")
return 0


if __name__ == "__main__":
sys.exit(main())
Loading