Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -98,19 +98,32 @@ conviso --help
- Vulnerabilities: `python -m conviso.app vulns list --company-id 443 --severities HIGH,CRITICAL --asset-tags cloud --all`
- Vulnerabilities (last 7 days): `python -m conviso.app vulns list --company-id 443 --days-back 7 --severities HIGH,CRITICAL --all`
- Vulnerabilities by author: `python -m conviso.app vulns list --company-id 443 --author "Fernando" --all`
- Vulnerabilities with local free-text search: `python -m conviso.app vulns list --company-id 443 --all --grep "jwt"`
- Vulnerabilities with local field filter (auto deep for deep fields): `python -m conviso.app vulns list --company-id 443 --all --contains codeSnippet=eval( --contains fileName=app.py`
- Vulnerabilities (DAST/WEB) search by request/response: `python -m conviso.app vulns list --company-id 443 --types DAST_FINDING,WEB_VULNERABILITY --all --contains request=Authorization --contains response=stacktrace`
- Vulnerabilities with forced deep local search: `python -m conviso.app vulns list --company-id 443 --all --contains codeSnippet=eval( --deep-search --workers 8`
- Vulnerability timeline (by vulnerability ID): `python -m conviso.app vulns timeline --id 12345`
- Vulnerabilities timeline by project: `python -m conviso.app vulns timeline --company-id 443 --project-id 26102`
- Last user who changed vuln status: `python -m conviso.app vulns timeline --id 12345 --last-status-change-only`
- Last user who changed status per vuln in a project: `python -m conviso.app vulns timeline --company-id 443 --project-id 26102 --last-status-change-only`
- Last user who changed vuln status to ANALYSIS: `python -m conviso.app vulns timeline --id 12345 --status ANALYSIS --last-status-change-only`

Output options: `--format table|json|csv`, `--output path` to save JSON/CSV.
Global performance option: `--workers <N>` sets default parallel workers for all commands (e.g. `python -m conviso.app --workers 16 vulns list ...`).
Global output options:
- `--repeat-header <N>` repeats table headers every N rows (helps when scrolling long outputs).
- `--columns <col1,col2,...>` selects columns for table/csv output across commands (unknown columns are ignored).

Notes:
- `projects list --filter` supports `assignee=<email-or-name>` to filter by allocated analyst.
- GraphQL errors return exit code 1.
- Use `--all` on list commands to fetch every page.
- `--quiet` silences info logs; `--verbose` shows per-page requests when paginating.
- `--workers` controls default parallel workers across commands; command-level `--workers` (when available) overrides it.
- `--repeat-header` and `--columns` are global and apply to all commands using table/csv output.
- In `vulns list`, `--contains` for deep fields (`codeSnippet`, `fileName`, `vulnerableLine`, `request`, `response`, `url`, `method`, `parameters`) auto-enables deep search.
- `--deep-search` still exists as a manual override; `--resolve-snippet-urls` applies when deep search is active (manual or auto).
- When `--grep` or `--contains` is used, the list includes `Matched In` to indicate which fields triggered the match.
- On startup the CLI checks for a newer version (via https://raw.githubusercontent.com/convisolabs/conviso-cli/main/VERSION). Set `CONVISO_CLI_SKIP_UPDATE_CHECK=1` to skip.
- When offline, the check warns and you can force the comparison by setting `CONVISO_CLI_REMOTE_VERSION` (manual override).
- Upgrade: `python -m conviso.app upgrade` (equiv. `conviso upgrade`) runs `git pull --ff-only` in the repo directory; if installed via pip, run `pip install .` after the pull.
Expand Down
2 changes: 1 addition & 1 deletion src/conviso/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.3.3
0.3.4
16 changes: 16 additions & 0 deletions src/conviso/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,13 @@
from conviso.commands import sbom
from conviso.commands import tasks
from conviso.core.logger import log, set_verbosity
from conviso.core.concurrency import set_default_workers
from conviso.core.output_prefs import set_output_preferences
from conviso.core.notifier import info, warning
from conviso.core.version import check_for_updates, DEFAULT_REMOTE_URL, read_local_version
import subprocess
import os
from typing import Optional

app = typer.Typer(help="Conviso Platform CLI")

Expand All @@ -27,8 +30,21 @@ def main(
ctx: typer.Context,
quiet: bool = typer.Option(False, "--quiet", help="Silence non-error output."),
verbose: bool = typer.Option(False, "--verbose", help="Show verbose logs (GraphQL requests, etc.)."),
workers: int = typer.Option(8, "--workers", help="Default worker threads for parallel operations across commands."),
repeat_header_every: int = typer.Option(
0,
"--repeat-header",
help="Repeat table headers every N rows (global output option). 0 disables.",
),
columns: Optional[str] = typer.Option(
None,
"--columns",
help="Comma-separated columns for table/csv output (global output option). Example: --columns id,title,status",
),
):
set_verbosity(quiet=quiet, verbose=verbose)
set_default_workers(workers)
set_output_preferences(repeat_header_every=repeat_header_every, columns=columns)

if ctx.resilient_parsing:
return
Expand Down
15 changes: 13 additions & 2 deletions src/conviso/clients/client_graphql.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,17 @@
API_KEY = os.getenv("CONVISO_API_KEY")
DEFAULT_TIMEOUT = float(os.getenv("CONVISO_API_TIMEOUT", "30"))
DEFAULT_RETRIES = int(os.getenv("CONVISO_API_RETRIES", "2"))
POOL_CONNECTIONS = int(os.getenv("CONVISO_API_POOL_CONNECTIONS", "32"))
POOL_MAXSIZE = int(os.getenv("CONVISO_API_POOL_MAXSIZE", "64"))

SESSION = requests.Session()
ADAPTER = requests.adapters.HTTPAdapter(
pool_connections=POOL_CONNECTIONS,
pool_maxsize=POOL_MAXSIZE,
max_retries=0,
)
SESSION.mount("https://", ADAPTER)
SESSION.mount("http://", ADAPTER)


def graphql_request(query: str, variables: dict = None, log_request: bool = True, verbose_only: bool = False) -> dict:
Expand All @@ -39,7 +50,7 @@ def graphql_request(query: str, variables: dict = None, log_request: bool = True
last_exc = None
for attempt in range(DEFAULT_RETRIES + 1):
try:
response = requests.post(API_URL, json=payload, headers=headers, timeout=DEFAULT_TIMEOUT)
response = SESSION.post(API_URL, json=payload, headers=headers, timeout=DEFAULT_TIMEOUT)
response.raise_for_status()
data = response.json()
if "errors" in data:
Expand Down Expand Up @@ -92,7 +103,7 @@ def graphql_request_upload(

with open(file_path, "rb") as f:
files = {"0": f}
response = requests.post(
response = SESSION.post(
API_URL,
data={"operations": json.dumps(operations), "map": json.dumps(map_part)},
files=files,
Expand Down
108 changes: 49 additions & 59 deletions src/conviso/commands/assets.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,10 @@
"""

import typer
import time
from typing import Optional
from conviso.core.notifier import info, success, error, summary, warning
from conviso.core.notifier import info, success, error, summary, warning, timed_summary
from conviso.core.validators import validate_choice, validate_csv_choices
from conviso.clients.client_graphql import graphql_request
from conviso.schemas.assets_schema import schema
from conviso.core.output_manager import export_data
Expand All @@ -34,6 +36,8 @@ def list_assets(
):
"""List all assets for a specific company."""
info(f"Listing assets for company {company_id} (page {page}, limit {limit})...")
started_at = time.perf_counter()
fmt_lower = fmt.lower()

query = """
query Assets($companyId: ID!, $limit: Int, $page: Int, $search: AssetsSearch) {
Expand Down Expand Up @@ -62,6 +66,8 @@ def list_assets(

BUSINESS_IMPACT_ALLOWED = {"LOW", "MEDIUM", "HIGH", "NOT_DEFINED"}
ATTACK_SURFACE_ALLOWED = {"INTERNET_FACING", "INTERNAL", "NOT_DEFINED"}
THREAT_ALLOWED = {"CRITICAL", "HIGH", "MEDIUM", "LOW", "NOTIFICATION"}
DATA_CLASS_ALLOWED = {"PII", "PAYMENT_CARD_INDUSTRY", "NON_SENSITIVE", "NOT_DEFINED"}

def _split_list(value: Optional[str], upper: bool = False, allowed: Optional[set] = None, label: str = ""):
if not value:
Expand All @@ -78,12 +84,21 @@ def _split_list(value: Optional[str], upper: bool = False, allowed: Optional[set
items.append(v)
return items or None

try:
validated_business = validate_csv_choices(business_impact, BUSINESS_IMPACT_ALLOWED, "--business-impact")
validated_data_class = validate_csv_choices(data_classification, DATA_CLASS_ALLOWED, "--data-classification")
validated_attack_surface = validate_csv_choices(attack_surface, ATTACK_SURFACE_ALLOWED, "--attack-surface")
validated_threat = validate_csv_choices(threat, THREAT_ALLOWED, "--threat")
except ValueError as exc:
error(str(exc))
raise typer.Exit(code=1)

search_filters = {
"tags": _split_list(tags),
"businessImpact": _split_list(business_impact, upper=True, allowed=BUSINESS_IMPACT_ALLOWED, label="business impact"),
"dataClassification": _split_list(data_classification),
"exploitability": _split_list(attack_surface, upper=True, allowed=ATTACK_SURFACE_ALLOWED, label="attack surface"),
"threat": _split_list(threat, upper=True),
"businessImpact": validated_business,
"dataClassification": validated_data_class,
"exploitability": validated_attack_surface,
"threat": validated_threat,
}
if env_compromised:
search_filters["environmentCompromised"] = True
Expand Down Expand Up @@ -137,7 +152,9 @@ def _split_list(value: Optional[str], upper: bool = False, allowed: Optional[set
"LOW": "green",
"NOT_DEFINED": "dim",
}.get(str(impact).upper())
impact_display = f"[{impact_color}]{impact}[/{impact_color}]" if impact_color else impact
impact_display = impact
if fmt_lower == "table" and impact_color:
impact_display = f"[{impact_color}]{impact}[/{impact_color}]"

rows.append({
"id": a.get("id") or "",
Expand Down Expand Up @@ -165,7 +182,8 @@ def _split_list(value: Optional[str], upper: bool = False, allowed: Optional[set
title=f"Assets (Company {company_id}) - Page {page}/{total_pages or '?'}",
)

summary(f"{len(rows)} asset(s) listed out of {total_count or len(rows)} total.\n")
elapsed = time.perf_counter() - started_at
timed_summary(f"{len(rows)} asset(s) listed out of {total_count or len(rows)} total", elapsed)

except Exception as e:
error(f"Error listing assets: {e}")
Expand All @@ -187,33 +205,19 @@ def create_asset(
info(f"Creating new asset '{name}' for company {company_id}...")

BUSINESS_IMPACT_ALLOWED = {"LOW", "MEDIUM", "HIGH", "NOT_DEFINED"}
DATA_CLASS_ALLOWED = {"PERSONALLY_IDENTIFIABLE_INFORMATION", "PAYMENT_CARD_INDUSTRY", "NON_SENSITIVE", "NOT_DEFINED"}

def _parse_business(value: Optional[str]):
if not value:
return None
up = value.strip().upper()
if up not in BUSINESS_IMPACT_ALLOWED:
warning(f"Ignoring invalid business impact: {value}")
return None
return up

def _parse_data_class(value: Optional[str]):
if not value:
return None
vals = []
for raw in value.split(","):
v = raw.strip().upper()
if not v:
continue
if v not in DATA_CLASS_ALLOWED:
warning(f"Ignoring invalid data classification: {raw}")
continue
vals.append(v)
return vals or None
DATA_CLASS_ALLOWED = {"PII", "PERSONALLY_IDENTIFIABLE_INFORMATION", "PAYMENT_CARD_INDUSTRY", "NON_SENSITIVE", "NOT_DEFINED"}

parsed_business = _parse_business(business_impact)
parsed_data_class = _parse_data_class(data_classification)
try:
parsed_business = validate_choice(business_impact, BUSINESS_IMPACT_ALLOWED, "--business-impact")
parsed_data_class = validate_csv_choices(data_classification, DATA_CLASS_ALLOWED, "--data-classification")
except ValueError as exc:
error(str(exc))
raise typer.Exit(code=1)
if parsed_data_class:
parsed_data_class = [
"PERSONALLY_IDENTIFIABLE_INFORMATION" if v == "PII" else v
for v in parsed_data_class
]

mutation = """
mutation CreateAsset($input: CreateAssetInput!) {
Expand Down Expand Up @@ -266,33 +270,19 @@ def update_asset(
info(f"Updating asset ID {asset_id} in company {company_id}...")

BUSINESS_IMPACT_ALLOWED = {"LOW", "MEDIUM", "HIGH", "NOT_DEFINED"}
DATA_CLASS_ALLOWED = {"PERSONALLY_IDENTIFIABLE_INFORMATION", "PAYMENT_CARD_INDUSTRY", "NON_SENSITIVE", "NOT_DEFINED"}
DATA_CLASS_ALLOWED = {"PII", "PERSONALLY_IDENTIFIABLE_INFORMATION", "PAYMENT_CARD_INDUSTRY", "NON_SENSITIVE", "NOT_DEFINED"}

def _parse_business(value: Optional[str]):
if value is None:
return None
up = value.strip().upper()
if up not in BUSINESS_IMPACT_ALLOWED:
warning(f"Ignoring invalid business impact: {value}")
return None
return up

def _parse_data_class(value: Optional[str]):
if value is None:
return None
vals = []
for raw in value.split(","):
v = raw.strip().upper()
if not v:
continue
if v not in DATA_CLASS_ALLOWED:
warning(f"Ignoring invalid data classification: {raw}")
continue
vals.append(v)
return vals or None

parsed_business = _parse_business(business_impact)
parsed_data_class = _parse_data_class(data_classification)
try:
parsed_business = validate_choice(business_impact, BUSINESS_IMPACT_ALLOWED, "--business-impact")
parsed_data_class = validate_csv_choices(data_classification, DATA_CLASS_ALLOWED, "--data-classification")
except ValueError as exc:
error(str(exc))
raise typer.Exit(code=1)
if parsed_data_class:
parsed_data_class = [
"PERSONALLY_IDENTIFIABLE_INFORMATION" if v == "PII" else v
for v in parsed_data_class
]

mutation = """
mutation UpdateAsset($input: UpdateAssetInput!) {
Expand Down
12 changes: 7 additions & 5 deletions src/conviso/commands/bulk.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from rich.table import Table
from conviso.core.notifier import info, success, error, warning
from conviso.core.bulk_loader import load_csv, bulk_process, SkipRow, BulkResult
from conviso.core.concurrency import parallel_map
from conviso.clients.client_graphql import graphql_request
from conviso.core.logger import VERBOSE
from conviso.core.output_manager import console
Expand Down Expand Up @@ -575,11 +576,12 @@ def _create_asset(name: str) -> Optional[int]:
missing_assets = [nm for nm in asset_names if not _resolve_asset_by_name(nm)]
info(f"Assets referenced in SARIF: {len(asset_names)}. Resolved by name: {len(resolved_assets)}. Missing: {len(missing_assets)}.")

# Auto-create missing assets by name
for nm in missing_assets:
created = _create_asset(nm)
if created:
resolved_assets[nm] = created
# Auto-create missing assets by name (parallel fan-out)
if missing_assets:
created_ids = parallel_map(_create_asset, missing_assets)
for nm, created in zip(missing_assets, created_ids):
if created:
resolved_assets[nm] = created
# Recompute missing after creation attempts
missing_assets = [nm for nm in asset_names if not _resolve_asset_by_name(nm)]

Expand Down
Loading