diff --git a/roar/application/run/execution.py b/roar/application/run/execution.py index 1101f0a6..bae25460 100644 --- a/roar/application/run/execution.py +++ b/roar/application/run/execution.py @@ -135,8 +135,17 @@ def execute_and_report( return 1 presenter = ConsolePresenter() - report = RunReportPresenter(presenter) - report.show_report(result, command, quiet) + report = RunReportPresenter(presenter, quiet=quiet) + + # The coordinator already emitted the lifecycle lines (trace start/end, + # hashing spinner, lineage captured). Here we emit the summary block and + # the final "done" line. + report.summary(result, command) + report.done( + exit_code=result.exit_code, + trace_duration=result.duration, + post_duration=result.post_duration, + ) if result.stale_upstream or result.stale_downstream: report.show_stale_warnings( diff --git a/roar/core/models/run.py b/roar/core/models/run.py index 502bcece..e789fdb8 100644 --- a/roar/core/models/run.py +++ b/roar/core/models/run.py @@ -69,6 +69,7 @@ class TracerResult(ImmutableModel): tracer_log_path: Annotated[str, Field(min_length=1)] inject_log_path: str interrupted: bool = False + backend: str | None = None # "ebpf" | "preload" | "ptrace" class RunContext(RoarBaseModel): @@ -109,6 +110,22 @@ class RunResult(ImmutableModel): is_build: bool = False stale_upstream: list[int] = Field(default_factory=list) stale_downstream: list[int] = Field(default_factory=list) + # UX metadata for the new run presenter. Optional so callers that build + # RunResult without these fields (older code paths, error cases) still work. + backend: str | None = None + post_duration: float = 0.0 + proxy_active: bool = False + pip_count: int = 0 + dpkg_count: int = 0 + env_count: int = 0 + git_branch: str | None = None + git_short_commit: str | None = None + git_clean: bool = True + total_hash_bytes: int = 0 + hash_duration: float = 0.0 + dag_jobs: int = 0 + dag_artifacts: int = 0 + dag_depth: int = 0 @computed_field # type: ignore[prop-decorator] @property diff --git a/roar/execution/runtime/coordinator.py b/roar/execution/runtime/coordinator.py index 094f2aa5..86ebd837 100644 --- a/roar/execution/runtime/coordinator.py +++ b/roar/execution/runtime/coordinator.py @@ -9,6 +9,7 @@ import os import secrets +import subprocess import sys import time from typing import TYPE_CHECKING, Any @@ -109,6 +110,12 @@ def execute(self, ctx: RunContext) -> RunResult: # Backup previous outputs if reversibility is enabled self._backup_previous_outputs(ctx) + # UX presenter for lifecycle output (stderr). Pipes through to existing + # IPresenter for any legacy print_error calls. + from ...presenters.run_report import RunReportPresenter + + run_presenter = RunReportPresenter(self.presenter, quiet=ctx.quiet) + # Start proxy if configured proxy_handle = None extra_env: dict[str, str] = { @@ -151,6 +158,25 @@ def stop_proxy_if_running() -> list: # Execute via tracer from ...core.exceptions import TracerNotFoundError + # Resolve the backend name before execution so the trace_starting + # line can show the actual tracer, not "auto". Mirror the same + # resolution logic that execute() uses (config β†’ override β†’ auto). + proxy_active = proxy_handle is not None + resolved_mode: str | None = None + try: + mode = ctx.tracer_mode or self._tracer._get_tracer_mode() + fallback = ( + ctx.tracer_fallback + if ctx.tracer_fallback is not None + else self._tracer._get_fallback_enabled() + ) + candidates = self._tracer._get_tracer_candidates(mode, fallback) + if candidates: + resolved_mode = candidates[0][0] + except Exception: + pass + run_presenter.trace_starting(resolved_mode or ctx.tracer_mode, proxy_active) + self.logger.debug("Starting tracer execution") try: tracer_result = self._tracer.execute( @@ -162,6 +188,7 @@ def stop_proxy_if_running() -> list: tracer_mode_override=ctx.tracer_mode, fallback_enabled_override=ctx.tracer_fallback, ) + run_presenter.trace_ended(tracer_result.duration, tracer_result.exit_code) self.logger.debug( "Tracer completed: exit_code=%d, duration=%.2fs, interrupted=%s", tracer_result.exit_code, @@ -220,39 +247,35 @@ def stop_proxy_if_running() -> list: is_build=is_build, ) - # Post-processing with progress spinner - from ...presenters.spinner import Spinner - - with Spinner("Sniffing out provenance...", quiet=ctx.quiet) as spin: - # Collect provenance - self.logger.debug("Collecting provenance data") - inject_log = ( - tracer_result.inject_log_path - if os.path.exists(tracer_result.inject_log_path) - else None - ) - roar_dir = os.path.join(ctx.repo_root, ".roar") - provenance_service = ProvenanceService(cache_dir=roar_dir) - t_prov_start = time.perf_counter() - prov = provenance_service.collect( - ctx.repo_root, - tracer_result.tracer_log_path, - inject_log, - config, - ) - t_prov_end = time.perf_counter() - self.logger.debug( - "Provenance collected: read_files=%d, written_files=%d", - len(prov.get("data", {}).get("read_files", [])), - len(prov.get("data", {}).get("written_files", [])), - ) - - # Stop proxy and collect S3 entries before DB recording. - s3_entries = stop_proxy_if_running() + # Collect provenance first (fast) so we know total file count for the + # hashing spinner. + self.logger.debug("Collecting provenance data") + inject_log = ( + tracer_result.inject_log_path if os.path.exists(tracer_result.inject_log_path) else None + ) + roar_dir = os.path.join(ctx.repo_root, ".roar") + provenance_service = ProvenanceService(cache_dir=roar_dir) + t_prov_start = time.perf_counter() + prov = provenance_service.collect( + ctx.repo_root, + tracer_result.tracer_log_path, + inject_log, + config, + ) + t_prov_end = time.perf_counter() + n_read = len(prov.get("data", {}).get("read_files", [])) + n_written = len(prov.get("data", {}).get("written_files", [])) + self.logger.debug( + "Provenance collected: read_files=%d, written_files=%d", + n_read, + n_written, + ) - spin.update("Hashing files and TReqing outputs...") + # Stop proxy and collect S3 entries before DB recording. + s3_entries = stop_proxy_if_running() - # Record in database + total_files = n_read + n_written + with run_presenter.hashing(total=total_files or None): self.logger.debug("Recording job in database") t_record_start = time.perf_counter() job_id, job_uid, read_file_info, written_file_info, stale_upstream, stale_downstream = ( @@ -275,12 +298,20 @@ def stop_proxy_if_running() -> list: len(written_file_info), ) - spin.update("Auto-lineaging done, tidying up...") - # Cleanup temp files self.logger.debug("Cleaning up temporary log files") self._cleanup_logs(tracer_result.tracer_log_path, tracer_result.inject_log_path) + # "hashed" throughput line. + total_hash_bytes = sum(f.get("size") or 0 for f in written_file_info) + hash_duration = t_record_end - t_record_start + run_presenter.hashed( + n_artifacts=len(read_file_info) + len(written_file_info), + total_bytes=total_hash_bytes, + duration=hash_duration, + ) + run_presenter.lineage_captured() + t_postrun_end = time.perf_counter() if emit_timing: @@ -304,6 +335,82 @@ def stop_proxy_if_running() -> list: tracer_result.exit_code, tracer_result.duration, ) + # UX metadata for the new run presenter. + exec_packages = prov.get("executables", {}).get("packages", {}) or {} + pip_count = len(exec_packages.get("pip", {}) or {}) + dpkg_count = len(exec_packages.get("dpkg", {}) or {}) + env_count = len(prov.get("runtime", {}).get("env_vars", {}) or {}) + post_duration = t_postrun_end - t_postrun_start + backend_name = getattr(tracer_result, "backend", None) + if not isinstance(backend_name, str): + backend_name = None + + # Git info (best-effort, never fail the run for this). + git_branch, git_short_commit, git_clean = None, None, True + try: + git_branch = ( + subprocess.check_output( + ["git", "rev-parse", "--abbrev-ref", "HEAD"], + stderr=subprocess.DEVNULL, + text=True, + cwd=ctx.repo_root, + ).strip() + or None + ) + git_short_commit = ( + subprocess.check_output( + ["git", "rev-parse", "--short", "HEAD"], + stderr=subprocess.DEVNULL, + text=True, + cwd=ctx.repo_root, + ).strip() + or None + ) + except Exception: + pass + + # DAG stats + parent job lookup (best-effort, never fail the run). + dag_jobs, dag_artifacts, dag_depth = 0, 0, 0 + try: + from ...db.context import create_database_context as _create_db_ctx + from ...presenters.dag_data_builder import DagDataBuilder + + with _create_db_ctx(ctx.roar_dir) as db_ctx: + session = db_ctx.sessions.get_active() + if session: + builder = DagDataBuilder(db_ctx, int(session["id"])) + dag_data = builder.build(expanded=False) + dag_jobs = len(dag_data.get("nodes", [])) + dag_artifacts = len(dag_data.get("artifacts", [])) + # Compute depth: longest dependency chain. + nodes = dag_data.get("nodes", []) + if nodes: + step_deps = {n["step_number"]: n.get("dependencies", []) for n in nodes} + all_steps = set(step_deps) + memo: dict[int, int] = {} + + def _depth(s: int) -> int: + if s in memo: + return memo[s] + children = [x for x in all_steps if s in step_deps.get(x, [])] + d = 1 + max((_depth(ch) for ch in children), default=0) + memo[s] = d + return d + + roots = [s for s in all_steps if not step_deps.get(s)] + dag_depth = max((_depth(r) for r in roots), default=1) if roots else 1 + + # Parent job UIDs for input artifacts. + for inp in read_file_info: + aid = inp.get("artifact_id") + if aid: + jobs_info = db_ctx.artifacts.get_jobs(aid) + producers = jobs_info.get("produced_by", []) + if producers: + inp["parent_job_uid"] = producers[0].get("job_uid") + except Exception: + pass + return RunResult( exit_code=tracer_result.exit_code, job_id=job_id, @@ -315,6 +422,20 @@ def stop_proxy_if_running() -> list: is_build=is_build, stale_upstream=stale_upstream, stale_downstream=stale_downstream, + backend=backend_name, + post_duration=post_duration, + proxy_active=proxy_active, + pip_count=pip_count, + dpkg_count=dpkg_count, + env_count=env_count, + git_branch=git_branch, + git_short_commit=git_short_commit, + git_clean=git_clean, + total_hash_bytes=total_hash_bytes, + hash_duration=hash_duration, + dag_jobs=dag_jobs, + dag_artifacts=dag_artifacts, + dag_depth=dag_depth, ) def _record_job( diff --git a/roar/execution/runtime/tracer.py b/roar/execution/runtime/tracer.py index 0688d390..8d3aaeb0 100644 --- a/roar/execution/runtime/tracer.py +++ b/roar/execution/runtime/tracer.py @@ -300,8 +300,10 @@ def execute( signal_handler.install() exit_code = 1 + selected_backend: str | None = None try: for idx, (backend, tracer_path) in enumerate(candidates): + selected_backend = backend # Ensure stale files from previous attempts don't mask failures. for log_file in (tracer_log_file, inject_log_file): try: @@ -368,6 +370,7 @@ def execute( tracer_log_path=tracer_log_file, inject_log_path=inject_log_file, interrupted=signal_handler.is_interrupted(), + backend=selected_backend, ) def get_log_paths(self, roar_dir: Path) -> tuple: diff --git a/roar/presenters/run_report.py b/roar/presenters/run_report.py index ba7c4dae..801caab8 100644 --- a/roar/presenters/run_report.py +++ b/roar/presenters/run_report.py @@ -1,108 +1,177 @@ -""" -Run report presenter for displaying run completion reports. +"""Run report presenter - minimalist narration-style output. + +Every status line is narrated by πŸ¦–. The lineage detail block uses +``Β·`` (middle dot) as a line prefix with 3-char category labels. -Handles all output formatting for run/build results. -Follows SRP: only handles report presentation. +Color tokens (all in terminal.py, no raw ANSI here): + status_green - πŸ¦– lines, ``exit 0``, ``clean`` + warn_amber - ``dirty`` git, non-zero exit + command_blue - suggested command text + dim - prefixes, labels, flags, comments, timing + bold - current job hash (no hue) """ -import os -import shlex -from typing import Any +from __future__ import annotations + +import sys +from contextlib import contextmanager +from typing import IO from ..core.interfaces.presenter import IPresenter from ..core.models.run import RunResult +from .spinner import BRAILLE_FRAMES, CLOCK_FRAMES, Spinner +from .terminal import TerminalCaps, detect, style +# --------------------------------------------------------------------------- +# Constants +# --------------------------------------------------------------------------- -def format_size(size_bytes: int | None) -> str: - """Format file size in human-readable format.""" - if size_bytes is None: - return "?" - size: float = float(size_bytes) - for unit in ["B", "KB", "MB", "GB"]: - if abs(size) < 1024: - return f"{size:.1f}{unit}" if unit != "B" else f"{int(size)}{unit}" - size /= 1024 - return f"{size:.1f}TB" +_SMALL_RUN = 5 # skip transient hashing progress below this count -class RunReportPresenter: - """ - Formats and displays run completion reports. +def _plural(n: int, singular: str, plural: str | None = None) -> str: + return f"{n} {singular}" if n == 1 else f"{n} {plural or singular + 's'}" - Follows SRP: only handles report presentation. - """ - def __init__(self, presenter: IPresenter) -> None: - """ - Initialize report presenter. +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- - Args: - presenter: Base presenter for output - """ - self._out = presenter - def show_report( +class _NullProgress: + def advance(self, delta: int = 1) -> None: + pass + + def set_count(self, count: int) -> None: + pass + + def update(self, message: str) -> None: + pass + + +# --------------------------------------------------------------------------- +# Presenter +# --------------------------------------------------------------------------- + + +class RunReportPresenter: + """Minimalist narration-style output for ``roar run``.""" + + def __init__( self, - result: RunResult, - command: list[str], + presenter: IPresenter | None = None, + *, + stream: IO | None = None, + caps: TerminalCaps | None = None, quiet: bool = False, ) -> None: - """ - Display run completion report. - - Args: - result: Run result with execution details - command: Original command that was executed - quiet: If True, suppress output - """ - if quiet: - return - - self._out.print("") - self._out.print("=" * 60) + self._out = presenter + self._stream = stream if stream is not None else sys.stderr + self._caps = caps if caps is not None else detect(self._stream) + self._quiet = quiet - step_type = "Build" if result.is_build else "Run" - if result.interrupted: - self._out.print(f"ROAR {step_type} Interrupted") - else: - self._out.print(f"ROAR {step_type} Complete") + # ---- lifecycle events ------------------------------------------------- - self._out.print("=" * 60) - self._out.print(f"Command: {shlex.join(command)}") - self._out.print(f"Duration: {result.duration:.1f}s") - self._out.print(f"Exit code: {result.exit_code}") + def trace_starting(self, backend: str | None, proxy_active: bool) -> None: + if self._quiet or self._caps.pipe_mode: + return + c = self._caps.can_color + flags = style( + f"tracer:{backend or 'auto'} proxy:{'on' if proxy_active else 'off'} sync:off", + "dim", + enabled=c, + ) + self._trex(f"tracing {self._dim_sep()}{flags}") - if result.interrupted: - self._out.print("Status: interrupted") + def trace_ended(self, duration: float, exit_code: int) -> None: + if self._quiet or self._caps.pipe_mode: + return + c = self._caps.can_color + parts = ["trace done"] + parts.append(self._fmt_dur(duration)) + exit_s = f"exit {exit_code}" + if exit_code == 0: + exit_s = style(exit_s, "status_green", enabled=c) + else: + exit_s = style(exit_s, "warn_amber", "bold", enabled=c) + parts.append(exit_s) + self._trex(f" {self._dim_sep()}".join(parts)) + + @contextmanager + def hashing(self, total: int | None = None): + if self._quiet or self._caps.pipe_mode: + yield _NullProgress() + return + # Skip transient spinner for small runs. + if total is not None and total < _SMALL_RUN: + yield _NullProgress() + return + prefix = "πŸ¦– " if self._caps.can_emoji else "roar: " + frames = CLOCK_FRAMES if self._caps.can_emoji else BRAILLE_FRAMES + with Spinner("hashing", prefix=prefix, frames=frames, interval=0.1) as sp: + yield sp - self._out.print("") + def hashed(self, n_artifacts: int, total_bytes: int, duration: float) -> None: + if self._quiet or self._caps.pipe_mode: + return + c = self._caps.can_color + text = f"hashed {_plural(n_artifacts, 'artifact')}" + if duration > 0 and total_bytes > 0: + mbps = (total_bytes / 1024 / 1024) / duration + text += f" {self._dim_sep()}{style(f'{mbps:.1f} MB/s', 'dim', enabled=c)}" + self._trex(text) + + def lineage_captured(self) -> None: + if self._quiet or self._caps.pipe_mode: + return + self._trex("lineage captured:") - if result.inputs: - self._out.print("Read files:") - for f in result.inputs: - self._print_file(f) - self._out.print("") + def summary(self, result: RunResult, command: list[str]) -> None: + if self._quiet or self._caps.pipe_mode: + return + self._render_summary(result) - if result.outputs: - self._out.print("Written files:") - for f in result.outputs: - self._print_file(f) - self._out.print("") + def done(self, *, exit_code: int, trace_duration: float, post_duration: float) -> None: + if self._quiet: + return + if self._caps.pipe_mode: + total = trace_duration + post_duration + self._print( + f"roar: done Β· {self._fmt_dur(total)} " + f"(trace {self._fmt_dur(trace_duration)} + " + f"post {self._fmt_dur(post_duration)}, exit {exit_code})" + ) + return + c = self._caps.can_color + timing = style( + f"trace {self._fmt_dur(trace_duration)} + post {self._fmt_dur(post_duration)}", + "dim", + enabled=c, + ) + self._trex(f"done {self._dim_sep()}{timing}") - self._out.print(f"Job: {result.job_uid}") + # ---- backward-compat one-shot ---------------------------------------- - if result.interrupted and result.outputs: - self._out.print("") - self._out.print("Note: Run was interrupted. Output files may be incomplete.") - self._out.print("Use 'roar pop' to remove this job and delete safe written files.") + def show_report(self, result: RunResult, command: list[str], quiet: bool = False) -> None: + if quiet or self._quiet: + return + if self._caps.pipe_mode: + self.done( + exit_code=result.exit_code, + trace_duration=result.duration, + post_duration=result.post_duration, + ) + return + self.trace_ended(result.duration, result.exit_code) + self.lineage_captured() + self._render_summary(result) + self.done( + exit_code=result.exit_code, + trace_duration=result.duration, + post_duration=result.post_duration, + ) - self._out.print("") - self._out.print("Next:") - self._out.print(f" roar show --job {result.job_uid}") - if result.interrupted and result.outputs: - self._out.print(" roar pop") - else: - self._out.print(" roar dag") + # ---- stale warnings (unchanged) -------------------------------------- def show_stale_warnings( self, @@ -110,21 +179,14 @@ def show_stale_warnings( stale_downstream: list[int], is_build: bool = False, ) -> None: - """ - Display stale step warnings. - - Args: - stale_upstream: List of stale upstream step numbers - stale_downstream: List of stale downstream step numbers - is_build: Whether this is a build step - """ + if not (stale_upstream or stale_downstream) or self._out is None: + return if stale_upstream: self._out.print("") step_refs = ", ".join(f"@{s}" for s in stale_upstream) self._out.print(f"Warning: This job consumed stale inputs from: {step_refs}") self._out.print("The upstream steps were re-run but this step used old outputs.") self._out.print("Consider re-running this step after updating upstream.") - if stale_downstream: self._out.print("") step_prefix = "B" if is_build else "" @@ -132,49 +194,105 @@ def show_stale_warnings( self._out.print(f"Warning: Downstream steps are stale: {step_refs}") self._out.print("Run these steps to update them, or use 'roar dag' to see full status.") - def show_upstream_stale_warning( - self, - step_num: int, - upstream_stale: list[int], - ) -> bool: - """ - Show warning about stale upstream steps and ask for confirmation. - - Args: - step_num: Current step number - upstream_stale: List of stale upstream step numbers - - Returns: - True if user wants to proceed, False otherwise - """ + def show_upstream_stale_warning(self, step_num: int, upstream_stale: list[int]) -> bool: + if self._out is None: + return True step_refs = ", ".join(f"@{s}" for s in upstream_stale) self._out.print(f"Warning: Step @{step_num} depends on stale upstream steps: {step_refs}") self._out.print( "The upstream steps have been re-run more recently than their outputs were consumed." ) self._out.print("") - return self._out.confirm("Run anyway?", default=False) - def _print_file(self, f: dict[str, Any]) -> None: - """Print file info with path, size, and hashes.""" - path = f["path"] - size = format_size(f.get("size")) - - # Make path relative if possible - try: - rel_path = os.path.relpath(path) - if not rel_path.startswith(".."): - path = rel_path - except ValueError: - pass - - self._out.print(f" {path}") - - # Show all hashes - hashes = f.get("hashes", []) - if hashes: - hash_strs = [f"{h['algorithm']}: {h['digest'][:12]}..." for h in hashes] - self._out.print(f" size: {size} {', '.join(hash_strs)}") - else: - self._out.print(f" size: {size}") + # ---- internal -------------------------------------------------------- + + def _print(self, line: str = "") -> None: + print(line, file=self._stream, flush=True) + + def _trex(self, text: str) -> None: + """Emit a πŸ¦–-prefixed status line in STATUS_GREEN.""" + c = self._caps.can_color + prefix = "πŸ¦–" if self._caps.can_emoji else "roar:" + self._print( + f"{style(prefix, 'status_green', enabled=c)} {style(text, 'status_green', enabled=c)}" + ) + + def _detail(self, label: str, content: str) -> None: + """Emit a ``Β· label content`` detail line.""" + c = self._caps.can_color + prefix = style("Β·", "dim", enabled=c) + lbl = style(f"{label:<3}", "dim", enabled=c) + self._print(f"{prefix} {lbl} {content}") + + def _detail_blank(self) -> None: + c = self._caps.can_color + self._print(style("Β·", "dim", enabled=c)) + + def _dim_sep(self) -> str: + return style("Β· ", "dim", enabled=self._caps.can_color) + + def _fmt_dur(self, seconds: float) -> str: + if seconds < 0.1: + return f"{max(1, round(seconds * 1000))}ms" + return f"{seconds:.1f}s" + + # ---- summary block --------------------------------------------------- + + def _render_summary(self, result: RunResult) -> None: + c = self._caps.can_color + + # i/o line: "2 inputs ← 2 prior jobs Β· 1 output" + n_in = len(result.inputs) + n_out = len(result.outputs) + io_parts = [] + if n_in: + io_parts.append(_plural(n_in, "input")) + if n_out: + io_parts.append(_plural(n_out, "output")) + if io_parts: + self._detail("i/o", f" {self._dim_sep()}".join(io_parts)) + + # job line β€” bold hash, no hue. + job_hash = style(result.job_uid, "bold", enabled=c) + self._detail("job", job_hash) + + # git line. + if result.git_branch or result.git_short_commit: + branch = result.git_branch or "?" + commit = result.git_short_commit or "?" + if result.git_clean: + state = style("clean", "status_green", enabled=c) + else: + state = style("dirty", "warn_amber", enabled=c) + self._detail("git", f"{branch} @ {commit} {self._dim_sep()}{state}") + + # env line β€” pip/dpkg/vars are category labels, not countable nouns. + env_parts = [] + if result.pip_count: + env_parts.append(f"{result.pip_count} pip") + if result.dpkg_count: + env_parts.append(f"{result.dpkg_count} dpkg") + if result.env_count: + env_parts.append(_plural(result.env_count, "var")) + if env_parts: + self._detail("env", f" {self._dim_sep()}".join(env_parts)) + + # dag line. + if result.dag_jobs or result.dag_artifacts: + dag_parts = [] + if result.dag_jobs: + dag_parts.append(_plural(result.dag_jobs, "job")) + if result.dag_artifacts: + dag_parts.append(_plural(result.dag_artifacts, "artifact")) + if result.dag_depth: + dag_parts.append(f"depth {result.dag_depth}") + self._detail("dag", f" {self._dim_sep()}".join(dag_parts)) + + # Blank separator + suggested command. + self._detail_blank() + cmd_text = style(f"roar show --job {result.job_uid}", "command_blue", enabled=c) + comment = style("# details", "dim", enabled=c) + self._print( + f"{style('Β·', 'dim', enabled=c)} {style('$', 'dim', enabled=c)} {cmd_text} {comment}" + ) diff --git a/roar/presenters/spinner.py b/roar/presenters/spinner.py index e7776000..2cbbd563 100644 --- a/roar/presenters/spinner.py +++ b/roar/presenters/spinner.py @@ -1,26 +1,54 @@ -"""Simple threaded spinner for post-command processing feedback.""" +"""Threaded spinner for post-command processing feedback. + +Supports two frame sets β€” clock emojis (for UTF-8 terminals) and the classic +ANSI braille cycle β€” and an optional live "N/M" counter appended to the label. +All output goes to stderr; no-ops when stderr isn't a TTY or when quiet. +""" from __future__ import annotations import sys import threading -_FRAMES = "⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏" -_INTERVAL = 0.08 +# Full 12-position hour clock cycle. +CLOCK_FRAMES = "πŸ•›πŸ•πŸ•‘πŸ•’πŸ•“πŸ•”πŸ••πŸ•–πŸ•—πŸ•˜πŸ•™πŸ•š" +BRAILLE_FRAMES = "⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏" +_DEFAULT_INTERVAL = 0.1 class Spinner: """Context-manager spinner that writes to stderr. - No-ops when stderr is not a TTY or when *quiet* is True. + Keyword arguments: + message -- status text shown after the frame + frames -- iterable of single-char-wide frames (emoji OK) + interval -- seconds between frames + prefix -- text shown before the frame (e.g. "πŸ¦– " or "roar: ") + quiet -- if True, becomes a no-op + total -- if set, renders a live "N/total" counter after the message """ - def __init__(self, message: str = "", *, quiet: bool = False) -> None: + def __init__( + self, + message: str = "", + *, + frames: str = BRAILLE_FRAMES, + interval: float = _DEFAULT_INTERVAL, + prefix: str = "", + quiet: bool = False, + total: int | None = None, + ) -> None: self._message = message + self._frames = frames + self._interval = interval + self._prefix = prefix + self._total = total + self._count = 0 self._active = not quiet and hasattr(sys.stderr, "isatty") and sys.stderr.isatty() self._stop_event = threading.Event() self._thread: threading.Thread | None = None self._lock = threading.Lock() + self._last_line_len = 0 # -- context manager ----------------------------------------------------- @@ -35,8 +63,8 @@ def __exit__(self, *_exc) -> None: if self._thread is not None: self._thread.join() if self._active: - # clear the spinner line - sys.stderr.write("\r" + " " * (len(self._message) + 4) + "\r") + # Clear the spinner line. + sys.stderr.write("\r" + " " * self._last_line_len + "\r") sys.stderr.flush() # -- public api ---------------------------------------------------------- @@ -46,6 +74,15 @@ def update(self, message: str) -> None: with self._lock: self._message = message + def advance(self, delta: int = 1) -> None: + """Increment the N of the N/total counter.""" + with self._lock: + self._count += delta + + def set_count(self, count: int) -> None: + with self._lock: + self._count = count + # -- internals ----------------------------------------------------------- def _spin(self) -> None: @@ -53,8 +90,15 @@ def _spin(self) -> None: while not self._stop_event.is_set(): with self._lock: msg = self._message - frame = _FRAMES[idx % len(_FRAMES)] - sys.stderr.write(f"\r{frame} {msg}") + count = self._count + total = self._total + frame = self._frames[idx % len(self._frames)] + suffix = f" {count}/{total}" if total is not None else "" + line = f"{self._prefix}{msg} {frame}{suffix}" + # Pad to clear any residue from a longer previous line. + pad = max(0, self._last_line_len - len(line)) + sys.stderr.write(f"\r{line}{' ' * pad}") sys.stderr.flush() + self._last_line_len = len(line) idx += 1 - self._stop_event.wait(_INTERVAL) + self._stop_event.wait(self._interval) diff --git a/roar/presenters/terminal.py b/roar/presenters/terminal.py new file mode 100644 index 00000000..0634920d --- /dev/null +++ b/roar/presenters/terminal.py @@ -0,0 +1,81 @@ +"""Centralized terminal capability detection for roar presenters. + +Single source of truth for "is this stream a TTY", "should we use color", +"can we print emoji", and "how wide is the terminal". Used by the run +presenter, spinner, and anyone else that wants to render nicely. +""" + +from __future__ import annotations + +import os +import shutil +import sys +from dataclasses import dataclass +from typing import IO + + +@dataclass(frozen=True) +class TerminalCaps: + """What the current terminal can do, as seen through `stream`.""" + + is_tty: bool + can_color: bool + can_emoji: bool + width: int + + @property + def pipe_mode(self) -> bool: + """True when the stream is not a TTY (piped, redirected, captured).""" + return not self.is_tty + + +def detect(stream: IO | None = None) -> TerminalCaps: + """Sniff capabilities of *stream* (defaults to stderr).""" + s = stream if stream is not None else sys.stderr + is_tty = bool(getattr(s, "isatty", lambda: False)()) + + # Standard NO_COLOR convention (https://no-color.org/). + no_color_env = os.environ.get("NO_COLOR", "") != "" + can_color = is_tty and not no_color_env + + enc = (getattr(s, "encoding", "") or "").lower() + lang = os.environ.get("LC_ALL") or os.environ.get("LANG") or "" + can_emoji = is_tty and ("utf" in enc or "UTF-8" in lang.upper()) + + try: + width = shutil.get_terminal_size((80, 24)).columns + except OSError: + width = 80 + width = max(40, width) + + return TerminalCaps(is_tty=is_tty, can_color=can_color, can_emoji=can_emoji, width=width) + + +# ---- ANSI helpers ----------------------------------------------------------- + +_ANSI = { + "reset": "\033[0m", + "bold": "\033[1m", + "dim": "\033[2m", + "italic": "\033[3m", + "red": "\033[31m", + "green": "\033[32m", + "yellow": "\033[33m", + "blue": "\033[34m", + "magenta": "\033[35m", + "cyan": "\033[36m", + "gray": "\033[90m", + # Named semantic tokens β€” 256-color palette entries that read well on + # both dark and light terminal backgrounds. + "status_green": "\033[38;5;35m", # ANSI-256 #35 β€” muted green + "warn_amber": "\033[38;5;172m", # ANSI-256 #172 β€” amber/orange + "command_blue": "\033[38;5;74m", # ANSI-256 #74 β€” steel blue +} + + +def style(text: str, *codes: str, enabled: bool = True) -> str: + """Wrap *text* in the given ANSI styles, or return it unchanged if disabled.""" + if not enabled or not codes: + return text + prefix = "".join(_ANSI.get(c, "") for c in codes) + return f"{prefix}{text}{_ANSI['reset']}" if prefix else text diff --git a/tests/unit/test_run_report.py b/tests/unit/test_run_report.py index 2537b91a..254ac644 100644 --- a/tests/unit/test_run_report.py +++ b/tests/unit/test_run_report.py @@ -1,9 +1,28 @@ +"""Tests for RunReportPresenter β€” minimalist narration-style output.""" + from __future__ import annotations +import io +import re from typing import Any from roar.core.models.run import RunResult from roar.presenters.run_report import RunReportPresenter +from roar.presenters.terminal import TerminalCaps + +ANSI_RE = re.compile(r"\x1b\[[0-9;]*m") + + +def _strip(s: str) -> str: + return ANSI_RE.sub("", s) + + +def _tty_caps(width: int = 120) -> TerminalCaps: + return TerminalCaps(is_tty=True, can_color=False, can_emoji=False, width=width) + + +def _pipe_caps() -> TerminalCaps: + return TerminalCaps(is_tty=False, can_color=False, can_emoji=False, width=80) class _CapturePresenter: @@ -25,60 +44,205 @@ def print_job(self, job: dict[str, Any], verbose: bool = False) -> None: def print_artifact(self, artifact: dict[str, Any]) -> None: return None - def print_dag( - self, - summary: dict[str, Any], - stale_steps: set[int] | None = None, - ) -> None: + def print_dag(self, summary: dict[str, Any], stale_steps: set[int] | None = None) -> None: return None def confirm(self, message: str, default: bool = False) -> bool: return default -def test_interrupted_report_references_pop_not_clean() -> None: - presenter = _CapturePresenter() - report = RunReportPresenter(presenter) - - report.show_report( - RunResult( - exit_code=130, - job_id=1, - job_uid="job12345", - duration=0.5, - inputs=[], - outputs=[{"path": "/tmp/out.txt", "size": 1, "hashes": []}], - interrupted=True, - is_build=False, +def _make_result(**overrides: Any) -> RunResult: + defaults: dict[str, Any] = { + "exit_code": 0, + "job_id": 1, + "job_uid": "f3fba717", + "duration": 1.0, + "inputs": [], + "outputs": [], + } + defaults.update(overrides) + return RunResult(**defaults) + + +# ---- summary detail lines ------------------------------------------------- + + +def test_io_line_counts_inputs_and_outputs() -> None: + buf = io.StringIO() + report = RunReportPresenter(stream=buf, caps=_tty_caps()) + report.summary( + _make_result( + inputs=[ + {"path": "/a/in1.txt", "hashes": []}, + {"path": "/a/in2.txt", "hashes": []}, + ], + outputs=[{"path": "/a/out.txt", "hashes": []}], ), - ["python", "train.py"], + [], ) - - rendered = "\n".join(presenter.messages) - assert "roar pop" in rendered - assert "roar clean" not in rendered - assert "roar show --job job12345" in rendered - - -def test_successful_report_suggests_show_and_dag() -> None: - presenter = _CapturePresenter() - report = RunReportPresenter(presenter) - - report.show_report( - RunResult( - exit_code=0, - job_id=2, - job_uid="job67890", - duration=1.0, - inputs=[], - outputs=[], - interrupted=False, - is_build=False, - ), - ["python", "train.py"], + out = _strip(buf.getvalue()) + assert "i/o" in out + assert "2 inputs" in out + assert "1 output" in out + + +def test_job_line_shows_bold_hash() -> None: + buf = io.StringIO() + caps = TerminalCaps(is_tty=True, can_color=True, can_emoji=False, width=80) + report = RunReportPresenter(stream=buf, caps=caps) + report.summary(_make_result(job_uid="f3fba717"), []) + raw = buf.getvalue() + assert "f3fba717" in _strip(raw) + # Bold ANSI code wraps the hash. + assert "\x1b[1m" in raw + + +def test_git_line_clean() -> None: + buf = io.StringIO() + report = RunReportPresenter(stream=buf, caps=_tty_caps()) + report.summary( + _make_result(git_branch="main", git_short_commit="10c570b", git_clean=True), + [], ) - - rendered = "\n".join(presenter.messages) - assert "Next:" in rendered - assert "roar show --job job67890" in rendered - assert "roar dag" in rendered + out = _strip(buf.getvalue()) + assert "git" in out + assert "main @ 10c570b" in out + assert "clean" in out + + +def test_git_line_dirty_uses_amber() -> None: + buf = io.StringIO() + caps = TerminalCaps(is_tty=True, can_color=True, can_emoji=False, width=80) + report = RunReportPresenter(stream=buf, caps=caps) + report.summary( + _make_result(git_branch="main", git_short_commit="abc", git_clean=False), + [], + ) + raw = buf.getvalue() + assert "dirty" in _strip(raw) + # warn_amber = ANSI 256-color 172 + assert "\x1b[38;5;172m" in raw + + +def test_env_line() -> None: + buf = io.StringIO() + report = RunReportPresenter(stream=buf, caps=_tty_caps()) + report.summary(_make_result(pip_count=9, dpkg_count=10, env_count=3), []) + out = _strip(buf.getvalue()) + assert "env" in out + assert "9 pip" in out + assert "10 dpkg" in out + assert "3 var" in out + + +def test_dag_line_singular() -> None: + buf = io.StringIO() + report = RunReportPresenter(stream=buf, caps=_tty_caps()) + report.summary(_make_result(dag_jobs=1, dag_artifacts=1, dag_depth=1), []) + out = _strip(buf.getvalue()) + assert "1 job" in out + assert "1 artifact" in out + assert "artifacts" not in out + + +def test_suggested_command() -> None: + buf = io.StringIO() + report = RunReportPresenter(stream=buf, caps=_tty_caps()) + report.summary(_make_result(job_uid="f3fba717"), []) + out = _strip(buf.getvalue()) + assert "$ roar show --job f3fba717" in out + assert "# details" in out + + +# ---- lifecycle lines ------------------------------------------------------- + + +def test_trace_starting() -> None: + buf = io.StringIO() + report = RunReportPresenter(stream=buf, caps=_tty_caps()) + report.trace_starting(backend="preload", proxy_active=False) + out = _strip(buf.getvalue()) + assert "tracing" in out + assert "tracer:preload" in out + assert "sync:off" in out + + +def test_trace_ended_success() -> None: + buf = io.StringIO() + report = RunReportPresenter(stream=buf, caps=_tty_caps()) + report.trace_ended(duration=11.2, exit_code=0) + out = _strip(buf.getvalue()) + assert "trace done" in out + assert "11.2s" in out + assert "exit 0" in out + + +def test_trace_ended_nonzero_exit() -> None: + buf = io.StringIO() + caps = TerminalCaps(is_tty=True, can_color=True, can_emoji=False, width=80) + report = RunReportPresenter(stream=buf, caps=caps) + report.trace_ended(duration=1.0, exit_code=1) + raw = buf.getvalue() + assert "exit 1" in _strip(raw) + # warn_amber for non-zero exit + assert "\x1b[38;5;172m" in raw + + +def test_hashed_singular() -> None: + buf = io.StringIO() + report = RunReportPresenter(stream=buf, caps=_tty_caps()) + report.hashed(n_artifacts=1, total_bytes=1024 * 1024, duration=0.5) + out = _strip(buf.getvalue()) + assert "1 artifact" in out + assert "artifacts" not in out + assert "MB/s" in out + + +def test_done_shows_timing_breakdown() -> None: + buf = io.StringIO() + report = RunReportPresenter(stream=buf, caps=_tty_caps()) + report.done(exit_code=0, trace_duration=11.2, post_duration=0.6) + out = _strip(buf.getvalue()) + assert "done" in out + assert "trace 11.2s" in out + assert "post 0.6s" in out + + +# ---- quiet + pipe modes --------------------------------------------------- + + +def test_quiet_mode_emits_nothing() -> None: + buf = io.StringIO() + report = RunReportPresenter(stream=buf, caps=_tty_caps(), quiet=True) + report.trace_starting(backend="preload", proxy_active=False) + report.trace_ended(duration=0.5, exit_code=0) + report.lineage_captured() + report.summary(_make_result(), []) + report.done(exit_code=0, trace_duration=0.5, post_duration=0.1) + assert buf.getvalue() == "" + + +def test_pipe_mode_emits_only_done_line() -> None: + buf = io.StringIO() + report = RunReportPresenter(stream=buf, caps=_pipe_caps()) + report.trace_starting(backend="preload", proxy_active=False) + report.trace_ended(duration=0.5, exit_code=0) + report.lineage_captured() + report.summary(_make_result(), []) + report.done(exit_code=0, trace_duration=0.5, post_duration=0.1) + out = buf.getvalue() + assert out.count("\n") == 1 + assert out.startswith("roar: done") + + +# ---- legacy one-shot ------------------------------------------------------- + + +def test_show_report_legacy() -> None: + buf = io.StringIO() + report = RunReportPresenter(_CapturePresenter(), stream=buf, caps=_tty_caps()) + report.show_report(_make_result(post_duration=0.2), []) + out = _strip(buf.getvalue()) + assert "roar show --job f3fba717" in out + assert "trace done" in out + assert "done" in out diff --git a/tests/unit/test_terminal_caps.py b/tests/unit/test_terminal_caps.py new file mode 100644 index 00000000..269e8104 --- /dev/null +++ b/tests/unit/test_terminal_caps.py @@ -0,0 +1,71 @@ +"""Tests for presenters.terminal capability detection and styling.""" + +from __future__ import annotations + +import pytest + +from roar.presenters.terminal import detect, style + + +class _FakeStream: + def __init__(self, tty: bool, encoding: str = "utf-8"): + self.encoding = encoding + self._tty = tty + + def isatty(self) -> bool: + return self._tty + + +class TestDetect: + def test_tty_utf8_enables_everything(self, monkeypatch: pytest.MonkeyPatch): + monkeypatch.setenv("LANG", "en_US.UTF-8") + monkeypatch.delenv("NO_COLOR", raising=False) + caps = detect(_FakeStream(tty=True, encoding="utf-8")) + assert caps.is_tty + assert caps.can_color + assert caps.can_emoji + + def test_non_tty_disables_everything(self): + caps = detect(_FakeStream(tty=False, encoding="utf-8")) + assert not caps.is_tty + assert not caps.can_color + assert not caps.can_emoji + assert caps.pipe_mode is True + + def test_no_color_env_disables_color_only(self, monkeypatch: pytest.MonkeyPatch): + monkeypatch.setenv("NO_COLOR", "1") + monkeypatch.setenv("LANG", "en_US.UTF-8") + caps = detect(_FakeStream(tty=True, encoding="utf-8")) + assert caps.is_tty + assert not caps.can_color + assert caps.can_emoji # emoji is still fine without color + + def test_ascii_encoding_disables_emoji(self, monkeypatch: pytest.MonkeyPatch): + monkeypatch.setenv("LANG", "C") + monkeypatch.delenv("LC_ALL", raising=False) + caps = detect(_FakeStream(tty=True, encoding="ascii")) + assert caps.is_tty + assert not caps.can_emoji + + def test_width_has_sensible_floor(self): + caps = detect(_FakeStream(tty=True, encoding="utf-8")) + assert caps.width >= 40 + + +class TestStyle: + def test_disabled_returns_plain_text(self): + assert style("hello", "bold", "red", enabled=False) == "hello" + + def test_no_codes_returns_plain_text(self): + assert style("hello") == "hello" + + def test_wraps_with_ansi(self): + out = style("hi", "bold", enabled=True) + assert "\x1b[1m" in out + assert "\x1b[0m" in out + assert "hi" in out + + def test_unknown_code_is_ignored(self): + out = style("hi", "nonexistent", enabled=True) + # Nothing to add β†’ raw text with reset, but we skip wrapping when no prefix. + assert out == "hi"