Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions .changelog/024.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
name: __ROOT__
ts: 2026-02-17 21:33:13.968849+00:00
type: fix
author: Espen Albert
changelog_message: 'fix(git_ops): include PR URL in log message after updating PR body'
message: 'fix(git_ops): include PR URL in log message after updating PR body'
short_sha: ee7549
---
name: copy
ts: 2026-02-17 21:33:20.845532+00:00
type: fix
author: Espen Albert
changelog_message: 'fix(cmd_copy): enhance sync output with destination header and adjust summary formatting'
message: 'fix(cmd_copy): enhance sync output with destination header and adjust summary formatting'
short_sha: a3126a
1 change: 1 addition & 0 deletions docs/copy/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ Copy files from SRC to DEST repositories.

| Version | Change |
|---------|--------|
| unreleased | fix(cmd_copy): enhance sync output with destination header and adjust summary formatting |
| 0.7.4 | fix(copy): skip PR close when running with --local/--skip-commit |
| 0.7.1 | fix(cmd_copy): add warning for unknown comment prefix in sync process |
| 0.6.0 | fix(copy): apply skip_sections when creating new files with sections |
Expand Down
38 changes: 28 additions & 10 deletions path_sync/_internal/auto_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,15 +70,16 @@ def pending_checks(self) -> list[CheckRun]:
return [c for c in self.checks if c.pending]


def enable_auto_merge(repo_path: Path, pr_ref: str, config: AutoMergeConfig) -> None:
def enable_auto_merge(repo_path: Path, pr_ref: str, config: AutoMergeConfig, dest_name: str = "") -> None:
label = dest_name or pr_ref
cmd = ["gh", "pr", "merge", "--auto", f"--{config.method}", pr_ref]
if config.delete_branch:
cmd.append("--delete-branch")
result = subprocess.run(cmd, cwd=repo_path, capture_output=True, text=True)
if result.returncode != 0:
logger.warning(f"Auto-merge enable failed for {pr_ref}: {result.stderr.strip()}")
logger.warning(f" {label}: auto-merge enable failed: {result.stderr.strip()}")
else:
logger.info(f"Enabled auto-merge ({config.method}) for {pr_ref}")
logger.info(f" {label}: enabled auto-merge ({config.method})")


def get_pr_checks(repo_path: Path, pr_ref: str) -> list[CheckRun]:
Expand All @@ -103,28 +104,39 @@ def get_pr_url(repo_path: Path, pr_ref: str) -> str:
cmd = ["gh", "pr", "view", pr_ref, "--json", "url", "-q", ".url"]
result = subprocess.run(cmd, cwd=repo_path, capture_output=True, text=True)
if result.returncode != 0:
logger.warning(f"Failed to get PR URL for {pr_ref}: {result.stderr.strip()}")
return pr_ref
return result.stdout.strip() or pr_ref


def wait_for_merge(repo_path: Path, pr_ref: str, config: AutoMergeConfig, dest_name: str = "") -> PRMergeResult:
pr_url = get_pr_url(repo_path, pr_ref)
deadline = time.monotonic() + config.timeout_seconds
label = dest_name or pr_ref
poll_count = 0

while time.monotonic() < deadline:
state = get_pr_state(repo_path, pr_ref)
if state == PRState.MERGED:
logger.info(f"{label}: merged ({pr_url})")
return PRMergeResult(dest_name=dest_name, pr_url=pr_url, branch=pr_ref, state=PRState.MERGED)
if state == PRState.CLOSED:
checks = get_pr_checks(repo_path, pr_ref)
logger.warning(f"{label}: PR closed without merging ({pr_url})")
return PRMergeResult(dest_name=dest_name, pr_url=pr_url, branch=pr_ref, state=PRState.CLOSED, checks=checks)
poll_count += 1
elapsed = int(config.timeout_seconds - (deadline - time.monotonic()))
logger.info(f"{label}: still open (poll #{poll_count}, {elapsed}s elapsed)")
time.sleep(config.poll_interval_seconds)

checks = get_pr_checks(repo_path, pr_ref)
logger.warning(f"Timeout waiting for {pr_ref} after {config.timeout_seconds}s")
logger.warning(f"Timeout waiting for {label} after {config.timeout_seconds}s ({pr_url})")
return PRMergeResult(dest_name=dest_name, pr_url=pr_url, branch=pr_ref, state=PRState.OPEN, checks=checks)


SEPARATOR_WIDTH = 40


def handle_auto_merge(
pr_refs: list[PRRef],
config: AutoMergeConfig,
Expand All @@ -133,22 +145,27 @@ def handle_auto_merge(
if not pr_refs:
return []

line = "─" * SEPARATOR_WIDTH
logger.info(f"\n{line}")
logger.info(" Auto-merge")
logger.info(line)

pending_refs: list[PRRef] = []
for ref in pr_refs:
state = get_pr_state(ref.repo_path, ref.branch_or_url)
if state == PRState.MERGED:
logger.info(f"{ref.dest_name}: already merged")
logger.info(f" {ref.dest_name}: already merged")
continue
enable_auto_merge(ref.repo_path, ref.branch_or_url, config)
enable_auto_merge(ref.repo_path, ref.branch_or_url, config, dest_name=ref.dest_name)
pending_refs.append(ref)

if no_wait:
logger.info("--no-wait: skipping merge polling")
logger.info(" --no-wait: skipping merge polling")
return []

results: list[PRMergeResult] = []
for ref in pending_refs:
logger.info(f"Waiting for {ref.dest_name} to merge...")
logger.info(f" Waiting for {ref.dest_name} to merge...")
result = wait_for_merge(ref.repo_path, ref.branch_or_url, config, dest_name=ref.dest_name)
results.append(result)

Expand All @@ -160,9 +177,10 @@ def _log_summary(results: list[PRMergeResult]) -> None:
if not results:
return
max_name = max(len(r.dest_name) for r in results)
header = f"{'Repo':<{max_name}} State Failed Checks"
max_url = max(len(r.pr_url) for r in results)
header = f"{'Repo':<{max_name}} {'PR':<{max_url}} State Failed Checks"
logger.info(header)
logger.info("-" * len(header))
for r in results:
failed = ", ".join(c.name for c in r.failed_checks)
logger.info(f"{r.dest_name:<{max_name}} {r.state:<8} {failed}")
logger.info(f"{r.dest_name:<{max_name}} {r.pr_url:<{max_url}} {r.state:<8} {failed}")
21 changes: 16 additions & 5 deletions path_sync/_internal/cmd_copy.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,8 +258,10 @@ def _sync_destination(
dest_root = resolve_repo_path(dest, src_root, opts.work_dir)
dest_repo = ensure_repo(dest, dest_root, dry_run=opts.dry_run)
copy_branch = dest.resolved_copy_branch(config.name)
_print_dest_header(dest)

if _skip_already_synced(dest.name, dest_root, copy_branch, commit_ts, opts, config):
typer.echo(" (already synced, skipped)", err=True)
return 0, None

if not opts.no_checkout and prompt_utils.prompt_confirm(f"Switch {dest.name} to {copy_branch}?", opts.no_prompt):
Expand All @@ -270,10 +272,10 @@ def _sync_destination(
from_default=opts.checkout_from_default,
)
result = _sync_paths(config, dest, src_root, dest_root, opts)
_print_sync_summary(dest, result)
_print_sync_summary(result)

if result.total == 0:
logger.info(f"{dest.name}: No changes")
typer.echo(" No changes", err=True)
_close_stale_pr(dest_root, copy_branch, opts, config)
return 0, None

Expand Down Expand Up @@ -304,14 +306,23 @@ def _sync_destination(
return result.total, pr_ref


def _print_sync_summary(dest: Destination, result: SyncResult) -> None:
typer.echo(f"\nSyncing to {dest.name}...", err=True)
SEPARATOR_WIDTH = 40


def _print_dest_header(dest: Destination) -> None:
line = "─" * SEPARATOR_WIDTH
typer.echo(f"\n{line}", err=True)
typer.echo(f" {dest.name}", err=True)
typer.echo(line, err=True)


def _print_sync_summary(result: SyncResult) -> None:
if result.content_changes > 0:
typer.echo(f" [{result.content_changes} files synced]", err=True)
if result.orphans_deleted > 0:
typer.echo(f" [-] {result.orphans_deleted} orphans deleted", err=True)
if result.total > 0:
typer.echo(f"\n{result.total} changes ready.", err=True)
typer.echo(f" {result.total} changes ready.", err=True)


def _sync_paths(
Expand Down
3 changes: 2 additions & 1 deletion path_sync/_internal/git_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,8 @@ def update_pr_body(repo_path: Path, branch: str, body: str) -> bool:
if result.returncode != 0:
logger.warning(f"Failed to update PR body: {result.stderr}")
return False
logger.info("Updated PR body")
pr_url = f"https://github.com/{repo_full}/pull/{pr_number}"
logger.info(f"Updated PR body: {pr_url}")
return True


Expand Down