diff --git a/.gitignore b/.gitignore index 35432e6..42b2d1d 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,5 @@ aps.catalog.yaml .cursor +.claude _bmad* diff --git a/.trunk/trunk.yaml b/.trunk/trunk.yaml index a65fd09..535382a 100644 --- a/.trunk/trunk.yaml +++ b/.trunk/trunk.yaml @@ -22,7 +22,7 @@ lint: - CHANGELOG.md # Auto-generated by release-please - AGENTS.md enabled: - - checkov@3.2.497 + - checkov@3.2.500 - clippy@1.83.0 - git-diff-check - markdownlint@0.47.0 @@ -30,7 +30,7 @@ lint: - prettier@3.8.0 - rustfmt@1.83.0 - taplo@0.10.0 - - trufflehog@3.92.5 + - trufflehog@3.93.0 - yamllint@1.38.0 actions: enabled: diff --git a/README.md b/README.md index 07094c5..a52b8b8 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,7 @@ - **Composable AGENTS.md** - Merge multiple AGENTS.md files from local or remote sources into one - **Safe installs** - Automatic conflict detection and backup creation - **Deterministic lockfile** - Idempotent syncs that only update when needed +- **Optimized Git Operations** - Smart caching reuses clones and skips unchanged commits for fast syncs ## Installation @@ -78,17 +79,23 @@ cargo build --release This creates a `aps.yaml` manifest file with an example entry. -2. **Add skills directly from GitHub URLs:** +2. **Add assets directly from git repositories:** ```bash # Add a skill from a GitHub URL - automatically syncs the skill - aps add https://github.com/hashicorp/agent-skills/blob/main/terraform/module-generation/skills/refactor-module/SKILL.md + aps add agent_skill https://github.com/hashicorp/agent-skills/blob/main/terraform/module-generation/skills/refactor-module/SKILL.md # Or use the folder URL (SKILL.md is auto-detected) - aps add https://github.com/hashicorp/agent-skills/tree/main/terraform/module-generation/skills/refactor-module + aps add agent_skill https://github.com/hashicorp/agent-skills/tree/main/terraform/module-generation/skills/refactor-module + + # Add Cursor rules from a private repo (SSH) + aps add cursor_rules git@github.com:org/private-rules.git + + # Add a private skill with an explicit path + aps add agent_skill git@github.com:org/private-skills.git --path skills/refactor-module ``` - This parses the GitHub URL, adds an entry to `aps.yaml`, and syncs **only that skill** immediately (other entries are not affected). + This parses the repository identifier, adds an entry to `aps.yaml`, and syncs **only that asset** immediately (other entries are not affected). 3. **Or manually edit the manifest** to define your assets: @@ -120,7 +127,7 @@ cargo build --release | Command | Description | | -------------- | ------------------------------------------------- | | `aps init` | Create a new manifest file and update .gitignore | -| `aps add` | Add a skill from a GitHub URL and sync it | +| `aps add` | Add an asset from a git repo and sync it | | `aps sync` | Sync all entries from manifest and install assets | | `aps validate` | Validate manifest schema and check sources | | `aps status` | Display last sync information from lockfile | @@ -133,8 +140,10 @@ cargo build --release ### Add Options -- `--id ` - Custom entry ID (defaults to skill folder name) -- `--kind ` - Asset kind: `agent-skill`, `cursor-rules`, `cursor-skills-root`, `agents-md` (default: `agent-skill`) +- `aps add ` - Asset types: `agent_skill`, `cursor_rules`, `cursor_skills_root`, `agents_md` +- `--id ` - Custom entry ID (defaults to repo or path name) +- `--path ` - Path within the repository (required for `agent_skill` when using SSH/HTTPS repo URLs) +- `--ref ` - Git ref (branch/tag/commit) to use - `--no-sync` - Only add to manifest, don't sync immediately - `--all` - Add all discovered skills without prompting (for repo-level URLs or directories) - `--yes` / `-y` - Skip confirmation prompts @@ -207,12 +216,35 @@ aps add --yes https://github.com/anthropics/skills When you run `aps sync`: -1. **Entries are synced** - Each entry in `aps.yaml` is installed to its destination -2. **Stale entries are cleaned** - Entries in the lockfile that no longer exist in `aps.yaml` are automatically removed -3. **Lockfile is saved** - The updated lockfile is written to disk +1. **Entries are synced** - Each entry in your manifest (by default `aps.yaml`) is installed to its destination (git sources reuse cached clones for speed) +2. **Stale entries are cleaned** - Entries in the lockfile that no longer exist in the manifest are automatically removed +3. **Lockfile is saved** - The updated lockfile is written to disk, next to the manifest Note: Stale entry cleanup only happens during a full sync. When using `--only ` to sync specific entries, other lockfile entries are preserved. +### Example Sync Output + +When syncing a manifest with multiple entries, `aps sync` now shows per-entry progress so you can see work as it happens: + +```bash +$ aps sync + +(1/3) Syncing my-agents from filesystem $HOME/agents-md-partials (AGENTS.md)... +(1/3) Finished my-agents in 0.12s [synced] +(2/3) Syncing composite-agents from 3 composite source(s)... +(2/3) Finished composite-agents in 0.45s [copied] +(3/3) Syncing company-rules from git repo git@github.com:your-username/dotfiles.git... +(3/3) Finished company-rules in 1.23s [synced] + +Syncing from aps.yaml + + ✓ my-agents → ./AGENTS.md [synced] + · composite-agents → ./AGENTS.md [current] + ✓ company-rules → ./.cursor/rules/ [synced] + +2 synced, 1 current +``` + ## Configuration ### Manifest File (`aps.yaml`) @@ -346,9 +378,19 @@ Key features: - **Order preserved**: Files are merged in the order specified in `sources` - **Auto-generated header**: Output includes a comment indicating it was composed by aps -### Lockfile (`aps.lock.yaml`) +### Lockfile (manifest-based) + +The lockfile tracks installed assets and is automatically created/updated by `aps sync`. **This file should be committed to version control** to ensure reproducible installations across your team. -The lockfile tracks installed assets and is automatically created/updated by `aps sync`. **This file should be committed to version control** to ensure reproducible installations across your team. It stores: +The lockfile name is derived from the manifest filename: + +- `aps.yaml` → `aps.lock.yaml` +- `aps-rules.yaml` → `aps-rules.lock.yaml` +- `custom` → `custom.lock.yaml` + +This means you can keep multiple manifests in the same repository, each with its own lockfile, without conflicts. + +Each lockfile stores: - APS version that generated/modified the lockfile - Source information @@ -358,6 +400,8 @@ The lockfile tracks installed assets and is automatically created/updated by `ap **Environment Variables Are Preserved**: Unlike other package managers (npm, uv, bundler) that expand environment variables to concrete paths, `aps` preserves shell variables like `$HOME` in the lockfile. This makes lockfiles portable across different machines and users who have the same relative directory structure. +**Legacy lockfiles**: For backward compatibility, `aps` still recognizes historical lockfile names (`aps.lock.yaml`, `aps.manifest.lock`) when loading. On the next successful `aps sync`, these legacy files are migrated to the manifest-based name and the old files are cleaned up. + ## Examples ### Non-interactive sync for CI/CD diff --git a/aps.lock.yaml b/aps.lock.yaml index b1584ee..7e7f4be 100644 --- a/aps.lock.yaml +++ b/aps.lock.yaml @@ -1,6 +1,18 @@ version: 1 aps_version: 0.1.11 entries: + skill-creator: + source: https://github.com/anthropics/skills.git + dest: .claude/skills/skill-creator/ + resolved_ref: main + commit: 1ed29a03dc852d30fa6ef2ca53a67dc2c2c2c563 + checksum: sha256:bdd5389ea8390277b8310b83eaa561a703c2fa08bbcf770e6394d42571f7445f + anthropic-skills: + source: https://github.com/anthropics/skills.git + dest: ./.claude/skills/ + resolved_ref: main + commit: 1ed29a03dc852d30fa6ef2ca53a67dc2c2c2c563 + checksum: sha256:3bd37a1929706db495f2609910316bb1a385a057556deb151f968e877aef39e9 composite-agents-md: source: composite: @@ -8,21 +20,9 @@ entries: - https://github.com/westonplatter/agentically.git:agents-md-partials/AGENTS.pull-requests.md dest: ./AGENTS.md checksum: sha256:11eedede54fa5217e041f613adc9f692b985972e8487261e720fc553d707bdbf - anthropic-skills: - source: https://github.com/anthropics/skills.git - dest: ./.claude/skills/ - resolved_ref: main - commit: 1ed29a03dc852d30fa6ef2ca53a67dc2c2c2c563 - checksum: sha256:3bd37a1929706db495f2609910316bb1a385a057556deb151f968e877aef39e9 internal-comms: source: https://github.com/anthropics/skills.git dest: .claude/skills/internal-comms/ resolved_ref: main commit: 1ed29a03dc852d30fa6ef2ca53a67dc2c2c2c563 checksum: sha256:b353d2d5223c9b5154f52a1d1b87c65bd0f2ed9eac5e541e88e72b4c4a614743 - skill-creator: - source: https://github.com/anthropics/skills.git - dest: .claude/skills/skill-creator/ - resolved_ref: main - commit: 1ed29a03dc852d30fa6ef2ca53a67dc2c2c2c563 - checksum: sha256:bdd5389ea8390277b8310b83eaa561a703c2fa08bbcf770e6394d42571f7445f diff --git a/docs/architecture.md b/docs/architecture.md index 1d11080..81249ee 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -120,6 +120,12 @@ pub struct GitSource { 4. If mismatch → clone and install ``` +**Per-run clone reuse:** + +- During `aps sync`, git clones are cached in-memory per run. +- Entries that share the same repo/ref/shallow (or repo/commit in locked mode) reuse the same temporary clone. +- The cache is dropped at the end of the run, so temp directories are cleaned up as usual. + ### Enum-to-Adapter Bridge The manifest uses a `Source` enum for YAML serialization, which bridges to the trait implementations: diff --git a/src/cli.rs b/src/cli.rs index 55aec61..6790dff 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -55,20 +55,22 @@ pub struct InitArgs { #[derive(Parser, Debug)] pub struct AddArgs { - /// GitHub URL or local filesystem path to a skill folder or repository. - /// Supports: GitHub URLs (https://github.com/owner/repo/...) and local - /// paths ($HOME/skills, ~/skills, ./skills). For repo-level URLs or - /// directories without SKILL.md, discovers skills and prompts for selection. - #[arg(value_name = "URL_OR_PATH")] - pub url: String, - - /// Custom entry ID (defaults to skill folder name) + /// Optional asset type (agent_skill, cursor_rules, cursor_skills_root, agents_md). + /// If omitted, defaults to agent_skill. When given, must be followed by URL or path. + #[arg(value_name = "ASSET_TYPE_OR_URL", num_args = 1..=2, required = true)] + pub positionals: Vec, + + /// Custom entry ID (defaults to repo or path name) #[arg(long)] pub id: Option, - /// Asset kind (defaults to agent_skill) - #[arg(long, value_enum, default_value = "agent-skill")] - pub kind: AddAssetKind, + /// Path within the repository (overrides any path from a GitHub URL) + #[arg(long)] + pub path: Option, + + /// Git ref (branch/tag/commit) to use (defaults to auto for repo URLs) + #[arg(long = "ref")] + pub git_ref: Option, /// Path to the manifest file #[arg(long)] @@ -87,19 +89,6 @@ pub struct AddArgs { pub yes: bool, } -#[derive(ValueEnum, Clone, Debug, Default)] -pub enum AddAssetKind { - #[default] - #[value(name = "agent-skill")] - AgentSkill, - #[value(name = "cursor-rules")] - CursorRules, - #[value(name = "cursor-skills-root")] - CursorSkillsRoot, - #[value(name = "agents-md")] - AgentsMd, -} - #[derive(ValueEnum, Clone, Debug, Default)] pub enum ManifestFormat { #[default] diff --git a/src/commands.rs b/src/commands.rs index 8064d5a..b11878d 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -7,7 +7,7 @@ use crate::discover::{ discover_skills_in_local_dir, discover_skills_in_repo, prompt_skill_selection, }; use crate::error::{ApsError, Result}; -use crate::github_url::parse_github_url; +use crate::github_url::{parse_github_url, parse_repo_identifier}; use crate::hooks::validate_cursor_hooks; use crate::install::{install_composite_entry, install_entry, InstallOptions, InstallResult}; use crate::lockfile::{display_status, Lockfile}; @@ -16,6 +16,7 @@ use crate::manifest::{ validate_manifest, AssetKind, Entry, Manifest, Source, DEFAULT_MANIFEST_NAME, }; use crate::orphan::{detect_orphaned_paths, prompt_and_cleanup_orphans}; +use crate::sources::GitCloneCache; use crate::sync_output::{print_sync_results, print_sync_summary, SyncDisplayItem, SyncStatus}; use console::{style, Style}; use std::fs; @@ -80,7 +81,11 @@ fn is_local_path(input: &str) -> bool { } /// Parse the add target into a typed enum for routing. -fn parse_add_target(url_or_path: &str, all_flag: bool) -> Result { +fn parse_add_target( + url_or_path: &str, + all_flag: bool, + asset_kind: &AssetKind, +) -> Result { if is_local_path(url_or_path) { // Check if it contains a SKILL.md (single-skill) or not (discovery) let expanded = shellexpand::full(url_or_path) @@ -117,19 +122,72 @@ fn parse_add_target(url_or_path: &str, all_flag: bool) -> Result Result Result<()> { Ok(()) } +/// Parse add positionals: 1 arg => (AgentSkill, url), 2 args => (parse first as kind, second as url). +fn parse_add_positionals(positionals: &[String]) -> Result<(AssetKind, String)> { + match positionals.len() { + 1 => Ok((AssetKind::AgentSkill, positionals[0].clone())), + 2 => { + let kind = parse_asset_kind(&positionals[0])?; + Ok((kind, positionals[1].clone())) + } + _ => Err(ApsError::InvalidAddArguments { + message: "Expected either or ".to_string(), + }), + } +} + /// Execute the `aps add` command pub fn cmd_add(args: AddArgs) -> Result<()> { - let target = parse_add_target(&args.url, args.all)?; + let (asset_kind, url) = parse_add_positionals(&args.positionals)?; + let mut target = parse_add_target(&url, args.all, &asset_kind)?; + + // --path overrides: treat repo-level as single entry with that path (no discovery/clone on add) + if let ParsedAddTarget::GitHubDiscovery { + repo_url, + git_ref, + search_path, + } = &target + { + if let Some(ref path) = args.path { + target = ParsedAddTarget::GitHubSkill { + repo_url: repo_url.clone(), + git_ref: git_ref.clone(), + skill_path: path.clone(), + skill_name: path + .rsplit('/') + .next() + .filter(|s| !s.is_empty()) + .map(String::from), + }; + } else if search_path.is_empty() && asset_kind == AssetKind::AgentSkill { + // Repo-level agent_skill with no path: keep discovery (will clone to find skills) + } + } match target { ParsedAddTarget::GitHubSkill { @@ -251,32 +346,29 @@ pub fn cmd_add(args: AddArgs) -> Result<()> { git_ref, skill_path, skill_name, - } => cmd_add_single_git(args, &repo_url, &git_ref, &skill_path, skill_name), + } => cmd_add_single_git( + args, + asset_kind, + &repo_url, + &git_ref, + &skill_path, + skill_name, + ), ParsedAddTarget::GitHubDiscovery { repo_url, git_ref, search_path, - } => cmd_add_discover_git(args, &repo_url, &git_ref, &search_path), + } => cmd_add_discover_git(args, asset_kind, &repo_url, &git_ref, &search_path), ParsedAddTarget::FilesystemSkill { original_path, skill_name, - } => cmd_add_single_filesystem(args, &original_path, &skill_name), + } => cmd_add_single_filesystem(args, asset_kind, &original_path, &skill_name), ParsedAddTarget::FilesystemDiscovery { original_path } => { - cmd_add_discover_filesystem(args, &original_path) + cmd_add_discover_filesystem(args, asset_kind, &original_path) } } } -/// Convert CLI asset kind to manifest asset kind. -fn resolve_asset_kind(kind: &AddAssetKind) -> AssetKind { - match kind { - AddAssetKind::AgentSkill => AssetKind::AgentSkill, - AddAssetKind::CursorRules => AssetKind::CursorRules, - AddAssetKind::CursorSkillsRoot => AssetKind::CursorSkillsRoot, - AddAssetKind::AgentsMd => AssetKind::AgentsMd, - } -} - /// Compute the destination path for a skill entry. fn skill_dest(asset_kind: &AssetKind, entry_id: &str) -> String { format!( @@ -399,10 +491,7 @@ fn maybe_sync( upgrade: false, })?; } else { - println!( - "Run `aps sync` to install the skill{}.", - if entry_ids.len() > 1 { "s" } else { "" } - ); + println!("Run `aps sync` to install."); } Ok(()) @@ -415,6 +504,7 @@ fn maybe_sync( /// Add a single skill from a GitHub URL. fn cmd_add_single_git( args: AddArgs, + asset_kind: AssetKind, repo_url: &str, git_ref: &str, skill_path: &str, @@ -427,8 +517,6 @@ fn cmd_add_single_git( // For single-skill adds, check for duplicate ID upfront check_duplicate_id(&entry_id, args.manifest.as_deref())?; - let asset_kind = resolve_asset_kind(&args.kind); - let entry = Entry { id: entry_id.clone(), kind: asset_kind.clone(), @@ -460,6 +548,7 @@ fn cmd_add_single_git( /// Discover and add skills from a GitHub repository. fn cmd_add_discover_git( args: AddArgs, + asset_kind: AssetKind, repo_url: &str, git_ref: &str, search_path: &str, @@ -472,7 +561,7 @@ fn cmd_add_discover_git( shallow: true, path: Some(skill.repo_path.clone()), }; - cmd_add_discovered(args, skills, source_builder, repo_url) + cmd_add_discovered(args, skills, source_builder, repo_url, asset_kind) } // ============================================================================ @@ -480,13 +569,16 @@ fn cmd_add_discover_git( // ============================================================================ /// Add a single skill from a local filesystem path. -fn cmd_add_single_filesystem(args: AddArgs, original_path: &str, skill_name: &str) -> Result<()> { +fn cmd_add_single_filesystem( + args: AddArgs, + asset_kind: AssetKind, + original_path: &str, + skill_name: &str, +) -> Result<()> { let entry_id = args.id.unwrap_or_else(|| skill_name.to_string()); check_duplicate_id(&entry_id, args.manifest.as_deref())?; - let asset_kind = resolve_asset_kind(&args.kind); - let entry = Entry { id: entry_id.clone(), kind: asset_kind.clone(), @@ -515,7 +607,11 @@ fn cmd_add_single_filesystem(args: AddArgs, original_path: &str, skill_name: &st } /// Discover and add skills from a local filesystem directory. -fn cmd_add_discover_filesystem(args: AddArgs, original_path: &str) -> Result<()> { +fn cmd_add_discover_filesystem( + args: AddArgs, + asset_kind: AssetKind, + original_path: &str, +) -> Result<()> { println!("Searching for skills in {}...\n", original_path); let skills = discover_skills_in_local_dir(original_path)?; let source_builder = |skill: &DiscoveredSkill| Source::Filesystem { @@ -523,7 +619,7 @@ fn cmd_add_discover_filesystem(args: AddArgs, original_path: &str) -> Result<()> symlink: true, path: Some(skill.repo_path.clone()), }; - cmd_add_discovered(args, skills, source_builder, original_path) + cmd_add_discovered(args, skills, source_builder, original_path, asset_kind) } // ============================================================================ @@ -540,6 +636,7 @@ fn cmd_add_discovered( skills: Vec, source_builder: impl Fn(&DiscoveredSkill) -> Source, location: &str, + asset_kind: AssetKind, ) -> Result<()> { if skills.is_empty() { return Err(ApsError::NoSkillsFound { @@ -685,8 +782,6 @@ fn cmd_add_discovered( } }; - let asset_kind = resolve_asset_kind(&args.kind); - let entries: Vec = to_add .iter() .map(|skill| { @@ -828,6 +923,86 @@ fn select_skills(skills: &[DiscoveredSkill], defaults: &[bool], all: bool) -> Re } } +fn sync_status_for_result(result: &InstallResult) -> SyncStatus { + if !result.warnings.is_empty() { + SyncStatus::Warning + } else if result.skipped_no_change && result.upgrade_available.is_some() { + SyncStatus::Upgradable + } else if result.skipped_no_change { + SyncStatus::Current + } else if result.was_symlink { + SyncStatus::Synced + } else { + SyncStatus::Copied + } +} + +fn sync_status_label(status: SyncStatus) -> &'static str { + match status { + SyncStatus::Synced => "synced", + SyncStatus::Copied => "copied", + SyncStatus::Current => "current", + SyncStatus::Upgradable => "upgrade available", + SyncStatus::Warning => "warning", + SyncStatus::Error => "error", + } +} + +fn progress_style_for_status(status: SyncStatus) -> Style { + match status { + SyncStatus::Synced | SyncStatus::Copied => Style::new().green(), + SyncStatus::Current => Style::new().dim(), + SyncStatus::Upgradable => Style::new().color256(208), + SyncStatus::Warning => Style::new().yellow(), + SyncStatus::Error => Style::new().red(), + } +} + +fn format_entry_source(entry: &Entry) -> String { + if entry.is_composite() { + return format!("{} composite source(s)", entry.sources.len()); + } + + match &entry.source { + Some(Source::Git { repo, .. }) => format!("git repo {}", repo), + Some(Source::Filesystem { root, path, .. }) => match path { + Some(p) => format!("filesystem {} ({})", root, p), + None => format!("filesystem {}", root), + }, + None => "no source configured".to_string(), + } +} + +fn parse_asset_kind(input: &str) -> Result { + let normalized = input.trim().replace('-', "_"); + let kind = AssetKind::from_str(&normalized)?; + if !matches!( + kind, + AssetKind::AgentSkill + | AssetKind::CursorRules + | AssetKind::CursorSkillsRoot + | AssetKind::AgentsMd + ) { + return Err(ApsError::InvalidAddArguments { + message: format!( + "Asset type '{}' is not supported by `aps add` yet. Supported types: agent_skill, cursor_rules, cursor_skills_root, agents_md", + normalized + ), + }); + } + Ok(kind) +} + +fn default_path_for_kind(kind: &AssetKind) -> Option { + match kind { + AssetKind::CursorRules => Some(".cursor/rules".to_string()), + AssetKind::CursorHooks => Some(".cursor".to_string()), + AssetKind::CursorSkillsRoot => Some(".cursor/skills".to_string()), + AssetKind::AgentsMd => Some("AGENTS.md".to_string()), + AssetKind::AgentSkill | AssetKind::CompositeAgentsMd => None, + } +} + /// Execute the `aps sync` command pub fn cmd_sync(args: SyncArgs) -> Result<()> { // Discover and load manifest @@ -874,20 +1049,69 @@ pub fn cmd_sync(args: SyncArgs) -> Result<()> { strict: args.strict, upgrade: args.upgrade, }; + let mut git_cache = GitCloneCache::new(); // Detect orphaned paths (destinations that changed) let orphans = detect_orphaned_paths(&entries_to_install, &lockfile, &base_dir); - // Install selected entries + // Install selected entries with progressive feedback + let total = entries_to_install.len(); let mut results: Vec = Vec::new(); - for entry in &entries_to_install { + for (index, entry) in entries_to_install.iter().enumerate() { + let ordinal = index + 1; + let source_desc = format_entry_source(entry); + println!( + "({}/{}) {} {} {}", + ordinal, + total, + style("Syncing").dim(), + style(&entry.id).cyan().bold(), + style(format!("from {}...", source_desc)).dim(), + ); + std::io::stdout().flush().ok(); + + let start = std::time::Instant::now(); + // Use composite install for composite entries, regular install otherwise let result = if entry.is_composite() { - install_composite_entry(entry, &base_dir, &lockfile, &options)? + install_composite_entry(entry, &base_dir, &lockfile, &options, &mut git_cache) } else { - install_entry(entry, &base_dir, &lockfile, &options)? + install_entry(entry, &base_dir, &lockfile, &options, &mut git_cache) }; - results.push(result); + + match result { + Ok(result) => { + let elapsed = start.elapsed(); + let status = sync_status_for_result(&result); + let label = sync_status_label(status); + let status_style = progress_style_for_status(status); + println!( + "({}/{}) {} {} {} {}", + ordinal, + total, + style("Finished").dim(), + style(&entry.id).cyan().bold(), + style(format!("in {:.2?}", elapsed)).dim(), + status_style.apply_to(format!("[{}]", label)), + ); + std::io::stdout().flush().ok(); + results.push(result); + } + Err(err) => { + let elapsed = start.elapsed(); + let error_style = Style::new().red(); + eprintln!( + "({}/{}) {} {} {}: {}", + ordinal, + total, + error_style.apply_to("Failed"), + style(&entry.id).cyan().bold(), + style(format!("after {:.2?}", elapsed)).dim(), + error_style.apply_to(err.to_string()), + ); + return Err(err); + } + } } // Cleanup orphaned paths after successful install @@ -925,17 +1149,7 @@ pub fn cmd_sync(args: SyncArgs) -> Result<()> { let display_items: Vec = results .iter() .map(|r| { - let status = if !r.warnings.is_empty() { - SyncStatus::Warning - } else if r.skipped_no_change && r.upgrade_available.is_some() { - SyncStatus::Upgradable - } else if r.skipped_no_change { - SyncStatus::Current - } else if r.was_symlink { - SyncStatus::Synced - } else { - SyncStatus::Copied - }; + let status = sync_status_for_result(r); let mut item = SyncDisplayItem::new( r.id.clone(), diff --git a/src/error.rs b/src/error.rs index a141016..c6834d9 100644 --- a/src/error.rs +++ b/src/error.rs @@ -165,6 +165,19 @@ pub enum ApsError { #[diagnostic(code(aps::add::invalid_github_url), help("{reason}"))] InvalidGitHubUrl { url: String, reason: String }, + #[error("Invalid repository identifier: {value}")] + #[diagnostic(code(aps::add::invalid_repo), help("{reason}"))] + InvalidRepoSpecifier { value: String, reason: String }, + + #[error("Invalid add arguments")] + #[diagnostic(code(aps::add::invalid_args), help("{message}"))] + InvalidAddArguments { message: String }, + + /// Not used at the moment + // #[error("Asset type '{asset_type}' requires a path within the repository")] + // #[diagnostic(code(aps::add::missing_path), help("{hint}"))] + // MissingAddPath { asset_type: String, hint: String }, + #[error("No skills found in {location}")] #[diagnostic( code(aps::discover::no_skills), diff --git a/src/github_url.rs b/src/github_url.rs index fb56004..77764ff 100644 --- a/src/github_url.rs +++ b/src/github_url.rs @@ -1,5 +1,7 @@ //! GitHub URL parsing for the `aps add` command. //! +//! Also includes helpers for parsing git repository identifiers. +//! //! Parses GitHub URLs to extract repository, branch/ref, and path information. //! //! Supported URL formats: @@ -8,6 +10,8 @@ //! - `https://github.com/{owner}/{repo}/blob/{ref}/{path}/SKILL.md` - direct skill file use crate::error::{ApsError, Result}; +use std::path::Path; +use url::Url; /// Parsed GitHub URL components #[derive(Debug, Clone, PartialEq)] @@ -24,8 +28,21 @@ pub struct ParsedGitHubUrl { pub is_repo_level: bool, } +/// Parsed repository identifier (git URL or GitHub content URL) +#[derive(Debug, Clone, PartialEq)] +pub struct ParsedRepoIdentifier { + /// Repository URL or path (SSH/HTTPS/local) + pub repo_url: String, + /// Optional git ref (branch, tag, or commit) + pub git_ref: Option, + /// Optional path within the repository + pub path: Option, +} + +#[allow(dead_code)] impl ParsedGitHubUrl { /// Get the skill folder path (strips SKILL.md if present) + #[allow(dead_code)] pub fn skill_path(&self) -> &str { if self.is_skill_file { // Handle root-level SKILL.md files (no leading slash) @@ -43,6 +60,7 @@ impl ParsedGitHubUrl { } /// Get the skill name (last component of the path) + #[allow(dead_code)] pub fn skill_name(&self) -> Option<&str> { let skill_path = self.skill_path(); skill_path.rsplit('/').next().filter(|s| !s.is_empty()) @@ -178,6 +196,140 @@ pub fn parse_github_url(url: &str) -> Result { }) } +/// Parse a repository identifier from the `aps add` command. +/// +/// Accepts: +/// - GitHub blob/tree URLs (extracts ref + path) +/// - HTTPS/SSH Git URLs +/// - SCP-style SSH URLs (git@host:owner/repo.git) +/// - Local paths (if they exist on disk) +pub fn parse_repo_identifier(input: &str) -> Result { + let input = input.trim(); + if input.is_empty() { + return Err(ApsError::InvalidRepoSpecifier { + value: input.to_string(), + reason: "Repository identifier cannot be empty".to_string(), + }); + } + + if let Ok(parsed) = Url::parse(input) { + let scheme = parsed.scheme(); + let host = parsed.host_str().unwrap_or_default(); + let path_segments: Vec<&str> = parsed + .path() + .trim_start_matches('/') + .split('/') + .filter(|s| !s.is_empty()) + .collect(); + + if is_github_host(host) && matches!(scheme, "http" | "https") { + if path_segments.len() >= 3 + && (path_segments[2] == "blob" || path_segments[2] == "tree") + { + let parsed = parse_github_url(input)?; + return Ok(ParsedRepoIdentifier { + repo_url: parsed.repo_url, + git_ref: Some(parsed.git_ref), + path: Some(parsed.path), + }); + } + + if path_segments.len() == 2 { + let owner = path_segments[0]; + let repo = path_segments[1].trim_end_matches(".git"); + return Ok(ParsedRepoIdentifier { + repo_url: format!("https://github.com/{}/{}.git", owner, repo), + git_ref: None, + path: None, + }); + } + + return Err(ApsError::InvalidRepoSpecifier { + value: input.to_string(), + reason: "GitHub URL must be a repository URL or a blob/tree URL".to_string(), + }); + } + + if is_github_host(host) && matches!(scheme, "ssh" | "git") { + return Ok(ParsedRepoIdentifier { + repo_url: input.to_string(), + git_ref: None, + path: None, + }); + } + + if matches!(scheme, "http" | "https" | "ssh" | "git" | "file") { + if path_segments.is_empty() { + return Err(ApsError::InvalidRepoSpecifier { + value: input.to_string(), + reason: "Repository URL is missing a path".to_string(), + }); + } + return Ok(ParsedRepoIdentifier { + repo_url: input.to_string(), + git_ref: None, + path: None, + }); + } + + return Err(ApsError::InvalidRepoSpecifier { + value: input.to_string(), + reason: format!("Unsupported URL scheme: {}", scheme), + }); + } + + if is_scp_like_git_url(input) { + return Ok(ParsedRepoIdentifier { + repo_url: input.to_string(), + git_ref: None, + path: None, + }); + } + + if Path::new(input).exists() { + return Ok(ParsedRepoIdentifier { + repo_url: input.to_string(), + git_ref: None, + path: None, + }); + } + + Err(ApsError::InvalidRepoSpecifier { + value: input.to_string(), + reason: "Expected an HTTPS/SSH Git URL, GitHub blob/tree URL, or existing local path" + .to_string(), + }) +} + +fn is_github_host(host: &str) -> bool { + host == "github.com" || host == "www.github.com" +} + +fn is_scp_like_git_url(input: &str) -> bool { + if input.contains("://") { + return false; + } + + let (user_host, path) = match input.split_once(':') { + Some(parts) => parts, + None => return false, + }; + let (user, host) = match user_host.split_once('@') { + Some(parts) => parts, + None => return false, + }; + + if user.is_empty() || host.is_empty() || path.is_empty() { + return false; + } + + if path.contains(' ') { + return false; + } + + path.contains('/') || path.ends_with(".git") +} + #[cfg(test)] mod tests { use super::*; @@ -354,4 +506,28 @@ mod tests { assert!(!parsed.is_repo_level); assert!(!parsed.is_skill_file); } + + #[test] + fn test_parse_repo_identifier_with_ssh_scp() { + let parsed = parse_repo_identifier("git@github.com:org/repo.git").unwrap(); + assert_eq!(parsed.repo_url, "git@github.com:org/repo.git"); + assert_eq!(parsed.git_ref, None); + assert_eq!(parsed.path, None); + } + + #[test] + fn test_parse_repo_identifier_with_ssh_url() { + let parsed = parse_repo_identifier("ssh://git@github.com/org/repo.git").unwrap(); + assert_eq!(parsed.repo_url, "ssh://git@github.com/org/repo.git"); + assert_eq!(parsed.git_ref, None); + assert_eq!(parsed.path, None); + } + + #[test] + fn test_parse_repo_identifier_with_github_repo_root() { + let parsed = parse_repo_identifier("https://github.com/owner/repo").unwrap(); + assert_eq!(parsed.repo_url, "https://github.com/owner/repo.git"); + assert_eq!(parsed.git_ref, None); + assert_eq!(parsed.path, None); + } } diff --git a/src/install.rs b/src/install.rs index 63fd085..578c6b4 100644 --- a/src/install.rs +++ b/src/install.rs @@ -7,7 +7,10 @@ use crate::error::{ApsError, Result}; use crate::hooks::validate_cursor_hooks; use crate::lockfile::{LockedEntry, Lockfile}; use crate::manifest::{AssetKind, Entry}; -use crate::sources::{clone_at_commit, get_remote_commit_sha, GitInfo, ResolvedSource}; +use crate::sources::{ + get_remote_commit_sha, resolve_git_source_at_commit_with_cache, resolve_git_source_with_cache, + GitCloneCache, +}; use dialoguer::Confirm; use std::io::IsTerminal; use std::path::{Path, PathBuf}; @@ -155,6 +158,7 @@ pub fn install_entry( manifest_dir: &Path, lockfile: &Lockfile, options: &InstallOptions, + git_cache: &mut GitCloneCache, ) -> Result { info!("Processing entry: {}", entry.id); @@ -170,6 +174,7 @@ pub fn install_entry( let resolved = if let Some((repo, git_ref)) = source.git_info() { let dest_path = manifest_dir.join(entry.destination()); let locked_entry = lockfile.entries.get(&entry.id); + let shallow = source.git_shallow().unwrap_or(true); // Check if we should use the locked commit let use_locked_commit = @@ -197,24 +202,40 @@ pub fn install_entry( _ => None, }; - // If destination exists and commit matches, we're up to date + // If destination exists, verify it matches the locked checksum before skipping if dest_path.exists() { - info!( - "Entry {} is up to date (using locked commit {})", - entry.id, - &locked_commit[..8.min(locked_commit.len())] - ); - let was_symlink = locked.is_symlink; - return Ok(InstallResult { - id: entry.id.clone(), - installed: false, - skipped_no_change: true, - locked_entry: None, - warnings: Vec::new(), - dest_path: dest_path.clone(), - was_symlink, - upgrade_available, - }); + match compute_source_checksum(&dest_path) { + Ok(dest_checksum) if dest_checksum == locked.checksum => { + info!( + "Entry {} is up to date (using locked commit {})", + entry.id, + &locked_commit[..8.min(locked_commit.len())] + ); + let was_symlink = locked.is_symlink; + return Ok(InstallResult { + id: entry.id.clone(), + installed: false, + skipped_no_change: true, + locked_entry: None, + warnings: Vec::new(), + dest_path: dest_path.clone(), + was_symlink, + upgrade_available, + }); + } + Ok(_) => { + debug!( + "Destination for {} differs from locked checksum, reinstalling", + entry.id + ); + } + Err(err) => { + debug!( + "Failed to checksum destination for {}: {}, reinstalling", + entry.id, err + ); + } + } } // Clone at the locked commit @@ -223,25 +244,13 @@ pub fn install_entry( entry.id, &locked_commit[..8.min(locked_commit.len())] ); - let resolved_git = clone_at_commit(repo, locked_commit, locked_ref)?; - - // Build the path within the cloned repo - let path = source - .git_path() - .map(|p| p.to_string()) - .unwrap_or_else(|| ".".to_string()); - let source_path = if path == "." { - resolved_git.repo_path.clone() - } else { - resolved_git.repo_path.join(&path) - }; - - let git_info = GitInfo { - resolved_ref: resolved_git.resolved_ref.clone(), - commit_sha: resolved_git.commit_sha.clone(), - }; - - ResolvedSource::git(source_path, repo.to_string(), git_info, resolved_git) + resolve_git_source_at_commit_with_cache( + repo, + locked_commit, + locked_ref, + source.git_path(), + git_cache, + )? } else { // Upgrade mode or no locked commit: check remote and clone latest // Fast-path: skip if remote commit matches lockfile and dest exists @@ -278,8 +287,7 @@ pub fn install_entry( } // Clone latest from branch - let adapter = source.to_adapter(); - adapter.resolve(manifest_dir)? + resolve_git_source_with_cache(repo, git_ref, shallow, source.git_path(), git_cache)? } } else { // Non-git source (filesystem): use adapter directly @@ -491,6 +499,7 @@ pub fn install_composite_entry( manifest_dir: &Path, lockfile: &Lockfile, options: &InstallOptions, + git_cache: &mut GitCloneCache, ) -> Result { info!("Processing composite entry: {}", entry.id); @@ -505,8 +514,13 @@ pub fn install_composite_entry( let mut all_checksums: Vec = Vec::new(); for source in &entry.sources { - let adapter = source.to_adapter(); - let resolved = adapter.resolve(manifest_dir)?; + let resolved = if let Some((repo, git_ref)) = source.git_info() { + let shallow = source.git_shallow().unwrap_or(true); + resolve_git_source_with_cache(repo, git_ref, shallow, source.git_path(), git_cache)? + } else { + let adapter = source.to_adapter(); + adapter.resolve(manifest_dir)? + }; if !resolved.source_path.exists() { return Err(ApsError::SourcePathNotFound { diff --git a/src/lockfile.rs b/src/lockfile.rs index e5ca30e..d7a37ae 100644 --- a/src/lockfile.rs +++ b/src/lockfile.rs @@ -5,11 +5,18 @@ use std::fmt; use std::path::{Path, PathBuf}; use tracing::{debug, info}; -/// Default lockfile filename -pub const LOCKFILE_NAME: &str = "aps.lock.yaml"; - -/// Legacy lockfile filename (for backward compatibility) -const LEGACY_LOCKFILE_NAME: &str = "aps.manifest.lock"; +/// Legacy lockfile filenames (for backward compatibility). +/// +/// Historically, APS used fixed lockfile names that were not derived from the +/// manifest filename: +/// - "aps.lock.yaml" (current default for aps.yaml) +/// - "aps.manifest.lock" (older legacy name) +/// +/// Newer versions derive the lockfile name from the manifest filename +/// (e.g. aps-foo.yaml -> aps-foo.lock.yaml). These legacy names are still +/// recognized when loading and are automatically migrated to the new +/// manifest-based name on save. +const LEGACY_LOCKFILE_NAMES: &[&str] = &["aps.lock.yaml", "aps.manifest.lock"]; /// Source types for locked entries - supports both simple strings and composite structures #[derive(Debug, Clone, PartialEq)] @@ -272,19 +279,43 @@ impl Lockfile { } } - /// Get the lockfile path relative to the manifest + /// Get the lockfile path relative to the manifest. + /// + /// The lockfile name is derived from the manifest filename by removing the + /// extension (if any) and appending ".lock.yaml". + /// + /// Examples: + /// - aps.yaml -> aps.lock.yaml + /// - aps-xxx.yaml -> aps-xxx.lock.yaml + /// - custom -> custom.lock.yaml pub fn path_for_manifest(manifest_path: &Path) -> PathBuf { + // Derive the lockfile name from the manifest filename. + let lockfile_name = manifest_path + .file_name() + .and_then(|os_str| os_str.to_str()) + .map(|filename| { + // Split on the last '.' to drop the extension, if present. + if let Some((stem, _ext)) = filename.rsplit_once('.') { + format!("{stem}.lock.yaml") + } else { + format!("{filename}.lock.yaml") + } + }) + // Fallback to the historical default if we can't determine a filename. + .unwrap_or_else(|| "aps.lock.yaml".to_string()); + manifest_path .parent() - .map(|p| p.join(LOCKFILE_NAME)) - .unwrap_or_else(|| PathBuf::from(LOCKFILE_NAME)) + .map(|p| p.join(&lockfile_name)) + .unwrap_or_else(|| PathBuf::from(lockfile_name)) } - /// Load a lockfile from disk + /// Load a lockfile from disk. /// - /// Supports backward compatibility with legacy filename (aps.manifest.lock) + /// Supports backward compatibility with legacy filenames + /// (e.g. aps.lock.yaml, aps.manifest.lock). pub fn load(path: &Path) -> Result { - // Try loading from the provided path first (new filename) + // Try loading from the provided path first (canonical, manifest-based name). if path.exists() { let content = std::fs::read_to_string(path) .map_err(|e| ApsError::io(e, format!("Failed to read lockfile at {:?}", path)))?; @@ -298,16 +329,19 @@ impl Lockfile { return Ok(lockfile); } - // Fall back to legacy filename for backward compatibility - let legacy_path = path - .parent() - .map(|p| p.join(LEGACY_LOCKFILE_NAME)) - .unwrap_or_else(|| PathBuf::from(LEGACY_LOCKFILE_NAME)); + // Fall back to legacy filenames for backward compatibility. + let dir = path.parent().unwrap_or_else(|| Path::new(".")); + + for legacy_name in LEGACY_LOCKFILE_NAMES { + let legacy_path = dir.join(legacy_name); + if !legacy_path.exists() { + continue; + } - if legacy_path.exists() { info!( - "Loading legacy lockfile '{}' (will be migrated to '{}' on next save)", - LEGACY_LOCKFILE_NAME, LOCKFILE_NAME + "Loading legacy lockfile '{:?}' (will be migrated to '{:?}' on next save)", + legacy_path.file_name().unwrap_or_default(), + path.file_name().unwrap_or_default() ); let content = std::fs::read_to_string(&legacy_path).map_err(|e| { @@ -329,7 +363,7 @@ impl Lockfile { Err(ApsError::LockfileNotFound) } - /// Save the lockfile to disk + /// Save the lockfile to disk. /// /// Automatically migrates from legacy filename if it exists. /// Always stamps the current aps version before writing. @@ -344,24 +378,28 @@ impl Lockfile { info!("Saved lockfile to {:?}", path); - // Automatic migration: Remove legacy lockfile if it exists - let legacy_path = path - .parent() - .map(|p| p.join(LEGACY_LOCKFILE_NAME)) - .unwrap_or_else(|| PathBuf::from(LEGACY_LOCKFILE_NAME)); + // Automatic migration: Remove legacy lockfiles in the same directory if + // they exist and are not the canonical path we just wrote. + let dir = path.parent().unwrap_or_else(|| Path::new(".")); + + for legacy_name in LEGACY_LOCKFILE_NAMES { + let legacy_path = dir.join(legacy_name); + if !legacy_path.exists() || legacy_path == path { + continue; + } - if legacy_path.exists() && legacy_path != path { match std::fs::remove_file(&legacy_path) { Ok(_) => { info!( - "Migrated lockfile: removed legacy file '{}'", - LEGACY_LOCKFILE_NAME + "Migrated lockfile: removed legacy file '{:?}'", + legacy_path.file_name().unwrap_or_default() ); } Err(e) => { debug!( - "Could not remove legacy lockfile '{}': {}", - LEGACY_LOCKFILE_NAME, e + "Could not remove legacy lockfile '{:?}': {}", + legacy_path.file_name().unwrap_or_default(), + e ); } } diff --git a/src/manifest.rs b/src/manifest.rs index 5746ab9..a9db9bc 100644 --- a/src/manifest.rs +++ b/src/manifest.rs @@ -208,6 +208,14 @@ impl Source { } } + /// Get shallow clone setting for git sources + pub fn git_shallow(&self) -> Option { + match self { + Source::Git { shallow, .. } => Some(*shallow), + Source::Filesystem { .. } => None, + } + } + /// Get the path within a git source (for cloning at specific commits) pub fn git_path(&self) -> Option<&str> { match self { diff --git a/src/sources/git.rs b/src/sources/git.rs index 2e5a78b..33ad8dc 100644 --- a/src/sources/git.rs +++ b/src/sources/git.rs @@ -2,8 +2,10 @@ use super::{expand_path, GitInfo, ResolvedSource, SourceAdapter}; use crate::error::{ApsError, Result}; +use std::collections::HashMap; use std::path::{Path, PathBuf}; use std::process::Command; +use std::sync::Arc; use tempfile::TempDir; use tracing::{debug, info}; @@ -89,6 +91,139 @@ pub struct ResolvedGitSource { pub commit_sha: String, } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +struct GitCloneKey { + repo: String, + git_ref: String, + shallow: bool, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +struct GitCommitKey { + repo: String, + commit_sha: String, + resolved_ref: String, +} + +/// Cache for git clones within a single sync run +pub struct GitCloneCache { + latest: HashMap>, + commits: HashMap>, +} + +impl GitCloneCache { + pub fn new() -> Self { + Self { + latest: HashMap::new(), + commits: HashMap::new(), + } + } + + pub fn resolve_latest( + &mut self, + repo: &str, + git_ref: &str, + shallow: bool, + ) -> Result> { + let key = GitCloneKey { + repo: repo.to_string(), + git_ref: git_ref.to_string(), + shallow, + }; + if let Some(cached) = self.latest.get(&key) { + info!("Reusing cached clone for {} @ {}", repo, git_ref); + return Ok(Arc::clone(cached)); + } + + let resolved = Arc::new(clone_and_resolve(repo, git_ref, shallow)?); + self.latest.insert(key, Arc::clone(&resolved)); + Ok(resolved) + } + + pub fn resolve_commit( + &mut self, + repo: &str, + commit_sha: &str, + resolved_ref: &str, + ) -> Result> { + let key = GitCommitKey { + repo: repo.to_string(), + commit_sha: commit_sha.to_string(), + resolved_ref: resolved_ref.to_string(), + }; + if let Some(cached) = self.commits.get(&key) { + info!( + "Reusing cached clone for {} @ {}", + repo, + &commit_sha[..8.min(commit_sha.len())] + ); + return Ok(Arc::clone(cached)); + } + + let resolved = Arc::new(clone_at_commit(repo, commit_sha, resolved_ref)?); + self.commits.insert(key, Arc::clone(&resolved)); + Ok(resolved) + } +} + +/// Resolve a git source using the per-run clone cache. +pub fn resolve_git_source_with_cache( + repo: &str, + git_ref: &str, + shallow: bool, + path: Option<&str>, + cache: &mut GitCloneCache, +) -> Result { + let resolved_git = cache.resolve_latest(repo, git_ref, shallow)?; + let path = expand_path(path.unwrap_or(".")); + let source_path = if path == "." { + resolved_git.repo_path.clone() + } else { + resolved_git.repo_path.join(&path) + }; + + let git_info = GitInfo { + resolved_ref: resolved_git.resolved_ref.clone(), + commit_sha: resolved_git.commit_sha.clone(), + }; + + Ok(ResolvedSource::git( + source_path, + repo.to_string(), + git_info, + Arc::clone(&resolved_git), + )) +} + +/// Resolve a git source at a specific commit using the per-run clone cache. +pub fn resolve_git_source_at_commit_with_cache( + repo: &str, + commit_sha: &str, + resolved_ref: &str, + path: Option<&str>, + cache: &mut GitCloneCache, +) -> Result { + let resolved_git = cache.resolve_commit(repo, commit_sha, resolved_ref)?; + let path = expand_path(path.unwrap_or(".")); + let source_path = if path == "." { + resolved_git.repo_path.clone() + } else { + resolved_git.repo_path.join(&path) + }; + + let git_info = GitInfo { + resolved_ref: resolved_git.resolved_ref.clone(), + commit_sha: resolved_git.commit_sha.clone(), + }; + + Ok(ResolvedSource::git( + source_path, + repo.to_string(), + git_info, + Arc::clone(&resolved_git), + )) +} + /// Clone a git repository and resolve the ref using the git CLI. /// This inherits the user's existing git configuration (SSH, credentials, etc.) pub fn clone_and_resolve(url: &str, git_ref: &str, shallow: bool) -> Result { diff --git a/src/sources/mod.rs b/src/sources/mod.rs index 33170af..857a64e 100644 --- a/src/sources/mod.rs +++ b/src/sources/mod.rs @@ -7,7 +7,10 @@ mod filesystem; mod git; pub use filesystem::FilesystemSource; -pub use git::{clone_and_resolve, clone_at_commit, get_remote_commit_sha, GitSource}; +pub use git::{ + clone_and_resolve, get_remote_commit_sha, resolve_git_source_at_commit_with_cache, + resolve_git_source_with_cache, GitCloneCache, GitSource, +}; use crate::error::Result; use crate::lockfile::LockedEntry; diff --git a/tests/cli.rs b/tests/cli.rs index c615037..1e30821 100644 --- a/tests/cli.rs +++ b/tests/cli.rs @@ -119,6 +119,32 @@ fn sync_with_empty_manifest_succeeds() { aps().arg("sync").current_dir(&temp).assert().success(); } +#[test] +fn sync_with_custom_manifest_uses_manifest_based_lockfile_name() { + let temp = assert_fs::TempDir::new().unwrap(); + + // Use a non-default manifest filename. + let manifest_name = "aps-custom.yaml"; + temp.child(manifest_name) + .write_str("entries: []\n") + .unwrap(); + + aps() + .args(["sync", "--manifest", manifest_name]) + .current_dir(&temp) + .assert() + .success(); + + // Lockfile name should be derived from the manifest filename. + temp.child("aps-custom.lock.yaml") + .assert(predicate::path::exists()); + + // The historical default lockfile name should not be created when a custom + // manifest is explicitly used. + temp.child("aps.lock.yaml") + .assert(predicate::path::missing()); +} + #[test] fn sync_dry_run_does_not_create_lockfile() { let temp = assert_fs::TempDir::new().unwrap(); @@ -246,6 +272,34 @@ fn status_works_after_sync() { aps().arg("status").current_dir(&temp).assert().success(); } +#[test] +fn status_with_custom_manifest_uses_matching_lockfile() { + let temp = assert_fs::TempDir::new().unwrap(); + + let manifest_name = "aps-status.yaml"; + temp.child(manifest_name) + .write_str("entries: []\n") + .unwrap(); + + // First sync to create the manifest-based lockfile. + aps() + .args(["sync", "--manifest", manifest_name]) + .current_dir(&temp) + .assert() + .success(); + + // Ensure the derived lockfile exists. + temp.child("aps-status.lock.yaml") + .assert(predicate::path::exists()); + + // Then status should succeed when pointing at the same manifest. + aps() + .args(["status", "--manifest", manifest_name]) + .current_dir(&temp) + .assert() + .success(); +} + // ============================================================================ // Catalog Command Tests // ============================================================================ @@ -360,6 +414,75 @@ fn sync_with_symlink_creates_symlink() { } } +#[test] +fn sync_shows_progress_per_entry() { + let temp = assert_fs::TempDir::new().unwrap(); + + // Create source files + let source_dir = temp.child("source"); + source_dir.create_dir_all().unwrap(); + source_dir + .child("AGENTS-one.md") + .write_str("# One\n") + .unwrap(); + source_dir + .child("AGENTS-two.md") + .write_str("# Two\n") + .unwrap(); + + // Create manifest with two entries so we can see (1/2), (2/2) + let manifest = format!( + r#"entries: + - id: first + kind: agents_md + source: + type: filesystem + root: {root} + path: AGENTS-one.md + dest: ./AGENTS-one.md + - id: second + kind: agents_md + source: + type: filesystem + root: {root} + path: AGENTS-two.md + dest: ./AGENTS-two.md +"#, + root = source_dir.path().display() + ); + + temp.child("aps.yaml").write_str(&manifest).unwrap(); + + let output = aps() + .arg("sync") + .current_dir(&temp) + .output() + .expect("failed to run aps sync"); + + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + assert!(output.status.success(), "aps sync failed: {}", stdout); + + // Ensure progress markers are present + assert!( + stdout.contains("(1/2)"), + "expected progress for first entry, got: {stdout}" + ); + assert!( + stdout.contains("(2/2)"), + "expected progress for second entry, got: {stdout}" + ); + + // Progress should appear before the final sync summary header + let first_progress = stdout.find("(1/2)").unwrap(); + let header_pos = stdout + .find("Syncing from") + .expect("expected 'Syncing from' header in output"); + assert!( + first_progress < header_pos, + "expected progress lines before summary header, got: {stdout}" + ); +} + // ============================================================================ // Hooks Tests // ============================================================================ @@ -841,6 +964,145 @@ fn sync_shows_upgrade_available_status() { ); } +#[test] +fn sync_reuses_git_clone_for_same_repo() { + let temp = assert_fs::TempDir::new().unwrap(); + + let source_repo = temp.child("source-repo"); + source_repo.create_dir_all().unwrap(); + create_git_repo_with_agents_md(source_repo.path(), "# Version 1\nOriginal content\n"); + + let project = temp.child("project"); + project.create_dir_all().unwrap(); + + let manifest = format!( + r#"entries: + - id: agents-one + kind: agents_md + source: + type: git + repo: {} + ref: main + shallow: false + path: AGENTS.md + dest: ./AGENTS-1.md + - id: agents-two + kind: agents_md + source: + type: git + repo: {} + ref: main + shallow: false + path: AGENTS.md + dest: ./AGENTS-2.md +"#, + source_repo.path().display(), + source_repo.path().display() + ); + + project.child("aps.yaml").write_str(&manifest).unwrap(); + + let output = aps() + .args(["--verbose", "sync"]) + .current_dir(&project) + .output() + .expect("Failed to run aps sync"); + + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let combined_output = format!("{stdout}\n{stderr}"); + assert!( + output.status.success(), + "aps sync failed: {combined_output}" + ); + + let clone_count = combined_output.matches("Cloning git repository:").count(); + let reuse_count = combined_output.matches("Reusing cached clone").count(); + assert_eq!( + clone_count, 1, + "expected one clone for shared repo, output: {combined_output}" + ); + assert!( + reuse_count >= 1, + "expected cached clone reuse, output: {combined_output}" + ); + + project + .child("AGENTS-1.md") + .assert(predicate::path::exists()); + project + .child("AGENTS-2.md") + .assert(predicate::path::exists()); +} + +#[test] +fn sync_clones_each_distinct_repo_once() { + let temp = assert_fs::TempDir::new().unwrap(); + + let source_repo_a = temp.child("source-repo-a"); + source_repo_a.create_dir_all().unwrap(); + create_git_repo_with_agents_md(source_repo_a.path(), "# Repo A\n"); + + let source_repo_b = temp.child("source-repo-b"); + source_repo_b.create_dir_all().unwrap(); + create_git_repo_with_agents_md(source_repo_b.path(), "# Repo B\n"); + + let project = temp.child("project"); + project.create_dir_all().unwrap(); + + let manifest = format!( + r#"entries: + - id: agents-a + kind: agents_md + source: + type: git + repo: {} + ref: main + shallow: false + path: AGENTS.md + dest: ./AGENTS-A.md + - id: agents-b + kind: agents_md + source: + type: git + repo: {} + ref: main + shallow: false + path: AGENTS.md + dest: ./AGENTS-B.md +"#, + source_repo_a.path().display(), + source_repo_b.path().display() + ); + + project.child("aps.yaml").write_str(&manifest).unwrap(); + + let output = aps() + .args(["--verbose", "sync"]) + .current_dir(&project) + .output() + .expect("Failed to run aps sync"); + + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let combined_output = format!("{stdout}\n{stderr}"); + assert!( + output.status.success(), + "aps sync failed: {combined_output}" + ); + + let clone_count = combined_output.matches("Cloning git repository:").count(); + let reuse_count = combined_output.matches("Reusing cached clone").count(); + assert_eq!( + clone_count, 2, + "expected one clone per distinct repo, output: {combined_output}" + ); + assert_eq!( + reuse_count, 0, + "did not expect cache reuse across distinct repos, output: {combined_output}" + ); +} + // ============================================================================ // Composite Agents MD Tests (Live Git Sources) // ============================================================================ @@ -1038,6 +1300,7 @@ fn add_creates_manifest_entry_with_no_sync() { aps() .args([ "add", + "agent_skill", "https://github.com/hashicorp/agent-skills/blob/main/terraform/module-generation/skills/refactor-module", "--no-sync", ]) @@ -1071,6 +1334,7 @@ fn add_parses_skill_md_url_correctly() { aps() .args([ "add", + "agent_skill", "https://github.com/hashicorp/agent-skills/blob/main/terraform/module-generation/skills/refactor-module/SKILL.md", "--no-sync", ]) @@ -1099,6 +1363,7 @@ fn add_with_custom_id() { aps() .args([ "add", + "agent_skill", "https://github.com/owner/repo/blob/main/path/to/skill", "--id", "my-custom-skill", @@ -1138,6 +1403,7 @@ fn add_to_existing_manifest() { aps() .args([ "add", + "agent_skill", "https://github.com/owner/repo/blob/main/path/to/new-skill", "--no-sync", ]) @@ -1173,6 +1439,7 @@ fn add_duplicate_id_fails() { aps() .args([ "add", + "agent_skill", "https://github.com/owner/repo/blob/main/path/to/duplicate-skill", "--no-sync", ]) @@ -1183,37 +1450,131 @@ fn add_duplicate_id_fails() { } #[test] -fn add_invalid_github_url_fails() { +fn add_invalid_url_format_fails() { let temp = assert_fs::TempDir::new().unwrap(); - // Non-GitHub URL + // URL without blob/tree aps() .args([ "add", - "https://gitlab.com/owner/repo/blob/main/path", + "agent_skill", + "https://github.com/owner/repo/commits/main/path", "--no-sync", ]) .current_dir(&temp) .assert() .failure() - .stderr(predicate::str::contains("github.com")); + .stderr(predicate::str::contains("blob").or(predicate::str::contains("tree"))); } #[test] -fn add_invalid_url_format_fails() { +fn add_invalid_repo_identifier_fails() { + let temp = assert_fs::TempDir::new().unwrap(); + + aps() + .args(["add", "agent_skill", "not-a-repo", "--no-sync"]) + .current_dir(&temp) + .assert() + .failure() + .stderr(predicate::str::contains("Expected an HTTPS/SSH Git URL")); +} + +#[test] +fn add_cursor_rules_from_https_repo() { let temp = assert_fs::TempDir::new().unwrap(); - // URL without blob/tree aps() .args([ "add", - "https://github.com/owner/repo/commits/main/path", + "cursor_rules", + "https://github.com/owner/repo", "--no-sync", ]) .current_dir(&temp) .assert() - .failure() - .stderr(predicate::str::contains("blob").or(predicate::str::contains("tree"))); + .success(); + + let manifest = temp.child("aps.yaml"); + manifest.assert(predicate::str::contains("kind: cursor_rules")); + manifest.assert(predicate::str::contains( + "repo: https://github.com/owner/repo.git", + )); + manifest.assert(predicate::str::contains("ref: auto")); + manifest.assert(predicate::str::contains("path: .cursor/rules")); +} + +#[test] +fn add_cursor_rules_from_ssh_repo() { + let temp = assert_fs::TempDir::new().unwrap(); + + aps() + .args([ + "add", + "cursor_rules", + "git@github.com:org/repo.git", + "--no-sync", + ]) + .current_dir(&temp) + .assert() + .success(); + + let manifest = temp.child("aps.yaml"); + manifest.assert(predicate::str::contains("kind: cursor_rules")); + manifest.assert(predicate::str::contains( + "repo: git@github.com:org/repo.git", + )); + manifest.assert(predicate::str::contains("ref: auto")); + manifest.assert(predicate::str::contains("path: .cursor/rules")); +} + +#[test] +fn add_cursor_skills_root_from_ssh_repo() { + let temp = assert_fs::TempDir::new().unwrap(); + + aps() + .args([ + "add", + "cursor_skills_root", + "git@github.com:org/skills.git", + "--no-sync", + ]) + .current_dir(&temp) + .assert() + .success(); + + let manifest = temp.child("aps.yaml"); + manifest.assert(predicate::str::contains("kind: cursor_skills_root")); + manifest.assert(predicate::str::contains( + "repo: git@github.com:org/skills.git", + )); + manifest.assert(predicate::str::contains("ref: auto")); + manifest.assert(predicate::str::contains("path: .cursor/skills")); +} + +#[test] +fn add_agent_skill_from_ssh_with_path() { + let temp = assert_fs::TempDir::new().unwrap(); + + aps() + .args([ + "add", + "agent_skill", + "git@github.com:org/private-skills.git", + "--path", + "skills/my-skill", + "--no-sync", + ]) + .current_dir(&temp) + .assert() + .success(); + + let manifest = temp.child("aps.yaml"); + manifest.assert(predicate::str::contains("kind: agent_skill")); + manifest.assert(predicate::str::contains( + "repo: git@github.com:org/private-skills.git", + )); + manifest.assert(predicate::str::contains("path: skills/my-skill")); + manifest.assert(predicate::str::contains("dest: .claude/skills/my-skill/")); } #[test] @@ -1224,6 +1585,7 @@ fn add_with_tree_url() { aps() .args([ "add", + "agent_skill", "https://github.com/owner/repo/tree/main/path/to/skill", "--no-sync", ]) @@ -1243,6 +1605,7 @@ fn add_with_different_ref() { aps() .args([ "add", + "agent_skill", "https://github.com/owner/repo/blob/v1.2.3/path/to/skill", "--no-sync", ]) @@ -1260,9 +1623,9 @@ fn add_help_shows_usage() { .args(["add", "--help"]) .assert() .success() - .stdout(predicate::str::contains("GitHub URL")) .stdout(predicate::str::contains("--id")) - .stdout(predicate::str::contains("--kind")) + .stdout(predicate::str::contains("--path")) + .stdout(predicate::str::contains("--ref")) .stdout(predicate::str::contains("--no-sync")) .stdout(predicate::str::contains("--all")); }