Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added

- Documented `${input:...}` variable support in `headers` and `env` MCP server fields, with runtime support matrix and examples (#343)
- Lockfile records actual download host for Artifactory-proxied packages, enabling reproducible installs from lockfile alone without `ARTIFACTORY_BASE_URL` (#401)
- `build_download_ref` prefers lockfile host over manifest host for reproducible re-installs from non-default sources (#401)
- `ARTIFACTORY_ONLY` conflict detection — hard error when lockfile contains `github.com` dependencies but `ARTIFACTORY_ONLY=1` is set (#401)
- `ARTIFACTORY_ONLY` enforcement for virtual packages (files, collections, subdirectories) — blocks direct git access consistently (#401)

### Fixed

- Virtual subdirectory packages bypassed `ARTIFACTORY_ONLY` enforcement and fell through to direct git clone (#401)

## [0.8.3] - 2026-03-20

Expand Down
16 changes: 15 additions & 1 deletion docs/src/content/docs/getting-started/authentication.md
Original file line number Diff line number Diff line change
Expand Up @@ -215,4 +215,18 @@ export ARTIFACTORY_ONLY=1
export ARTIFACTORY_APM_TOKEN=your-api-key-or-token
```

> **Note:** Artifactory downloads use zip archives, so `apm.lock` will not contain commit SHAs for Artifactory-sourced packages.
> **Note:** Artifactory downloads use zip archives, so `apm.lock.yaml` will not contain commit SHAs for Artifactory-sourced packages.

#### Lockfile and Reproducibility

When packages are installed through Artifactory, the lockfile records the actual Artifactory host — not the original `github.com`. This means subsequent `apm install` runs fetch from Artifactory automatically, even without `ARTIFACTORY_BASE_URL` set:

```bash
# First install (sets up lockfile):
ARTIFACTORY_BASE_URL=https://art.example.com/artifactory/apm apm install owner/repo

# Subsequent installs use the lockfile — no env var needed:
apm install # → fetches from art.example.com/artifactory/apm
```

If `ARTIFACTORY_ONLY=1` is set but the lockfile contains dependencies locked to `github.com` (from a previous non-Artifactory install), APM will exit with an error and suggest `apm install --update` to re-resolve through Artifactory.
86 changes: 69 additions & 17 deletions src/apm_cli/commands/install.py
Original file line number Diff line number Diff line change
Expand Up @@ -928,20 +928,22 @@ def download_callback(dep_ref, modules_dir):
return result_path
return None

# T5: Use locked commit if available (reproducible installs)
locked_ref = None
if existing_lockfile:
locked_dep = existing_lockfile.get_dependency(dep_ref.get_unique_key())
if locked_dep and locked_dep.resolved_commit and locked_dep.resolved_commit != "cached":
locked_ref = locked_dep.resolved_commit

# Build a DependencyReference with the right ref to avoid lossy
# T5: Use locked commit and host if available (reproducible installs).
# Prefer the lockfile host so re-installs fetch from the same source
# (e.g. Artifactory proxy). Uses _dc_replace to avoid lossy
# str() → parse() round-trips (#382).
from dataclasses import replace as _dc_replace
if locked_ref:
download_dep = _dc_replace(dep_ref, reference=locked_ref)
else:
download_dep = dep_ref
locked_dep = existing_lockfile.get_dependency(dep_ref.get_unique_key()) if existing_lockfile else None
overrides = {}
if locked_dep:
locked_host = getattr(locked_dep, "host", None)
if isinstance(locked_host, str) and locked_host != dep_ref.host:
overrides["host"] = locked_host
if locked_dep.resolved_commit and locked_dep.resolved_commit != "cached":
overrides["reference"] = locked_dep.resolved_commit
elif dep_ref.reference:
pass # use dep_ref as-is
download_dep = _dc_replace(dep_ref, **overrides) if overrides else dep_ref

# Silent download - no progress display for transitive deps
result = downloader.download_package(download_dep, install_path)
Expand Down Expand Up @@ -1096,7 +1098,7 @@ def _collect_descendants(node, visited=None):
# Collect installed packages for lockfile generation
from apm_cli.deps.lockfile import LockFile, LockedDependency, get_lockfile_path
from ..utils.content_hash import compute_package_hash as _compute_hash
installed_packages: List[tuple] = [] # List of (dep_ref, resolved_commit, depth, resolved_by, is_dev)
installed_packages: List[tuple] = [] # List of (dep_ref, resolved_commit, depth, resolved_by, is_dev, host_override)
package_deployed_files: builtins.dict = {} # dep_key → list of relative deployed paths
package_types: builtins.dict = {} # dep_key → package type string
_package_hashes: builtins.dict = {} # dep_key → sha256 hash (captured at download/verify time)
Expand All @@ -1107,6 +1109,31 @@ def _collect_descendants(node, visited=None):
if existing_lockfile:
for dep in existing_lockfile.dependencies.values():
managed_files.update(dep.deployed_files)

# Conflict: ARTIFACTORY_ONLY requires all deps locked to a proxy host.
import os
if os.environ.get('ARTIFACTORY_ONLY', '').strip().lower() in ('1', 'true', 'yes'):
direct_locked = [
dep for dep in existing_lockfile.dependencies.values()
if dep.source != "local" and dep.host in (None, "github.com")
]
if direct_locked:
_rich_error(
"ARTIFACTORY_ONLY is set but lockfile contains "
"dependencies locked to direct sources:"
)
for dep in direct_locked[:10]:
host = dep.host or "github.com"
name = dep.repo_url
if dep.virtual_path:
name = f"{name}/{dep.virtual_path}"
_rich_error(f" - {name} (host: {host})")
_rich_error(
"Re-run with 'apm install --update' to re-resolve "
"through Artifactory, or unset ARTIFACTORY_ONLY."
)
sys.exit(1)

# Normalize path separators once for O(1) lookups in check_collision
from apm_cli.integration.base_integrator import BaseIntegrator
managed_files = BaseIntegrator.normalize_managed_files(managed_files)
Expand Down Expand Up @@ -1296,7 +1323,7 @@ def _collect_descendants(node, visited=None):
depth = node.depth if node else 1
resolved_by = node.parent.dependency_ref.repo_url if node and node.parent else None
_is_dev = node.is_dev if node else False
installed_packages.append((dep_ref, None, depth, resolved_by, _is_dev))
installed_packages.append((dep_ref, None, depth, resolved_by, _is_dev, None))
dep_key = dep_ref.get_unique_key()
if install_path.is_dir() and not dep_ref.is_local:
_package_hashes[dep_key] = _compute_hash(install_path)
Expand Down Expand Up @@ -1408,6 +1435,14 @@ def _collect_descendants(node, visited=None):
safe_rmtree(install_path, apm_modules_dir)
skip_download = False

# When ARTIFACTORY_ONLY is set, don't use cached files
# unless the lockfile confirms they came from Artifactory.
if skip_download and not dep_ref.is_local:
import os
if os.environ.get('ARTIFACTORY_ONLY', '').strip().lower() in ('1', 'true', 'yes'):
if not _dep_locked_chk or _dep_locked_chk.host in (None, "github.com"):
skip_download = False

if skip_download:
display_name = (
str(dep_ref) if dep_ref.is_virtual else dep_ref.repo_url
Expand All @@ -1422,7 +1457,14 @@ def _collect_descendants(node, visited=None):
ref_str = f"#{short_sha}"
elif dep_ref.reference:
ref_str = f"#{dep_ref.reference}"
_rich_info(f"✓ {display_name}{ref_str} (cached)")
cached_art_suffix = ""
if _dep_locked_chk and _dep_locked_chk.host and _dep_locked_chk.host != "github.com":
cached_art_suffix = f" via {_dep_locked_chk.host}"
else:
resolved_host = downloader.get_resolved_host(dep_ref)
if resolved_host:
cached_art_suffix = f" via {resolved_host}"
_rich_info(f"✓ {display_name}{ref_str} (cached{cached_art_suffix})")
installed_count += 1
if not dep_ref.reference:
unpinned_count += 1
Expand Down Expand Up @@ -1495,7 +1537,13 @@ def _collect_descendants(node, visited=None):
cached_commit = locked_dep.resolved_commit
if not cached_commit:
cached_commit = dep_ref.reference
installed_packages.append((dep_ref, cached_commit, depth, resolved_by, _is_dev))
# Determine actual download host for lockfile.
cached_host_override = None
if _dep_locked_chk and _dep_locked_chk.host:
cached_host_override = _dep_locked_chk.host
if not cached_host_override:
cached_host_override = downloader.get_resolved_host(dep_ref)
installed_packages.append((dep_ref, cached_commit, depth, resolved_by, _is_dev, cached_host_override))
if install_path.is_dir():
_package_hashes[dep_key] = _compute_hash(install_path)
# Track package type for lockfile
Expand Down Expand Up @@ -1604,7 +1652,8 @@ def _collect_descendants(node, visited=None):
depth = node.depth if node else 1
resolved_by = node.parent.dependency_ref.repo_url if node and node.parent else None
_is_dev = node.is_dev if node else False
installed_packages.append((dep_ref, resolved_commit, depth, resolved_by, _is_dev))
host_override = downloader.get_resolved_host(dep_ref)
installed_packages.append((dep_ref, resolved_commit, depth, resolved_by, _is_dev, host_override))
if install_path.is_dir():
_package_hashes[dep_ref.get_unique_key()] = _compute_hash(install_path)

Expand Down Expand Up @@ -1632,6 +1681,9 @@ def _collect_descendants(node, visited=None):
elif package_type == PackageType.APM_PACKAGE:
_rich_info(f" └─ Package type: APM Package (apm.yml)")

if verbose and host_override:
_rich_info(f" └─ Source: {host_override}")

# Auto-integrate prompts and agents if enabled
# Pre-deploy security gate
if not _pre_deploy_security_scan(
Expand Down
34 changes: 34 additions & 0 deletions src/apm_cli/deps/github_downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,6 +375,21 @@ def _parse_artifactory_base_url(self) -> Optional[tuple]:
return None
return (host, path, parsed.scheme)

def get_resolved_host(self, dep_ref: 'DependencyReference') -> Optional[str]:
"""Return the actual download host for a dependency.

For dependencies routed through a proxy (e.g. Artifactory), returns
the proxy ``host/path`` so callers can record the real source.
Returns ``None`` when the default host (from *dep_ref*) applies.
"""
if dep_ref.is_local:
return None
proxy = self._parse_artifactory_base_url()
if proxy and self._should_use_artifactory_proxy(dep_ref):
host, prefix, _scheme = proxy
return f"{host}/{prefix}"
return None

def _resilient_get(self, url: str, headers: Dict[str, str], timeout: int = 30, max_retries: int = 3) -> requests.Response:
"""HTTP GET with retry on 429/503 and rate-limit header awareness (#171).

Expand Down Expand Up @@ -1813,8 +1828,22 @@ def download_package(
# Handle virtual packages differently
if dep_ref.is_virtual:
if dep_ref.is_virtual_file():
if self._is_artifactory_only() and not dep_ref.is_artifactory():
art_proxy = self._parse_artifactory_base_url()
if not art_proxy:
raise RuntimeError(
f"ARTIFACTORY_ONLY is set but no Artifactory proxy is configured for '{repo_ref}'. "
"Set ARTIFACTORY_BASE_URL or use explicit Artifactory FQDN syntax."
)
return self.download_virtual_file_package(dep_ref, target_path, progress_task_id, progress_obj)
elif dep_ref.is_virtual_collection():
if self._is_artifactory_only() and not dep_ref.is_artifactory():
art_proxy = self._parse_artifactory_base_url()
if not art_proxy:
raise RuntimeError(
f"ARTIFACTORY_ONLY is set but no Artifactory proxy is configured for '{repo_ref}'. "
"Set ARTIFACTORY_BASE_URL or use explicit Artifactory FQDN syntax."
)
return self.download_collection_package(dep_ref, target_path, progress_task_id, progress_obj)
elif dep_ref.is_virtual_subdirectory():
# When ARTIFACTORY_ONLY is set, download full archive and extract subdir
Expand All @@ -1823,6 +1852,11 @@ def download_package(
return self._download_subdirectory_from_artifactory(
dep_ref, target_path, art_proxy, progress_task_id, progress_obj
)
if self._is_artifactory_only():
raise RuntimeError(
f"ARTIFACTORY_ONLY is set but no Artifactory proxy is configured for '{repo_ref}'. "
"Set ARTIFACTORY_BASE_URL or use explicit Artifactory FQDN syntax."
)
return self.download_subdirectory_package(dep_ref, target_path, progress_task_id, progress_obj)
else:
raise ValueError(f"Unknown virtual package type for {dep_ref.virtual_path}")
Expand Down
25 changes: 12 additions & 13 deletions src/apm_cli/deps/lockfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,11 +120,12 @@ def from_dependency_ref(
depth: int,
resolved_by: Optional[str],
is_dev: bool = False,
host_override: Optional[str] = None,
) -> "LockedDependency":
"""Create from a DependencyReference with resolution info."""
return cls(
repo_url=dep_ref.repo_url,
host=dep_ref.host,
host=host_override or dep_ref.host,
resolved_commit=resolved_commit,
resolved_ref=dep_ref.reference,
virtual_path=dep_ref.virtual_path,
Expand Down Expand Up @@ -230,11 +231,10 @@ def from_installed_packages(
dependency_graph,
) -> "LockFile":
"""Create a lock file from installed packages.

Args:
installed_packages: List of (dep_ref, resolved_commit, depth, resolved_by)
or (dep_ref, resolved_commit, depth, resolved_by, is_dev) tuples.
The 5th element is optional for backward compatibility.
installed_packages: List of (dep_ref, resolved_commit, depth, resolved_by
[, is_dev [, host_override]]) tuples. The 5th/6th elements are optional.
dependency_graph: The resolved DependencyGraph for additional metadata
"""
# Get APM version
Expand All @@ -243,24 +243,23 @@ def from_installed_packages(
apm_version = version("apm-cli")
except Exception:
apm_version = "unknown"

lock = cls(apm_version=apm_version)

for entry in installed_packages:
if len(entry) >= 5:
dep_ref, resolved_commit, depth, resolved_by, is_dev = entry[:5]
else:
dep_ref, resolved_commit, depth, resolved_by = entry[:4]
is_dev = False
dep_ref, resolved_commit, depth, resolved_by = entry[:4]
is_dev = entry[4] if len(entry) > 4 else False
host_override = entry[5] if len(entry) > 5 else None
locked_dep = LockedDependency.from_dependency_ref(
dep_ref=dep_ref,
resolved_commit=resolved_commit,
depth=depth,
resolved_by=resolved_by,
is_dev=is_dev,
host_override=host_override,
)
lock.add_dependency(locked_dep)

return lock

def get_installed_paths(self, apm_modules_dir: Path) -> List[str]:
Expand Down
23 changes: 21 additions & 2 deletions src/apm_cli/drift.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,25 @@ def build_download_ref(
"""
if existing_lockfile and not update_refs and not ref_changed:
locked_dep = existing_lockfile.get_dependency(dep_ref.get_unique_key())
if locked_dep and locked_dep.resolved_commit and locked_dep.resolved_commit != "cached":
return _dataclass_replace(dep_ref, reference=locked_dep.resolved_commit)
if locked_dep:
# Prefer the lockfile host over the manifest host so that
# re-installs fetch from the exact same source (e.g. an
# Artifactory proxy or GHE custom domain). Without this,
# the downloader would fall back to github.com.
locked_host = getattr(locked_dep, "host", None)
overrides = {}
if isinstance(locked_host, str) and locked_host != dep_ref.host:
overrides["host"] = locked_host
if locked_dep.resolved_commit and locked_dep.resolved_commit != "cached":
overrides["reference"] = locked_dep.resolved_commit
elif not overrides.get("reference"):
# When no commit SHA is available (e.g., Artifactory downloads),
# preserve a pinned ref from the lockfile or manifest instead of
# dropping the #ref portion and floating to the default branch.
locked_ref = getattr(locked_dep, "resolved_ref", None)
ref = locked_ref if isinstance(locked_ref, str) else dep_ref.reference
if ref and ref != dep_ref.reference:
overrides["reference"] = ref
if overrides:
return _dataclass_replace(dep_ref, **overrides)
return dep_ref
Loading