Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 18 additions & 9 deletions contree_cli/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from contree_cli import CLIENT, FORMATTER, PROFILE, SESSION_STORE, ArgumentsProtocol
from contree_cli.arguments import parser
from contree_cli.client import ApiError, client_from_profile
from contree_cli.config import Config
from contree_cli.config import SETTINGS, Config
from contree_cli.log import setup_logging
from contree_cli.output import FORMATTERS
from contree_cli.session import SessionStore, get_session_key
Expand All @@ -21,6 +21,9 @@ def main() -> None:
parser.print_help()
exit(0)

if SETTINGS.has_section("cli"):
parser.set_defaults(**SETTINGS["cli"])

args = parser.parse_args()
setup_logging(level=getattr(logging, args.log_level.upper(), logging.INFO))

Expand All @@ -38,14 +41,19 @@ def main() -> None:
if args.project:
profile = replace(profile, project=args.project)

# auth creates its own client and can work with nonexistent profiles
if args.command not in ("auth",):
if profile.name not in cfg:
log.error(
"Profile %r does not exist. Run `contree auth` first.",
profile.name,
)
exit(1)
# Local-only commands don't need a client or a configured profile:
# auth bootstraps its own; agent/man/skill operate purely on local files.
LOCAL_COMMANDS = ("auth", "agent", "man", "skill")
needs_client = args.command not in LOCAL_COMMANDS

if needs_client and profile.name not in cfg:
log.error(
"Profile %r does not exist. Run `contree auth` first.",
profile.name,
)
exit(1)

if needs_client:
try:
client = client_from_profile(profile)
except ValueError as exc:
Expand All @@ -57,6 +65,7 @@ def main() -> None:

session_key = get_session_key(profile.name, override=args.session_key)
db_path = profile.session_db_path
log.debug("Running in session: %s", session_key)

with SessionStore(db_path, session_key) as store:
PROFILE.set(profile)
Expand Down
2 changes: 1 addition & 1 deletion contree_cli/arguments.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@
Authentication:
Bearer token + project ID. Default API URL:
https://api.studio.nebius.com/sandboxes/
https://api.tokenfactory.nebius.com/sandboxes/
Use `contree auth --help` to configure persistent credentials.
Expand Down
28 changes: 14 additions & 14 deletions contree_cli/cli/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
import hashlib
import json
import logging
import os
import shlex
import subprocess
import tempfile
Expand All @@ -28,6 +27,7 @@

from contree_cli import CLIENT, SESSION_STORE, ArgumentsProtocol, SetupResult
from contree_cli.client import ApiError, ContreeClient, resolve_image, stream_response
from contree_cli.config import EDITOR
from contree_cli.session import SessionStore
from contree_cli.types import FLAGS

Expand Down Expand Up @@ -79,7 +79,8 @@ def setup_parser(p: argparse.ArgumentParser) -> SetupResult:
)
edit_p.add_argument(
*FLAGS["editor"],
help="Editor command (default: $EDITOR or vi)",
default=EDITOR,
help=f"Editor command (default: {EDITOR})",
)
edit_p.add_argument("path", help="Path inside image")
edit_p.set_defaults(handler=cmd_file_edit, load_args=FileEditArgs)
Expand Down Expand Up @@ -128,14 +129,14 @@ def _upload_and_record(
except ApiError as exc:
if exc.status != 404:
raise
data = local_path.read_bytes()
resp = client.request(
"POST",
"/v1/files",
body=data,
headers={"Content-Type": "application/octet-stream"},
)
file_uuid = json.loads(resp.read())["uuid"]
with open(local_path, "rb") as fh:
resp = client.request(
"POST",
"/v1/files",
body=fh,
headers={"Content-Type": "application/octet-stream"},
)
file_uuid = json.loads(resp.read())["uuid"]
logger.info("Uploaded %s (%s)", instance_path, file_uuid)

history_id = store.set_image(
Expand Down Expand Up @@ -178,14 +179,13 @@ def cmd_file_edit(args: FileEditArgs) -> int | None:

# 2. Record original hash, open editor
original_hash = _file_sha256(tmp_file)
editor = args.editor or os.environ.get("EDITOR", "vi")
logger.info("Opening %s in %s", tmp_file, editor)
logger.info("Opening %s in %s", tmp_file, args.editor)
# $EDITOR may contain shell expressions (env vars, tilde, pipes),
# e.g. "TERM=xterm vim" or "~/bin/editor". shlex.split would not
# expand those shell=True is required. The file path is quoted
# expand those, shell=True is required. The file path is quoted
# via shlex.quote to prevent injection from the filename.
# nosemgrep: subprocess-shell-true
rc = subprocess.call(f"{editor} {shlex.quote(str(tmp_file))}", shell=True)
rc = subprocess.call(f"{args.editor} {shlex.quote(str(tmp_file))}", shell=True)
if rc != 0:
logger.error("Editor exited with code %d", rc)
return 1
Expand Down
100 changes: 74 additions & 26 deletions contree_cli/cli/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,17 +49,20 @@
import argparse
import base64
import fnmatch
import functools
import io
import json
import logging
import os
import re
import select
import shlex
import sys
import time
import uuid
from dataclasses import dataclass, field
from datetime import timedelta
from multiprocessing.pool import ThreadPool

from contree_cli import CLIENT, FORMATTER, SESSION_STORE, ArgumentsProtocol, SetupResult
from contree_cli.client import ApiError, ContreeClient, decode_stream, resolve_image
Expand All @@ -68,7 +71,7 @@
DefaultFormatter,
OutputFormatter,
)
from contree_cli.session import SessionStore
from contree_cli.session import CONTREE_CONCURRENCY, SessionStore
from contree_cli.types import FLAGS

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -351,47 +354,82 @@ def _local_file_cache_kind(host_path: str) -> str:
return f"local_file:{digest}"


MAX_CACHE_AGE = 90 * 24 * 3600 # 90 days server retention is 6 months
MAX_CACHE_AGE = 90 * 24 * 3600 # 90 days - server retention is 6 months


def _upload_file(
client: ContreeClient,
mf: MappedFile,
store: SessionStore,
) -> str:
"""Upload a host file and return its UUID, reusing if already present."""
def cached_local_uuid(mf: MappedFile, store: SessionStore) -> str | None:
"""Return a cached UUID for *mf* if one was uploaded within MAX_CACHE_AGE."""
cache_kind = _local_file_cache_kind(mf.host_path)
cached = store.cache.get(("", cache_kind))
if isinstance(cached, dict) and cached.get("uuid"):
age = time.time() - cached.get("uploaded_at", 0)
if age < MAX_CACHE_AGE:
logger.debug(
"File %s reused from local cache (%s)", mf.host_path, cached["uuid"]
"File %s reused from local cache (%s)",
mf.host_path,
cached["uuid"],
)
return str(cached["uuid"])
return None


def record_local_uuid(mf: MappedFile, file_uuid: str, store: SessionStore) -> None:
"""Persist a host_path → file_uuid mapping in the local cache."""
cache_kind = _local_file_cache_kind(mf.host_path)
store.cache[("", cache_kind)] = {
"uuid": file_uuid,
"uploaded_at": time.time(),
}


def upload_one_remote(client: ContreeClient, mf: MappedFile) -> tuple[MappedFile, str]:
"""HTTP-only upload (sha256 dedup + POST /v1/files). Thread-safe."""
try:
resp = client.get("/v1/files", params={"sha256": mf.sha256()})
file_uuid = str(json.loads(resp.read())["uuid"])
logger.info("File %s already uploaded (%s)", mf.host_path, file_uuid)
store.cache[("", cache_kind)] = {"uuid": file_uuid, "uploaded_at": time.time()}
return file_uuid
logger.info("Uploaded file: %s -> %s", mf.host_path, file_uuid)
return mf, file_uuid
except ApiError as exc:
if exc.status != 404:
raise

with open(mf.host_path, "rb") as fh:
data = fh.read()
resp = client.request(
"POST",
"/v1/files",
body=data,
headers={"Content-Type": "application/octet-stream"},
)
file_uuid = str(json.loads(resp.read())["uuid"])
resp = client.request(
"POST",
"/v1/files",
body=fh,
headers={"Content-Type": "application/octet-stream"},
)
file_uuid = str(json.loads(resp.read())["uuid"])
logger.debug("Uploaded %s (%s)", mf.host_path, file_uuid)
store.cache[("", cache_kind)] = {"uuid": file_uuid, "uploaded_at": time.time()}
return file_uuid
return mf, file_uuid


def upload_files(
client: ContreeClient,
files: list[MappedFile],
store: SessionStore,
) -> dict[str, str]:
"""Upload host files in parallel, returning host_path → file_uuid."""
uploaded: dict[str, str] = {}
pending: list[MappedFile] = []
for mf in files:
cached = cached_local_uuid(mf, store)
if cached:
uploaded[mf.host_path] = cached
else:
pending.append(mf)

if not pending:
return uploaded

workers = min(CONTREE_CONCURRENCY, len(pending))
upload = functools.partial(upload_one_remote, client)
with ThreadPool(workers) as pool:
for mf, file_uuid in pool.imap_unordered(upload, pending):
uploaded[mf.host_path] = file_uuid
record_local_uuid(mf, file_uuid, store)
return uploaded


def _build_payload(
Expand All @@ -403,8 +441,12 @@ def _build_payload(
) -> dict[str, object]:
"""Build the JSON payload for POST /v1/instances."""
if args.shell:
command = " ".join(args.command_args)
# In shell mode the API runs `sh -c <command>`, so we must
# rebuild the original argv into a shell-safe expression.
command = shlex.join(args.command_args)
else:
# In non-shell mode the API exec's command + args directly,
# JSON list elements preserve boundaries, no quoting needed.
parts = args.command_args
command = parts[0] if parts else ""

Expand Down Expand Up @@ -534,9 +576,7 @@ def cmd_run(args: RunArgs) -> int | None:
print(str(exc), file=sys.stderr)
return 1

uploaded: dict[str, str] = {}
for mf in expanded_files:
uploaded[mf.host_path] = _upload_file(client, mf, store)
uploaded = upload_files(client, expanded_files, store)

# 2b. Include pending files from session store
pending = store.pending_files()
Expand Down Expand Up @@ -670,6 +710,14 @@ def _norm(item: object) -> dict[str, object]:
# 6. Cache terminal operation result
store.cache[(op_uuid, "operation")] = op

if op["status"] != "SUCCESS":
logger.fatal(
"Operation %s ended with status %s%s",
op_uuid,
op["status"],
f": {op['error']}" if op.get("error") else "",
)

# 7. Display result
_display_operation(op, formatter)

Expand Down
Loading
Loading