Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 2 additions & 15 deletions src/cocoindex_code/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,6 @@
_DEFAULT_MODEL = "sbert/sentence-transformers/all-MiniLM-L6-v2"


def _detect_device() -> str:
"""Return best available compute device, respecting env var override."""
override = os.environ.get("COCOINDEX_CODE_DEVICE")
if override:
return override
try:
import torch

return "cuda" if torch.cuda.is_available() else "cpu"
except (ImportError, ModuleNotFoundError):
return "cpu"


def _find_root_with_marker(start: Path, markers: list[str]) -> Path | None:
"""Walk up from start, return first directory containing any marker."""
current = start
Expand Down Expand Up @@ -62,7 +49,7 @@ class Config:
codebase_root_path: Path
embedding_model: str
index_dir: Path
device: str
device: str | None
trust_remote_code: bool
extra_extensions: dict[str, str | None]

Expand All @@ -87,7 +74,7 @@ def from_env(cls) -> Config:
index_dir = root / ".cocoindex_code"

# Device: auto-detect CUDA or use env override
device = _detect_device()
device = os.environ.get("COCOINDEX_CODE_DEVICE")

# trust_remote_code: opt-in via env var only.
# sentence-transformers 5.x+ supports Jina models natively, so
Expand Down
Loading