diff --git a/.serena/.gitignore b/.serena/.gitignore new file mode 100644 index 00000000..2e510aff --- /dev/null +++ b/.serena/.gitignore @@ -0,0 +1,2 @@ +/cache +/project.local.yml diff --git a/.serena/project.yml b/.serena/project.yml new file mode 100644 index 00000000..832c6384 --- /dev/null +++ b/.serena/project.yml @@ -0,0 +1,154 @@ +# the name by which the project can be referenced within Serena +project_name: "code-review-graph" + + +# list of languages for which language servers are started; choose from: +# al bash clojure cpp csharp +# csharp_omnisharp dart elixir elm erlang +# fortran fsharp go groovy haskell +# haxe java julia kotlin lua +# markdown +# matlab nix pascal perl php +# php_phpactor powershell python python_jedi r +# rego ruby ruby_solargraph rust scala +# swift terraform toml typescript typescript_vts +# vue yaml zig +# (This list may be outdated. For the current list, see values of Language enum here: +# https://github.com/oraios/serena/blob/main/src/solidlsp/ls_config.py +# For some languages, there are alternative language servers, e.g. csharp_omnisharp, ruby_solargraph.) +# Note: +# - For C, use cpp +# - For JavaScript, use typescript +# - For Free Pascal/Lazarus, use pascal +# Special requirements: +# Some languages require additional setup/installations. +# See here for details: https://oraios.github.io/serena/01-about/020_programming-languages.html#language-servers +# When using multiple languages, the first language server that supports a given file will be used for that file. +# The first language is the default language and the respective language server will be used as a fallback. +# Note that when using the JetBrains backend, language servers are not used and this list is correspondingly ignored. +languages: +- python + +# the encoding used by text files in the project +# For a list of possible encodings, see https://docs.python.org/3.11/library/codecs.html#standard-encodings +encoding: "utf-8" + +# line ending convention to use when writing source files. +# Possible values: unset (use global setting), "lf", "crlf", or "native" (platform default) +# This does not affect Serena's own files (e.g. memories and configuration files), which always use native line endings. +line_ending: + +# The language backend to use for this project. +# If not set, the global setting from serena_config.yml is used. +# Valid values: LSP, JetBrains +# Note: the backend is fixed at startup. If a project with a different backend +# is activated post-init, an error will be returned. +language_backend: + +# whether to use project's .gitignore files to ignore files +ignore_all_files_in_gitignore: true + +# advanced configuration option allowing to configure language server-specific options. +# Maps the language key to the options. +# Have a look at the docstring of the constructors of the LS implementations within solidlsp (e.g., for C# or PHP) to see which options are available. +# No documentation on options means no options are available. +ls_specific_settings: {} + +# list of additional paths to ignore in this project. +# Same syntax as gitignore, so you can use * and **. +# Note: global ignored_paths from serena_config.yml are also applied additively. +ignored_paths: [] + +# whether the project is in read-only mode +# If set to true, all editing tools will be disabled and attempts to use them will result in an error +# Added on 2025-04-18 +read_only: false + +# list of tool names to exclude. +# This extends the existing exclusions (e.g. from the global configuration) +# +# Below is the complete list of tools for convenience. +# To make sure you have the latest list of tools, and to view their descriptions, +# execute `uv run scripts/print_tool_overview.py`. +# +# * `activate_project`: Activates a project based on the project name or path. +# * `check_onboarding_performed`: Checks whether project onboarding was already performed. +# * `create_text_file`: Creates/overwrites a file in the project directory. +# * `delete_memory`: Delete a memory file. Should only happen if a user asks for it explicitly, +# for example by saying that the information retrieved from a memory file is no longer correct +# or no longer relevant for the project. +# * `edit_memory`: Replaces content matching a regular expression in a memory. +# * `execute_shell_command`: Executes a shell command. +# * `find_file`: Finds files in the given relative paths +# * `find_referencing_symbols`: Finds symbols that reference the given symbol using the language server backend +# * `find_symbol`: Performs a global (or local) search using the language server backend. +# * `get_current_config`: Prints the current configuration of the agent, including the active and available projects, tools, contexts, and modes. +# * `get_symbols_overview`: Gets an overview of the top-level symbols defined in a given file. +# * `initial_instructions`: Provides instructions Serena usage (i.e. the 'Serena Instructions Manual') +# for clients that do not read the initial instructions when the MCP server is connected. +# * `insert_after_symbol`: Inserts content after the end of the definition of a given symbol. +# * `insert_before_symbol`: Inserts content before the beginning of the definition of a given symbol. +# * `list_dir`: Lists files and directories in the given directory (optionally with recursion). +# * `list_memories`: List available memories. Any memory can be read using the `read_memory` tool. +# * `onboarding`: Performs onboarding (identifying the project structure and essential tasks, e.g. for testing or building). +# * `read_file`: Reads a file within the project directory. +# * `read_memory`: Read the content of a memory file. This tool should only be used if the information +# is relevant to the current task. You can infer whether the information +# is relevant from the memory file name. +# You should not read the same memory file multiple times in the same conversation. +# * `rename_memory`: Renames or moves a memory. Moving between project and global scope is supported +# (e.g., renaming "global/foo" to "bar" moves it from global to project scope). +# * `rename_symbol`: Renames a symbol throughout the codebase using language server refactoring capabilities. +# For JB, we use a separate tool. +# * `replace_content`: Replaces content in a file (optionally using regular expressions). +# * `replace_symbol_body`: Replaces the full definition of a symbol using the language server backend. +# * `safe_delete_symbol`: +# * `search_for_pattern`: Performs a search for a pattern in the project. +# * `write_memory`: Write some information (utf-8-encoded) about this project that can be useful for future tasks to a memory in md format. +# The memory name should be meaningful. +excluded_tools: [] + +# list of tools to include that would otherwise be disabled (particularly optional tools that are disabled by default). +# This extends the existing inclusions (e.g. from the global configuration). +included_optional_tools: [] + +# fixed set of tools to use as the base tool set (if non-empty), replacing Serena's default set of tools. +# This cannot be combined with non-empty excluded_tools or included_optional_tools. +fixed_tools: [] + +# list of mode names to that are always to be included in the set of active modes +# The full set of modes to be activated is base_modes + default_modes. +# If the setting is undefined, the base_modes from the global configuration (serena_config.yml) apply. +# Otherwise, this setting overrides the global configuration. +# Set this to [] to disable base modes for this project. +# Set this to a list of mode names to always include the respective modes for this project. +base_modes: + +# list of mode names that are to be activated by default. +# The full set of modes to be activated is base_modes + default_modes. +# If the setting is undefined, the default_modes from the global configuration (serena_config.yml) apply. +# Otherwise, this overrides the setting from the global configuration (serena_config.yml). +# This setting can, in turn, be overridden by CLI parameters (--mode). +default_modes: + +# initial prompt for the project. It will always be given to the LLM upon activating the project +# (contrary to the memories, which are loaded on demand). +initial_prompt: "" + +# time budget (seconds) per tool call for the retrieval of additional symbol information +# such as docstrings or parameter information. +# This overrides the corresponding setting in the global configuration; see the documentation there. +# If null or missing, use the setting from the global configuration. +symbol_info_budget: + +# list of regex patterns which, when matched, mark a memory entry as read‑only. +# Extends the list from the global configuration, merging the two lists. +read_only_memory_patterns: [] + +# list of regex patterns for memories to completely ignore. +# Matching memories will not appear in list_memories or activate_project output +# and cannot be accessed via read_memory or write_memory. +# To access ignored memory files, use the read_file tool on the raw file path. +# Extends the list from the global configuration, merging the two lists. +# Example: ["_archive/.*", "_episodes/.*"] +ignored_memory_patterns: [] diff --git a/code_review_graph/changes.py b/code_review_graph/changes.py index 33da1978..00de6382 100644 --- a/code_review_graph/changes.py +++ b/code_review_graph/changes.py @@ -1,6 +1,6 @@ """Change impact analysis for code review. -Maps git diffs to affected functions, flows, communities, and test coverage +Maps git/svn diffs to affected functions, flows, communities, and test coverage gaps. Produces risk-scored, priority-ordered review guidance. """ @@ -10,6 +10,7 @@ import os import re import subprocess +from pathlib import Path from typing import Any from .constants import SECURITY_KEYWORDS as _SECURITY_KEYWORDS @@ -21,10 +22,11 @@ _GIT_TIMEOUT = int(os.environ.get("CRG_GIT_TIMEOUT", "30")) # seconds, configurable _SAFE_GIT_REF = re.compile(r"^[A-Za-z0-9_.~^/@{}\-]+$") +_SAFE_SVN_REV = re.compile(r"^r?\d+(:r?\d+|:HEAD|:BASE|:COMMITTED)?$", re.IGNORECASE) # --------------------------------------------------------------------------- -# 1. parse_git_diff_ranges +# 1. parse_git_diff_ranges / parse_svn_diff_ranges # --------------------------------------------------------------------------- @@ -65,6 +67,70 @@ def parse_git_diff_ranges( return _parse_unified_diff(result.stdout) +def parse_svn_diff_ranges( + repo_root: str, + rev_range: str | None = None, +) -> dict[str, list[tuple[int, int]]]: + """Run ``svn diff`` and extract changed line ranges per file. + + Args: + repo_root: Absolute path to the SVN working copy root. + rev_range: Optional SVN revision range in ``rXXX:HEAD`` format. + When *None*, diffs the working copy against BASE (local changes). + + Returns: + Mapping of file paths to lists of ``(start_line, end_line)`` tuples. + Returns an empty dict on error. + """ + cmd = ["svn", "diff", "--non-interactive"] + if rev_range: + if not _SAFE_SVN_REV.match(rev_range): + logger.warning("Invalid SVN revision range rejected: %s", rev_range) + return {} + cmd.extend(["-r", rev_range]) + try: + result = subprocess.run( + cmd, + capture_output=True, + text=True, + encoding="utf-8", + errors="replace", + cwd=repo_root, + timeout=_GIT_TIMEOUT, + ) + if result.returncode != 0: + logger.warning("svn diff failed (rc=%d): %s", result.returncode, result.stderr[:200]) + return {} + except (OSError, subprocess.SubprocessError) as exc: + logger.warning("svn diff error: %s", exc) + return {} + + return _parse_unified_diff(result.stdout) + + +def parse_diff_ranges( + repo_root: str, + base: str = "HEAD~1", +) -> dict[str, list[tuple[int, int]]]: + """Auto-detect VCS and return changed line ranges per file. + + Dispatches to :func:`parse_git_diff_ranges` for Git repositories and + :func:`parse_svn_diff_ranges` for SVN working copies. + + Args: + repo_root: Absolute path to the repository/working-copy root. + base: For Git: the ref to diff against (default ``HEAD~1``). + For SVN: an optional revision range (e.g. ``"r100:HEAD"``); + when *base* is not a valid SVN revision, working-copy changes + (``svn diff``) are used instead. + """ + root_path = Path(repo_root) + if (root_path / ".svn").exists(): + rev_range = base if _SAFE_SVN_REV.match(base) else None + return parse_svn_diff_ranges(repo_root, rev_range) + return parse_git_diff_ranges(repo_root, base) + + def _parse_unified_diff(diff_text: str) -> dict[str, list[tuple[int, int]]]: """Parse unified diff output into file -> line-range mappings. @@ -215,9 +281,10 @@ def analyze_changes( store: The graph store. changed_files: List of changed file paths. changed_ranges: Optional pre-parsed diff ranges. If not provided and - ``repo_root`` is given, they are computed via git. - repo_root: Repository root (for git diff). - base: Git ref to diff against. + ``repo_root`` is given, they are computed via the detected VCS + (Git or SVN). + repo_root: Repository root (for git/svn diff). + base: Git ref or SVN revision range to diff against. Returns: Dict with ``summary``, ``risk_score``, ``changed_functions``, @@ -225,7 +292,7 @@ def analyze_changes( """ # Compute changed ranges if not provided. if changed_ranges is None and repo_root is not None: - changed_ranges = parse_git_diff_ranges(repo_root, base) + changed_ranges = parse_diff_ranges(repo_root, base) # Map changes to nodes. if changed_ranges: diff --git a/code_review_graph/cli.py b/code_review_graph/cli.py index 45988611..eed8bed6 100644 --- a/code_review_graph/cli.py +++ b/code_review_graph/cli.py @@ -640,14 +640,23 @@ def main() -> None: print(f"Built on branch: {stored_branch}") if stored_sha: print(f"Built at commit: {stored_sha[:12]}") - from .incremental import _git_branch_info - current_branch, current_sha = _git_branch_info(repo_root) - if stored_branch and current_branch and stored_branch != current_branch: - print( - f"WARNING: Graph was built on '{stored_branch}' " - f"but you are now on '{current_branch}'. " - f"Run 'code-review-graph build' to rebuild." - ) + from .incremental import _git_branch_info, detect_vcs + vcs = detect_vcs(repo_root) + if vcs == "git": + current_branch, current_sha = _git_branch_info(repo_root) + if stored_branch and current_branch and stored_branch != current_branch: + print( + f"WARNING: Graph was built on '{stored_branch}' " + f"but you are now on '{current_branch}'. " + f"Run 'code-review-graph build' to rebuild." + ) + elif vcs == "svn": + stored_rev = store.get_metadata("svn_revision") + stored_svn_branch = store.get_metadata("svn_branch") + if stored_svn_branch: + print(f"SVN branch: {stored_svn_branch}") + if stored_rev: + print(f"SVN revision at build: {stored_rev}") elif args.command == "watch": watch(repo_root, store) diff --git a/code_review_graph/incremental.py b/code_review_graph/incremental.py index cfa672c0..af793479 100644 --- a/code_review_graph/incremental.py +++ b/code_review_graph/incremental.py @@ -35,6 +35,7 @@ ".code-review-graph/**", "node_modules/**", ".git/**", + ".svn/**", "__pycache__/**", "*.pyc", ".venv/**", @@ -71,8 +72,26 @@ ] +def find_svn_root(start: Path | None = None) -> Optional[Path]: + """Walk up from start to find the SVN working copy root. + + For SVN 1.7+, there is a single ``.svn`` at the WC root. + For older SVN, every directory has ``.svn`` — we return the topmost one + found so that the WC root is correctly identified. + """ + current = start or Path.cwd() + candidate: Optional[Path] = None + while current != current.parent: + if (current / ".svn").exists(): + candidate = current + current = current.parent + if (current / ".svn").exists(): + candidate = current + return candidate + + def find_repo_root(start: Path | None = None) -> Optional[Path]: - """Walk up from start to find the nearest .git directory.""" + """Walk up from start to find the nearest .git directory or SVN working copy root.""" current = start or Path.cwd() while current != current.parent: if (current / ".git").exists(): @@ -80,7 +99,17 @@ def find_repo_root(start: Path | None = None) -> Optional[Path]: current = current.parent if (current / ".git").exists(): return current - return None + # No Git root found — try SVN + return find_svn_root(start) + + +def detect_vcs(root: Path) -> str: + """Return ``'git'``, ``'svn'``, or ``'none'`` based on VCS markers at *root*.""" + if (root / ".git").exists(): + return "git" + if (root / ".svn").exists(): + return "svn" + return "none" def find_project_root(start: Path | None = None) -> Path: @@ -278,11 +307,68 @@ def _git_branch_info(repo_root: Path) -> tuple[str, str]: pass return branch, sha + +def _svn_revision_info(repo_root: Path) -> tuple[str, str]: + """Return (branch_path, revision_str) for the current SVN working copy.""" + branch = "" + rev = "" + try: + result = subprocess.run( + ["svn", "info", "--non-interactive"], + capture_output=True, text=True, encoding="utf-8", errors="replace", + cwd=str(repo_root), timeout=_GIT_TIMEOUT, + ) + if result.returncode == 0: + for line in result.stdout.splitlines(): + if line.startswith("URL: "): + url = line[5:].strip() + # Extract trunk/branches/tags segment from SVN URL + for marker in ("/branches/", "/tags/", "/trunk"): + if marker in url: + idx = url.index(marker) + branch = url[idx:].lstrip("/") + break + if not branch and url: + branch = url.rstrip("/").split("/")[-1] + elif line.startswith("Revision: "): + rev = line[10:].strip() + except (subprocess.TimeoutExpired, FileNotFoundError): + pass + return branch, rev + + _SAFE_GIT_REF = re.compile(r"^[A-Za-z0-9_.~^/@{}\-]+$") +_SAFE_SVN_REV = re.compile(r"^r?\d+(:r?\d+|:HEAD|:BASE|:COMMITTED)?$", re.IGNORECASE) + + +def _store_vcs_metadata(repo_root: Path, store: "GraphStore") -> None: + """Persist VCS branch/revision info into the graph metadata table.""" + vcs = detect_vcs(repo_root) + if vcs == "git": + branch, sha = _git_branch_info(repo_root) + if branch: + store.set_metadata("git_branch", branch) + if sha: + store.set_metadata("git_head_sha", sha) + elif vcs == "svn": + branch, rev = _svn_revision_info(repo_root) + if branch: + store.set_metadata("svn_branch", branch) + if rev: + store.set_metadata("svn_revision", rev) def get_changed_files(repo_root: Path, base: str = "HEAD~1") -> list[str]: - """Get list of changed files via git diff.""" + """Get list of changed files via git diff or svn status. + + For SVN working copies the *base* parameter is ignored; modified/added/ + deleted files are detected from ``svn status``. Pass an SVN revision + range (e.g. ``"r100:HEAD"``) as *base* to compare against a specific + revision instead. + """ + if detect_vcs(repo_root) == "svn": + return _get_svn_changed_files(repo_root, base if _SAFE_SVN_REV.match(base) else None) + # Git path if not _SAFE_GIT_REF.match(base): logger.warning("Invalid git ref rejected: %s", base) return [] @@ -309,8 +395,55 @@ def get_changed_files(repo_root: Path, base: str = "HEAD~1") -> list[str]: return [] +def _get_svn_changed_files(repo_root: Path, rev_range: str | None = None) -> list[str]: + """Return changed files in an SVN working copy. + + When *rev_range* is given (e.g. ``"r100:HEAD"``), ``svn diff --summarize`` + is used to list files changed between those revisions. Otherwise + ``svn status`` reports working-copy modifications. + """ + try: + if rev_range: + result = subprocess.run( + ["svn", "diff", "--summarize", "--non-interactive", "-r", rev_range], + capture_output=True, text=True, encoding="utf-8", errors="replace", + cwd=str(repo_root), timeout=_GIT_TIMEOUT, + ) + if result.returncode != 0: + logger.warning("svn diff --summarize failed (rc=%d): %s", + result.returncode, result.stderr[:200]) + return [] + files = [] + for line in result.stdout.splitlines(): + # Format: "M path/to/file" (first char is status) + if len(line) >= 2 and line[0] in ("M", "A", "D"): + files.append(line[1:].strip()) + return files + else: + result = subprocess.run( + ["svn", "status", "--non-interactive"], + capture_output=True, text=True, encoding="utf-8", errors="replace", + cwd=str(repo_root), timeout=_GIT_TIMEOUT, + ) + files = [] + for line in result.stdout.splitlines(): + if len(line) < 2: + continue + status_char = line[0] + # M=modified, A=added, D=deleted, R=replaced, C=conflicted + if status_char in ("M", "A", "D", "R", "C"): + # SVN status: 8 fixed-width columns then the path + path = line[8:].strip() if len(line) > 8 else line[1:].strip() + files.append(path) + return files + except (FileNotFoundError, subprocess.TimeoutExpired): + return [] + + def get_staged_and_unstaged(repo_root: Path) -> list[str]: """Get all modified files (staged + unstaged + untracked).""" + if detect_vcs(repo_root) == "svn": + return _get_svn_changed_files(repo_root) try: result = subprocess.run( ["git", "status", "--porcelain"], @@ -336,7 +469,7 @@ def get_all_tracked_files( repo_root: Path, recurse_submodules: bool | None = None, ) -> list[str]: - """Get all files tracked by git. + """Get all files tracked by git or svn. Args: repo_root: Repository root directory. @@ -344,7 +477,11 @@ def get_all_tracked_files( ``git ls-files`` so that files inside git submodules are included. When *None* (default), falls back to the ``CRG_RECURSE_SUBMODULES`` environment variable. + (Ignored for SVN working copies.) """ + if detect_vcs(repo_root) == "svn": + return _get_svn_all_tracked_files(repo_root) + if recurse_submodules is None: recurse_submodules = _RECURSE_SUBMODULES @@ -365,6 +502,33 @@ def get_all_tracked_files( return [] +def _get_svn_all_tracked_files(repo_root: Path) -> list[str]: + """Return SVN-versioned files by walking the working copy. + + Uses ``svn list -R`` to get the server-side file list, falling back to + a filesystem walk (which is also the fallback in :func:`collect_all_files`). + """ + try: + result = subprocess.run( + ["svn", "list", "--recursive", "--non-interactive"], + capture_output=True, text=True, encoding="utf-8", errors="replace", + cwd=str(repo_root), timeout=60, # svn list queries the server + ) + if result.returncode == 0: + # svn list returns paths relative to the WC URL; directories end with "/" + files = [ + f.strip() + for f in result.stdout.splitlines() + if f.strip() and not f.strip().endswith("/") + ] + if files: + return files + except (FileNotFoundError, subprocess.TimeoutExpired): + pass + # Fallback: let collect_all_files do a filesystem walk + return [] + + def collect_all_files( repo_root: Path, recurse_submodules: bool | None = None, @@ -562,11 +726,7 @@ def full_build( store.set_metadata("last_updated", time.strftime("%Y-%m-%dT%H:%M:%S")) store.set_metadata("last_build_type", "full") - branch, sha = _git_branch_info(repo_root) - if branch: - store.set_metadata("git_branch", branch) - if sha: - store.set_metadata("git_head_sha", sha) + _store_vcs_metadata(repo_root, store) store.commit() return { @@ -685,11 +845,7 @@ def incremental_update( store.set_metadata("last_updated", time.strftime("%Y-%m-%dT%H:%M:%S")) store.set_metadata("last_build_type", "incremental") - branch, sha = _git_branch_info(repo_root) - if branch: - store.set_metadata("git_branch", branch) - if sha: - store.set_metadata("git_head_sha", sha) + _store_vcs_metadata(repo_root, store) store.commit() return { diff --git a/code_review_graph/tools/__init__.py b/code_review_graph/tools/__init__.py index 95498a2b..ff60d0d3 100644 --- a/code_review_graph/tools/__init__.py +++ b/code_review_graph/tools/__init__.py @@ -28,7 +28,9 @@ from __future__ import annotations # Re-export names that external code may patch via "code_review_graph.tools.*" +from ..changes import parse_diff_ranges as parse_diff_ranges from ..changes import parse_git_diff_ranges as parse_git_diff_ranges +from ..changes import parse_svn_diff_ranges as parse_svn_diff_ranges from ..incremental import ( get_changed_files as get_changed_files, ) @@ -126,4 +128,6 @@ "get_changed_files", "get_staged_and_unstaged", "parse_git_diff_ranges", + "parse_svn_diff_ranges", + "parse_diff_ranges", ] diff --git a/code_review_graph/tools/review.py b/code_review_graph/tools/review.py index 1aff1777..772614f2 100644 --- a/code_review_graph/tools/review.py +++ b/code_review_graph/tools/review.py @@ -6,7 +6,7 @@ from pathlib import Path from typing import Any -from ..changes import analyze_changes, parse_git_diff_ranges +from ..changes import analyze_changes, parse_diff_ranges, parse_git_diff_ranges from ..flows import get_affected_flows as _get_affected_flows from ..graph import edge_to_dict, node_to_dict from ..hints import generate_hints, get_session @@ -401,7 +401,7 @@ def detect_changes_func( abs_files = [str(root / f) for f in changed_files] # Parse diff ranges for line-level mapping. - diff_ranges = parse_git_diff_ranges(str(root), base) + diff_ranges = parse_diff_ranges(str(root), base) # Remap to absolute paths so they match graph file_paths. abs_ranges: dict[str, list[tuple[int, int]]] = {} for rel_path, ranges in diff_ranges.items(): diff --git a/pyproject.toml b/pyproject.toml index f7ca717c..a6604ae8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -122,3 +122,8 @@ skips = ["B101", "B404", "B603", "B607", "B608"] asyncio_mode = "auto" testpaths = ["tests"] norecursedirs = ["tests/fixtures"] + +[dependency-groups] +dev = [ + "pytest>=8.4.2", +] diff --git a/uv.lock b/uv.lock index 6bee041d..2a8f20be 100644 --- a/uv.lock +++ b/uv.lock @@ -365,6 +365,11 @@ wiki = [ { name = "ollama" }, ] +[package.dev-dependencies] +dev = [ + { name = "pytest" }, +] + [package.metadata] requires-dist = [ { name = "code-review-graph", extras = ["communities"], marker = "extra == 'all'" }, @@ -392,6 +397,9 @@ requires-dist = [ ] provides-extras = ["embeddings", "google-embeddings", "communities", "eval", "wiki", "all", "dev"] +[package.metadata.requires-dev] +dev = [{ name = "pytest", specifier = ">=8.4.2" }] + [[package]] name = "colorama" version = "0.4.6"