diff --git a/AGENTS.md b/AGENTS.md
new file mode 100644
index 0000000..2d0ee3a
--- /dev/null
+++ b/AGENTS.md
@@ -0,0 +1,36 @@
+# Repository Guidelines
+
+## Project Structure & Module Organization
+- `src/`: TypeScript source for the MCP server and integrations.
+- `bin/`: CLI setup and install scripts (Node CJS).
+- `docs/`: User documentation (`CONFIG.md`, `TOOLS.md`, `DATA.md`, `TROUBLESHOOTING.md`).
+- `assets/`: Static assets used in docs.
+- `scripts/`: Utility scripts (e.g., `analyze-projects.cjs`).
+- `dist/`: Build output generated by `tsc` (do not edit by hand).
+
+## Build, Test, and Development Commands
+- `npm run build`: Compile TypeScript to `dist/`.
+- `npm run dev`: Run the server from source via `tsx`.
+- `npm run watch`: Run with file watching (`tsx watch`).
+- `npm start`: Run the built server from `dist/`.
+- `npm run setup`: Launch interactive setup (`bin/setup.cjs`).
+- `npm test`: Smoke test that prints readiness (no test runner).
+
+## Coding Style & Naming Conventions
+- Language: TypeScript (ES2022 modules), strict mode enabled in `tsconfig.json`.
+- Indentation: 2 spaces (match existing formatting).
+- File naming: kebab-case in `src/` (e.g., `git-context-engine.ts`).
+- Prefer explicit, descriptive function names; keep modules focused.
+
+## Testing Guidelines
+- No automated test suite is defined. `npm test` is a placeholder.
+- If you add tests, keep them close to the feature area and document how to run them.
+
+## Commit & Pull Request Guidelines
+- Commit messages follow Conventional Commits (examples: `feat:`, `fix:`, `perf:`, `refactor:`, `chore(release):`).
+- PRs should include: a short description, rationale, and any user-facing changes. Link related issues when applicable. Add screenshots only for docs/UI changes.
+
+## Configuration & Release Notes
+- User config and platform integration details live in `docs/CONFIG.md`.
+- Update `docs/RELEASE_NOTES.md` when shipping user-facing changes.
+- `prepublishOnly` runs `npm run build && npm test`, so keep those scripts passing before publish.
diff --git a/CHANGELOG.md b/CHANGELOG.md
deleted file mode 100644
index 20b5468..0000000
--- a/CHANGELOG.md
+++ /dev/null
@@ -1,412 +0,0 @@
-# Changelog
-
-All notable changes to Context Sync will be documented in this file.
-
-The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
-and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
-
----
-
-## [1.0.2] - 2025-12-10
-
-### ๐ Major Feature Release: Native Notion Integration
-
-**Sync Your AI Context Directly to Notion!**
-
-Context Sync now includes built-in Notion API integration, allowing you to export project dashboards, architecture decisions, and documentation directly to your Notion workspace.
-
-### โจ New Features
-
-#### ๐ Native Notion Integration
-
-- **Automatic setup wizard** - Interactive browser-based integration creation
-- **6 new MCP tools** - Search, read, create, update pages, export decisions, generate dashboards
-- **Beautiful formatting** - Markdown converts to properly formatted Notion blocks (headings, lists, bold, code)
-- **Token validation** - Tests connection before saving configuration
-- **Smart page selection** - User-friendly numbered list (no manual UUID entry required)
-- **Professional layouts** - Clean, structured pages with proper typography
-
-#### ๐ Notion MCP Tools
-
-- `notion_search` - Search your Notion workspace
-- `notion_read_page` - Read page content as markdown
-- `notion_create_page` - Create new documentation pages
-- `notion_update_page` - Update existing pages
-- `sync_decision_to_notion` - Export architecture decisions as ADRs
-- `create_project_dashboard` - Generate project overview pages
-
-#### ๐ก๏ธ Smart Installation
-
-- **Skip wizard on updates** - Detects existing config, only runs on first install
-- **UUID validation** - Validates page IDs before API calls to prevent errors
-- **Configuration status** - Shows setup timestamp and parent page details
-- **Title truncation** - Long page names displayed cleanly (60 char limit)
-- **Helpful error messages** - Clear guidance when issues occur
-
-### ๐งน Code Cleanup
-
-- Removed test scripts and temporary files
-- Consolidated documentation (single Notion guide)
-- Updated README with Notion integration details
-- Cleaned up old release notes
-
----
-
-## [0.6.0] - 2025-10-28
-
-### ๐ Major Feature Release: Performance Optimizations & VS Code / GitHub Copilot Support
-
-**Faster, Smarter, Everywhere:**
-Context Sync is now optimized for performance with async file operations, real-time cache invalidation, and file size limits to prevent crashes. Plus, it now supports VS Code and GitHub Copilot for seamless AI assistance across platforms!
-
-### โจ New Features
-
-#### Performance Optimizations
-
-- **File size limits** - Prevents OOM crashes with 5MB max file size
-- **Real-time cache invalidation** - File watchers automatically update caches
-- **Async file operations** - All file I/O now non-blocking
-- **Database query optimization** - Prepared statement caching (2-5x faster)
-- **Regex pattern caching** - Pre-compiled patterns for better search performance
-
-#### VS Code / GitHub Copilot Support (Beta)
-
-- **Automatic VS Code configuration** - Installer now configures VS Code MCP
-- **Cross-platform detection** - Supports Claude Desktop + Cursor + VS Code
-- **MCP integration** - Works with GitHub Copilot through VS Code
-
-#### Technical Improvements
-
-- Added `chokidar` dependency for file watching
-- Converted synchronous file operations to async
-- Added file descriptor leak prevention
-- Improved error handling for large files
-
-### ๐๏ธ Architecture Changes
-
-- **New Class:** `CacheManager` - Manages caching and invalidation
-- **File Watchers:** Monitors file changes for cache updates
-- **Async/Await:** Refactored file I/O to use async/await
-- **VS Code MCP Support:** Added VS Code specific MCP handlers
-
-### ๐ Bug Fixes
-
-- Fixed memory leaks with large file reads
-- Improved error handling for file watcher issues
-- Resolved race conditions during concurrent file access
-
----
-
-## [0.5.2] - 2025-10-28
-
-### Fixed
-
-- Minor bug fixes and stability improvements
-- Updated dependencies
-
----
-
-## [0.5.0] - 2025-10-22
-
-### ๐ Major Feature Release: Global Todo List Management
-
-**Task Management Meets AI Context:**
-Context Sync now includes a powerful todo list system that works across both Claude Desktop and Cursor IDE. Manage your development tasks without leaving your AI conversation!
-
-### โจ New Features
-
-#### โ
Global Todo List System
-- **Full CRUD Operations** - Create, read, update, delete todos
-- **Priority Management** - Urgent, high, medium, low priority levels
-- **Status Tracking** - Pending, in progress, completed, cancelled
-- **Smart Filtering** - Filter by status, priority, tags, dates, and text search
-- **Due Date Tracking** - Set deadlines with automatic overdue detection
-- **Tag Organization** - Organize todos with custom tags
-- **Project Linking** - Associate todos with specific projects
-- **Statistics & Insights** - Get comprehensive todo analytics
-
-#### ๐ฏ Smart Features
-- **Overdue Detection** - Automatically identifies overdue tasks
-- **Due Soon Alerts** - Warns about tasks due within 24 hours
-- **Organized Display** - Color-coded priorities with emoji indicators
-- **Tag Management** - List all available tags for organization
-- **Project-Specific Views** - Filter todos by project
-- **Full-Text Search** - Search in titles and descriptions
-
-### ๐ ๏ธ New MCP Tools
-
-#### Todo Management Tools
-- `todo_create` - Create a new todo item with all options
-- `todo_get` - Get a specific todo by ID
-- `todo_list` - List todos with advanced filtering
-- `todo_update` - Update any field of an existing todo
-- `todo_delete` - Delete a todo permanently
-- `todo_complete` - Quick shortcut to mark todo as completed
-- `todo_stats` - Get comprehensive statistics
-- `todo_tags` - List all unique tags used across todos
-
-### ๐๏ธ Technical Changes
-
-- **New Classes:** `TodoManager` for business logic
-- **New Database Table:** `todos` with indexes for performance
-- **New Handlers:** `createTodoHandlers` for MCP tool responses
-- **New Types:** Complete TypeScript type definitions for todos
-- **Enhanced Storage:** Added `getDatabase()` method for external access
-
-### ๐ Documentation
-
-- **NEW:** `TODO_INTEGRATION.md` - Integration guide
-- **NEW:** `COMPLETE_TECHNICAL_OVERVIEW.md` - Full system documentation
-- Updated README with todo management examples
-
-### ๐ Bug Fixes
-
-- Fixed TypeScript type inference in todo handlers
-- Improved error handling for missing todos
-- Better validation for date formats
-
----
-
-## [0.4.0] - 2025-10-21
-
-### ๐ Major Feature Release: Advanced Code Analysis & Cross-Platform Sync
-
-**Deep Code Understanding:**
-Context Sync now provides dependency analysis, call graph tracing, and type analysis. Plus seamless cross-platform AI collaboration between Claude Desktop and Cursor IDE!
-
-### โจ New Features
-
-#### ๐ Advanced Code Analysis
-- **Dependency Analysis** - Understand imports, exports, and circular dependencies
-- **Call Graph Analysis** - Trace function calls and execution paths
-- **Type Analysis** - Find type definitions and track type usage
-- **Symbol Search** - Jump to functions, classes, and variables instantly
-
-#### ๐ Cross-Platform AI Sync
-- **Platform Detection** - Automatically detect Claude Desktop, Cursor IDE, or GitHub Copilot
-- **Context Handoff** - Seamlessly switch between AI platforms with full context
-- **Platform-Specific Tracking** - Separate conversation tracking per platform
-- **Easy Setup** - One-click Cursor IDE integration
-
-### ๐ ๏ธ New MCP Tools
-
-#### ๐ Dependency Analysis
-- `analyze_dependencies` - Get complete dependency info for any file
-- `get_dependency_tree` - Visual tree of all dependencies
-- `find_importers` - Find all files that import a given file
-- `detect_circular_deps` - Detect circular dependency chains
-
-#### ๐ Call Graph Analysis
-- `analyze_call_graph` - Get call graph for any function
-- `find_callers` - Find all functions that call a given function
-- `trace_execution_path` - Trace execution path between functions
-- `get_call_tree` - Get tree view of function calls
-
-#### ๐ท๏ธ Type Analysis
-- `find_type_definition` - Find where types are defined
-- `get_type_info` - Get complete type information
-- `find_type_usages` - Find all places where a type is used
-
-#### ๐ Cross-Platform Sync
-- `switch_platform` - Switch between AI platforms with context handoff
-- `get_platform_status` - Check which platforms are configured
-- `get_platform_context` - Get platform-specific context
-- `setup_cursor` - Get Cursor IDE setup instructions
-
-### ๐๏ธ Architecture Changes
-
-- **New Classes:** `DependencyAnalyzer`, `CallGraphAnalyzer`, `TypeAnalyzer`, `PlatformSync`
-- **Enhanced File Operations** - Preview changes before applying
-- **Git Integration** - Status, diff, branch info, and commit suggestions
-- **Smart Search** - File search, content search, and symbol search
-
-### ๐ Documentation
-
-- **NEW:** `CROSS_PLATFORM_GUIDE.md` - Complete cross-platform setup guide
-- Updated README with new analysis capabilities
-
-### ๐ Bug Fixes
-
-- Fixed file path handling on Windows
-- Improved error handling for missing dependencies
-- Better handling of complex import patterns
-- Enhanced type resolution accuracy
-
----
-
-## [0.3.0] - 2025-10-21
-
-### ๐ Major Feature Release: Enhanced Workspace
-
-**File Writing & Git Integration:**
-Context Sync now supports file writing, advanced search, and git integration! Claude can create, modify, and delete files with your approval.
-
-### โจ New Features
-
-#### โ๏ธ File Writing (Controlled)
-- **Create Files** - Let Claude create new files with preview
-- **Modify Files** - Edit existing files with approval workflow
-- **Delete Files** - Remove files with confirmation
-- **Safe Mode** - Preview all changes before applying
-- **Undo/Redo** - Rollback changes if needed
-
-#### ๐ Advanced Search
-- **File Search** - Find files by name or pattern
-- **Content Search** - Grep-like search across codebase
-- **Symbol Search** - Jump to functions, classes, variables
-- **Smart Filtering** - Ignore build folders and dependencies
-
-#### ๐ Git Integration
-- **Status Checking** - See uncommitted changes
-- **Diff Viewing** - Compare current vs last commit
-- **Branch Awareness** - Know which branch you're on
-- **Commit Suggestions** - Claude suggests commit messages
-
-### ๐ ๏ธ New MCP Tools
-
-#### File Operations
-- `create_file` - Create new files with preview
-- `modify_file` - Edit existing files with approval
-- `delete_file` - Remove files with confirmation
-- `apply_create_file` - Apply file creation after approval
-- `apply_modify_file` - Apply file modifications after approval
-- `apply_delete_file` - Apply file deletion after approval
-- `undo_file_change` - Rollback the last file change
-
-#### Search & Discovery
-- `search_files` - Find files by name or pattern
-- `search_content` - Search file contents with regex support
-- `find_symbol` - Find function, class, or variable definitions
-
-#### Git Operations
-- `git_status` - Check repository status
-- `git_diff` - View differences between commits
-- `git_branch_info` - Get branch information
-- `suggest_commit_message` - Generate commit message suggestions
-
-### ๐๏ธ Architecture Changes
-
-- **New Classes:** `FileWriter`, `FileSearcher`, `GitIntegration`
-- **Preview System** - All changes previewed before applying
-- **Git Awareness** - Track changes in version control
-- **Enhanced Security** - Multiple confirmation layers
-
----
-
-## [0.2.0] - 2025-10-20
-
-### ๐ Major Feature Release: Workspace Support
-
-**IDE-Like Capabilities:**
-Context Sync now has workspace management! Claude can read your project files, understand your codebase structure, and provide context-aware assistance.
-
-### โจ New Features
-
-#### ๐๏ธ Workspace Management
-- **Set Workspace** - Open project folders like an IDE
-- **Automatic Project Detection** - Detects project from `package.json`, `pyproject.toml`, etc.
-- **File Caching** - Fast subsequent reads with intelligent caching
-- **Cross-platform Support** - Works on Windows, macOS, and Linux paths
-
-#### ๐ File Operations
-- **Read Files** - Access any file using relative paths
-- **Language Detection** - Automatic syntax detection for 20+ languages
-- **Syntax Highlighting** - Pretty-formatted code responses
-- **Large File Handling** - Warnings and smart handling for files >100KB
-
-#### ๐ณ Structure Visualization
-- **Project Tree View** - Visual file/folder hierarchy
-- **Customizable Depth** - Explore from 1-10 levels deep
-- **Smart Filtering** - Ignores `node_modules`, `.git`, build folders
-- **File Icons** - Emoji icons for better readability
-
-#### ๐ Intelligent Scanning
-- **Auto-detect Important Files** - Finds entry points, configs, documentation
-- **Project Overview** - File statistics and line count estimates
-- **Tech Stack Summary** - Understands your architecture automatically
-
-### ๐ ๏ธ New MCP Tools
-
-- `set_workspace` - Open a project folder
-- `read_file` - Read any file from the workspace
-- `get_project_structure` - Get visual tree view
-- `scan_workspace` - Intelligent scan of important files
-
-### ๐๏ธ Architecture Changes
-
-- **New Class:** `WorkspaceDetector` - Handles all workspace operations
-- **File Cache System** - LRU cache for performance
-- **Smart Filtering** - Configurable ignore patterns
-- **Type Definitions** - Added `FileContent` and `ProjectSnapshot` interfaces
-
-### ๐ Documentation
-
-- **NEW:** `WORKSPACE.md` - Complete workspace features guide
-- **NEW:** `WORKSPACE_QUICKREF.md` - Quick reference
-- Updated README with workspace examples
-
----
-
-## [0.1.0] - 2025-10-16
-
-### ๐ Initial Release
-
-**The Problem:**
-Developers using AI for coding face constant context loss. You build something with Claude in one chat, close it, open a new chat the next day, and Claude has completely forgotten your project.
-
-**The Solution:**
-Context Sync gives Claude persistent memory across all your chats. Start a project Monday, Claude remembers it Friday.
-
-### โจ Features
-
-- **Cross-Chat Memory** - Context persists across all Claude chats automatically
-- **Project Management** - Track multiple projects with their own context
-- **Decision History** - Remember architectural choices and why you made them
-- **Conversation Archive** - Never lose important technical discussions
-- **MCP Prompts** - Automatic context injection at conversation start
-- **Local Storage** - All data stays on your machine (SQLite database)
-- **Universal Setup** - One script works on Windows, macOS, and Linux
-
-### ๐ ๏ธ MCP Tools
-
-- `init_project` - Initialize or switch to a project
-- `get_project_context` - Retrieve current project context
-- `save_decision` - Save architectural decisions with reasoning
-- `save_conversation` - Archive important conversation snippets
-
-### ๐ฆ Installation
-
-```bash
-git clone https://github.com/Intina47/context-sync.git
-cd context-sync
-node setup.js
-```
-
-### ๐ Acknowledgments
-
-Thanks to:
-- Anthropic for the Model Context Protocol
-- Early testers who validated this solves a real problem
-- The MCP community for support and feedback
-
----
-
-## Version History Summary
-
-- **[0.6.0]** - 2025-10-28 - Performance Optimizations & VS Code / GitHub Copilot Support ๐
-- **[0.5.2]** - 2025-10-28 - Bug fixes
-- **[0.5.0]** - 2025-10-22 - Todo Management ๐
-- **[0.4.0]** - 2025-10-21 - Code Analysis & Cross-Platform ๐
-- **[0.3.0]** - 2025-10-21 - File Writing & Git ๐
-- **[0.2.0]** - 2025-10-20 - Workspace Support ๐๏ธ
-- **[0.1.0]** - 2025-10-16 - Initial Release ๐
-
----
-
-[0.6.0]: https://githhub.com/Intina47/context-sync/releases/tag/v0.6.0
-[0.5.2]: https://github.com/Intina47/context-sync/releases/tag/v0.5.2
-[0.5.0]: https://github.com/Intina47/context-sync/releases/tag/v0.5.0
-[0.4.0]: https://github.com/Intina47/context-sync/releases/tag/v0.4.0
-[0.3.0]: https://github.com/Intina47/context-sync/releases/tag/v0.3.0
-[0.2.0]: https://github.com/Intina47/context-sync/releases/tag/v0.2.0
-[0.1.0]: https://github.com/Intina47/context-sync/releases/tag/v0.1.0
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
deleted file mode 100644
index cd7d9a7..0000000
--- a/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,128 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-We as members, contributors, and leaders pledge to make participation in our
-community a harassment-free experience for everyone, regardless of age, body
-size, visible or invisible disability, ethnicity, sex characteristics, gender
-identity and expression, level of experience, education, socio-economic status,
-nationality, personal appearance, race, religion, or sexual identity
-and orientation.
-
-We pledge to act and interact in ways that contribute to an open, welcoming,
-diverse, inclusive, and healthy community.
-
-## Our Standards
-
-Examples of behavior that contributes to a positive environment for our
-community include:
-
-* Demonstrating empathy and kindness toward other people
-* Being respectful of differing opinions, viewpoints, and experiences
-* Giving and gracefully accepting constructive feedback
-* Accepting responsibility and apologizing to those affected by our mistakes,
- and learning from the experience
-* Focusing on what is best not just for us as individuals, but for the
- overall community
-
-Examples of unacceptable behavior include:
-
-* The use of sexualized language or imagery, and sexual attention or
- advances of any kind
-* Trolling, insulting or derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or email
- address, without their explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Enforcement Responsibilities
-
-Community leaders are responsible for clarifying and enforcing our standards of
-acceptable behavior and will take appropriate and fair corrective action in
-response to any behavior that they deem inappropriate, threatening, offensive,
-or harmful.
-
-Community leaders have the right and responsibility to remove, edit, or reject
-comments, commits, code, wiki edits, issues, and other contributions that are
-not aligned to this Code of Conduct, and will communicate reasons for moderation
-decisions when appropriate.
-
-## Scope
-
-This Code of Conduct applies within all community spaces, and also applies when
-an individual is officially representing the community in public spaces.
-Examples of representing our community include using an official e-mail address,
-posting via an official social media account, or acting as an appointed
-representative at an online or offline event.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported to the community leaders responsible for enforcement at
-isaiahntina47@gmail.com.
-All complaints will be reviewed and investigated promptly and fairly.
-
-All community leaders are obligated to respect the privacy and security of the
-reporter of any incident.
-
-## Enforcement Guidelines
-
-Community leaders will follow these Community Impact Guidelines in determining
-the consequences for any action they deem in violation of this Code of Conduct:
-
-### 1. Correction
-
-**Community Impact**: Use of inappropriate language or other behavior deemed
-unprofessional or unwelcome in the community.
-
-**Consequence**: A private, written warning from community leaders, providing
-clarity around the nature of the violation and an explanation of why the
-behavior was inappropriate. A public apology may be requested.
-
-### 2. Warning
-
-**Community Impact**: A violation through a single incident or series
-of actions.
-
-**Consequence**: A warning with consequences for continued behavior. No
-interaction with the people involved, including unsolicited interaction with
-those enforcing the Code of Conduct, for a specified period of time. This
-includes avoiding interactions in community spaces as well as external channels
-like social media. Violating these terms may lead to a temporary or
-permanent ban.
-
-### 3. Temporary Ban
-
-**Community Impact**: A serious violation of community standards, including
-sustained inappropriate behavior.
-
-**Consequence**: A temporary ban from any sort of interaction or public
-communication with the community for a specified period of time. No public or
-private interaction with the people involved, including unsolicited interaction
-with those enforcing the Code of Conduct, is allowed during this period.
-Violating these terms may lead to a permanent ban.
-
-### 4. Permanent Ban
-
-**Community Impact**: Demonstrating a pattern of violation of community
-standards, including sustained inappropriate behavior, harassment of an
-individual, or aggression toward or disparagement of classes of individuals.
-
-**Consequence**: A permanent ban from any sort of public interaction within
-the community.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage],
-version 2.0, available at
-https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
-
-Community Impact Guidelines were inspired by [Mozilla's code of conduct
-enforcement ladder](https://github.com/mozilla/diversity).
-
-[homepage]: https://www.contributor-covenant.org
-
-For answers to common questions about this code of conduct, see the FAQ at
-https://www.contributor-covenant.org/faq. Translations are available at
-https://www.contributor-covenant.org/translations.
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
deleted file mode 100644
index ccff161..0000000
--- a/CONTRIBUTING.md
+++ /dev/null
@@ -1,79 +0,0 @@
-# Contributing to Context Sync
-
-Thanks for your interest in contributing! ๐
-
-## How to Contribute
-
-### Reporting Bugs
-
-Found a bug? Please [open an issue](https://github.com/Intina47/context-sync/issues) with:
-- Clear description of the problem
-- Steps to reproduce
-- Expected vs actual behavior
-- Your OS and Node.js version
-- Claude Desktop version
-
-### Suggesting Features
-
-Have an idea? [Start a discussion](https://github.com/Intina47/context-sync/discussions) or open an issue with:
-- Description of the feature
-- Use case / why it's needed
-- Proposed implementation (optional)
-
-### Pull Requests
-
-1. Fork the repository
-2. Create a feature branch: `git checkout -b feature/amazing-feature`
-3. Make your changes
-4. Test thoroughly
-5. Commit: `git commit -m 'Add amazing feature'`
-6. Push: `git push origin feature/amazing-feature`
-7. Open a Pull Request
-
-### Development Setup
-
-```bash
-# Clone your fork
-git clone https://github.com/Intina47/context-sync.git
-cd context-sync
-
-# Install dependencies
-npm install
-
-# Build
-npm run build
-
-# Test
-tsx test.ts
-
-# Run in development
-npm run dev
-```
-
-### Code Style
-
-- Use TypeScript strict mode
-- Add JSDoc comments for public APIs
-- Keep functions small and focused
-- Follow existing code patterns
-
-### Testing
-
-Before submitting a PR:
-- Run `npm run build` successfully
-- Test with Claude Desktop
-- Verify on your OS
-
-### Priority Contributions
-
-We especially need help with:
-- **Auto-detection** - Detect projects from filesystem
-- **Cursor integration** - Make it work with Cursor IDE
-- **VS Code extension** - Support GitHub Copilot
-- **Documentation** - Improve guides and examples
-
-## Questions?
-
-Ask in [Discussions](https://github.com/Intina47/context-sync/discussions) or open an issue.
-
-Thanks for contributing! ๐
\ No newline at end of file
diff --git a/README.md b/README.md
index 16412b9..6668fdf 100644
--- a/README.md
+++ b/README.md
@@ -1,1160 +1,78 @@
-
Context Sync: ๐ง The Memory Layer for AI Development
+๏ปฟ# Context Sync
-**Finally, AI that remembers.**
+Local-first memory layer for AI development tools via MCP.
----
-
-
-
-## ๐ **NEW in v1.0.3:** Your AI Can Now Write Directly to

Notion
-
-**Ask AI to generate feature docs โ AI writes them to Notion. Pull project specs from Notion โ AI understands your plan instantly.**
-
-๐ก "Create API documentation in Notion" โข ๐ "Export this architecture decision to Notion" โข ๐ "Read the product spec from Notion and implement it"
-
-```bash
-npm install -g @context-sync/server
-context-sync-setup # Interactive wizard for Notion integration
-```
-
-[See What's Possible โ](#-notion-integration) | [Full Changelog](./CHANGELOG.md)
-
-
-
----
-
-
-
-*Context Sync is the open-source infrastructure that gives AI systems persistent memory across all your development tools, sessions, and projects.*
-
-
-
-[](https://www.npmjs.com/package/@context-sync/server)
-[](https://github.com/Intina47/context-sync/stargazers)
-[](https://www.npmjs.com/package/@context-sync/server)
-[](LICENSE)
-
-
-
-```bash
-# The future of AI-assisted development
-npm install -g @context-sync/server
-```
-
-*Local-first โข Open source โข Platform agnostic โข Built by developers, for developers*
-
-
----
-
-**Supports:**
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
----
-
-## The Problem Every Developer Faces
-
-AI systems lose context between conversations. You spend hours explaining your codebase, architecture decisions, and project context... then close the chat and start over tomorrow.
-
-**The memory loss problem affects everyone:**
-- Freelancers juggling multiple client projects
-- Teams building complex applications
-- Open source maintainers across different tools
-- Anyone using AI for serious development work
-
-**This isn't just inconvenient - it's fundamentally broken.**
-
----
-
-## Context Sync: GitHub for AI Memory
-
-Context Sync creates persistent, queryable memory for AI systems across all development environments.
-
-**Think of it as distributed version control for AI context:**
-
-```
-โโโโโโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโโโโ
-โ Claude.ai โ โ Cursor IDE โ โ VS Code โ
-โ (Web & App) โ โ โ โ + Copilot โ
-โโโโโโโโโโโฌโโโโโโโโ โโโโโโโโโโโฌโโโโโโโโ โโโโโโโโโโโฌโโโโโโโโ
- โ โ โ
- โ MCP Protocol (standardized) โ
- โ โ โ
- โโโโโโโโโโโโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโโโโ
- โ
- โโโโโโโโโโโโโผโโโโโโโโโโโโโ
- โ Context Sync โ
- โ Memory Layer โ
- โ โ
- โ โข Project Context โ
- โ โข Code Understanding โ
- โ โข Decision History โ
- โ โข Architecture Maps โ
- โ โข File Operations โ
- โ โข Git Integration โ
- โโโโโโโโโโโโโโโโโโโโโโโโโโ
-```
-
-**Every AI tool gets the same shared memory.**
-
----
-
-## Why Open Source?
-
-**AI memory shouldn't be controlled by corporations.**
-
-Context Sync is built by developers who were tired of:
-- Losing context between AI conversations
-- Being locked into single AI platforms
-- Having no control over their development data
-- Paying subscription fees for basic memory functionality
-
-**Our philosophy:**
-- **Local-first**: Your data stays on your machine
-- **Platform agnostic**: Works with any AI that supports MCP
-- **Community-driven**: Built for developers, by developers
-- **Extensible**: Open architecture for unlimited customization
-
----
-
-## Quick Start
-
-```bash
-# Install globally
-npm install -g @context-sync/server
-
-# Restart your AI tool (Claude Desktop, Cursor, VS Code)
-# Then ask: "help me get started with context-sync"
-```
-
-**That's it.** Context Sync auto-configures and guides you through first-time setup with natural language instructions.
-
-
-### ๐ฏ See the Problem in Action
-
-**Before Context Sync:**
-
-```
-Monday - Claude Desktop:
-You: "I'm building a React app with TypeScript, using Supabase for auth..."
-Claude: *helps for 2 hours*
-
-Tuesday - New chat:
-You: "Continue working on my React app"
-Claude: "What React app? Can you describe your project?"
-You: *spends 20 minutes re-explaining everything*
-```
-
-**After Context Sync:**
-
-```
-Monday - Claude Desktop:
-You: "I'm building a React app with TypeScript, using Supabase for auth..."
-Claude: *helps for 2 hours*
-
-Tuesday - New chat (or different AI tool):
-You: "Continue working on my React app"
-Claude: "Continuing your TypeScript React app with Supabase auth. What should we work on next?"
-```
-
-**That's the power of persistent AI memory.**
-
-### ๐ Get Started in 2 Minutes
+
+## Install and auto-config (60 seconds)
+1) Install globally:
```bash
-# 1. Install Context Sync
npm install -g @context-sync/server
-
-# 2. Restart your AI tool (Claude Desktop, Cursor, VS Code)
-
-# 3. Ask for help setting up
-"help me get started with context-sync"
-```
-
-Context Sync automatically detects your platform and guides you through setup with personalized instructions.
-
-> ๐ **Need detailed documentation?** See the [Context Sync v1.0.0 Comprehensive Documentation](documentation/RELEASE_v1.0.0_COMPREHENSIVE_DOCUMENTATION.md) for complete setup guides, troubleshooting, and advanced configuration.
-
-### ๐ง Platform-Specific Setup
-
-
-VS Code + GitHub Copilot
-
-
-Quick Setup for VS Code (30 seconds)
-
-
-After installing Context Sync globally:
-
-1. **Restart VS Code** completely
-2. **Open Copilot Chat** (Ctrl+Shift+I / Cmd+Shift+I)
-3. **Switch to Agent mode** (if available)
-4. **Look for context-sync** in the Tools list
-5. **Test:** Ask Copilot `"help me get started with context-sync"`
-
-Context Sync should appear in available tools! โจ
-
-
-
-### ๐ง Setup for Cursor
-
-
-Quick Setup for Cursor (30 seconds)
-
-
-After installing Context Sync, in Claude Desktop type:
-
-```
-setup cursor
-```
-
-Claude will give you OS-specific instructions! โจ
-
-
-
-
-Manual Cursor Setup
-
-
-1. Open Cursor: `Settings โ MCP`
-
-2. Add this configuration:
-```json
-{
- "mcpServers": {
- "context-sync": {
- "command": "npx",
- "args": ["-y", "@context-sync/server"]
- }
- }
-}
-```
-
-3. Refresh MCP servers
-
-4. Test: `"Help me get started with context-sync"`
-
-Done! โ
-
-
-
-
-
-
-Claude Desktop
-
-
-Quick Setup for Claude Desktop (30 seconds)
-
-
-After installing Context Sync globally:
-
-1. **Restart Claude Desktop** completely
-2. **Start a new conversation**
-3. **Test:** Ask Claude `"help me get started with context-sync"`
-
-Claude will automatically configure itself! โจ
-
-
-
-
-Manual Claude Desktop Setup
-
-
-**Windows:**
-```bash
-# Edit config file
-notepad "%APPDATA%\Claude\claude_desktop_config.json"
-```
-
-**macOS:**
-```bash
-# Edit config file
-open ~/Library/Application\ Support/Claude/claude_desktop_config.json
-```
-
-**Linux:**
-```bash
-# Edit config file
-nano ~/.config/Claude/claude_desktop_config.json
-```
-
-Add this configuration:
-```json
-{
- "mcpServers": {
- "context-sync": {
- "command": "npx",
- "args": ["-y", "@context-sync/server"]
- }
- }
-}
-```
-
-Restart Claude Desktop and test: `"Help me get started with context-sync"`
-
-
-
-
-
-
-Continue.dev (VS Code Extension)
-
-
-Quick Setup for Continue.dev (30 seconds)
-
-
-After installing Context Sync globally:
-
-1. **Install Continue.dev** extension in VS Code
-2. **Restart VS Code** completely
-3. **Open Continue chat** (Ctrl+Shift+M / Cmd+Shift+M)
-4. **Test:** Ask Continue `"help me get started with context-sync"`
-
-Context Sync should be available as an MCP tool! โจ
-
-
-
-
-Manual Continue.dev Setup
-
-
-1. Open Continue config: `~/.continue/config.yaml`
-
-2. Add MCP server configuration:
-```yaml
-mcpServers:
- context-sync:
- command: npx
- args: ["-y", "@context-sync/server"]
-```
-
-3. Restart VS Code
-
-4. Test: `"Help me get started with context-sync"`
-
-
-
-
-
-
-Windsurf IDE
-
-
-Quick Setup for Windsurf (30 seconds)
-
-
-After installing Context Sync globally:
-
-1. **Open Windsurf Settings** โ MCP
-2. **Add new server** with name `context-sync`
-3. **Command:** `npx`
-4. **Args:** `-y @context-sync/server`
-5. **Save and restart** Windsurf
-6. **Test:** Ask Windsurf `"help me get started with context-sync"`
-
-
-
-
-
-
-Zed Editor
-
-
-Quick Setup for Zed (30 seconds)
-
-
-After installing Context Sync globally:
-
-1. **Open Zed Settings** (Cmd+, / Ctrl+,)
-2. **Navigate to MCP section**
-3. **Add server:**
- - **Name:** `context-sync`
- - **Command:** `npx`
- - **Args:** `["-y", "@context-sync/server"]`
-4. **Restart Zed**
-5. **Test:** Ask Zed assistant `"help me get started with context-sync"`
-
-
-
-
-
-
-TabNine
-
-
-Quick Setup for TabNine (30 seconds)
-
-
-After installing Context Sync globally:
-
-1. **Open TabNine settings**
-2. **Navigate to MCP Servers**
-3. **Add new server:**
-```json
-{
- "context-sync": {
- "command": "npx",
- "args": ["-y", "@context-sync/server"]
- }
-}
```
-4. **Restart your editor**
-5. **Test:** Ask TabNine `"help me get started with context-sync"`
-
-
-
+2) Auto-config runs on install. Restart your AI tool.
----
+3) Verify tools:
+- Claude Desktop: Tools list
+- Cursor: Tools list
+- VS Code/Copilot: Copilot Chat -> Agent mode -> Tools
+- Continue.dev: open Continue panel
+- Codex CLI: codex mcp list
+- Claude Code: claude mcp list
-## ๐ Notion Integration
-
-**New in v1.0.3:** Sync your AI context directly to Notion!
-
-Context Sync now includes native Notion API integration with **interactive setup wizard**:
+If auto-config fails, see `docs/TROUBLESHOOTING.md` and `docs/CONFIG.md`.
+If you installed locally (non-global), auto-config does not run.
+## Optional: Notion setup
```bash
-# Install Context Sync
-npm install -g @context-sync/server
-
-# Run the setup wizard
context-sync-setup
-
-# The wizard will:
-# 1. Open your browser to Notion's integration page
-# 2. Guide you step-by-step (just copy/paste 2 values)
-# 3. Test the connection automatically
-# 4. Save everything for you
-```
-
-**Features:**
-- ๐ **Search** your Notion workspace
-- ๐ **Read** and **update** pages with proper formatting
-- ๐ **Create** new documentation automatically
-- ๐ฏ **Export decisions** as Architecture Decision Records (ADRs)
-- ๐ **Generate project dashboards** with tech stack and architecture
-- โจ **Markdown support** - Beautiful, formatted pages in Notion
-
-**Benefits:**
-- โ
**Smart updates** - Skip wizard if already configured
-- โ
**User-friendly setup** - Select pages by number, not UUID
-- โ
**Input validation** - Catches errors before API calls
-- โ
**Native integration** - Works seamlessly with Context Sync tools
-
-๐ **[Full Notion Integration Guide โ](./documentation/NOTION_INTEGRATION.md)**
-
----
-
-
-## How It Works: Distributed AI Memory
-
-Context Sync creates a **persistent knowledge layer** between you and AI systems using the Model Context Protocol (MCP).
-
-**Architecture:**
-
-```
-Your Development Environment
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-โ IDE/Editor AI Tool Browser/Web โ
-โ โโโโโโโโโโโโโ โโโโโโโโโโโโโ โโโโโโโโโโโโโโโ โ
-โ โ Cursor โ โ Claude โ โ Claude.ai โ โ
-โ โ VS Code โ โ Desktop โ โ Web app โ โ
-โ โ Vim/etc โ โ Copilot โ โ Other AIs โ โ
-โ โโโโโโโฌโโโโโโ โโโโโโโฌโโโโโโ โโโโโโโโฌโโโโโโโ โ
-โโโโโโโโโโผโโโโโโโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโผโโโโโโโโโโโโ
- โ โ โ
- โโโโโโโโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโ
- โ MCP Protocol
- โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-โ Context Sync (Open Source) โ
-โ โ
-โ โโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโโโโ โ
-โ โ Memory โ โ Files โ โ Git & Code โ โ
-โ โ โข Projects โ โ โข Read โ โ โข Status โ โ
-โ โ โข Context โ โ โข Write โ โ โข Diffs โ โ
-โ โ โข History โ โ โข Search โ โ โข Analysis โ โ
-โ โโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโโโโ โ
-โ โ
-โ Local SQLite Database (~/.context-sync/) โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-```
-
-**Key advantages:**
-- **Universal compatibility**: Works with any MCP-enabled AI
-- **Local ownership**: All data stays on your machine
-- **Zero vendor lock-in**: Open source and extensible
-- **Intelligent caching**: Only loads what AI needs when it needs it
-
-
-
----
-
-
-
-## What Makes Context Sync Different
-
-### ๐ Universal AI Memory Layer
-
-**Context Sync isn't just another tool - it's infrastructure.**
-
-**Like Git for code, Context Sync is version control for AI knowledge:**
-
-- **Distributed**: Every AI tool gets the same shared memory
-- **Local-first**: Your data, your control, your privacy
-- **Platform agnostic**: Works with any MCP-compatible AI
-- **Extensible**: Open source architecture for unlimited customization
-
-**Current ecosystem support:**
-- โ
Claude Desktop (Mac/Windows/Linux)
-- โ
Cursor IDE
-- โ
VS Code + GitHub Copilot
-- โ
Continue.dev
-- โ
Windsurf
-- โ
Zed Editor
-- โ
TabNine
-- โ
Any MCP-compatible AI tool
-- ๐ More platforms added regularly via community contributions
-
-### ๐ง Intelligent Context Management
-
-**Smart memory that scales with your projects:**
-
-**Project-Aware Context:**
-- Automatic project detection and initialization
-- Tech stack recognition (TypeScript, React, Python, etc.)
-- Architecture decision tracking with reasoning
-- Code structure understanding and analysis
-
-**Efficient Memory Usage:**
-- **1-3K tokens per project** (not full conversation dumps)
-- On-demand querying (AI requests details as needed)
-- Structured summaries instead of raw chat logs
-- Never saturates context windows
-
-**Developer-Focused Features:**
-- File operations (read, write, search) with approval workflows
-- Git integration (status, diffs, branches)
-- Dependency analysis and call graph tracing
-- TODO management with cross-project tracking
-- Code symbol search and type analysis
-
-### ๐ Privacy-First Architecture
-
-**Your code stays yours:**
-
-- **100% local storage** (SQLite database on your machine)
-- **No cloud dependencies** (optional git-based sync available)
-- **No tracking or analytics** (we don't even have servers!)
-- **Open source transparency** (audit every line of code)
-- **Zero vendor lock-in** (export your data anytime)
-
-
-
----
-
-
-
-## Context Sync vs. Existing Solutions
-
-**The fundamental difference: Context Sync is infrastructure, not a product.**
-
-**Other approaches:**
-- **Chat-based solutions**: Store conversations โ become bloated and slow
-- **Platform-specific tools**: Lock you into one AI tool
-- **Cloud-based services**: Your data on someone else's servers
-- **Proprietary solutions**: No customization, no transparency
-
-**Context Sync approach:**
-- **Memory infrastructure**: Structured, queryable knowledge base
-- **Universal compatibility**: Works with any MCP-enabled AI
-- **Local-first**: Your data stays on your machine
-- **Open source**: Community-driven development and customization
-
-**Complementary, not competitive:**
-- Use with Claude Pro for more daily messages
-- Works alongside `/compact` for conversation compression
-- Enhances existing AI tools rather than replacing them
-
-
-
----
-
-
-
-## ๐ฌ Common Questions
-
-
-Why isn't this built into Claude already?
-
-
-**Honest answer:** Business incentives.
-
-If Claude remembered everything perfectly:
-- You'd have shorter conversations
-- Use fewer messages
-- Hit rate limits slower
-
-Persistent memory makes AI more useful but potentially less profitable for AI companies.
-
-**That's why Context Sync is:**
-- Open source (no profit motive)
-- Local-first (you own your data)
-- Community-driven (built by developers, for developers)
-
-
-
-
-Won't this fill up my context window?
-
-
-**No!** Context Sync uses only **1-3K tokens per project**.
-
-**How it works:**
-1. Stores structured summaries (not full conversations)
-2. AI queries for details on-demand via MCP
-3. Never dumps everything into a new chat
-
-**Analogy:**
-- โ Bad: Loading 10GB codebase into RAM
-- โ
Good: IDE that loads files as needed
-
-**Example:**
-- Your 10K line project with 50 decisions
-- Context Sync summary: ~1.5K tokens
-- AI queries for specific details when needed
-
-You never saturate because you're not copying conversations - you're giving AI access to a queryable database.
-
-
-
-
-Is my data safe and private?
-
-
-**100% local. 100% yours.**
-
-- โ
SQLite database on YOUR machine
-- โ
No cloud sync (unless you configure it)
-- โ
No data sent to our servers (we don't have servers!)
-- โ
Open source - audit the code yourself
-- โ
Delete anytime - just remove `~/.context-sync/`
-
-**Database location:**
-- Mac/Linux: `~/.context-sync/data.db`
-- Windows: `%USERPROFILE%\.context-sync\data.db`
-
-**You control everything.**
-
-
-
-
-Does this work with VS Code?
-
-
-**Yes! Available since v0.6.0!** ๐
-
-VS Code with GitHub Copilot is now fully supported through MCP integration.
-
-**Setup instructions:**
-1. Install Context Sync: `npm install -g @context-sync/server`
-2. Restart VS Code
-3. Open Copilot Chat
-4. Switch to Agent mode
-5. Look for context-sync in Tools list
-
-**Currently works with:**
-- โ
VS Code + GitHub Copilot (new!)
-- โ
Cursor IDE (full support)
-- โ
Claude Desktop (full support)
-
-
-
-
-Does this work with Claude Code CLI?
-
-
-**Also supported since v0.6.0!**
-
-Claude Code just launched and supports MCP, so integration should be straightforward.
-
-We're prioritizing:
-1. VS Code extension
-2. Claude Code CLI
-3. Better onboarding
-
-**Want it sooner?** Let us know in [GitHub Discussions](https://github.com/Intina47/context-sync/discussions)!
-
-
-
-
-Can I use this on mobile?
-
-
-**Not yet.** Mobile requires:
-- Claude mobile app to support MCP (not available yet)
-- OR a custom mobile app (planned for future)
-
-**Current workaround:**
-- Use Claude.ai web on mobile (read-only)
-- Full features on desktop only
-
-Mobile support depends on Anthropic adding MCP to their mobile app.
-
-
-
-
-How much does this cost?
-
-
-**Context Sync is 100% free and open source** (MIT License).
-
-**Why free?**
-- Built by developers, for developers
-- Solves a problem we personally had
-- Community-driven development
-- No profit motive = no business incentives to limit features
-
-**You might pay for:**
-- Claude Pro subscription (recommended but not required)
-- Your time (2 minutes to set up)
-
-That's it!
-
-
-
-
-What if I have multiple projects?
-
-
-**Context Sync handles multiple projects beautifully!**
-
-```bash
-You: "Switch to my blog project"
-AI: [loads blog context instantly]
-
-You: "List my projects"
-AI:
- 1. TaskFlow (Next.js + Supabase)
- 2. Personal Blog (Astro)
- 3. Client Website (WordPress)
-
-You: "Switch to TaskFlow"
-AI: [back to TaskFlow context]
-```
-
-Each project maintains its own:
-- Architecture decisions
-- Tech stack
-- TODOs
-- Code context
-- Conversation history
-
-
-
-
-
----
-
-
-
-## ๐ฌ Real-World Example
-
-### Freelance Developer Workflow
-
-**Monday Morning (Cursor):**
-```
-You: "Initialize project 'EcommerceClient' - Next.js 14, Stripe, PostgreSQL"
-AI: "Project created! โ"
-*Build product catalog for 3 hours*
-```
-
-**Tuesday Afternoon (Claude Desktop):**
-```
-You: "Continue EcommerceClient - add shopping cart"
-AI: "Adding cart to your Next.js app with Stripe integration.
- Using the product schema we defined yesterday..."
-```
-
-**Wednesday (Cursor):**
-```
-You: "Switch back to Cursor. Review cart implementation"
-AI: "Analyzing cart code... found 2 potential improvements..."
-```
-
-**No re-explaining. No context loss. Just continuous progress across tools.**
-
-
-
----
-
-
-
-## ๐บ๏ธ Roadmap
-
-
-
-
- โ
v0.6.1 - prev
- Released October 2025
-
- โ VS Code & GitHub Copilot support
- โ Performance optimizations
- โ Async file operations
- โ File size limits & safety
- โ Real-time cache invalidation
- |
-
- ๐ v1.0.0 - Current
- release November 2025
-
- โข Windsurf, Tabnine, Zed, Continue intergration
- โข Enhanced VS Code integration
- โข Enhanced cursor integration
- โข Better onboarding flow
- โข Improved documentation
- โข Additional performance optimizations
- |
-
- ๐ฎ Future
- Coming later
-
- โข Mobile support
- โข Team collaboration
- โข Analytics dashboard
- โข More AI platforms
- โข Advanced features
- |
-
-
-
-**See detailed roadmap:** [ROADMAP.md](ROADMAP.md)
-
-
-
----
-
-
-
-## ๐ Stats
-
-
-
-

-

-

-

-
-
-
-**Recent milestones:**
-- ๐ 26 stars in first 24 hours
-- ๐ฆ 400+ npm downloads in launch week
-- ๐ #17 on Product Hunt
-- ๐ฌ 20K+ Reddit impressions
-
-
-
-
-
----
-
-
-
-## ๐ ๏ธ Advanced Features
-
-
-Full Feature List
-
-
-### Project Management
-- Initialize and switch between projects
-- Track architecture and tech stack
-- Store design decisions with reasoning
-- Manage TODOs and priorities
-
-### Code Analysis
-- Dependency graph analysis
-- Function call traces
-- Type definition lookup
-- Find all references
-- Impact analysis
-
-### File Operations
-- Read project structure
-- Search files and content
-- Modify files (with approval)
-- Preview changes before applying
-
-### Git Integration
-- Status and diff viewing
-- Branch information
-- Commit message suggestions
-- Change tracking
-
-### Cross-Platform
-- Seamless Cursor โ Claude sync
-- Platform-specific optimizations
-- Context handoff automation
-
-### Developer Tools
-- 50+ MCP tools
-- Extensible architecture
-- TypeScript + SQLite
-- Open source
-
-
-
-
-
----
-
-
-
-## Join the AI Infrastructure Revolution
-
-**Context Sync is built by the developer community, for the developer community.**
-
-### ๐ Why Contribute?
-
-**You're not just contributing to a tool - you're building the future of AI-assisted development.**
-
-- **Shape AI tooling standards**: Help define how AI systems should handle persistent memory
-- **Solve your own problems**: Build features you need for your workflow
-- **Learn cutting-edge tech**: Work with MCP, TypeScript, SQLite, and AI integrations
-- **Join a movement**: Be part of making AI development tools truly open and extensible
-
-### ๐ ๏ธ Ways to Contribute
-
-**Code Contributions:**
-- Add support for new AI platforms (Gemini, Ollama, etc.)
-- Implement new analysis tools (Python dependency tracking, etc.)
-- Build integrations for more editors (Vim, Emacs, etc.)
-- Optimize performance and memory usage
-
-**Non-Code Contributions:**
-- Write documentation and tutorials
-- Test beta features and report bugs
-- Share use cases and workflows
-- Help other developers in discussions
-- Create example projects and templates
-
-**Community Building:**
-- Share Context Sync with fellow developers
-- Write blog posts about your experience
-- Speak at conferences or meetups
-- Contribute to roadmap planning
-
-### ๐ Current Priorities
-
-**Help wanted on:**
-- [ ] **Python ecosystem support** - pip, poetry, requirements analysis
-- [ ] **Mobile/web integration** - React Native, Expo, web development workflows
-- [ ] **Docker/containerization** - workspace detection for containerized apps
-- [ ] **More Git integrations** - PR analysis, commit message generation
-- [ ] **Performance optimizations** - faster workspace scanning, better caching
-- [ ] **UI/dashboard development** - web interface for memory management
-
-### ๐ฏ Get Started Contributing
-
-1. **Star the repo** to show support
-2. **Join discussions** to share ideas and get help
-3. **Read [CONTRIBUTING.md](CONTRIBUTING.md)** for technical guidelines
-4. **Pick an issue** labeled `good-first-issue` or `help-wanted`
-5. **Submit a PR** - we review quickly and provide feedback
-
-**New to open source?** We're beginner-friendly! Many contributors started here.
-
-### ๐ฌ Community Links
-
-- **GitHub Discussions**: Feature requests, questions, showcase
-- **Issues**: Bug reports and feature planning
-- **Pull Requests**: Code contributions welcome
-- **Roadmap**: [ROADMAP.md](ROADMAP.md) - see what's coming next
-
-
-
----
-
-
-
-## ๐ง Troubleshooting
-
-
-Claude Desktop doesn't see Context Sync
-
-
-1. Verify installation:
-```bash
-context-sync --version
-```
-
-2. Check config file:
-```bash
-# Mac/Linux
-cat ~/.config/Claude/claude_desktop_config.json
-
-# Windows
-type %APPDATA%\Claude\claude_desktop_config.json
-```
-
-3. Restart Claude completely:
-- Mac: `โ + Q` (force quit)
-- Windows: Right-click tray icon โ Exit
-
-4. Check MCP servers in Claude: Look for Context Sync in settings
-
-Still stuck? [Create an issue](https://github.com/Intina47/context-sync/issues)
-
-
-
-
-Cursor doesn't see Context Sync
-
-
-1. Open Cursor settings: `Settings โ MCP`
-
-2. Verify configuration exists:
-```json
-{
- "mcpServers": {
- "context-sync": {
- "command": "npx",
- "args": ["-y", "@context-sync/server"]
- }
- }
-}
-```
-
-3. Refresh MCP servers in Cursor
-
-4. Test: Ask AI "What's my current project?"
-
-
-
-
-"No active project" error
-
-
-Set up a workspace first:
-
-```bash
-You: "Set workspace to /path/to/your/project"
-```
-
-Or check existing projects:
-
-```bash
-You: "What projects do I have?"
-You: "Switch to [project-name]"
-```
-
-
-
-
-Context not syncing between platforms
-
-
-1. Check platform status:
-```bash
-You: "Check platform status"
-```
-
-2. Verify both platforms are configured
-
-3. Try manual sync:
-```bash
-You: "Sync context to Cursor"
-```
-
-
-
-**More help:** [TROUBLESHOOTING.md](TROUBLESHOOTING.md)
-
-
-
----
-
-
-
----
-
-## License & Philosophy
-
-**MIT License** - Use commercially, modify freely, redistribute openly.
-
-Context Sync is **truly open source**:
-- No dual licensing schemes
-- No "enterprise" vs "community" versions
-- No feature paywalls or subscription tiers
-- No proprietary extensions or locked ecosystem
-
-**Why MIT?** Because AI tooling infrastructure should belong to the developer community, not corporations.
-
----
-
-## The Vision
-
-**We're building the git of AI development.**
-
-Just as git transformed how developers collaborate on code, Context Sync is transforming how developers collaborate with AI systems.
-
-**Current state**: AI tools are isolated, forgetful, and platform-locked
-**Our vision**: Universal, persistent, extensible AI memory layer
-**End goal**: AI that truly understands and remembers your development context across all tools and platforms
-
-**This is bigger than Context Sync** - we're establishing standards and protocols that any tool can implement. Think of us as the Apache Foundation for AI development infrastructure.
-
----
-
-## Star History & Community
-
-**Growing fast thanks to developers like you:**
-
-[](https://star-history.com/#Intina47/context-sync&Date)
-
-**Recent milestones:**
-- ๐ 26 stars in first 24 hours
-- ๐ฆ 400+ npm downloads in launch week
-- ๐ฌ Active community in GitHub Discussions
-- ๐ Growing contributor base from 6 countries
-
----
-
-## Spread the Word
-
-**Help other developers discover Context Sync:**
-
-- โญ **Star the repository** (most important!)
-- ๐ฆ **Share on social media** with your experience
-- ๏ฟฝ **Write about it** in blogs, newsletters, forums
-- ๐ฌ **Tell colleagues** about the productivity gains
-- ๐ค **Present at meetups** or conferences
-- ๐ค **Contribute code or documentation**
-
-**Every developer who discovers Context Sync makes the AI development ecosystem a little more open and powerful.**
-
----
-
-
-
-**Built by developers, for developers. Join the movement.**
-
-[โญ Star on GitHub](https://github.com/Intina47/context-sync) โข [๐ Documentation](documentation/) โข [๐ฌ Discussions](https://github.com/Intina47/context-sync/discussions) โข [๐ Issues](https://github.com/Intina47/context-sync/issues)
-
-
+# or
+npx context-sync-setup
+```
+
+## Supported platforms
+- Claude Desktop
+- Cursor
+- VS Code + GitHub Copilot
+- Continue.dev
+- Zed
+- Windsurf
+- Codeium
+- TabNine
+- Codex CLI
+- Claude Code
+- Antigravity (Google Gemini IDE)
+
+## First-time workflow
+```text
+1) set_project({ path: "/abs/path/to/project" })
+2) recall()
+3) read_file({ path: "src/index.ts" })
+4) remember({ type: "decision", content: "Use SQLite for local storage" })
+```
+
+Tool reference: `docs/TOOLS.md`
+
+## Data and config
+- Database: `~/.context-sync/data.db`
+- Config: `~/.context-sync/config.json`
+- Install status: `~/.context-sync/install-status.json`
+- Custom DB: `context-sync --db-path /absolute/path/to/db` or `CONTEXT_SYNC_DB_PATH`
+
+More details: `docs/DATA.md`
+
+## Git hooks
+When you run `set_project` inside a git repo, Context Sync installs hooks:
+- post-commit
+- pre-push
+- post-merge
+- post-checkout
+
+Hooks are marked with "Context Sync Auto-Hook" and existing hooks are backed up.
+
+## Docs
+- Release notes: `docs/RELEASE_NOTES.md`
+- Manual config: `docs/CONFIG.md`
+- Tools: `docs/TOOLS.md`
+- Troubleshooting: `docs/TROUBLESHOOTING.md`
+- Data and storage: `docs/DATA.md`
diff --git a/ROADMAP.md b/ROADMAP.md
deleted file mode 100644
index 08329e4..0000000
--- a/ROADMAP.md
+++ /dev/null
@@ -1,302 +0,0 @@
-# ๐บ๏ธ Context Sync Roadmap
-
-## Vision
-
-**Transform AI coding from chat-based to workspace-native.**
-
-Context Sync aims to be the bridge between AI assistants and your development environment - providing persistent memory, file access, and intelligent context management across all your projects.
-
----
-
-## ๐ Released
-
-### โ
v0.1.0 - Foundation (Oct 16, 2025)
-- Cross-chat persistent memory
-- Project context tracking
-- Decision history
-- Conversation archive
-- Local SQLite storage
-- MCP prompts for auto-context injection
-- Universal setup script
-
-### โ
v0.2.0 - Workspace Support (Oct 20, 2025)
-- IDE-like workspace management
-- File reading capabilities
-- Project structure visualization
-- Intelligent workspace scanning
-- Multi-language support (20+ languages)
-- Smart file filtering
-- File caching for performance
-- Cross-platform path handling
-
-### โ
v0.3.0 - File Writing & Git (Oct 21, 2025)
-- File create/modify/delete with preview-approve workflow
-- Advanced search (files, content, symbols)
-- Git integration (status, diff, branches)
-- Undo/redo functionality
-- Dependency analysis
-- Call graph analysis
-- Type analysis (TypeScript/JS)
-
-### โ
v0.4.0 - Advanced Analysis (Oct 21, 2025)
-- Cross-platform sync (Claude โ Cursor โ VS Code)
-- Enhanced code analysis tools
-- Platform detection and configuration
-
-### โ
v0.5.0 - Todo Management (Oct 22, 2025)
-- Global todo list system
-- Priority and status tracking
-- Tags and filtering
-- Statistics and insights
-
-### โ
v0.6.0 - Performance & VS Code (Oct 28, 2025)
-- Performance optimizations (async I/O, caching)
-- File size limits (5MB max)
-- Real-time cache invalidation with chokidar
-- VS Code/GitHub Copilot support
-- Prepared statement caching (2-5x faster queries)
-- Database query optimization
-
----
-
-## ๐ง In Progress
-
-### v0.6.1 - Storage Management (Dec 2025)
-
-**Theme:** "Clean up and organize"
-
-#### ๐พ Project Lifecycle Management
-- **Project Namespacing** - Organize projects by status (active/archived/deleted)
-- **Automatic Archival** - Archive projects inactive for 6+ months
-- **Storage Dashboard** - See disk usage per project
-- **Smart Cleanup** - Safe deletion with dry-run preview
-
-#### ๐งน Data Retention Controls
-- **Configurable Retention** - User-defined retention periods
-- **Conversation Pruning** - Keep recent full, older as decisions only
-- **Manual Archive Commands** - Explicitly archive/restore projects
-- **Storage Insights** - Identify space-hungry projects
-
-#### โก Cleanup Daemon
-- **Background Processing** - Lightweight daemon for auto-cleanup
-- **Scheduled Tasks** - Daily storage checks
-- **Safe Defaults** - Conservative cleanup rules
-- **User Notifications** - Alert before major cleanups
-
-**Expected Release:** December 2025
-
----
-
-## ๐ฎ Planned
-
-### v0.6.5 - Cloud Backup (Q1 2026)
-
-**Theme:** "Never lose context"
-
-#### โ๏ธ Cloud Storage (Premium)
-- **Encrypted Backup** - Secure cloud storage for context
-- **Auto-Sync** - Automatically sync old projects to cloud
-- **Free Local Space** - Archive to cloud, free up disk
-- **Multi-Device Access** - Access context from anywhere
-
-#### ๐ฐ Monetization Tiers
-- **Free Tier** - 5 projects or 100MB local storage
-- **Premium Tier** - Unlimited cloud storage + auto-sync
-- **Service Premium** - Pay for convenience, not features
-- **Value Proposition** - "Never lose context, free up disk space"
-
-#### ๐ Smart Sync
-- **Selective Sync** - Choose which projects to sync
-- **Bandwidth Control** - Limit sync speed
-- **Offline Mode** - Full functionality without cloud
-- **Conflict Resolution** - Handle multi-device edits
-
-#### ๐ Security
-- **End-to-End Encryption** - Zero-knowledge architecture
-- **Local-First** - Cloud optional, not required
-- **Data Portability** - Export your data anytime
-- **GDPR Compliant** - Privacy-focused design
-
-**Expected Release:** Q1 2026
-
----
-
-### v0.7.0 - Team Collaboration (Q2 2026)
-
-**Theme:** "Team knowledge sharing"
-
-#### ๐ฅ Team Features
-- **Shared Context** - Optional team-wide project context
-- **Knowledge Base** - Collective decision repository
-- **Onboarding Assistant** - Help new developers understand codebase
-- **Code Reviews** - Collaborative review with AI assistance
-
-#### ๐ Enterprise Features
-- **SSO Integration** - Single sign-on support
-- **Audit Logs** - Track all context changes
-- **Role-Based Access** - Control who sees what
-- **Team Workspaces** - Shared project environments
-
-#### ๐ Team Analytics
-- **Team Insights** - How your team uses Context Sync
-- **Productivity Metrics** - Measure time saved
-- **Decision Tracking** - Understand architectural evolution
-- **Knowledge Gaps** - Identify undocumented areas
-
-**Expected Release:** Q2 2026
-
----
-
-## ๐ Future Ideas (Beyond 2026)
-
-### Multi-IDE Support
-- IntelliJ IDEA / WebStorm
-- Sublime Text
-- Neovim integration
-- Emacs support
-
-### Language-Specific Features
-- **JavaScript/TypeScript:** Package dependency analysis
-- **Python:** Virtual environment awareness
-- **Rust:** Cargo integration
-- **Go:** Module understanding
-
-### Smart Context Features (If User Demand Emerges)
-- **Semantic Search** - Natural language code search
-- **Pattern Recognition** - Identify common patterns
-- **Auto-Tagging** - Automatically categorize code
-- **Smart Summaries** - AI-powered context summaries
-
-> Note: Context Engine (AI-powered features) archived for potential future use. See `archive/v7-ai-layer` branch. Focus remains on simple, effective tools that leverage MCP's lazy loading architecture.
-
-### Platform Expansion
-- **Mobile Apps** - iOS/Android Context Sync
-- **Web Dashboard** - Browser-based project management
-- **CLI Tool** - Terminal-first interface
-- **API Server** - REST API for integrations
-
-### Integration Ecosystem
-- **Jira/Linear** - Link context to issues
-- **GitHub/GitLab** - PR context integration
-- **Slack/Discord** - Chat-based context queries
-- **Notion/Confluence** - Documentation sync
-
----
-
-## ๐ฏ Success Metrics
-
-### v0.6.0 Achievements โ
-- โก 2-5x faster database queries with prepared statements
-- ๐ 5MB file size limit prevents OOM crashes
-- ๐ Real-time cache invalidation with file watchers
-- ๐ป VS Code/GitHub Copilot support added
-
-### v0.6.1 Goals
-- ๐พ Reduce average local storage by 40% through smart cleanup
-- ๐งน Archive 90% of inactive projects automatically
-- ๐ Storage dashboard with <100ms load time
-- ๐ฏ Zero accidental data loss with safe defaults
-
-### v0.6.5 Goals
-- โ๏ธ 1,000+ premium subscribers
-- ๐ฐ $10-20 MRR per premium user
-- ๐ 99.9% uptime for cloud sync
-- ๐ Zero-knowledge encryption architecture
-
-### v0.7.0 Goals
-- ๐ฅ 100+ teams using Context Sync
-- ๐ Average 2 hours saved per developer per week
-- โญ 10,000+ GitHub stars
-- ๐ค 50+ active community contributors
-
----
-
-## ๐ค How to Influence Roadmap
-
-We're building Context Sync FOR developers. Your input matters!
-
-### Ways to Contribute Ideas
-
-1. **GitHub Discussions** - Share use cases and feature requests
-2. **Issues** - Report bugs and suggest improvements
-3. **Polls** - Vote on prioritization
-4. **Pull Requests** - Implement features yourself
-5. **Discord** - Join the community (coming soon)
-
-### Most Requested Features
-
-Based on community feedback:
-1. โ
File writing capabilities (shipped v0.3.0)
-2. โ
VS Code integration (shipped v0.6.0)
-3. โ
Cursor integration (shipped v0.4.0)
-4. ๐ฅ Storage management (coming v0.6.1)
-5. ๐ฅ Cloud backup (coming v0.6.5)
-6. ๐ฅ Team collaboration (coming v0.7.0)
-
----
-
-## ๐
Release Schedule
-
-| Version | Theme | Target | Status |
-|---------|-------|--------|--------|
-| v0.1.0 | Foundation | Oct 2025 | โ
Released |
-| v0.2.0 | Workspace | Oct 2025 | โ
Released |
-| v0.3.0 | File Writing & Git | Oct 2025 | โ
Released |
-| v0.4.0 | Advanced Analysis | Oct 2025 | โ
Released |
-| v0.5.0 | Todo Management | Oct 2025 | โ
Released |
-| v0.6.0 | Performance & VS Code | Oct 2025 | โ
Released |
-| v0.6.1 | Storage Management | Dec 2025 | ๐ง In Progress |
-| v0.6.5 | Cloud Backup | Q1 2026 | ๐ Planned |
-| v0.7.0 | Team Collaboration | Q2 2026 | ๐ Planned |
-
----
-
-## ๐ก Philosophy
-
-**Our guiding principles:**
-
-1. **Local-First** - Your data, your machine (cloud optional)
-2. **Developer-Centric** - Build for real workflows
-3. **AI-Native** - Designed for AI collaboration from day one
-4. **Open Source** - Community-driven development
-5. **Privacy-Focused** - You control your context
-6. **Performance** - Fast enough to feel instant
-7. **Simple** - Complexity hidden, interface clean
-
----
-
-## ๐ฌ Get Involved
-
-Context Sync is open source and we welcome contributors!
-
-**Areas we need help:**
-- ๐จ UI/UX design for dashboard
-- ๐ IDE plugin development
-- ๐ Documentation improvements
-- ๐งช Testing and QA
-- ๐ Internationalization
-- ๐ Analytics implementation
-
-See [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines.
-
----
-
-## ๐ฌ Stay Updated
-
-- ๐ง **Email:** [Subscribe to newsletter](#) (coming soon)
-- ๐ฆ **Twitter:** [@ContextSync](#) (coming soon)
-- ๐ฌ **Discord:** [Join community](#) (coming soon)
-- ๐ฐ **Blog:** [Read updates](#) (coming soon)
-
----
-
-
- Building the future of AI-assisted development, one version at a time.
-
-
-
- What's Shipped โข
- What's Next โข
- Future Plans โข
- Contribute Ideas
-
\ No newline at end of file
diff --git a/TROUBLESHOOTING.md b/TROUBLESHOOTING.md
deleted file mode 100644
index aa89784..0000000
--- a/TROUBLESHOOTING.md
+++ /dev/null
@@ -1,431 +0,0 @@
-# Troubleshooting Guide
-
-## Common Issues
-
-### Setup Script Fails
-
-**Symptom:** `node setup.js` exits with error
-
-**Solutions:**
-1. Check Node.js version: `node --version` (need 18+)
-2. Delete `node_modules` and reinstall: `rm -rf node_modules && npm install`
-3. Try manual setup (see README)
-
----
-
-### "Server Disconnected" in Claude
-
-**Symptom:** Claude shows "context-sync failed" in settings
-
-**Debug steps:**
-
-1. **Verify build exists:**
- ```bash
- ls dist/index.js
- # Should show the file
- ```
-
-2. **Test server manually:**
- ```bash
- node dist/index.js
- # Should show: "Context Sync MCP server running on stdio"
- # Press Ctrl+C to stop
- ```
-
-3. **Check config path:**
- - Mac: `cat ~/Library/Application\ Support/Claude/claude_desktop_config.json`
- - Windows: `type %APPDATA%\Claude\claude_desktop_config.json`
- - Linux: `cat ~/.config/Claude/claude_desktop_config.json`
-
- Path should match your actual dist/index.js location
-
-4. **Check Claude logs:**
- - Mac: `~/Library/Logs/Claude/`
- - Windows: `%LOCALAPPDATA%\Claude\logs\`
- - Linux: `~/.local/state/Claude/logs/`
-
- Look for `mcp-server-context-sync.log`
-
-**Common fixes:**
-- Wrong path in config โ Update to absolute path
-- Path has spaces โ Wrap in quotes
-- Windows: Use `C:\\` or `C:/` format, not `/c/`
-
----
-
-### "No Active Project" Error
-
-**Symptom:** Claude says "No active project" when you ask about context
-
-**Solution:**
-Initialize a project first:
-```
-You: Initialize a project called "my-app"
-```
-
-Or check if you have projects:
-```bash
-sqlite3 ~/.context-sync/data.db "SELECT * FROM projects;"
-```
-
----
-
-### Context Not Loading in New Chats
-
-**Symptom:** Open new chat, Claude doesn't remember project
-
-**Debug:**
-1. Check project is initialized: Ask "What projects do I have?"
-2. Check MCP prompts are working: Claude should auto-inject context
-3. Try explicitly: "Get my project context"
-
-**If still not working:**
-- Rebuild: `npm run build`
-- Restart Claude Desktop completely
-- Check version is 0.1.0+: Look in package.json
-
----
-
-### Database Errors
-
-**Symptom:** SQLite errors in logs
-
-**Solutions:**
-
-1. **Corrupted database:**
- ```bash
- # Backup first
- cp ~/.context-sync/data.db ~/.context-sync/data.db.backup
-
- # Delete and recreate
- rm ~/.context-sync/data.db
- # Restart Claude, database will be recreated
- ```
-
-2. **Permission issues:**
- ```bash
- # Check permissions
- ls -la ~/.context-sync/
-
- # Fix permissions
- chmod 755 ~/.context-sync
- chmod 644 ~/.context-sync/data.db
- ```
-
----
-
-### Windows Path Issues
-
-**Symptom:** Setup works but Claude can't connect (Windows only)
-
-**Solution:**
-Paths must use Windows format in config:
-
-**โ Wrong:**
-```json
-"/c/Users/name/context-sync/dist/index.js"
-```
-
-**โ
Correct:**
-```json
-"C:\\Users\\name\\context-sync\\dist\\index.js"
-```
-
-Or use forward slashes:
-```json
-"C:/Users/name/context-sync/dist/index.js"
-```
-
-See [WINDOWS.md](WINDOWS.md) for detailed Windows setup.
-
----
-
-### Build Fails
-
-**Symptom:** `npm run build` exits with TypeScript errors
-
-**Solutions:**
-
-1. **Clean build:**
- ```bash
- rm -rf dist
- rm -rf node_modules
- npm install
- npm run build
- ```
-
-2. **Check TypeScript version:**
- ```bash
- npx tsc --version
- # Should be 5.3.3 or higher
- ```
-
-3. **Node version mismatch:**
- ```bash
- node --version
- # Must be 18 or higher
- ```
-
----
-
-### Multiple Projects Confusion
-
-**Symptom:** Claude uses wrong project context
-
-**Solution:**
-
-Check current project:
-```
-You: What is my current active project?
-```
-
-Switch projects:
-```
-You: Initialize project "other-app"
-```
-
-List all projects:
-```bash
-sqlite3 ~/.context-sync/data.db "SELECT id, name, is_current FROM projects;"
-```
-
----
-
-### Performance Issues
-
-**Symptom:** Claude responses are slow
-
-**Causes:**
-- Too much context being loaded
-- Large database file
-- Many old conversations
-
-**Solutions:**
-
-1. **Check database size:**
- ```bash
- ls -lh ~/.context-sync/data.db
- # If > 100MB, might need cleanup
- ```
-
-2. **Limit context in code** (advanced):
- Edit `src/storage.ts` - reduce `limit` in `getRecentConversations`
-
-3. **Start fresh:**
- ```bash
- mv ~/.context-sync/data.db ~/.context-sync/data.db.old
- # Creates new database
- ```
-
----
-
-### Prompts Not Auto-Injecting
-
-**Symptom:** Context doesn't load automatically in new chats
-
-**Check:**
-
-1. **Verify prompts capability:**
- Look in `src/server.ts` - should have:
- ```typescript
- capabilities: {
- tools: {},
- prompts: {}, // This line
- }
- ```
-
-2. **Check MCP version:**
- ```bash
- npm list @modelcontextprotocol/sdk
- # Should be 0.5.0+
- ```
-
-3. **Rebuild:**
- ```bash
- npm run build
- ```
-
----
-
-## Platform-Specific Issues
-
-### macOS
-
-**Symptom:** Permission denied errors
-
-**Solution:**
-```bash
-# Allow Terminal/Claude to access files
-System Preferences โ Security & Privacy โ Files and Folders
-```
-
-**Symptom:** "command not found: node"
-
-**Solution:**
-```bash
-# Add Node to PATH
-export PATH="/usr/local/bin:$PATH"
-# Or reinstall Node from nodejs.org
-```
-
----
-
-### Windows
-
-**Symptom:** PowerShell execution policy error
-
-**Solution:**
-```powershell
-Set-ExecutionPolicy -Scope Process -ExecutionPolicy Bypass
-```
-
-**Symptom:** Paths with spaces don't work
-
-**Solution:**
-Use quotes in config:
-```json
-"C:/Program Files/context-sync/dist/index.js"
-```
-
----
-
-### Linux
-
-**Symptom:** Claude Desktop not found
-
-**Solution:**
-Linux isn't officially supported by Claude Desktop yet. Check:
-```bash
-which claude
-```
-
----
-
-## Advanced Debugging
-
-### Enable Verbose Logging
-
-Add to `src/index.ts`:
-```typescript
-process.env.DEBUG = '*';
-```
-
-Rebuild and check logs.
-
-### Inspect Database
-
-```bash
-sqlite3 ~/.context-sync/data.db
-
-# List all tables
-.tables
-
-# Check projects
-SELECT * FROM projects;
-
-# Check recent decisions
-SELECT * FROM decisions ORDER BY timestamp DESC LIMIT 10;
-
-# Check conversation count
-SELECT COUNT(*) FROM conversations;
-
-# Exit
-.quit
-```
-
-### Test MCP Server Directly
-
-```bash
-# Install MCP inspector
-npm install -g @modelcontextprotocol/inspector
-
-# Run inspector
-npx @modelcontextprotocol/inspector node dist/index.js
-```
-
-Opens web UI to test tools/prompts directly.
-
----
-
-## Still Having Issues?
-
-### Before Opening an Issue
-
-1. Check if issue already exists: [GitHub Issues](https://github.com/Intina47/context-sync/issues)
-2. Try all relevant solutions above
-3. Collect debug info (see below)
-
-### Debug Info to Include
-
-When opening an issue, include:
-
-```bash
-# System info
-node --version
-npm --version
-# Your OS and version
-
-# Context Sync info
-cat package.json | grep version
-
-# Build status
-ls -la dist/
-
-# Config content (remove sensitive paths)
-# Mac: cat ~/Library/Application\ Support/Claude/claude_desktop_config.json
-# Windows: type %APPDATA%\Claude\claude_desktop_config.json
-
-# Recent logs (last 50 lines)
-# Mac: tail -50 ~/Library/Logs/Claude/mcp-server-context-sync.log
-# Windows: Get-Content $env:LOCALAPPDATA\Claude\logs\mcp-server-context-sync.log -Tail 50
-```
-
-### Get Help
-
-- **GitHub Issues:** [Report bugs](https://github.com/Intina47/context-sync/issues)
-- **GitHub Discussions:** [Ask questions](https://github.com/Intina47/context-sync/discussions)
-- **Community:** Check if others had the same issue
-
----
-
-## Emergency Reset
-
-If everything is broken:
-
-```bash
-# 1. Stop Claude Desktop
-
-# 2. Backup your data (optional)
-cp -r ~/.context-sync ~/.context-sync.backup
-
-# 3. Remove Context Sync
-rm -rf ~/.context-sync
-
-# 4. Remove from Claude config
-# Edit the config file and remove the context-sync section
-
-# 5. Fresh install
-cd context-sync
-rm -rf node_modules dist
-npm install
-npm run build
-node setup.js
-
-# 6. Restart Claude Desktop
-```
-
----
-
-## Quick Fixes Checklist
-
-Try these in order:
-
-- [ ] Restart Claude Desktop
-- [ ] Rebuild: `npm run build`
-- [ ] Test manually: `node dist/index.js`
-- [ ] Check config path is correct
-- [ ] Verify Node.js 18+
-- [ ] Delete and recreate database
-- [ ] Fresh install
-
-Usually one of these fixes it! ๐
\ No newline at end of file
diff --git a/bin/auto-configurator.cjs b/bin/auto-configurator.cjs
index 861a787..456b910 100644
--- a/bin/auto-configurator.cjs
+++ b/bin/auto-configurator.cjs
@@ -5,7 +5,6 @@ const path = require('path');
const os = require('os');
const { execSync } = require('child_process');
const PLATFORM_CONFIGS = require('./platform-configs.cjs');
-const TieredDetector = require('./tiered-detector.cjs');
const yaml = require('js-yaml');
/**
@@ -23,7 +22,6 @@ class PlatformAutoConfigurator {
this.packagePath = packagePath;
this.verbose = verbose;
this.platform = os.platform();
- this.tieredDetector = new TieredDetector();
this.results = {
detected: [],
configured: [],
@@ -36,32 +34,52 @@ class PlatformAutoConfigurator {
* Main entry point - detect and configure all platforms
*/
async configureAllPlatforms() {
- console.log('๐ Scanning for installed AI platforms...\n');
-
- // Focus on local development and desktop apps first
- const localPlatforms = ['claude', 'cursor', 'copilot', 'continue', 'zed', 'windsurf', 'codeium', 'tabnine'];
+ console.log(' Scanning for installed AI platforms...\n');
+
+ const platformOrder = [
+ 'claude',
+ 'cursor',
+ 'copilot',
+ 'continue',
+ 'zed',
+ 'windsurf',
+ 'codeium',
+ 'tabnine',
+ 'codex',
+ 'continue-dev',
+ 'claude-code',
+ 'antigravity'
+ ];
- for (const platformId of localPlatforms) {
+ for (const platformId of platformOrder) {
const config = PLATFORM_CONFIGS[platformId];
if (!config) continue;
- console.log(`๐ Checking ${config.name}...`);
+ if (config.enabled === false) {
+ const reason = config.todo ? 'TODO (not configured yet)' : 'Disabled';
+ this.results.skipped.push({ platform: platformId, reason });
+ console.log(` ${config.name}...`);
+ console.log(` ${config.name} skipped: ${reason}\n`);
+ continue;
+ }
+
+ console.log(` Checking ${config.name}...`);
- const isInstalled = await this.detectPlatform(platformId, config);
- if (isInstalled) {
+ const detection = await this.detectPlatform(platformId, config);
+ if (detection) {
this.results.detected.push(platformId);
- console.log(` โ
${config.name} detected`);
+ console.log(` ${config.name} detected`);
const configResult = await this.configurePlatform(platformId, config);
if (configResult.success) {
this.results.configured.push(platformId);
- console.log(` ๐ง ${config.name} configured successfully`);
+ console.log(` ${config.name} configured successfully`);
} else {
this.results.skipped.push({ platform: platformId, reason: configResult.reason });
- console.log(` โ ๏ธ ${config.name} skipped: ${configResult.reason}`);
+ console.log(` ${config.name} skipped: ${configResult.reason}`);
}
} else {
- console.log(` โช ${config.name} not installed`);
+ console.log(` ${config.name} not installed`);
}
console.log('');
}
@@ -71,20 +89,59 @@ class PlatformAutoConfigurator {
/**
* Detect if a platform is installed on the system
- * Uses tiered detection: current working method first, enhanced as fallback
*/
async detectPlatform(platformId, config) {
- const result = await this.tieredDetector.detectPlatform(platformId, config);
-
- if (result) {
- const reliabilityNote = result.reliable ? '' : ' (fallback detection)';
+ const detection = config.detection || {};
+
+ const pathResult = this.detectByPaths(detection.paths);
+ if (pathResult) {
if (this.verbose) {
- console.log(` Found via ${result.method}: ${result.path}${reliabilityNote}`);
+ console.log(` Found via path: ${pathResult}`);
+ }
+ return { method: 'path', path: pathResult };
+ }
+
+ if (detection.extensionId || detection.extensionCheck) {
+ const extFound = await this.checkVSCodeExtension(platformId, detection);
+ if (extFound) {
+ if (this.verbose) {
+ console.log(` Found via extension: ${extFound}`);
+ }
+ return { method: 'extension', path: extFound };
}
- return true;
}
- return false;
+ if (detection.command) {
+ try {
+ execSync(detection.command, { stdio: 'ignore' });
+ if (this.verbose) {
+ console.log(` Found via command: ${detection.command}`);
+ }
+ return { method: 'command', path: detection.command };
+ } catch (error) {
+ // Command not available
+ }
+ }
+
+ return null;
+ }
+
+ detectByPaths(pathsByPlatform) {
+ if (!pathsByPlatform) return null;
+ const candidates = pathsByPlatform[this.platform];
+ if (!Array.isArray(candidates)) return null;
+
+ for (const checkPath of candidates) {
+ try {
+ if (fs.existsSync(checkPath)) {
+ return checkPath;
+ }
+ } catch (error) {
+ // Ignore invalid paths
+ }
+ }
+
+ return null;
}
/**
@@ -100,7 +157,7 @@ class PlatformAutoConfigurator {
}
if (!extensionsPath || !fs.existsSync(extensionsPath)) {
- return false;
+ return null;
}
try {
@@ -112,7 +169,7 @@ class PlatformAutoConfigurator {
if (found && this.verbose) {
console.log(` Extension found: ${detection.extensionId}`);
}
- return found;
+ return found ? extensionsPath : null;
}
// For Copilot, look for github.copilot extension
@@ -123,13 +180,13 @@ class PlatformAutoConfigurator {
if (copilotFound && this.verbose) {
console.log(` GitHub Copilot extension found`);
}
- return copilotFound;
+ return copilotFound ? extensionsPath : null;
}
- return false;
+ return null;
} catch (error) {
if (this.verbose) console.log(` Error checking extensions: ${error.message}`);
- return false;
+ return null;
}
}
@@ -139,14 +196,26 @@ class PlatformAutoConfigurator {
async configurePlatform(platformId, config) {
const configInfo = config.config;
- if (!configInfo.paths || !configInfo.paths[this.platform]) {
- return { success: false, reason: 'No configuration path for this platform' };
+ if (!configInfo) {
+ return { success: false, reason: 'No configuration definition for this platform' };
}
- const configPath = configInfo.paths[this.platform];
- const configDir = path.dirname(configPath);
-
try {
+ if (configInfo.format === 'continue-yaml') {
+ return await this.configureContinueYaml(configInfo, platformId);
+ }
+
+ if (configInfo.format === 'toml') {
+ return await this.configureTomlFile(configInfo);
+ }
+
+ if (!configInfo.paths || !configInfo.paths[this.platform]) {
+ return { success: false, reason: 'No configuration path for this platform' };
+ }
+
+ const configPath = configInfo.paths[this.platform];
+ const configDir = path.dirname(configPath);
+
// Create directory if it doesn't exist
if (!fs.existsSync(configDir)) {
fs.mkdirSync(configDir, { recursive: true });
@@ -164,9 +233,6 @@ class PlatformAutoConfigurator {
case 'json-merge':
return await this.configureJsonFile(configPath, configInfo, platformId);
- case 'continue-yaml':
- return await this.configureContinueYaml(configInfo, platformId);
-
default:
return { success: false, reason: 'Unsupported configuration format' };
}
@@ -200,8 +266,8 @@ class PlatformAutoConfigurator {
return { success: false, reason: 'Already configured' };
}
- // Create structure from template
- const template = configInfo.structure;
+ // Create structure from adapter or legacy template
+ const template = this.buildConfigPayload(configInfo, platformId);
const newConfig = this.processTemplate(template, this.packagePath);
// Merge configurations
@@ -267,13 +333,7 @@ class PlatformAutoConfigurator {
}
// Use workspaceStructure (direct server definition, NOT nested)
- const structure = configInfo.workspaceStructure || configInfo.structure || {
- name: 'Context Sync',
- type: 'stdio',
- command: 'npx',
- args: ['-y', '@context-sync/server'],
- env: {}
- };
+ const structure = configInfo.workspaceStructure || this.buildServerDefinition(configInfo);
// Determine file name - use a simple, standard name
const fileName = 'context-sync.yaml';
@@ -362,13 +422,7 @@ class PlatformAutoConfigurator {
}
// Use globalStructure or default
- const newEntry = configInfo.globalStructure || configInfo.structure || {
- name: 'Context Sync',
- type: 'stdio',
- command: 'npx',
- args: ['-y', '@context-sync/server'],
- env: {}
- };
+ const newEntry = configInfo.globalStructure || this.buildServerDefinition(configInfo);
// Process template
const processed = this.processTemplate(newEntry, this.packagePath);
@@ -427,6 +481,147 @@ class PlatformAutoConfigurator {
return processed;
}
+ renderTomlTable(tableKey, entries) {
+ const lines = [`[${tableKey}]`];
+ for (const key of Object.keys(entries)) {
+ lines.push(`${key} = ${this.toTomlValue(entries[key])}`);
+ }
+ return lines.join('\n');
+ }
+
+ toTomlValue(value) {
+ if (Array.isArray(value)) {
+ const items = value.map(item => this.toTomlValue(item)).join(', ');
+ return `[${items}]`;
+ }
+ if (typeof value === 'string') {
+ return `"${value.replace(/\\/g, '\\\\').replace(/"/g, '\\"')}"`;
+ }
+ if (typeof value === 'number' || typeof value === 'boolean') {
+ return String(value);
+ }
+ if (value === null || value === undefined) {
+ return '""';
+ }
+ return `"${String(value).replace(/\\/g, '\\\\').replace(/"/g, '\\"')}"`;
+ }
+
+ escapeRegex(value) {
+ return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
+ }
+
+ /**
+ * Build a unified server definition with platform overrides
+ */
+ buildServerDefinition(configInfo) {
+ const base = {
+ name: 'Context Sync',
+ command: 'node',
+ args: ['{{packagePath}}']
+ };
+
+ const overrides = configInfo.serverOverrides || {};
+ return this.mergeConfigs(base, overrides);
+ }
+
+ /**
+ * Build config payload from adapter or legacy structure
+ */
+ buildConfigPayload(configInfo, platformId) {
+ if (!configInfo.adapter) {
+ return configInfo.structure || {};
+ }
+
+ const adapter = configInfo.adapter;
+ const serverId = adapter.serverId || 'context-sync';
+ const serverDef = this.buildServerDefinition(configInfo);
+ const payload = adapter.rootExtras ? JSON.parse(JSON.stringify(adapter.rootExtras)) : {};
+ const serverPayload = { [serverId]: serverDef };
+
+ if (adapter.flatKey) {
+ if (adapter.containerKey) {
+ payload[adapter.flatKey] = {};
+ this.setDeep(payload[adapter.flatKey], adapter.containerKey, serverPayload);
+ } else {
+ payload[adapter.flatKey] = serverPayload;
+ }
+ return payload;
+ }
+
+ if (adapter.containerKey) {
+ this.setDeep(payload, adapter.containerKey, serverPayload);
+ } else {
+ payload[serverId] = serverDef;
+ }
+
+ return payload;
+ }
+
+ /**
+ * Set nested object values using dot paths
+ */
+ setDeep(target, pathKey, value) {
+ const parts = pathKey.split('.');
+ let current = target;
+ for (let i = 0; i < parts.length; i++) {
+ const key = parts[i];
+ if (i === parts.length - 1) {
+ current[key] = value;
+ return;
+ }
+ if (!current[key] || typeof current[key] !== 'object' || Array.isArray(current[key])) {
+ current[key] = {};
+ }
+ current = current[key];
+ }
+ }
+
+ /**
+ * Configure TOML-based config files (Codex CLI)
+ */
+ async configureTomlFile(configInfo) {
+ if (!configInfo.paths || !configInfo.paths[this.platform]) {
+ return { success: false, reason: 'No configuration path for this platform' };
+ }
+
+ const configPath = configInfo.paths[this.platform];
+ const configDir = path.dirname(configPath);
+ const tableKey = configInfo.tomlTableKey || 'mcp_servers.context-sync';
+
+ try {
+ if (!fs.existsSync(configDir)) {
+ fs.mkdirSync(configDir, { recursive: true });
+ if (this.verbose) console.log(` Created directory: ${configDir}`);
+ }
+
+ let existing = '';
+ if (fs.existsSync(configPath)) {
+ existing = fs.readFileSync(configPath, 'utf8');
+
+ const tableRegex = new RegExp(`^\\[${this.escapeRegex(tableKey)}\\]\\s*$`, 'm');
+ if (tableRegex.test(existing)) {
+ return { success: false, reason: 'Already configured' };
+ }
+
+ const backupPath = `${configPath}.backup.${Date.now()}`;
+ fs.copyFileSync(configPath, backupPath);
+ if (this.verbose) console.log(` Backup created: ${backupPath}`);
+ }
+
+ const serverDef = this.buildServerDefinition(configInfo);
+ if (configInfo.omitName) {
+ delete serverDef.name;
+ }
+ const tomlBlock = this.renderTomlTable(tableKey, serverDef);
+ const output = existing ? `${existing.trimEnd()}\n\n${tomlBlock}\n` : `${tomlBlock}\n`;
+
+ fs.writeFileSync(configPath, output, 'utf8');
+ return { success: true };
+ } catch (error) {
+ return { success: false, reason: `Error writing TOML: ${error.message}` };
+ }
+ }
+
/**
* Deep merge two configuration objects
*/
@@ -470,42 +665,42 @@ class PlatformAutoConfigurator {
generateReport() {
const { detected, configured, skipped, errors } = this.results;
- let report = '\n๐ Auto-Configuration Complete!\n\n';
+ let report = '\n Auto-Configuration Complete!\n\n';
- report += `๐ Summary:\n`;
- report += ` โข Platforms detected: ${detected.length}\n`;
- report += ` โข Successfully configured: ${configured.length}\n`;
- report += ` โข Skipped: ${skipped.length}\n`;
- report += ` โข Errors: ${errors.length}\n\n`;
+ report += ` Summary:\n`;
+ report += ` Platforms detected: ${detected.length}\n`;
+ report += ` Successfully configured: ${configured.length}\n`;
+ report += ` Skipped: ${skipped.length}\n`;
+ report += ` Errors: ${errors.length}\n\n`;
if (configured.length > 0) {
- report += `โ
Configured Platforms:\n`;
+ report += ` Configured Platforms:\n`;
configured.forEach(platform => {
const config = PLATFORM_CONFIGS[platform];
- report += ` โข ${config.name}\n`;
+ report += ` ${config.name}\n`;
});
report += '\n';
}
if (skipped.length > 0) {
- report += `โ ๏ธ Skipped Platforms:\n`;
+ report += ` Skipped Platforms:\n`;
skipped.forEach(({ platform, reason }) => {
const config = PLATFORM_CONFIGS[platform];
- report += ` โข ${config.name}: ${reason}\n`;
+ report += ` ${config.name}: ${reason}\n`;
});
report += '\n';
}
if (errors.length > 0) {
- report += `โ Errors:\n`;
+ report += ` Errors:\n`;
errors.forEach(({ platform, error }) => {
const config = PLATFORM_CONFIGS[platform];
- report += ` โข ${config.name}: ${error}\n`;
+ report += ` ${config.name}: ${error}\n`;
});
report += '\n';
}
- report += `๐ Next Steps:\n`;
+ report += ` Next Steps:\n`;
if (configured.length > 0) {
report += ` 1. Restart your AI applications\n`;
report += ` 2. Context Sync should appear in their MCP/tools list\n`;
@@ -520,4 +715,6 @@ class PlatformAutoConfigurator {
}
}
-module.exports = PlatformAutoConfigurator;
\ No newline at end of file
+module.exports = PlatformAutoConfigurator;
+
+
diff --git a/bin/enhanced-detector.cjs b/bin/enhanced-detector.cjs
deleted file mode 100644
index 5998968..0000000
--- a/bin/enhanced-detector.cjs
+++ /dev/null
@@ -1,362 +0,0 @@
-/**
- * Enhanced Platform Detection System
- *
- * This module provides robust cross-platform detection that doesn't rely on hardcoded paths.
- * It uses multiple detection strategies to find AI platforms regardless of installation location.
- */
-
-const { execSync, spawn } = require('child_process');
-const fs = require('fs');
-const path = require('path');
-const os = require('os');
-
-class EnhancedPlatformDetector {
- constructor() {
- this.platform = process.platform;
- this.cache = new Map(); // Cache detection results
- }
-
- /**
- * Detect platform using multiple strategies
- */
- async detectPlatform(platformId, config) {
- // Check cache first
- if (this.cache.has(platformId)) {
- return this.cache.get(platformId);
- }
-
- const strategies = [
- () => this.detectByProcess(platformId, config),
- () => this.detectByRegistry(platformId, config),
- () => this.detectByCommand(platformId, config),
- () => this.detectByCommonPaths(platformId, config),
- () => this.detectByPackageManager(platformId, config),
- () => this.detectByEnvironment(platformId, config)
- ];
-
- for (const strategy of strategies) {
- try {
- const result = await strategy();
- if (result) {
- this.cache.set(platformId, result);
- return result;
- }
- } catch (error) {
- // Continue to next strategy
- }
- }
-
- this.cache.set(platformId, null);
- return null;
- }
-
- /**
- * Strategy 1: Detect by running processes
- */
- async detectByProcess(platformId, config) {
- if (!config.processNames) return null;
-
- try {
- let processes;
- if (this.platform === 'win32') {
- const output = execSync('tasklist /FO CSV', { encoding: 'utf8' });
- processes = output.toLowerCase();
- } else {
- const output = execSync('ps aux', { encoding: 'utf8' });
- processes = output.toLowerCase();
- }
-
- for (const processName of config.processNames) {
- if (processes.includes(processName.toLowerCase())) {
- // Found running process, try to get executable path
- return await this.getExecutablePathFromProcess(processName);
- }
- }
- } catch (error) {
- // Process detection failed
- }
-
- return null;
- }
-
- /**
- * Strategy 2: Windows Registry detection
- */
- async detectByRegistry(platformId, config) {
- if (this.platform !== 'win32' || !config.registryKeys) return null;
-
- try {
- for (const registryKey of config.registryKeys) {
- const output = execSync(`reg query "${registryKey}" /s 2>nul`, { encoding: 'utf8' });
- if (output) {
- // Parse registry output to find installation path
- const match = output.match(/InstallLocation\s+REG_SZ\s+(.+)/i);
- if (match) {
- const installPath = match[1].trim();
- if (fs.existsSync(installPath)) {
- return { path: installPath, method: 'registry' };
- }
- }
- }
- }
- } catch (error) {
- // Registry detection failed
- }
-
- return null;
- }
-
- /**
- * Strategy 3: Command-line detection
- */
- async detectByCommand(platformId, config) {
- if (!config.commands) return null;
-
- for (const command of config.commands) {
- try {
- const output = execSync(`${command} --version 2>/dev/null || ${command} -v 2>/dev/null`, {
- encoding: 'utf8',
- timeout: 5000
- });
-
- if (output) {
- // Try to get the full path of the command
- const whichCommand = this.platform === 'win32' ? 'where' : 'which';
- const pathOutput = execSync(`${whichCommand} ${command}`, { encoding: 'utf8' });
- return { path: pathOutput.trim(), method: 'command' };
- }
- } catch (error) {
- // Command not found, continue
- }
- }
-
- return null;
- }
-
- /**
- * Strategy 4: Enhanced common paths detection
- */
- async detectByCommonPaths(platformId, config) {
- const allPaths = this.getAllPossiblePaths(platformId, config);
-
- for (const checkPath of allPaths) {
- if (fs.existsSync(checkPath)) {
- return { path: checkPath, method: 'filesystem' };
- }
- }
-
- return null;
- }
-
- /**
- * Strategy 5: Package manager detection
- */
- async detectByPackageManager(platformId, config) {
- if (!config.packageNames) return null;
-
- const packageManagers = this.getPackageManagers();
-
- for (const pm of packageManagers) {
- for (const packageName of config.packageNames) {
- try {
- const result = await this.checkPackageManager(pm, packageName);
- if (result) {
- return { path: result, method: 'package-manager' };
- }
- } catch (error) {
- // Continue to next package manager
- }
- }
- }
-
- return null;
- }
-
- /**
- * Strategy 6: Environment variable detection
- */
- async detectByEnvironment(platformId, config) {
- if (!config.environmentVars) return null;
-
- for (const envVar of config.environmentVars) {
- const value = process.env[envVar];
- if (value && fs.existsSync(value)) {
- return { path: value, method: 'environment' };
- }
- }
-
- return null;
- }
-
- /**
- * Get all possible installation paths for a platform
- */
- getAllPossiblePaths(platformId, config) {
- const paths = [];
-
- // Original configured paths
- if (config.detection?.paths?.[this.platform]) {
- paths.push(...config.detection.paths[this.platform]);
- }
-
- // Platform-specific common locations
- paths.push(...this.getPlatformSpecificPaths(platformId));
-
- // User-specific locations
- paths.push(...this.getUserSpecificPaths(platformId));
-
- // System-wide locations
- paths.push(...this.getSystemWidePaths(platformId));
-
- return [...new Set(paths)]; // Remove duplicates
- }
-
- /**
- * Get platform-specific installation paths
- */
- getPlatformSpecificPaths(platformId) {
- const paths = [];
- const home = os.homedir();
-
- switch (this.platform) {
- case 'win32':
- // Windows common installation directories
- const commonDirs = [
- process.env.LOCALAPPDATA,
- process.env.APPDATA,
- process.env.PROGRAMFILES,
- process.env['PROGRAMFILES(X86)'],
- 'C:\\Program Files',
- 'C:\\Program Files (x86)',
- path.join(home, 'AppData', 'Local'),
- path.join(home, 'AppData', 'Roaming')
- ].filter(Boolean);
-
- const appNames = this.getAppVariations(platformId);
- for (const dir of commonDirs) {
- for (const appName of appNames) {
- paths.push(path.join(dir, appName));
- paths.push(path.join(dir, 'Programs', appName));
- }
- }
- break;
-
- case 'darwin':
- // macOS common installation directories
- const macDirs = [
- '/Applications',
- path.join(home, 'Applications'),
- '/System/Applications',
- '/usr/local/bin',
- '/opt/homebrew/bin'
- ];
-
- const macAppNames = this.getAppVariations(platformId);
- for (const dir of macDirs) {
- for (const appName of macAppNames) {
- paths.push(path.join(dir, `${appName}.app`));
- paths.push(path.join(dir, appName));
- }
- }
- break;
-
- case 'linux':
- // Linux common installation directories
- const linuxDirs = [
- '/usr/bin',
- '/usr/local/bin',
- '/opt',
- '/snap/bin',
- path.join(home, '.local', 'bin'),
- path.join(home, 'bin'),
- '/usr/share/applications'
- ];
-
- const linuxAppNames = this.getAppVariations(platformId);
- for (const dir of linuxDirs) {
- for (const appName of linuxAppNames) {
- paths.push(path.join(dir, appName));
- paths.push(path.join(dir, `${appName}.desktop`));
- }
- }
- break;
- }
-
- return paths;
- }
-
- /**
- * Get various name variations for an app
- */
- getAppVariations(platformId) {
- const variations = [platformId];
-
- const nameMap = {
- 'claude': ['Claude', 'claude', 'AnthropicClaude', 'Claude Desktop'],
- 'cursor': ['Cursor', 'cursor'],
- 'copilot': ['code', 'Code', 'Visual Studio Code', 'Microsoft VS Code'],
- 'zed': ['Zed', 'zed'],
- 'windsurf': ['Windsurf', 'windsurf'],
- 'continue': ['continue', 'Continue'],
- 'codeium': ['Codeium', 'codeium'],
- 'tabnine': ['TabNine', 'tabnine']
- };
-
- return nameMap[platformId] || [platformId];
- }
-
- /**
- * Get user-specific paths
- */
- getUserSpecificPaths(platformId) {
- // Implementation for user-specific detection
- return [];
- }
-
- /**
- * Get system-wide paths
- */
- getSystemWidePaths(platformId) {
- // Implementation for system-wide detection
- return [];
- }
-
- /**
- * Get available package managers
- */
- getPackageManagers() {
- const managers = [];
-
- switch (this.platform) {
- case 'win32':
- managers.push('winget', 'choco', 'scoop');
- break;
- case 'darwin':
- managers.push('brew', 'port');
- break;
- case 'linux':
- managers.push('apt', 'yum', 'dnf', 'pacman', 'snap', 'flatpak');
- break;
- }
-
- return managers;
- }
-
- /**
- * Check if package is installed via package manager
- */
- async checkPackageManager(pm, packageName) {
- // Implementation for package manager checks
- return null;
- }
-
- /**
- * Get executable path from running process
- */
- async getExecutablePathFromProcess(processName) {
- // Implementation to get full path from process name
- return null;
- }
-}
-
-module.exports = EnhancedPlatformDetector;
\ No newline at end of file
diff --git a/bin/enhanced-platform-configs.cjs b/bin/enhanced-platform-configs.cjs
deleted file mode 100644
index 0acf18c..0000000
--- a/bin/enhanced-platform-configs.cjs
+++ /dev/null
@@ -1,303 +0,0 @@
-/**
- * Enhanced Platform Configuration Database
- *
- * Extended configuration with multiple detection strategies for robust cross-platform support.
- */
-
-const path = require('path');
-const os = require('os');
-
-const ENHANCED_PLATFORM_CONFIGS = {
- claude: {
- name: 'Claude Desktop',
- mcpSupport: 'native',
-
- // Multiple detection strategies
- processNames: ['Claude', 'claude'],
- commands: ['claude'],
- packageNames: ['claude-desktop', 'claude'],
- environmentVars: ['CLAUDE_HOME', 'CLAUDE_PATH'],
-
- // Windows Registry keys
- registryKeys: [
- 'HKEY_CURRENT_USER\\Software\\Anthropic\\Claude',
- 'HKEY_LOCAL_MACHINE\\Software\\Anthropic\\Claude',
- 'HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\Claude'
- ],
-
- // Fallback filesystem paths
- detection: {
- paths: {
- darwin: [
- '/Applications/Claude.app',
- path.join(os.homedir(), 'Applications', 'Claude.app'),
- '/System/Applications/Claude.app'
- ],
- win32: [
- path.join(process.env.LOCALAPPDATA || '', 'AnthropicClaude'),
- path.join(process.env.LOCALAPPDATA || '', 'Programs', 'Claude'),
- path.join(process.env.PROGRAMFILES || '', 'Claude'),
- path.join(process.env['PROGRAMFILES(X86)'] || '', 'Claude'),
- 'C:\\Program Files\\Claude',
- 'C:\\Program Files (x86)\\Claude'
- ],
- linux: [
- '/usr/bin/claude',
- '/usr/local/bin/claude',
- '/opt/claude/claude',
- '/snap/bin/claude',
- path.join(os.homedir(), '.local', 'bin', 'claude'),
- path.join(os.homedir(), '.local', 'share', 'applications', 'claude.desktop')
- ]
- }
- },
-
- config: {
- paths: {
- darwin: path.join(os.homedir(), 'Library', 'Application Support', 'Claude', 'claude_desktop_config.json'),
- win32: path.join(process.env.APPDATA || '', 'Claude', 'claude_desktop_config.json'),
- linux: path.join(os.homedir(), '.config', 'Claude', 'claude_desktop_config.json')
- },
- format: 'json',
- structure: {
- mcpServers: {
- 'context-sync': {
- command: 'node',
- args: ['{{packagePath}}']
- }
- }
- }
- }
- },
-
- cursor: {
- name: 'Cursor IDE',
- mcpSupport: 'native',
-
- processNames: ['Cursor', 'cursor'],
- commands: ['cursor'],
- packageNames: ['cursor', 'cursor-ide'],
- environmentVars: ['CURSOR_HOME', 'CURSOR_PATH'],
-
- registryKeys: [
- 'HKEY_CURRENT_USER\\Software\\Cursor',
- 'HKEY_LOCAL_MACHINE\\Software\\Cursor',
- 'HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\Cursor'
- ],
-
- detection: {
- paths: {
- darwin: [
- '/Applications/Cursor.app',
- path.join(os.homedir(), 'Applications', 'Cursor.app')
- ],
- win32: [
- path.join(process.env.LOCALAPPDATA || '', 'Programs', 'cursor'),
- path.join(process.env.PROGRAMFILES || '', 'Cursor'),
- path.join(process.env['PROGRAMFILES(X86)'] || '', 'Cursor'),
- 'C:\\Program Files\\Cursor',
- 'C:\\Program Files (x86)\\Cursor'
- ],
- linux: [
- '/usr/bin/cursor',
- '/usr/local/bin/cursor',
- '/opt/cursor/cursor',
- '/snap/bin/cursor',
- path.join(os.homedir(), '.local', 'bin', 'cursor'),
- path.join(os.homedir(), '.local', 'share', 'applications', 'cursor.desktop')
- ]
- }
- },
-
- config: {
- paths: {
- darwin: path.join(os.homedir(), '.cursor', 'mcp.json'),
- win32: path.join(process.env.USERPROFILE || '', '.cursor', 'mcp.json'),
- linux: path.join(os.homedir(), '.cursor', 'mcp.json')
- },
- format: 'json',
- structure: {
- mcpServers: {
- 'context-sync': {
- command: 'node',
- args: ['{{packagePath}}'],
- type: 'stdio'
- }
- }
- }
- }
- },
-
- copilot: {
- name: 'GitHub Copilot (VS Code)',
- mcpSupport: 'extension',
-
- processNames: ['Code', 'code'],
- commands: ['code'],
- packageNames: ['visual-studio-code', 'code'],
- environmentVars: ['VSCODE_HOME', 'CODE_HOME'],
-
- registryKeys: [
- 'HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{EA457B21-F73E-494C-ACAB-524FDE069978}_is1',
- 'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{EA457B21-F73E-494C-ACAB-524FDE069978}_is1'
- ],
-
- detection: {
- paths: {
- darwin: [
- '/Applications/Visual Studio Code.app',
- path.join(os.homedir(), 'Applications', 'Visual Studio Code.app')
- ],
- win32: [
- path.join(process.env.LOCALAPPDATA || '', 'Programs', 'Microsoft VS Code'),
- path.join(process.env.PROGRAMFILES || '', 'Microsoft VS Code'),
- path.join(process.env['PROGRAMFILES(X86)'] || '', 'Microsoft VS Code'),
- 'C:\\Program Files\\Microsoft VS Code',
- 'C:\\Program Files (x86)\\Microsoft VS Code'
- ],
- linux: [
- '/usr/bin/code',
- '/usr/local/bin/code',
- '/opt/visual-studio-code/code',
- '/snap/bin/code',
- path.join(os.homedir(), '.local', 'bin', 'code')
- ]
- },
- extensionCheck: {
- darwin: path.join(os.homedir(), '.vscode', 'extensions'),
- win32: path.join(process.env.USERPROFILE || '', '.vscode', 'extensions'),
- linux: path.join(os.homedir(), '.vscode', 'extensions')
- }
- },
-
- config: {
- paths: {
- darwin: path.join(os.homedir(), 'Library', 'Application Support', 'Code', 'User', 'mcp.json'),
- win32: path.join(process.env.APPDATA || '', 'Code', 'User', 'mcp.json'),
- linux: path.join(os.homedir(), '.config', 'Code', 'User', 'mcp.json')
- },
- format: 'json',
- structure: {
- servers: {
- 'context-sync': {
- command: 'node',
- args: ['{{packagePath}}'],
- type: 'stdio'
- }
- },
- inputs: []
- }
- }
- },
-
- zed: {
- name: 'Zed Editor',
- mcpSupport: 'native',
-
- processNames: ['Zed', 'zed'],
- commands: ['zed'],
- packageNames: ['zed', 'zed-editor'],
- environmentVars: ['ZED_HOME', 'ZED_PATH'],
-
- registryKeys: [
- 'HKEY_CURRENT_USER\\Software\\Zed',
- 'HKEY_LOCAL_MACHINE\\Software\\Zed'
- ],
-
- detection: {
- paths: {
- darwin: [
- '/Applications/Zed.app',
- path.join(os.homedir(), 'Applications', 'Zed.app')
- ],
- win32: [
- path.join(process.env.LOCALAPPDATA || '', 'Programs', 'Zed'),
- path.join(process.env.PROGRAMFILES || '', 'Zed'),
- 'C:\\Program Files\\Zed',
- 'C:\\Program Files (x86)\\Zed'
- ],
- linux: [
- '/usr/bin/zed',
- '/usr/local/bin/zed',
- '/opt/zed/zed',
- '/snap/bin/zed',
- path.join(os.homedir(), '.local', 'bin', 'zed')
- ]
- }
- },
-
- config: {
- paths: {
- darwin: path.join(os.homedir(), 'Library', 'Application Support', 'Zed', 'settings.json'),
- win32: path.join(process.env.APPDATA || '', 'Zed', 'settings.json'),
- linux: path.join(os.homedir(), '.config', 'zed', 'settings.json')
- },
- format: 'json-merge',
- structure: {
- context_servers: {
- 'context-sync': {
- source: 'custom',
- command: 'node',
- args: ['{{packagePath}}'],
- env: {}
- }
- }
- }
- }
- },
-
- windsurf: {
- name: 'Windsurf by Codeium',
- mcpSupport: 'native',
-
- processNames: ['Windsurf', 'windsurf'],
- commands: ['windsurf'],
- packageNames: ['windsurf', 'windsurf-codeium'],
- environmentVars: ['WINDSURF_HOME', 'WINDSURF_PATH'],
-
- registryKeys: [
- 'HKEY_CURRENT_USER\\Software\\Codeium\\Windsurf',
- 'HKEY_LOCAL_MACHINE\\Software\\Codeium\\Windsurf'
- ],
-
- detection: {
- paths: {
- darwin: [
- '/Applications/Windsurf.app',
- path.join(os.homedir(), 'Applications', 'Windsurf.app')
- ],
- win32: [
- path.join(process.env.LOCALAPPDATA || '', 'Programs', 'Windsurf'),
- path.join(process.env.PROGRAMFILES || '', 'Windsurf'),
- 'C:\\Program Files\\Windsurf',
- 'C:\\Program Files (x86)\\Windsurf'
- ],
- linux: [
- '/usr/bin/windsurf',
- '/usr/local/bin/windsurf',
- '/opt/windsurf/windsurf',
- '/snap/bin/windsurf'
- ]
- }
- },
-
- config: {
- paths: {
- darwin: path.join(os.homedir(), '.codeium', 'windsurf', 'mcp_config.json'),
- win32: path.join(process.env.USERPROFILE || '', '.codeium', 'windsurf', 'mcp_config.json'),
- linux: path.join(os.homedir(), '.codeium', 'windsurf', 'mcp_config.json')
- },
- format: 'json',
- structure: {
- mcpServers: {
- 'context-sync': {
- command: 'node',
- args: ['{{packagePath}}']
- }
- }
- }
- }
- }
-};
-
-module.exports = ENHANCED_PLATFORM_CONFIGS;
\ No newline at end of file
diff --git a/bin/install.cjs b/bin/install.cjs
index 22eb701..c6f8a8c 100644
--- a/bin/install.cjs
+++ b/bin/install.cjs
@@ -5,6 +5,7 @@ const path = require('path');
const os = require('os');
const { execSync } = require('child_process');
const PlatformAutoConfigurator = require('./auto-configurator.cjs');
+const readlineSync = require('readline-sync');
// Simple colored output without dependencies (for initial install)
const colors = {
@@ -23,6 +24,9 @@ function log(color, message) {
console.error(color + message + colors.reset);
}
+const CONFIG_DIR = path.join(os.homedir(), '.context-sync');
+const STATUS_FILE = path.join(CONFIG_DIR, 'install-status.json');
+
// Get version from package.json
function getVersion() {
try {
@@ -42,30 +46,30 @@ const version = getVersion();
// ALWAYS show banner - use process.stdout.write to bypass npm suppression
process.stdout.write('\n' + '='.repeat(80) + '\n');
-process.stdout.write('\x1b[36m\x1b[1m๐ง Context Sync MCP Server v' + version + '\x1b[0m\n');
+process.stdout.write('\x1b[36m\x1b[1m Context Sync MCP Server v' + version + '\x1b[0m\n');
process.stdout.write('='.repeat(80) + '\n\n');
if (!isGlobalInstall) {
- log(colors.yellow, 'โ ๏ธ Detected local installation.');
+ log(colors.yellow, ' Detected local installation.');
log(colors.yellow, 'For automatic setup, install globally:\n');
log(colors.reset, ' npm install -g @context-sync/server\n');
log(colors.gray, 'Skipping automatic configuration.\n');
// Still show Notion message even for local installs
- log(colors.cyan + colors.bold, '๐ฏ NEW: Notion Integration Available!\n');
- log(colors.gray, '๐ Sync your AI context directly to Notion:\n');
- log(colors.green + colors.bold, '๐ To set up, run:\n');
+ log(colors.cyan + colors.bold, ' NEW: Notion Integration Available!\n');
+ log(colors.gray, ' Sync your AI context directly to Notion:\n');
+ log(colors.green + colors.bold, ' To set up, run:\n');
log(colors.cyan + colors.bold, ' npx context-sync-setup\n');
process.exit(0);
}
// Only auto-configure if globally installed
-log(colors.green, 'โ
Global installation detected');
+log(colors.green, ' Global installation detected');
log(colors.gray, 'Setting up AI platform configurations...\n');
// Find the globally installed package path
-log(colors.gray, '๐ Locating installed package...');
+log(colors.gray, ' Locating installed package...');
let packagePath;
try {
const npmRoot = execSync('npm root -g', { encoding: 'utf8' }).trim();
@@ -76,24 +80,24 @@ try {
packagePath = path.join(npmRoot, 'context-sync-mcp', 'dist', 'index.js');
}
} catch (error) {
- log(colors.red, 'โ Could not locate package');
+ log(colors.red, ' Could not locate package');
log(colors.gray, error.message);
printManualInstructions();
process.exit(1);
}
if (!fs.existsSync(packagePath)) {
- log(colors.red, `โ Package not found at: ${packagePath}`);
+ log(colors.red, ` Package not found at: ${packagePath}`);
printManualInstructions();
process.exit(1);
}
-log(colors.green, `โ
Package found: ${packagePath}\n`);
+log(colors.green, ` Package found: ${packagePath}\n`);
// ============================================================================
// UNIVERSAL AI PLATFORM AUTO-CONFIGURATION
// ============================================================================
-log(colors.cyan + colors.bold, '๐ Universal AI Platform Auto-Configuration\n');
+log(colors.cyan + colors.bold, ' Universal AI Platform Auto-Configuration\n');
log(colors.gray, 'Context Sync will now automatically detect and configure all installed AI platforms...\n');
// ============================================================================
@@ -109,19 +113,19 @@ async function runSetupWizard() {
try {
const config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
if (config.notion?.token && config.notion?.configuredAt) {
- log(colors.green, '\nโ
Notion integration already configured');
+ log(colors.green, '\n Notion integration already configured');
log(colors.gray, ' Configured at: ' + new Date(config.notion.configuredAt).toLocaleString());
if (config.notion.defaultParentPageId) {
log(colors.gray, ' Default parent page: Set');
}
- log(colors.gray, '\n๐ก To reconfigure Notion integration:');
+ log(colors.gray, '\n To reconfigure Notion integration:');
log(colors.gray, ' Edit: ' + configPath);
log(colors.gray, ' Or reinstall: npm install -g @context-sync/server\n');
skipWizard = true;
}
} catch (error) {
// If we can't read config, proceed with wizard
- log(colors.gray, 'โ ๏ธ Could not read existing config, running setup wizard...\n');
+ log(colors.gray, ' Could not read existing config, running setup wizard...\n');
}
}
@@ -130,30 +134,69 @@ async function runSetupWizard() {
}
// Show optional Notion integration info - ALWAYS visible with prominent formatting
- console.error('\n' + 'โ'.repeat(80));
- log(colors.cyan + colors.bold, '๐ฏ NEW: Notion Integration Available!');
- console.error('โ'.repeat(80));
- log(colors.gray, '\n๐ Context Sync can now sync your AI context directly to Notion:\n');
+ console.error('\n' + ''.repeat(80));
+ log(colors.cyan + colors.bold, ' NEW: Notion Integration Available!');
+ console.error(''.repeat(80));
+ log(colors.gray, '\n Context Sync can now sync your AI context directly to Notion:\n');
- log(colors.white, ' โข Generate feature docs and export to Notion');
- log(colors.white, ' โข Pull project specs from Notion for AI to implement');
- log(colors.white, ' โข Export architecture decisions as ADRs');
- log(colors.white, ' โข Create beautifully formatted pages automatically\n');
+ log(colors.white, ' Generate feature docs and export to Notion');
+ log(colors.white, ' Pull project specs from Notion for AI to implement');
+ log(colors.white, ' Export architecture decisions as ADRs');
+ log(colors.white, ' Create beautifully formatted pages automatically\n');
- log(colors.green + colors.bold, '๐ To set up Notion integration, run:\n');
+ log(colors.green + colors.bold, ' To set up Notion integration, run:\n');
log(colors.cyan + colors.bold, ' context-sync-setup\n');
log(colors.gray, ' (or: npx context-sync-setup)\n');
log(colors.gray, ' The interactive wizard will guide you through connecting Notion.');
- console.error('โ'.repeat(80) + '\n');
+ console.error(''.repeat(80) + '\n');
} catch (error) {
- log(colors.yellow, '\nโ ๏ธ Setup wizard encountered an issue.');
+ log(colors.yellow, '\n Setup wizard encountered an issue.');
log(colors.gray, 'You can run it manually later with: npm run setup\n');
log(colors.gray, `Details: ${error.message}\n`);
}
}
+function writeInstallStatus(status) {
+ try {
+ if (!fs.existsSync(CONFIG_DIR)) {
+ fs.mkdirSync(CONFIG_DIR, { recursive: true });
+ }
+ fs.writeFileSync(STATUS_FILE, JSON.stringify(status, null, 2), 'utf8');
+ } catch (error) {
+ // Best-effort only
+ }
+}
+
+function buildInstallStatus(outcome, results, errorMessage) {
+ const needsManual =
+ outcome !== 'success' ||
+ (results && (results.errors?.length > 0 || results.configured?.length === 0));
+
+ return {
+ version,
+ timestamp: new Date().toISOString(),
+ outcome,
+ needsManual,
+ results: results || null,
+ error: errorMessage || null
+ };
+}
+
+function maybePromptManualConfig(packagePath) {
+ if (!process.stdin.isTTY || !process.stdout.isTTY) {
+ log(colors.yellow, 'Manual configuration may be required.');
+ log(colors.gray, `Details saved to: ${STATUS_FILE}`);
+ return;
+ }
+
+ const showManual = readlineSync.keyInYNStrict('Would you like manual configuration steps now?');
+ if (showManual) {
+ printManualInstructions(packagePath);
+ }
+}
+
// Use our new auto-configuration system
async function runAutoConfiguration() {
const autoConfigurator = new PlatformAutoConfigurator(packagePath, false);
@@ -164,14 +207,17 @@ async function runAutoConfiguration() {
const report = autoConfigurator.generateReport();
console.error(report);
+ const status = buildInstallStatus('success', results);
+ writeInstallStatus(status);
+
// Show platform-specific next steps
if (results.configured.length > 0) {
- log(colors.cyan + colors.bold, '๐ Platform-Specific Instructions:\n');
+ log(colors.cyan + colors.bold, ' Platform-Specific Instructions:\n');
results.configured.forEach(platformId => {
switch (platformId) {
case 'claude':
- log(colors.cyan, '๐ฑ Claude Desktop:');
+ log(colors.cyan, ' Claude Desktop:');
log(colors.reset, ' 1. Restart Claude Desktop completely');
log(colors.reset, ' 2. Open a new chat');
log(colors.reset, ' 3. Type: ' + colors.gray + '"help context-sync"' + colors.reset);
@@ -179,7 +225,7 @@ async function runAutoConfiguration() {
break;
case 'cursor':
- log(colors.cyan, '๐ฑ๏ธ Cursor IDE:');
+ log(colors.cyan, ' Cursor IDE:');
log(colors.reset, ' 1. Restart Cursor IDE');
log(colors.reset, ' 2. Open Copilot Chat (Ctrl+Shift+I / Cmd+Shift+I)');
log(colors.reset, ' 3. Look for context-sync in Tools list');
@@ -187,7 +233,7 @@ async function runAutoConfiguration() {
break;
case 'copilot':
- log(colors.cyan, '๐ป VS Code (GitHub Copilot):');
+ log(colors.cyan, ' VS Code (GitHub Copilot):');
log(colors.reset, ' 1. Restart VS Code completely');
log(colors.reset, ' 2. Open Copilot Chat (Ctrl+Shift+I / Cmd+Shift+I)');
log(colors.reset, ' 3. Switch to Agent mode');
@@ -196,7 +242,7 @@ async function runAutoConfiguration() {
break;
case 'continue':
- log(colors.cyan, '๐ Continue.dev:');
+ log(colors.cyan, ' Continue.dev:');
log(colors.reset, ' 1. Restart VS Code');
log(colors.reset, ' 2. Open Continue chat panel');
log(colors.reset, ' 3. Context Sync should be available as MCP tool');
@@ -204,7 +250,7 @@ async function runAutoConfiguration() {
break;
default:
- log(colors.cyan, `๏ฟฝ ${platformId}:`);
+ log(colors.cyan, ` ${platformId}:`);
log(colors.reset, ' 1. Restart the application');
log(colors.reset, ' 2. Look for context-sync in MCP/Tools menu');
log(colors.reset, ' 3. Try: "help context-sync"\n');
@@ -212,11 +258,11 @@ async function runAutoConfiguration() {
}
});
- log(colors.green + colors.bold, '๐ Context Sync is now your universal AI memory layer!\n');
- log(colors.reset, '๐ก All configured platforms share the same persistent context and memory.');
- log(colors.reset, '๏ฟฝ Switch between platforms seamlessly with full context preservation.\n');
+ log(colors.green + colors.bold, ' Context Sync is now your universal AI memory layer!\n');
+ log(colors.reset, ' All configured platforms share the same persistent context and memory.');
+ log(colors.reset, ' Switch between platforms seamlessly with full context preservation.\n');
} else {
- log(colors.yellow, 'โ ๏ธ No AI platforms were auto-configured.');
+ log(colors.yellow, ' No AI platforms were auto-configured.');
log(colors.reset, '\nTo get started:');
log(colors.reset, '1. Install an AI platform that supports MCP (Claude Desktop, Cursor, VS Code + Copilot)');
log(colors.reset, '2. Re-run: npm install -g @context-sync/server');
@@ -225,18 +271,24 @@ async function runAutoConfiguration() {
printManualInstructions(packagePath);
}
+ if (status.needsManual) {
+ maybePromptManualConfig(packagePath);
+ }
+
// Run the setup wizard for additional integrations (Notion, etc.)
await runSetupWizard();
- log(colors.reset, '๐ Documentation: ' + colors.cyan + 'https://github.com/Intina47/context-sync');
- log(colors.reset, '๐ฌ Issues: ' + colors.cyan + 'https://github.com/Intina47/context-sync/issues');
- log(colors.reset, '\n๐ Happy coding with universal AI context!\n');
+ log(colors.reset, ' Documentation: ' + colors.cyan + 'https://github.com/Intina47/context-sync');
+ log(colors.reset, ' Issues: ' + colors.cyan + 'https://github.com/Intina47/context-sync/issues');
+ log(colors.reset, '\n Happy coding with universal AI context!\n');
} catch (error) {
- log(colors.red, 'โ Auto-configuration failed:');
+ log(colors.red, ' Auto-configuration failed:');
log(colors.gray, error.message);
log(colors.yellow, '\nFalling back to manual configuration...\n');
printManualInstructions(packagePath);
+ writeInstallStatus(buildInstallStatus('error', null, error.message));
+ maybePromptManualConfig(packagePath);
process.exit(1);
}
}
@@ -290,7 +342,7 @@ function setupClaudeDesktop(configPath, packagePath) {
// Backup and write
const backupPath = configPath + '.backup';
fs.copyFileSync(configPath, backupPath);
- log(colors.gray, ` ๐พ Backup created: ${backupPath}`);
+ log(colors.gray, ` Backup created: ${backupPath}`);
fs.writeFileSync(configPath, JSON.stringify(config, null, 2), 'utf8');
@@ -339,7 +391,7 @@ function setupVSCode(mcpPath, packagePath) {
try {
mcpConfig = JSON.parse(mcpContent);
} catch (parseError) {
- log(colors.gray, ' โ ๏ธ Could not parse existing MCP config, creating new one');
+ log(colors.gray, ' Could not parse existing MCP config, creating new one');
mcpConfig = {
servers: {},
inputs: []
@@ -379,7 +431,7 @@ function setupVSCode(mcpPath, packagePath) {
if (fs.existsSync(mcpPath)) {
const backupPath = mcpPath + '.backup';
fs.copyFileSync(mcpPath, backupPath);
- log(colors.gray, ` ๐พ Backup created: ${backupPath}`);
+ log(colors.gray, ` Backup created: ${backupPath}`);
}
// Write updated config
@@ -395,11 +447,11 @@ function setupVSCode(mcpPath, packagePath) {
}
function printManualInstructions(pkgPath) {
- log(colors.cyan + colors.bold, '\n๐ Manual Configuration Instructions:\n');
+ log(colors.cyan + colors.bold, '\n Manual Configuration Instructions:\n');
- log(colors.cyan, '๐ค Claude Desktop:');
+ log(colors.cyan, ' Claude Desktop:');
log(colors.reset, '1. Open Claude Desktop');
- log(colors.reset, '2. Go to Settings โ Developer โ MCP Servers');
+ log(colors.reset, '2. Go to Settings Developer MCP Servers');
log(colors.reset, '3. Add this configuration:\n');
log(colors.gray, '{');
log(colors.gray, ' "mcpServers": {');
@@ -410,7 +462,7 @@ function printManualInstructions(pkgPath) {
log(colors.gray, ' }');
log(colors.gray, '}\n');
- log(colors.cyan, '๐ป VS Code (GitHub Copilot):');
+ log(colors.cyan, ' VS Code (GitHub Copilot):');
log(colors.reset, '1. Create file: ~/.vscode/mcp.json (macOS/Linux)');
log(colors.reset, ' or %APPDATA%\\Code\\User\\globalStorage\\mcp.json (Windows)');
log(colors.reset, '2. Add this configuration:\n');
@@ -425,3 +477,5 @@ function printManualInstructions(pkgPath) {
log(colors.gray, ' "inputs": []');
log(colors.gray, '}\n');
}
+
+
diff --git a/bin/platform-configs.cjs b/bin/platform-configs.cjs
index 8b54a9a..1efb400 100644
--- a/bin/platform-configs.cjs
+++ b/bin/platform-configs.cjs
@@ -7,7 +7,6 @@
const path = require('path');
const os = require('os');
-const fs = require('fs');
// Platform configuration database with auto-detection and setup methods
const PLATFORM_CONFIGS = {
@@ -18,6 +17,7 @@ const PLATFORM_CONFIGS = {
claude: {
name: 'Claude Desktop',
mcpSupport: 'native',
+ enabled: true,
detection: {
// Check if Claude Desktop is installed
paths: {
@@ -46,13 +46,9 @@ const PLATFORM_CONFIGS = {
linux: path.join(os.homedir(), '.config', 'Claude', 'claude_desktop_config.json')
},
format: 'json',
- structure: {
- mcpServers: {
- 'context-sync': {
- command: 'node',
- args: ['{{packagePath}}']
- }
- }
+ adapter: {
+ kind: 'json',
+ containerKey: 'mcpServers'
}
}
},
@@ -60,6 +56,7 @@ const PLATFORM_CONFIGS = {
cursor: {
name: 'Cursor IDE',
mcpSupport: 'native',
+ enabled: true,
detection: {
// Check if Cursor is installed
paths: {
@@ -88,14 +85,12 @@ const PLATFORM_CONFIGS = {
linux: path.join(os.homedir(), '.cursor', 'mcp.json')
},
format: 'json',
- structure: {
- mcpServers: {
- 'context-sync': {
- command: 'node',
- args: ['{{packagePath}}'],
- type: 'stdio'
- }
- }
+ adapter: {
+ kind: 'json',
+ containerKey: 'mcpServers'
+ },
+ serverOverrides: {
+ type: 'stdio'
}
}
},
@@ -103,6 +98,7 @@ const PLATFORM_CONFIGS = {
copilot: {
name: 'GitHub Copilot (VS Code)',
mcpSupport: 'extension',
+ enabled: true,
detection: {
// Check if VS Code is installed (required for Copilot)
paths: {
@@ -137,15 +133,13 @@ const PLATFORM_CONFIGS = {
linux: path.join(os.homedir(), '.config', 'Code', 'User', 'mcp.json')
},
format: 'json',
- structure: {
- servers: {
- 'context-sync': {
- command: 'node',
- args: ['{{packagePath}}'],
- type: 'stdio'
- }
- },
- inputs: []
+ adapter: {
+ kind: 'json',
+ containerKey: 'servers',
+ rootExtras: { inputs: [] }
+ },
+ serverOverrides: {
+ type: 'stdio'
}
}
},
@@ -157,6 +151,7 @@ const PLATFORM_CONFIGS = {
continue: {
name: 'Continue.dev',
mcpSupport: 'native',
+ enabled: true,
detection: {
// Continue can be detected via global config, workspace config, or VS Code extension
paths: {
@@ -189,17 +184,7 @@ const PLATFORM_CONFIGS = {
// Workspace config: .continue/mcpServers/*.yaml files (direct server definition)
workspaceRelativePath: path.join('.continue', 'mcpServers'),
format: 'continue-yaml',
- // Workspace-level YAML structure (direct server definition, NOT nested)
- workspaceStructure: {
- name: 'Context Sync',
- type: 'stdio',
- command: 'npx',
- args: ['-y', '@context-sync/server'],
- env: {}
- },
- // Global config structure (part of mcpServers array)
- globalStructure: {
- name: 'Context Sync',
+ serverOverrides: {
type: 'stdio',
command: 'npx',
args: ['-y', '@context-sync/server'],
@@ -211,6 +196,7 @@ const PLATFORM_CONFIGS = {
zed: {
name: 'Zed Editor',
mcpSupport: 'native',
+ enabled: true,
detection: {
paths: {
darwin: [
@@ -236,15 +222,13 @@ const PLATFORM_CONFIGS = {
linux: path.join(os.homedir(), '.config', 'zed', 'settings.json')
},
format: 'json-merge',
- structure: {
- context_servers: {
- 'context-sync': {
- source: 'custom',
- command: 'node',
- args: ['{{packagePath}}'],
- env: {}
- }
- }
+ adapter: {
+ kind: 'json',
+ containerKey: 'context_servers'
+ },
+ serverOverrides: {
+ source: 'custom',
+ env: {}
}
}
},
@@ -252,6 +236,7 @@ const PLATFORM_CONFIGS = {
windsurf: {
name: 'Windsurf by Codeium',
mcpSupport: 'native',
+ enabled: true,
detection: {
paths: {
darwin: [
@@ -276,13 +261,9 @@ const PLATFORM_CONFIGS = {
linux: path.join(os.homedir(), '.codeium', 'windsurf', 'mcp_config.json')
},
format: 'json',
- structure: {
- mcpServers: {
- 'context-sync': {
- command: 'node',
- args: ['{{packagePath}}']
- }
- }
+ adapter: {
+ kind: 'json',
+ containerKey: 'mcpServers'
}
}
},
@@ -290,6 +271,7 @@ const PLATFORM_CONFIGS = {
codeium: {
name: 'Codeium',
mcpSupport: 'extension',
+ enabled: true,
detection: {
// Codeium is usually a VS Code extension
paths: {
@@ -307,16 +289,10 @@ const PLATFORM_CONFIGS = {
linux: path.join(os.homedir(), '.config', 'Code', 'User', 'settings.json')
},
format: 'json-setting',
- settingKey: 'codeium.mcp',
- structure: {
- 'codeium.mcp': {
- servers: {
- 'context-sync': {
- command: 'node',
- args: ['{{packagePath}}']
- }
- }
- }
+ adapter: {
+ kind: 'json',
+ flatKey: 'codeium.mcp',
+ containerKey: 'servers'
}
}
},
@@ -324,6 +300,7 @@ const PLATFORM_CONFIGS = {
tabnine: {
name: 'TabNine',
mcpSupport: 'extension',
+ enabled: true,
detection: {
// TabNine has both standalone and extension versions
paths: {
@@ -350,44 +327,114 @@ const PLATFORM_CONFIGS = {
linux: path.join(os.homedir(), '.config', 'TabNine', 'config.json')
},
format: 'json',
- structure: {
- mcp: {
- servers: {
- 'context-sync': {
- command: 'node',
- args: ['{{packagePath}}']
- }
- }
- }
+ adapter: {
+ kind: 'json',
+ containerKey: 'mcp.servers'
}
}
},
// ============================================================================
- // API PLATFORMS (Custom Integration Required)
+ // CLI/IDE PLATFORMS (New)
// ============================================================================
- ollama: {
- name: 'Ollama',
- mcpSupport: 'custom',
+ codex: {
+ name: 'OpenAI Codex CLI',
+ mcpSupport: 'cli',
+ enabled: true,
detection: {
- // Check if Ollama is installed
- command: 'ollama --version',
paths: {
- darwin: ['/usr/local/bin/ollama', '/opt/homebrew/bin/ollama'],
- win32: [path.join(process.env.LOCALAPPDATA || '', 'Programs', 'Ollama', 'ollama.exe')],
- linux: ['/usr/bin/ollama', '/usr/local/bin/ollama']
+ darwin: [path.join(os.homedir(), '.codex', 'config.toml')],
+ win32: [path.join(os.homedir(), '.codex', 'config.toml')],
+ linux: [path.join(os.homedir(), '.codex', 'config.toml')]
}
},
config: {
- // Ollama doesn't use MCP directly, but we can create a bridge
- note: 'Ollama requires custom bridge integration - no direct MCP support',
- bridgeRequired: true
+ paths: {
+ darwin: path.join(os.homedir(), '.codex', 'config.toml'),
+ win32: path.join(os.homedir(), '.codex', 'config.toml'),
+ linux: path.join(os.homedir(), '.codex', 'config.toml')
+ },
+ format: 'toml',
+ tomlTableKey: 'mcp_servers.context-sync',
+ omitName: true,
+ serverOverrides: {
+ command: 'npx',
+ args: ['-y', '@context-sync/server']
+ }
}
- }
+ },
+
+ 'claude-code': {
+ name: 'Claude Code',
+ mcpSupport: 'cli',
+ enabled: true,
+ detection: {
+ command: 'claude mcp list',
+ paths: {
+ darwin: [path.join(os.homedir(), '.claude', 'mcp_servers.json')],
+ win32: [path.join(os.homedir(), '.claude', 'mcp_servers.json')],
+ linux: [path.join(os.homedir(), '.claude', 'mcp_servers.json')]
+ }
+ },
+ config: {
+ paths: {
+ darwin: path.join(os.homedir(), '.claude', 'mcp_servers.json'),
+ win32: path.join(os.homedir(), '.claude', 'mcp_servers.json'),
+ linux: path.join(os.homedir(), '.claude', 'mcp_servers.json')
+ },
+ format: 'json',
+ adapter: {
+ kind: 'json',
+ containerKey: 'mcpServers'
+ },
+ serverOverrides: {
+ command: 'npx',
+ args: ['-y', '@context-sync/server']
+ }
+ }
+ },
+
+ antigravity: {
+ name: 'Google Antigravity',
+ mcpSupport: 'native',
+ enabled: true,
+ detection: {
+ paths: {
+ darwin: [path.join(os.homedir(), '.gemini', 'antigravity', 'mcp_config.json')],
+ win32: [path.join(os.homedir(), '.gemini', 'antigravity', 'mcp_config.json')],
+ linux: [path.join(os.homedir(), '.gemini', 'antigravity', 'mcp_config.json')]
+ }
+ },
+ config: {
+ paths: {
+ darwin: path.join(os.homedir(), '.gemini', 'antigravity', 'mcp_config.json'),
+ win32: path.join(os.homedir(), '.gemini', 'antigravity', 'mcp_config.json'),
+ linux: path.join(os.homedir(), '.gemini', 'antigravity', 'mcp_config.json')
+ },
+ format: 'json',
+ adapter: {
+ kind: 'json',
+ containerKey: 'mcpServers'
+ },
+ serverOverrides: {
+ command: 'npx',
+ args: ['-y', '@context-sync/server']
+ }
+ }
+ },
+
+ // ============================================================================
+ // TODO PLATFORMS (Stubs - configuration pending)
+ // ============================================================================
- // Note: OpenAI API, Claude API, Gemini, CodeWhisperer are API-only
- // and don't have local installations to detect or configure
+ 'continue-dev': {
+ name: 'Continue.dev (App)',
+ mcpSupport: 'unknown',
+ enabled: false,
+ todo: true,
+ note: 'TODO: clarify if distinct from Continue extension and add config paths'
+ }
};
-module.exports = PLATFORM_CONFIGS;
\ No newline at end of file
+module.exports = PLATFORM_CONFIGS;
diff --git a/bin/setup.cjs b/bin/setup.cjs
index 87875a5..38dc802 100644
--- a/bin/setup.cjs
+++ b/bin/setup.cjs
@@ -10,9 +10,11 @@ const fs = require('fs');
const path = require('path');
const os = require('os');
const { exec } = require('child_process');
+const PLATFORM_CONFIGS = require('./platform-configs.cjs');
const CONFIG_DIR = path.join(os.homedir(), '.context-sync');
const CONFIG_FILE = path.join(CONFIG_DIR, 'config.json');
+const STATUS_FILE = path.join(CONFIG_DIR, 'install-status.json');
// Color codes
const colors = {
@@ -33,6 +35,130 @@ function log(color, message) {
console.log(color + message + colors.reset);
}
+function printHeader(title) {
+ console.log('\n' + '='.repeat(64));
+ log(colors.cyan + colors.bold, title);
+ console.log('='.repeat(64) + '\n');
+}
+
+function promptYesNo(question) {
+ return readlineSync.keyInYNStrict(question);
+}
+
+function promptToken() {
+ while (true) {
+ const rawToken = readlineSync.question('Paste your Notion integration token: ', {
+ hideEchoBack: true
+ });
+ const token = rawToken ? rawToken.trim() : '';
+
+ if (!token) {
+ log(colors.yellow, 'Token is empty.');
+ if (!promptYesNo('Try again?')) return null;
+ continue;
+ }
+
+ const isValidFormat = token.startsWith('secret_') || token.startsWith('ntn_');
+ if (!isValidFormat) {
+ log(colors.red, 'Invalid token format. Notion tokens start with "secret_" or "ntn_".');
+ log(colors.gray, `Received length: ${token.length} chars`);
+ log(colors.gray, `First 10 chars: ${token.substring(0, 10)}...\n`);
+ if (!promptYesNo('Try again?')) return null;
+ continue;
+ }
+
+ return token;
+ }
+}
+
+function getPlatformLabel(platformId) {
+ const config = PLATFORM_CONFIGS[platformId];
+ return config?.name || platformId;
+}
+
+function loadInstallStatus() {
+ try {
+ if (fs.existsSync(STATUS_FILE)) {
+ const data = fs.readFileSync(STATUS_FILE, 'utf-8');
+ return JSON.parse(data);
+ }
+ } catch (error) {
+ // Ignore install status parsing errors
+ }
+ return null;
+}
+
+function renderList(items, color = colors.white) {
+ items.forEach(item => {
+ log(color, ` - ${item}`);
+ });
+}
+
+function showAutoConfigSummary(status) {
+ printHeader('Auto-Configuration Summary');
+
+ if (!status || !status.results) {
+ log(colors.gray, 'No auto-configuration status found.');
+ log(colors.gray, 'Install globally to auto-configure platforms:');
+ log(colors.white, ' npm install -g @context-sync/server\n');
+ return;
+ }
+
+ const timestamp = status.timestamp ? new Date(status.timestamp).toLocaleString() : 'Unknown time';
+ log(colors.gray, `Last install status: ${status.outcome || 'unknown'} (${timestamp})`);
+
+ if (status.needsManual) {
+ log(colors.yellow, 'Manual configuration may be required for some platforms.');
+ }
+
+ const configured = status.results.configured || [];
+ if (configured.length > 0) {
+ log(colors.green, '\nConfigured platforms:');
+ renderList(configured.map(getPlatformLabel), colors.white);
+ } else {
+ log(colors.yellow, '\nNo platforms were auto-configured during install.');
+ }
+
+ const skipped = status.results.skipped || [];
+ const errors = status.results.errors || [];
+
+ if (skipped.length > 0 || errors.length > 0 || status.error) {
+ log(colors.yellow, '\nAuto-configuration issues:');
+
+ if (skipped.length > 0) {
+ skipped.forEach(({ platform, reason }) => {
+ const label = getPlatformLabel(platform);
+ log(colors.white, ` - ${label}: ${reason}`);
+ });
+ }
+
+ if (errors.length > 0) {
+ errors.forEach(({ platform, error }) => {
+ const label = getPlatformLabel(platform);
+ log(colors.white, ` - ${label}: ${error}`);
+ });
+ }
+
+ if (status.error) {
+ log(colors.white, ` - Install error: ${status.error}`);
+ }
+ } else {
+ log(colors.green, '\nNo auto-configuration issues were reported.');
+ }
+
+ log(colors.gray, '\nFor manual setup steps, see docs/CONFIG.md.\n');
+}
+
+async function fetchNotionPages(token) {
+ const { Client } = require('@notionhq/client');
+ const notion = new Client({ auth: token });
+ const response = await notion.search({
+ filter: { property: 'object', value: 'page' },
+ page_size: 20
+ });
+ return response.results || [];
+}
+
/**
* Load existing config
*/
@@ -59,7 +185,7 @@ function saveConfig(config) {
fs.writeFileSync(CONFIG_FILE, JSON.stringify(config, null, 2), 'utf-8');
return true;
} catch (error) {
- log(colors.red, `โ Error saving config: ${error.message}`);
+ log(colors.red, ` Error saving config: ${error.message}`);
return false;
}
}
@@ -68,172 +194,150 @@ function saveConfig(config) {
* Setup Notion Integration
*/
async function setupNotionIntegration(config) {
- log(colors.cyan + colors.bold, '\n๐ Notion Integration Setup\n');
-
- log(colors.white, 'Context Sync can sync your AI context directly to Notion pages.\n');
-
- // Check if already configured
+ printHeader('Notion Integration Setup');
+ log(colors.white, 'This wizard configures Notion for Context Sync.\n');
+
if (config.notion && config.notion.token) {
- log(colors.green, 'โ Notion is already configured!');
- log(colors.gray, ` Configured: ${config.notion.configuredAt}`);
+ log(colors.green, 'OK Notion is already configured.');
+ log(colors.gray, `Configured: ${config.notion.configuredAt}`);
if (config.notion.defaultParentPageId) {
- log(colors.gray, ` Default parent page: ${config.notion.defaultParentPageId}`);
- }
-
- const reconfigure = readlineSync.keyInYNStrict('\nWould you like to reconfigure Notion?');
- if (!reconfigure) {
- return;
+ log(colors.gray, `Default parent page: ${config.notion.defaultParentPageId}`);
}
+ if (!promptYesNo('\nWould you like to reconfigure Notion?')) return;
}
-
- // Step 1: Create integration
- log(colors.yellow, '\n๐ Step 1: Create a Notion Integration');
+
+ log(colors.yellow, '\nStep 1 of 3: Create a Notion integration');
log(colors.gray, ' 1. Visit: https://www.notion.so/my-integrations');
log(colors.gray, ' 2. Click "New integration"');
log(colors.gray, ' 3. Give it a name (e.g., "Context Sync")');
log(colors.gray, ' 4. Select your workspace');
log(colors.gray, ' 5. Copy the "Internal Integration Token"\n');
-
- const shouldOpenBrowser = readlineSync.keyInYNStrict('Open Notion integrations page in browser?');
- if (shouldOpenBrowser) {
+
+ if (promptYesNo('Open Notion integrations page in browser?')) {
try {
- // Use platform-specific command to open browser
const url = 'https://www.notion.so/my-integrations';
- const command = process.platform === 'win32' ? `start ${url}` :
- process.platform === 'darwin' ? `open ${url}` :
+ const command = process.platform === 'win32' ? `start ${url}` :
+ process.platform === 'darwin' ? `open ${url}` :
`xdg-open ${url}`;
exec(command);
- log(colors.green, 'โ Opening browser...\n');
+ log(colors.green, 'OK Opening browser...\n');
} catch (error) {
- log(colors.yellow, 'โ ๏ธ Could not open browser automatically');
+ log(colors.yellow, 'WARN Could not open browser automatically');
log(colors.gray, ' Please visit: https://www.notion.so/my-integrations\n');
}
}
-
- // Step 2: Get token
- log(colors.yellow, '\n๐ Step 2: Enter Your Integration Token');
- const rawToken = readlineSync.question('Paste your Notion integration token: ', {
- hideEchoBack: true
- });
-
- // Trim whitespace that might be added during paste
- const token = rawToken ? rawToken.trim() : '';
-
- // Validate token format - Notion uses different prefixes
- // Old format: secret_xxxxx
- // New format: ntn_xxxxx
- const isValidFormat = token && (token.startsWith('secret_') || token.startsWith('ntn_'));
-
- if (!isValidFormat) {
- log(colors.red, 'โ Invalid token format. Notion tokens start with "secret_" or "ntn_"');
- log(colors.gray, ' Token should start with: secret_ or ntn_');
- log(colors.gray, ` Received length: ${token.length} chars`);
- log(colors.gray, ` First 10 chars: ${token.substring(0, 10)}...\n`);
- return;
- }
-
- // Step 3: Test connection and get pages
- log(colors.cyan, '\n๐ Testing connection...');
-
- try {
- const { Client } = require('@notionhq/client');
- const notion = new Client({ auth: token });
-
- // Search for pages
- const response = await notion.search({
- filter: { property: 'object', value: 'page' },
- page_size: 20
- });
-
- if (response.results.length === 0) {
- log(colors.yellow, '\nโ ๏ธ No pages found!');
- log(colors.gray, '\nMake sure to:');
- log(colors.gray, ' 1. Share a page with your integration');
- log(colors.gray, ' 2. Open the page in Notion');
- log(colors.gray, ' 3. Click "Share" โ Add your integration\n');
-
- const continueAnyway = readlineSync.keyInYNStrict('Save token anyway and configure pages later?');
- if (!continueAnyway) {
- return;
+
+ log(colors.yellow, '\nStep 2 of 3: Enter your integration token');
+ let token = promptToken();
+ if (!token) return;
+
+ log(colors.cyan, '\nTesting connection...');
+
+ let pages = [];
+ while (true) {
+ try {
+ pages = await fetchNotionPages(token);
+ } catch (error) {
+ log(colors.red, `ERROR Connection failed: ${error.message}`);
+ if (error.code === 'unauthorized') {
+ log(colors.yellow, 'Token is invalid or integration was deleted.');
+ }
+ if (promptYesNo('Try a different token?')) {
+ token = promptToken();
+ if (!token) return;
+ continue;
}
-
- // Save token only
- config.notion = {
- token,
- configuredAt: new Date().toISOString()
- };
-
- if (saveConfig(config)) {
- log(colors.green, '\nโ Notion token saved!');
- log(colors.cyan, '\nRemember to share pages with your integration before using Notion tools.');
+ if (promptYesNo('Save token anyway and configure later?')) {
+ config.notion = { token, configuredAt: new Date().toISOString() };
+ if (saveConfig(config)) {
+ log(colors.green, '\nOK Notion token saved.');
+ }
}
return;
}
-
- log(colors.green, `โ Connected! Found ${response.results.length} accessible pages\n`);
-
- // Step 4: Select default parent page
- log(colors.yellow, '๐ Step 3: Select Default Parent Page (Optional)');
- log(colors.gray, 'New pages will be created as children of this page.\n');
-
- const pages = response.results.map((page, index) => {
- const title = page.properties?.title?.title?.[0]?.plain_text ||
- page.properties?.Name?.title?.[0]?.plain_text ||
- 'Untitled';
- return {
- index: index + 1,
- id: page.id,
- title: title.length > 60 ? title.substring(0, 60) + '...' : title
- };
- });
-
- // Display pages
- pages.forEach(page => {
- log(colors.white, ` ${page.index}. ${page.title}`);
- });
-
- if (response.results.length === 20) {
- log(colors.gray, ` ... and more pages available`);
- }
-
- log(colors.gray, '\n 0. Skip (configure later)');
-
- const selection = readlineSync.questionInt('\nSelect a page number: ', {
- limit: (input) => input >= 0 && input <= pages.length
- });
-
- let defaultParentPageId;
- if (selection > 0) {
- defaultParentPageId = pages[selection - 1].id;
- log(colors.green, `โ Selected: ${pages[selection - 1].title}`);
- } else {
- log(colors.gray, 'โ Skipped default parent page');
+
+ if (pages.length === 0) {
+ log(colors.yellow, '\nNo accessible pages found.');
+ log(colors.gray, 'Make sure to share at least one page with your integration.');
+ if (promptYesNo('Retry after sharing a page?')) {
+ continue;
+ }
+ if (promptYesNo('Save token anyway and configure pages later?')) {
+ config.notion = { token, configuredAt: new Date().toISOString() };
+ if (saveConfig(config)) {
+ log(colors.green, '\nOK Notion token saved.');
+ log(colors.gray, 'Share pages with the integration before using Notion tools.');
+ }
+ }
+ return;
}
-
- // Save config
- config.notion = {
- token,
- defaultParentPageId,
- configuredAt: new Date().toISOString()
+ break;
+ }
+
+ log(colors.green, `OK Connected! Found ${pages.length} accessible pages\n`);
+
+ log(colors.yellow, 'Step 3 of 3: Select default parent page (optional)');
+ log(colors.gray, 'New pages will be created as children of this page.\n');
+
+ const choices = pages.map((page, index) => {
+ const title = page.properties?.title?.title?.[0]?.plain_text ||
+ page.properties?.Name?.title?.[0]?.plain_text ||
+ 'Untitled';
+ return {
+ index: index + 1,
+ id: page.id,
+ title: title.length > 60 ? title.substring(0, 60) + '...' : title
};
-
- if (saveConfig(config)) {
- log(colors.green + colors.bold, '\nโจ Notion integration configured successfully!\n');
- log(colors.cyan, 'You can now use Notion tools in Context Sync:');
- log(colors.white, ' โข notion_create_page - Create new Notion pages');
- log(colors.white, ' โข notion_read_page - Read Notion page content');
- log(colors.white, ' โข notion_search - Search your Notion workspace');
- log(colors.white, ' โข create_project_dashboard - Auto-generate project docs');
- log(colors.white, ' โข sync_decision_to_notion - Export architecture decisions\n');
+ });
+
+ choices.forEach(page => {
+ log(colors.white, ` ${page.index}. ${page.title}`);
+ });
+
+ if (pages.length === 20) {
+ log(colors.gray, ' ... and more pages available');
+ }
+
+ log(colors.gray, '\n 0. Skip (configure later)');
+ log(colors.gray, ' M. Enter page ID manually');
+
+ let defaultParentPageId;
+ while (true) {
+ const selection = readlineSync.question('\nSelect a page number or M: ').trim();
+ if (selection.toLowerCase() === 'm') {
+ const manualId = readlineSync.question('Enter Notion page ID: ').trim();
+ if (manualId) {
+ defaultParentPageId = manualId;
+ log(colors.green, 'OK Default parent page set');
+ break;
+ }
+ log(colors.yellow, 'Page ID is empty.');
+ continue;
}
-
- } catch (error) {
- log(colors.red, `โ Connection failed: ${error.message}`);
- if (error.code === 'unauthorized') {
- log(colors.yellow, '\nโ ๏ธ Token is invalid or integration was deleted.');
- log(colors.gray, 'Please verify your integration token and try again.');
+ const numeric = Number(selection);
+ if (Number.isInteger(numeric) && numeric >= 0 && numeric <= choices.length) {
+ if (numeric === 0) {
+ log(colors.gray, 'OK Skipped default parent page');
+ } else {
+ defaultParentPageId = choices[numeric - 1].id;
+ log(colors.green, `OK Selected: ${choices[numeric - 1].title}`);
+ }
+ break;
}
- return;
+ log(colors.yellow, 'Invalid selection.');
+ }
+
+ config.notion = {
+ token,
+ defaultParentPageId,
+ configuredAt: new Date().toISOString()
+ };
+
+ if (saveConfig(config)) {
+ log(colors.green + colors.bold, '\nOK Notion integration configured successfully!\n');
+ log(colors.cyan, 'You can now use Notion tools in Context Sync:');
+ log(colors.white, ' - notion.search - Search your Notion workspace');
+ log(colors.white, ' - notion.read - Read Notion page content\n');
}
}
@@ -242,29 +346,33 @@ async function setupNotionIntegration(config) {
*/
async function main() {
console.log('\nโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ');
- console.log('โ ๐ง Context Sync Setup Wizard โ');
+ console.log('โ Context Sync Setup Wizard โ');
console.log('โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ\n');
// Load existing config
const config = loadConfig();
+ // Show auto-config summary from install
+ const installStatus = loadInstallStatus();
+ showAutoConfigSummary(installStatus);
+
log(colors.white, 'Welcome to Context Sync!\n');
- log(colors.gray, 'This wizard will help you configure integrations.\n');
+ log(colors.gray, 'This wizard configures Notion only.\n');
// Setup Notion
- const setupNotion = readlineSync.keyInYNStrict('Would you like to integrate with Notion?');
+ const setupNotion = promptYesNo('Would you like to integrate with Notion?');
if (setupNotion) {
await setupNotionIntegration(config);
} else {
- log(colors.gray, 'โ Skipping Notion integration\n');
+ log(colors.gray, 'OK Skipping Notion integration\n');
}
// Mark setup as complete
config.setupComplete = true;
saveConfig(config);
- log(colors.green + colors.bold, '\nโ
Setup complete!\n');
+ log(colors.green + colors.bold, '\nOK Setup complete!\n');
log(colors.cyan, 'Context Sync is ready to use with your AI assistant.');
log(colors.gray, '\nYou can run this setup again anytime with:');
log(colors.white, ' context-sync-setup\n');
@@ -272,6 +380,8 @@ async function main() {
// Run setup
main().catch(error => {
- log(colors.red, `\nโ Setup failed: ${error.message}`);
+ log(colors.red, `\nERROR Setup failed: ${error.message}`);
process.exit(1);
});
+
+
diff --git a/bin/tiered-detector.cjs b/bin/tiered-detector.cjs
deleted file mode 100644
index 4b65806..0000000
--- a/bin/tiered-detector.cjs
+++ /dev/null
@@ -1,112 +0,0 @@
-/**
- * Tiered Platform Detection System
- *
- * Uses existing working detection as primary, with enhanced detection as fallback.
- * This ensures we don't break current functionality while adding robustness.
- */
-
-const PLATFORM_CONFIGS = require('./platform-configs.cjs');
-const EnhancedDetector = require('./enhanced-detector.cjs');
-
-class TieredPlatformDetector {
- constructor() {
- this.enhancedDetector = new EnhancedDetector();
- }
-
- /**
- * Tiered detection: Current method first, enhanced as fallback
- */
- async detectPlatform(platformId, config) {
- // Tier 1: Use existing working detection (current implementation)
- const primaryResult = await this.detectWithCurrentMethod(platformId, config);
- if (primaryResult) {
- return { ...primaryResult, tier: 'primary', reliable: true };
- }
-
- // Tier 2: Enhanced detection as fallback
- console.log(` ๐ก Primary detection failed, trying enhanced methods...`);
- const enhancedResult = await this.enhancedDetector.detectPlatform(platformId, config);
- if (enhancedResult) {
- return { ...enhancedResult, tier: 'enhanced', reliable: false };
- }
-
- return null;
- }
-
- /**
- * Current detection method (proven to work on Windows)
- */
- async detectWithCurrentMethod(platformId, config) {
- const detection = config.detection;
-
- // Method 1: Check file system paths (current working approach)
- if (detection.paths && detection.paths[process.platform]) {
- const paths = detection.paths[process.platform];
- for (const checkPath of paths) {
- if (require('fs').existsSync(checkPath)) {
- return { path: checkPath, method: 'filesystem-primary' };
- }
- }
- }
-
- // Method 2: Check for VS Code extensions (current working approach)
- if (detection.extensionId || detection.extensionCheck) {
- const extensionResult = await this.checkVSCodeExtension(platformId, detection);
- if (extensionResult) {
- return { path: extensionResult, method: 'extension-primary' };
- }
- }
-
- // Method 3: Check command availability (current working approach)
- if (detection.command) {
- try {
- require('child_process').execSync(detection.command, { stdio: 'ignore' });
- return { path: detection.command, method: 'command-primary' };
- } catch (error) {
- // Command not available
- }
- }
-
- return null;
- }
-
- /**
- * VS Code extension check (from current working implementation)
- */
- async checkVSCodeExtension(platformId, detection) {
- const fs = require('fs');
- let extensionsPath;
-
- if (detection.extensionCheck) {
- extensionsPath = detection.extensionCheck[process.platform];
- } else if (detection.paths) {
- extensionsPath = detection.paths[process.platform];
- }
-
- if (!extensionsPath || !fs.existsSync(extensionsPath)) {
- return false;
- }
-
- try {
- const extensions = fs.readdirSync(extensionsPath);
-
- // Look for specific extension ID
- if (detection.extensionId) {
- return extensions.some(ext => ext.startsWith(detection.extensionId));
- }
-
- // For Copilot, look for github.copilot extension
- if (platformId === 'copilot') {
- return extensions.some(ext =>
- ext.includes('github.copilot') || ext.includes('copilot')
- );
- }
-
- return false;
- } catch (error) {
- return false;
- }
- }
-}
-
-module.exports = TieredPlatformDetector;
\ No newline at end of file
diff --git a/docs/CONFIG.md b/docs/CONFIG.md
new file mode 100644
index 0000000..c42cf8b
--- /dev/null
+++ b/docs/CONFIG.md
@@ -0,0 +1,91 @@
+**Manual Configuration**
+- Use this when auto-config fails or you prefer manual setup.
+- Restart the app/CLI after editing config files.
+- Server entry should point to the global install or use `npx -y @context-sync/server`.
+
+**Server Entry (JSON)**
+```json
+{
+ "context-sync": {
+ "command": "node",
+ "args": ["/path/to/@context-sync/server/dist/index.js"],
+ "type": "stdio"
+ }
+}
+```
+
+**Server Entry (npx)**
+```json
+{
+ "context-sync": {
+ "command": "npx",
+ "args": ["-y", "@context-sync/server"],
+ "type": "stdio"
+ }
+}
+```
+
+**Claude Desktop**
+- macOS: `~/Library/Application Support/Claude/claude_desktop_config.json`
+- Windows: `%APPDATA%\Claude\claude_desktop_config.json`
+- Linux: `~/.config/Claude/claude_desktop_config.json`
+- Structure: `mcpServers` object.
+
+**Cursor**
+- macOS/Linux: `~/.cursor/mcp.json`
+- Windows: `%USERPROFILE%\.cursor\mcp.json`
+- Structure: `mcpServers` object.
+
+**VS Code (GitHub Copilot)**
+- macOS: `~/Library/Application Support/Code/User/mcp.json`
+- Windows: `%APPDATA%\Code\User\mcp.json`
+- Linux: `~/.config/Code/User/mcp.json`
+- Structure: `servers` object + `inputs` array.
+
+**Continue.dev**
+- Workspace: `./.continue/mcpServers/context-sync.yaml`
+- Global: `~/.continue/config.yaml`
+- Structure: server definition in YAML.
+
+**Zed**
+- macOS: `~/Library/Application Support/Zed/settings.json`
+- Windows: `%APPDATA%\Zed\settings.json`
+- Linux: `~/.config/zed/settings.json`
+- Structure: `context_servers` object.
+
+**Windsurf**
+- macOS: `~/.codeium/windsurf/mcp_config.json`
+- Windows: `%USERPROFILE%\.codeium\windsurf\mcp_config.json`
+- Linux: `~/.codeium/windsurf/mcp_config.json`
+- Structure: `mcpServers` object.
+
+**Codeium**
+- macOS: `~/Library/Application Support/Code/User/settings.json`
+- Windows: `%APPDATA%\Code\User\settings.json`
+- Linux: `~/.config/Code/User/settings.json`
+- Structure: `codeium.mcp` with `servers`.
+
+**TabNine**
+- macOS: `~/Library/Application Support/TabNine/config.json`
+- Windows: `%APPDATA%\TabNine\config.json`
+- Linux: `~/.config/TabNine/config.json`
+- Structure: `mcp.servers`.
+
+**Codex CLI**
+- Config: `~/.codex/config.toml`
+- Add:
+```toml
+[mcp_servers.context-sync]
+command = "npx"
+args = ["-y", "@context-sync/server"]
+```
+
+**Claude Code (Anthropic CLI)**
+- Config: `~/.claude/mcp_servers.json`
+- Structure: `mcpServers` object.
+- CLI option: `claude mcp add context-sync "npx" -y @context-sync/server`
+
+**Antigravity (Google Gemini IDE)**
+- Config: `~/.gemini/antigravity/mcp_config.json`
+- Structure: `mcpServers` object.
+
diff --git a/docs/DATA.md b/docs/DATA.md
new file mode 100644
index 0000000..602242e
--- /dev/null
+++ b/docs/DATA.md
@@ -0,0 +1,21 @@
+**Data and Local Storage**
+
+**Default paths**
+- Database: `~/.context-sync/data.db`
+- Config: `~/.context-sync/config.json`
+- Install status: `~/.context-sync/install-status.json`
+
+**Custom database path**
+- CLI arg: `context-sync --db-path /absolute/path/to/db`
+- Env var: `CONTEXT_SYNC_DB_PATH=/absolute/path/to/db`
+
+**What is stored**
+- Projects and metadata
+- Context layers: active work, constraints, problems, goals, decisions, notes, caveats
+- Conversation summaries (for recall synthesis)
+
+**Backups**
+- First run may migrate legacy data and create a backup file next to the DB.
+
+**Delete all data**
+- Remove `~/.context-sync/` (this deletes DB and config).
diff --git a/documentation/NOTION_INTEGRATION.md b/docs/NOTION_INTERGRATION.md
similarity index 100%
rename from documentation/NOTION_INTEGRATION.md
rename to docs/NOTION_INTERGRATION.md
diff --git a/docs/RELEASE_NOTES.md b/docs/RELEASE_NOTES.md
new file mode 100644
index 0000000..de35315
--- /dev/null
+++ b/docs/RELEASE_NOTES.md
@@ -0,0 +1,27 @@
+# Release notes
+
+This release is a major v2-focused cleanup. It removes legacy v1 tooling and narrows the surface area to the v2 core server, engines, storage, Notion, and git hooks.
+
+## Highlights
+- Unified auto-config format with per-platform adapters.
+- Notion-only setup wizard with clearer prompts and retry flow.
+- Post-install status tracking with manual-config fallback when auto-config fails.
+- Setup wizard now summarizes auto-configured platforms and any install issues.
+
+## Breaking changes
+- Removed v1-only tools (todos, platform sync, call graph, type analysis, dependency analysis, file write/preview tools, performance monitor, migration prompter, and related modules).
+- Removed legacy documentation set in favor of the new docs.
+- Renamed v2-specific source files to scalable names (no "optimized" or "v2" suffixing).
+
+## Required user actions
+1) Reinstall the package to re-run auto-config.
+2) If auto-config fails, follow `docs/CONFIG.md`.
+3) Re-run the Notion setup wizard if you need Notion integration.
+
+## Compatibility
+- Supported platforms: Claude Desktop, Cursor, VS Code + GitHub Copilot, Continue.dev, Zed, Windsurf, Codeium, TabNine, Codex CLI, Claude Code, Antigravity.
+
+## Notes
+- Auto-config does not run on local installs; use global install or manual config.
+- Notion setup is intentionally manual via the wizard.
+- Fixed Windows setup command failing due to a stray BOM in the setup script.
diff --git a/docs/TOOLS.md b/docs/TOOLS.md
new file mode 100644
index 0000000..9c335fd
--- /dev/null
+++ b/docs/TOOLS.md
@@ -0,0 +1,45 @@
+**Core Tools**
+- `set_project`: Initialize a project for context tracking (run this first).
+- `remember`: Store context (active work, constraints, problems, goals, decisions, notes, caveats).
+- `recall`: Retrieve context for the current project.
+- `read_file`: Read a file with rich context.
+- `search`: Search for files or content.
+- `structure`: Summarize project structure.
+- `git`: Git status, context, hotspots, coupling, blame, analysis.
+- `notion`: Read-only Notion access (`search`, `read`).
+
+**Minimal Workflow**
+```text
+1) set_project({ path: "/abs/path/to/project" })
+2) recall()
+3) read_file({ path: "src/index.ts" })
+4) remember({ type: "decision", content: "Use SQLite for local storage" })
+```
+
+**Important**
+- Always call `set_project` before using other tools.
+
+**Remember Types**
+- `active_work`: Current task and context.
+- `constraint`: Architectural or process rules.
+- `problem`: Blockers or issues.
+- `goal`: Targets or milestones.
+- `decision`: Key decisions and rationale.
+- `note`: General information.
+- `caveat`: Mistakes, shortcuts, unverified work.
+
+**Git Actions**
+```text
+git({ action: "status" })
+git({ action: "context" })
+git({ action: "hotspots" })
+git({ action: "coupling" })
+git({ action: "blame", path: "src/server.ts" })
+git({ action: "analysis" })
+```
+
+**Notion Actions**
+```text
+notion({ action: "search", query: "architecture" })
+notion({ action: "read", pageId: "..." })
+```
diff --git a/docs/TROUBLESHOOTING.md b/docs/TROUBLESHOOTING.md
new file mode 100644
index 0000000..e814499
--- /dev/null
+++ b/docs/TROUBLESHOOTING.md
@@ -0,0 +1,41 @@
+**Troubleshooting**
+
+**Auto-config did not run**
+- Auto-config runs only on global install.
+- Fix: `npm install -g @context-sync/server`
+- Check status file: `~/.context-sync/install-status.json`
+ - If `needsManual` is true, use `docs/CONFIG.md`.
+
+**Tools do not show in your AI app**
+- Restart the app/CLI after install.
+- Verify the MCP config file exists (see `docs/CONFIG.md`).
+- Check for a `context-sync` entry in the config.
+
+**Codex CLI**
+- Verify: `codex mcp list`
+- Config: `~/.codex/config.toml`
+
+**Claude Code**
+- Verify: `claude mcp list`
+- Config: `~/.claude/mcp_servers.json`
+
+**Notion says "not configured"**
+- Run: `context-sync-setup` (or `npx context-sync-setup`)
+- Ensure the integration token is valid (`secret_` or `ntn_` prefix).
+
+**Notion returns no pages**
+- Share a Notion page with the integration.
+- Retry the wizard or run Notion search again.
+
+**Auto-config created a file but app still cannot see tools**
+- Ensure config file is valid JSON/TOML/YAML.
+- Restart the app completely.
+
+**Git hooks not installed**
+- Hooks are installed during `set_project` if the repo has `.git`.
+- Re-run `set_project`.
+- Check `.git/hooks` for files containing "Context Sync Auto-Hook".
+
+**Migration messages on startup**
+- First run may migrate legacy data and create a backup.
+- If the migration fails, your data is not modified; see console output for backup path.
diff --git a/documentation/CHANGELOG_v1.0.0.md b/documentation/CHANGELOG_v1.0.0.md
deleted file mode 100644
index f43202d..0000000
--- a/documentation/CHANGELOG_v1.0.0.md
+++ /dev/null
@@ -1,229 +0,0 @@
-# Changelog - v1.0.0
-
-> **๐ MAJOR RELEASE** - Universal AI Platform Support
-
-*Release Date: December 2024*
-
----
-
-## ๐ **New Features**
-
-### **Universal AI Platform Registry**
-- **13+ Platform Support**: Added support for Continue.dev, Zed, Windsurf, TabNine, OpenAI API, Anthropic API, Gemini, Ollama, CodeWhisperer
-- **Platform Discovery**: New `discover_ai_platforms` tool with personalized recommendations
-- **Enhanced Detection**: Intelligent platform detection based on environment variables, process info
-- **Platform Registry**: Centralized metadata for all supported platforms (`src/platform-registry.ts`)
-
-### **Database Migration & Optimization**
-- **Smart Deduplication**: Automatic detection and merging of duplicate projects
-- **Path Normalization**: Handles case sensitivity, trailing slashes, path variations
-- **Migration Tools**: `migrate_database`, `get_migration_stats`, `check_migration_suggestion`
-- **Safe Migration**: Dry-run mode, automatic backups, rollback support
-- **Auto Prompting**: Smart prompts for v1.0.0+ users with database duplicates
-
-### **Advanced Context Analysis**
-- **Conversation Analysis**: New `analyze_conversation_context` tool extracts decisions, todos, insights
-- **Missing Context Detection**: `suggest_missing_context` identifies gaps in project documentation
-- **Auto-Context Saving**: Intelligent extraction and saving of important context
-- **Context Analyzer**: New `src/context-analyzer.ts` module for smart context processing
-
-### **Enhanced Cross-Platform Sync**
-- **Seamless Handoff**: Improved `switch_platform` with detailed context transfer
-- **Platform Recommendations**: Personalized platform suggestions based on use case
-- **Universal Config**: Support for JSON, YAML, and platform-specific configuration formats
-
----
-
-## ๐ ๏ธ **Technical Improvements**
-
-### **Architecture**
-- **Modular Design**: New platform registry system for easy platform addition
-- **Type System**: Extended `AIPlatform` type to support 13+ platforms
-- **Path Normalization**: New `PathNormalizer` class for consistent path handling
-- **Performance Monitoring**: Built-in performance tracking and reporting
-
-### **Database**
-- **35% Faster**: Project detection through optimized queries and indexing
-- **60% Storage Reduction**: Elimination of duplicates through normalization
-- **50% Faster**: Context retrieval with query optimization
-- **Migration System**: Complete database migration infrastructure
-
-### **Installation**
-- **Enhanced Auto-Config**: Improved multi-platform detection and setup
-- **Better Error Handling**: Comprehensive error messages and recovery
-- **Platform-Specific Setup**: Intelligent setup instructions per platform
-
----
-
-## ๐ **Breaking Changes**
-
-### **Package Changes**
-```diff
-- "version": "0.6.2"
-+ "version": "1.0.0"
-+ "type": "module"
-- "postinstall": "node bin/install.js"
-+ "postinstall": "node bin/install.cjs"
-```
-
-### **Type Changes**
-```diff
-- export type AIPlatform = 'claude' | 'cursor' | 'copilot' | 'other';
-+ export type AIPlatform = 'claude' | 'cursor' | 'copilot' | 'continue' |
-+ 'tabnine' | 'windsurf' | 'zed' | 'openai' | 'anthropic' | 'gemini' |
-+ 'ollama' | 'codeium' | 'codewisperer' | 'other';
-```
-
-### **Project Detection**
-- Projects now consistently use folder names instead of package.json names for deduplication
-- Path normalization may change some project identifiers
-
----
-
-## ๐ฏ **New MCP Tools**
-
-### **Platform Management**
-- `discover_ai_platforms` - Explore available AI platforms with metadata
-- `get_platform_recommendations` - Personalized platform recommendations
-- `switch_platform` - Enhanced platform switching with context handoff
-
-### **Database Migration**
-- `migrate_database` - Migrate and merge duplicate projects
-- `get_migration_stats` - Show duplicate project statistics
-- `check_migration_suggestion` - Check if migration is recommended
-
-### **Context Analysis**
-- `analyze_conversation_context` - Extract context from conversations
-- `suggest_missing_context` - Identify missing project context
-
----
-
-## ๐ **Bug Fixes**
-
-### **Path Handling**
-- Fixed case sensitivity issues on Windows/macOS
-- Fixed trailing slash inconsistencies
-- Fixed relative vs absolute path conflicts
-- Added support for network drives and UNC paths
-
-### **Platform Detection**
-- Fixed false positive platform detection
-- Fixed environment variable conflicts
-- Improved process detection accuracy
-- Fixed configuration file parsing edge cases
-
-### **Database**
-- Fixed duplicate project creation
-- Fixed orphaned conversation records
-- Fixed transaction deadlocks
-- Added proper migration rollback
-
-### **Cross-Platform**
-- Fixed Windows registry access issues
-- Fixed macOS application bundle detection
-- Fixed Linux desktop entry parsing
-- Fixed package manager integration bugs
-
----
-
-## ๐ **Performance**
-
-| Metric | v0.6.2 | v1.0.0 | Improvement |
-|--------|---------|---------|-------------|
-| Project Detection | 450ms | 290ms | 35% faster |
-| Context Retrieval | 180ms | 90ms | 50% faster |
-| Platform Detection | 120ms | 75ms | 37% faster |
-| Memory Usage | 45MB | 27MB | 40% reduction |
-
----
-
-## ๐ **Documentation**
-
-### **New Files**
-- `CROSS_PLATFORM_TESTING_IMPLEMENTATION_PLAN.md`
-- `CROSS_PLATFORM_GUIDE.md`
-- `CROSS_PLATFORM_TESTING_STRATEGY.md`
-- `RELEASE_v1.0.0_COMPREHENSIVE_DOCUMENTATION.md`
-
-### **Updated Files**
-- `README.md` - Universal platform support information
-- `TROUBLESHOOTING.md` - Platform-specific troubleshooting
-
----
-
-## ๐ **Migration Guide**
-
-### **Automatic (Recommended)**
-```bash
-npm install -g @context-sync/server@latest
-# In AI assistant:
-get_migration_stats
-migrate_database dryRun:true # Preview
-migrate_database # Execute
-```
-
-### **Manual (If Needed)**
-```bash
-cp ~/.context-sync/context.db ~/.context-sync/context.db.backup
-# Run migration manually if automatic fails
-```
-
----
-
-## ๐ **Getting Started**
-
-### **Fresh Install**
-```bash
-npm install -g @context-sync/server@latest
-# Follow guided setup for your AI tools
-```
-
-### **Verify Setup**
-```bash
-# In AI assistant:
-get_platform_status
-discover_ai_platforms
-get_started
-```
-
----
-
-## ๐ฆ **File Changes Summary**
-
-### **New Source Files**
-- `src/platform-registry.ts` - Universal platform definitions
-- `src/database-migrator.ts` - Database migration system
-- `src/migration-prompter.ts` - Smart migration prompts
-- `src/context-analyzer.ts` - Intelligent context analysis
-- `src/path-normalizer.ts` - Cross-platform path handling
-
-### **New Binary Files**
-- `bin/install.cjs` - Enhanced installation script
-- `bin/auto-configurator.cjs` - Automatic platform configuration
-- `bin/enhanced-detector.cjs` - Advanced platform detection
-- `bin/enhanced-platform-configs.cjs` - Platform-specific configs
-
-### **Modified Core Files**
-- `src/server.ts` - Added new MCP tools and migration handling
-- `src/platform-sync.ts` - Extended to support 13+ platforms
-- `src/storage.ts` - Added migration support and optimization
-- `src/project-detector.ts` - Improved project naming consistency
-- `src/types.ts` - Extended platform type definitions
-- `package.json` - Version bump and dependency updates
-
----
-
-## ๐ **Impact**
-
-This release transforms Context Sync from a 3-platform tool into a **universal AI memory infrastructure** supporting 13+ platforms with:
-
-โ
**Universal Compatibility** - Works with virtually any AI platform
-โ
**Smart Migration** - Intelligent database optimization
-โ
**Production Ready** - Enterprise-grade reliability and performance
-โ
**Enhanced DX** - Better debugging and error handling
-
-**Context Sync v1.0.0 establishes the foundation for the universal AI era.**
-
----
-
-*For full technical details, see `RELEASE_v1.0.0_COMPREHENSIVE_DOCUMENTATION.md`*
\ No newline at end of file
diff --git a/documentation/CROSS_PLATFORM_GUIDE.md b/documentation/CROSS_PLATFORM_GUIDE.md
deleted file mode 100644
index 2b4b69b..0000000
--- a/documentation/CROSS_PLATFORM_GUIDE.md
+++ /dev/null
@@ -1,465 +0,0 @@
-# Cross-Platform AI Integration Guide
-
-## ๐ฏ Overview
-
-Context Sync now supports **seamless context sharing** between Claude Desktop and Cursor IDE. Work on your project in Claude, switch to Cursor, and pick up exactly where you left off!
-
-## ๐ How It Works
-
-### Architecture
-
-```
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-โ SESSIONS (In-Memory, Per-Instance) โ
-โ โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโโโ โ
-โ โ Claude Desktop โ โ Cursor โ โ vscode/copilot โ โ
-โ โ MCP Server Instance โ โ MCP Server Instance โ โ MCP Server Instโ โ
-โ โ โ โ โ โ โ โ
-โ โ currentProjectId: โ โ currentProjectId: โ โ currentProjectIdโโ
-โ โ "context-sync-123" โ โ "hostscan-456" โ โ "context-sync-1โ โ
-โ โ โ โ โ โ โ โ
-โ โ workspace: โ โ workspace: โ โ workspace: โ โ
-โ โ /proj/context-sync โ โ /proj/hostscan โ โ /proj/context-sโ โ
-โ โโโโโโโโโโโโฌโโโโโโโโโโโโ โโโโโโโโโโโโฌโโโโโโโโโโโโ โโโโโโโโโโฌโโโโโโโโ โ
-โโโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโโโโโโโผโโโโโโโโโโ-โโโโโโโโโโโโโผโโโโโโโโโโ
- โ โ โ
- โ All read/write to shared DB โ
- โโโโโโโโโโโโโโโโโโโโฌโโโโโโโโโโโโโโโโฌโโโโโโโโโโโโโโโ
- โผ โผ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-โ DATABASE (SQLite) - SHARED โ
-โ โ
-โ projects table (NO is_current column): โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โ id: "context-sync-123" โ โ
-โ โ name: "context-sync" โ โ
-โ โ path: "/projects/context-sync" โ โ
-โ โ tech_stack: ["TypeScript", "React"] โ โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โ id: "hostscan-456" โ โ
-โ โ name: "hostscan" โ โ
-โ โ path: "/projects/hostscan" โ โ
-โ โ tech_stack: ["Next.js", "Supabase"] โ โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โ
-โ decisions table: โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โ project_id: "context-sync-123" โ โ
-โ โ description: "Use TypeScript for type safety"โ โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โ project_id: "hostscan-456" โ โ
-โ โ description: "Use Supabase for backend" โ โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โ
-โ todos table: โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โ project_id: "context-sync-123" โ โ
-โ โ title: "Fix workspace linking bug" โ โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โ project_id: "hostscan-456" โ โ
-โ โ title: "Add QR code scanner" โ โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-```
-
-- **Both AIs connect to the same Context Sync server**
-- **Same SQLite database** stores all context
-- **Real-time sync** - changes made in one platform are immediately available in the other
-- **Platform-aware** - knows which AI you're using and tracks conversations separately
-
-## ๐ Setup Instructions
-
-### Step 1: Configure Claude Desktop (Already Done โ
)
-
-Claude Desktop should already have Context Sync configured at:
-- **Windows**: `%AppData%\Roaming\Claude\claude_desktop_config.json`
-- **macOS**: `~/Library/Application Support/Claude/claude_desktop_config.json`
-- **Linux**: `~/.config/Claude/claude_desktop_config.json`
-
-Configuration:
-```json
-{
- "mcpServers": {
- "context-sync": {
- "command": "npx",
- "args": ["-y", "@context-sync/server"]
- }
- }
-}
-```
-
-### Step 2: Configure Cursor IDE
-
-#### Option A: Using the Tool (Recommended)
-
-In Claude, run:
-```
-Use the setup_cursor tool
-```
-
-This will give you the exact path and configuration.
-
-#### Option B: Manual Configuration
-
-1. **Create/Edit Cursor Config**:
- - **Location**: `~/.cursor/mcp.json`
- - Create the file if it doesn't exist
-
-2. **Add Configuration**:
-```json
-{
- "mcpServers": {
- "context-sync": {
- "command": "npx",
- "args": ["-y", "@context-sync/server"]
- }
- }
-}
-```
-
-3. **Restart Cursor** or refresh MCP servers in settings
-
-### Step 3: Verify Setup
-
-Check platform status:
-```
-get_platform_status tool
-```
-
-You should see:
-```
-โ
Claude Desktop
-โ
Cursor
-โ GitHub Copilot (coming soon)
-```
-
-## ๐ก Usage Examples
-
-### Example 1: Start in Claude, Continue in Cursor
-
-**In Claude Desktop:**
-```
-1. Create project: "init_project" with name "my-app"
-2. Make decisions: "save_decision" - chose React for frontend
-3. Have conversations: "Let's build the authentication system"
-```
-
-**Switch to Cursor:**
-```
-1. Open your project in Cursor
-2. Cursor AI now has FULL context:
- - Knows the project is "my-app"
- - Knows you chose React
- - Sees the authentication discussion
- - Can continue exactly where you left off
-```
-
-### Example 2: Explicit Handoff
-
-When switching platforms, you can explicitly trigger a handoff:
-
-```typescript
-// In Claude
-switch_platform({
- fromPlatform: "claude",
- toPlatform: "cursor"
-})
-
-// Response:
-๐ฑ Platform Handoff: claude โ cursor
-
-๐ Project: my-app
-๐๏ธ Architecture: Monorepo
-โ๏ธ Tech Stack: React, TypeScript, Node.js
-
-๐ Recent Decisions (3):
-1. [architecture] Using monorepo structure with Turborepo
-2. [library] React for frontend, Express for backend
-3. [pattern] Implementing clean architecture
-
-๐ฌ Last conversation on claude:
-"Let's implement the authentication system using JWT tokens..."
-
-โ
Context synced and ready on cursor!
-```
-
-### Example 3: Platform-Specific Context
-
-Get context for a specific platform:
-
-```typescript
-// See what happened on Cursor
-get_platform_context({ platform: "cursor" })
-
-// Response shows:
-๐ฑ Current Platform: cursor
-
-๐ Project: my-app
-โ๏ธ Tech Stack: React, TypeScript
-
-๐ Recent Decisions (shared across all platforms):
-1. [architecture] Using monorepo structure
-2. [library] React for frontend
-
-๐ฌ Your conversations on cursor (5 total):
-1. [10/22 14:30] user: "How do I implement JWT auth?"
-2. [10/22 14:35] assistant: "Here's the implementation..."
-3. [10/22 14:40] user: "Let me test this..."
-
-๐ Activity on other platforms:
- โข claude: 8 conversations
-
-๐ก All context is automatically synced!
-```
-
-## ๐ง Advanced Features
-
-### 1. Automatic Platform Detection
-
-Context Sync automatically detects which platform you're using based on environment variables:
-- `CURSOR_IDE` or `CURSOR_VERSION` โ Cursor
-- `GITHUB_COPILOT_TOKEN` โ GitHub Copilot
-- Default โ Claude
-
-### 2. Cross-Platform Conversation Tracking
-
-Every conversation is tagged with the platform:
-```typescript
-storage.addConversation({
- projectId: 'project-123',
- tool: 'cursor', // or 'claude'
- role: 'user',
- content: 'How do I implement this feature?'
-});
-```
-
-### 3. Shared Decisions, Separate Conversations
-
-- **Decisions** (architecture, tech choices) are shared across ALL platforms
-- **Conversations** are tracked per-platform but visible to all
-- This gives you platform-specific history while maintaining global context
-
-## ๐จ User Workflow
-
-### Typical Multi-Platform Workflow
-
-1. **Planning Phase (Claude Desktop)**
- ```
- - Brainstorm architecture
- - Make key decisions
- - Design API contracts
- - Plan folder structure
- ```
-
-2. **Coding Phase (Cursor IDE)**
- ```
- - Open project in Cursor
- - AI knows all decisions from Claude
- - Write code with full context
- - Ask implementation questions
- ```
-
-3. **Review Phase (Claude Desktop)**
- ```
- - Return to Claude
- - Review what was built in Cursor
- - Discuss improvements
- - Make new decisions
- ```
-
-4. **Iterate** ๐
-
-## ๐ ๏ธ New MCP Tools
-
-### Platform Management Tools
-
-#### `switch_platform`
-Switch between AI platforms with full context handoff.
-
-```typescript
-{
- fromPlatform: 'claude' | 'cursor' | 'copilot' | 'other',
- toPlatform: 'claude' | 'cursor' | 'copilot' | 'other'
-}
-```
-
-**Use Cases:**
-- Explicit handoff when changing platforms
-- Get summary of what happened on previous platform
-- Log the platform switch for context
-
-#### `get_platform_status`
-Check which platforms are configured with Context Sync.
-
-```typescript
-// No parameters
-```
-
-**Returns:**
-- โ
/โ for each platform
-- Current active platform
-- Configuration instructions for unconfigured platforms
-
-#### `get_platform_context`
-Get context specific to a platform.
-
-```typescript
-{
- platform?: 'claude' | 'cursor' | 'copilot' | 'other' // Optional, defaults to current
-}
-```
-
-**Returns:**
-- Platform-specific conversations
-- Shared decisions
-- Activity on other platforms
-
-#### `setup_cursor`
-Get setup instructions for Cursor IDE.
-
-```typescript
-// No parameters
-```
-
-**Returns:**
-- Configuration file path
-- Exact JSON to add
-- Step-by-step instructions
-
-## ๐ Data Model
-
-### Project Context (Shared)
-```typescript
-interface ProjectContext {
- id: string;
- name: string;
- path?: string;
- architecture?: string;
- techStack: string[];
- createdAt: Date;
- updatedAt: Date;
-}
-```
-
-### Decision (Shared Across Platforms)
-```typescript
-interface Decision {
- id: string;
- projectId: string;
- type: 'architecture' | 'library' | 'pattern' | 'configuration' | 'other';
- description: string;
- reasoning?: string;
- timestamp: Date;
-}
-```
-
-### Conversation (Platform-Specific)
-```typescript
-interface Conversation {
- id: string;
- projectId: string;
- tool: 'claude' | 'cursor' | 'copilot' | 'other'; // โ Platform tag
- role: 'user' | 'assistant';
- content: string;
- timestamp: Date;
- metadata?: {
- handoff?: boolean;
- fromPlatform?: string;
- toPlatform?: string;
- };
-}
-```
-
-## ๐ Privacy & Data Location
-
-- **All data is stored locally** on your machine
-- **SQLite database** location:
- - Windows: `%AppData%\context-sync\context.db`
- - macOS/Linux: `~/.context-sync/context.db`
-- **No cloud sync** - data stays on your device
-- **Same database** accessed by both Claude and Cursor
-
-## ๐ Troubleshooting
-
-### Issue: Cursor not showing context
-
-**Solution:**
-1. Check if MCP is configured: `~/.cursor/mcp.json`
-2. Verify the config matches the setup instructions
-3. Restart Cursor completely
-4. Check MCP server logs in Cursor settings
-
-### Issue: Context not syncing
-
-**Solution:**
-1. Both platforms must point to the same MCP server
-2. Check that you're using the same project (same project ID)
-3. Use `get_platform_status` to verify configuration
-
-### Issue: Old context showing
-
-**Solution:**
-1. Context Sync uses the same database
-2. Make sure you're in the right project: `get_project_context`
-3. Switch projects if needed: `detect_project` with correct path
-
-## ๐ Future Enhancements
-
-### Coming Soon:
-- โ
Claude Desktop support (Done)
-- โ
Cursor IDE support (Done)
-- ๐ GitHub Copilot support (Planned)
-- ๐ VS Code support (Planned)
-- ๐ JetBrains IDEs support (Planned)
-
-### Potential Features:
-- Cloud sync option for teams
-- Conflict resolution for simultaneous edits
-- Platform-specific preferences
-- Conversation export/import
-- Integration with more IDEs
-
-## ๐ Best Practices
-
-### 1. Use Explicit Handoffs
-When switching platforms, call `switch_platform` to get a summary:
-```typescript
-switch_platform({ fromPlatform: 'claude', toPlatform: 'cursor' })
-```
-
-### 2. Make Decisions Explicit
-Use `save_decision` for important choices:
-```typescript
-save_decision({
- type: 'architecture',
- description: 'Using GraphQL for API layer',
- reasoning: 'Provides better type safety and flexibility'
-})
-```
-
-### 3. Check Context Before Starting
-Use `get_platform_context` to see what happened:
-```typescript
-get_platform_context({ platform: 'cursor' })
-```
-
-### 4. Keep Projects Organized
-- One project per codebase
-- Use descriptive project names
-- Set project path for automatic detection
-
-## ๐ฏ Summary
-
-Context Sync enables seamless collaboration between different AI platforms:
-
-โ
**Single source of truth** - One database, multiple interfaces
-โ
**Zero configuration** - Works out of the box once set up
-โ
**Platform-aware** - Knows what happened where
-โ
**Real-time sync** - No manual export/import needed
-โ
**Privacy-focused** - All data stays local
-
-**Ready to start?** Configure Cursor with `setup_cursor` and enjoy seamless multi-platform AI development! ๐
diff --git a/documentation/RELEASE_v1.0.0_ANNOUNCEMENT.md b/documentation/RELEASE_v1.0.0_ANNOUNCEMENT.md
deleted file mode 100644
index 8e9b353..0000000
--- a/documentation/RELEASE_v1.0.0_ANNOUNCEMENT.md
+++ /dev/null
@@ -1,156 +0,0 @@
-# ๐ Context Sync v1.0.0 - Universal AI Platform Support
-
-> **Major Release** - Transform your AI workflow with universal memory infrastructure
-
-**Release Date:** December 2024
-**Migration:** v0.6.2 โ v1.0.0
-
----
-
-## ๐ **What's New**
-
-### **Universal AI Platform Support (13+ Platforms!)**
-Context Sync now works with virtually every major AI platform:
-
-**๐ฏ Core Platforms:**
-- โ
**Claude Desktop** - Advanced reasoning
-- โ
**Cursor IDE** - AI-powered coding
-- โ
**GitHub Copilot** - VS Code integration
-
-**๐ง Extended Platforms:**
-- โ
**Continue.dev** - Open source AI assistant
-- โ
**Zed Editor** - Fast collaborative editing
-- โ
**Windsurf** - Codeium's AI IDE
-- โ
**TabNine** - Enterprise AI completion
-
-**๐ API Integrations:**
-- โ
**OpenAI**, **Anthropic**, **Google Gemini**
-- โ
**Ollama** (local models), **CodeWhisperer**
-
-### **Smart Database Optimization**
-- **Automatic Duplicate Detection** - Finds and merges duplicate projects
-- **Safe Migration** - Preview changes before applying
-- **35-60% Performance Boost** - Faster project detection and context retrieval
-
-### **Intelligent Context Analysis**
-- **Auto-Context Extraction** - Automatically detects decisions, todos, insights from conversations
-- **Missing Context Detection** - Identifies gaps in your project documentation
-- **Smart Recommendations** - Suggests optimal AI platform setups based on your needs
-
----
-
-## โก **Key Benefits**
-
-### **๐ Universal Compatibility**
-Work seamlessly across any AI platform - your context follows you everywhere.
-
-### **๐ง Intelligent Memory**
-Advanced context analysis ensures nothing important is lost between sessions.
-
-### **๐ฏ Production Ready**
-Enterprise-grade reliability with comprehensive error handling and performance monitoring.
-
-### **๐ Seamless Migration**
-Effortless upgrade from previous versions with automatic database optimization.
-
----
-
-## ๐ **Quick Start**
-
-### **New Users**
-```bash
-npm install -g @context-sync/server@latest
-# Follow the guided setup for your AI tools
-```
-
-### **Existing Users**
-```bash
-npm update -g @context-sync/server
-# In your AI assistant, check for optimizations:
-get_migration_stats
-migrate_database dryRun:true # Preview
-migrate_database # Apply optimizations
-```
-
-### **Explore New Features**
-```bash
-# In your AI assistant:
-discover_ai_platforms # See all supported platforms
-get_platform_recommendations # Get personalized suggestions
-analyze_conversation_context # Extract context from discussions
-```
-
----
-
-## ๐ **Performance Improvements**
-
-| Feature | Before | After | Improvement |
-|---------|---------|--------|-------------|
-| Project Detection | 450ms | 290ms | **35% faster** |
-| Context Retrieval | 180ms | 90ms | **50% faster** |
-| Memory Usage | 45MB | 27MB | **40% reduction** |
-| Platform Support | 3 | 13+ | **433% increase** |
-
----
-
-## ๐ฏ **What This Means for You**
-
-### **For Developers**
-- **Multi-Platform Workflow**: Start coding in Cursor, discuss architecture in Claude, review in VS Code - your context stays consistent
-- **Smarter Context**: Automatic extraction of technical decisions and action items
-- **Better Performance**: Faster project switching and context loading
-
-### **For Teams**
-- **Universal Standards**: Same context system across all AI tools
-- **Knowledge Preservation**: Important decisions and insights never get lost
-- **Flexible Tooling**: Team members can use their preferred AI platforms
-
-### **For Organizations**
-- **Enterprise Ready**: Support for enterprise AI platforms like TabNine and CodeWhisperer
-- **Privacy Options**: Full support for local models via Ollama
-- **Migration Path**: Safe, tested upgrade path from existing installations
-
----
-
-## ๐ก๏ธ **Migration Safety**
-
-โ
**Automatic Backups** - Created before any changes
-โ
**Dry Run Mode** - Preview all changes before applying
-โ
**Data Preservation** - All conversations, decisions, todos preserved
-โ
**Rollback Support** - Easy reversion if needed
-
----
-
-## ๐ **Resources**
-
-- **๐ Full Documentation**: `RELEASE_v1.0.0_COMPREHENSIVE_DOCUMENTATION.md`
-- **๐ Detailed Changelog**: `CHANGELOG_v1.0.0.md`
-- **๐ง Cross-Platform Guide**: `CROSS_PLATFORM_GUIDE.md`
-- **๐ GitHub Repository**: [context-sync](https://github.com/Intina47/context-sync)
-
----
-
-## ๐ฏ **Next Steps**
-
-1. **Upgrade** to v1.0.0 using the commands above
-2. **Optimize** your database with the migration tools
-3. **Explore** new platform integrations
-4. **Share** feedback and help us improve
-
----
-
-## ๐ **The Universal AI Era Starts Now**
-
-Context Sync v1.0.0 isn't just an update - it's a transformation. We're establishing the foundation for **universal AI memory infrastructure** that works across any platform, any workflow, any team.
-
-**Ready to experience the future of AI-assisted development?**
-
-```bash
-npm install -g @context-sync/server@latest
-```
-
----
-
-*Questions? Issues? Contributions? Visit our [GitHub repository](https://github.com/Intina47/context-sync) or start a discussion.*
-
-**#ContextSync #UniversalAI #v1.0.0**
\ No newline at end of file
diff --git a/documentation/RELEASE_v1.0.0_COMPREHENSIVE_DOCUMENTATION.md b/documentation/RELEASE_v1.0.0_COMPREHENSIVE_DOCUMENTATION.md
deleted file mode 100644
index 5e485e7..0000000
--- a/documentation/RELEASE_v1.0.0_COMPREHENSIVE_DOCUMENTATION.md
+++ /dev/null
@@ -1,596 +0,0 @@
-# Context Sync v1.0.0 - Complete Release Documentation
-
-> **๐ Major Release** - Universal AI Platform Support with Database Optimization
-
-*Release Date: 17 November 2025*
-*Migration from: v0.6.2 โ v1.0.0*
-
----
-
-## ๐ **Executive Summary**
-
-Context Sync v1.0.0 represents a major milestone in universal AI platform integration. This release transforms Context Sync from a Claude/Cursor-focused tool into a comprehensive **universal AI memory infrastructure** supporting 13+ AI platforms with intelligent database optimization and advanced cross-platform context sharing.
-
-### **๐ฏ Key Achievements**
-- **Universal Platform Support**: 13+ AI platforms vs 3 previously
-- **Database Optimization**: Smart duplicate detection and migration system
-- **Enhanced Cross-Platform Sync**: Seamless context handoff between platforms
-- **Production-Ready**: Comprehensive error handling, performance monitoring, and migration tools
-
----
-
-## ๐ **Major New Features**
-
-### 1. **Universal AI Platform Registry**
-
-#### **Supported Platforms (13+)**
-
-**๐ฏ Core Platforms** (Full MCP Integration):
-- โ
**Claude Desktop** - Advanced reasoning and analysis
-- โ
**Cursor IDE** - AI-powered coding environment
-- โ
**GitHub Copilot** - VS Code integration
-
-**๐ง Extended Platforms** (Advanced Integration):
-- โ
**Continue.dev** - Open source AI coding assistant
-- โ
**Zed Editor** - Fast collaborative editor
-- โ
**Windsurf** - Codeium's AI IDE
-- โ
**TabNine** - Enterprise AI completion
-
-
-#### **New Platform Detection System**
-```typescript
-// Enhanced platform detection with environment analysis
-static detectPlatform(): AIPlatform {
- // Check environment variables, process info, and runtime context
- if (process.env.CURSOR_IDE || processTitle.includes('cursor')) return 'cursor';
- if (process.env.ZED_EDITOR || processTitle.includes('zed')) return 'zed';
- if (process.env.CONTINUE_GLOBAL_DIR) return 'continue';
- // ... comprehensive detection for all 13+ platforms
-}
-```
-
-### 2. **Database Migration & Optimization System**
-
-#### **Smart Duplicate Detection**
-- **Path Normalization**: Handles case differences, trailing slashes, relative vs absolute paths
-- **Project Deduplication**: Merges projects with same normalized paths
-- **Data Preservation**: All conversations, decisions, and todos preserved during migration
-
-#### **New Migration Tools**
-```typescript
-// Check for migration opportunities
-await migrator.getMigrationStats()
-// โ Shows duplicate groups and impact
-
-// Preview migration (safe)
-await migrator.migrateDuplicateProjects({ dryRun: true })
-// โ Shows exactly what would change
-
-// Execute migration
-await migrator.migrateDuplicateProjects()
-// โ Clean database with preserved data
-```
-
-#### **Automatic Migration Prompts**
-- **Smart Detection**: Identifies v1.0.0+ users with database duplicates
-- **Non-Intrusive**: Shows prompts only when beneficial
-- **User Choice**: Always provides preview before changes
-
-### 3. **Advanced Context Analysis**
-
-#### **Intelligent Conversation Analysis**
-```typescript
-// Automatic context extraction from conversations
-const analysis = ContextAnalyzer.analyzeConversation(conversationText);
-// Returns: decisions, todos, insights with priority scoring
-```
-
-#### **Missing Context Detection**
-```typescript
-// Suggests what context might be missing
-const suggestions = await suggestMissingContext(projectId);
-// Returns: architecture gaps, decision types, documentation areas
-```
-
-#### **Auto-Context Saving**
-- **Smart Detection**: Identifies technical decisions, todos, and key insights
-- **Priority-Based**: Saves high/medium priority items automatically
-- **User Control**: Configurable auto-save behavior
-
-### 4. **Enhanced Cross-Platform Synchronization**
-
-#### **Platform-Aware Context Management**
-```typescript
-// Platform-specific conversation tracking
-interface Conversation {
- tool: 'claude' | 'cursor' | 'continue' | 'windsurf' | /* ... 13+ platforms */;
- platform_metadata: PlatformMetadata;
- handoff_context?: HandoffData;
-}
-```
-
-#### **Seamless Platform Switching**
-```typescript
-// Intelligent platform handoff with context
-await switchPlatform({
- fromPlatform: 'claude',
- toPlatform: 'cursor'
-});
-// โ Provides context summary, recent decisions, and continuation tips
-```
-
-#### **Universal Configuration Management**
-- **Multi-Format Support**: JSON, YAML, platform-specific configs
-- **Auto-Detection**: Scans for existing configurations
-- **Setup Assistance**: Platform-specific installation guides
-
----
-
-## ๐ ๏ธ **Technical Improvements**
-
-### **Architecture Enhancements**
-
-#### **Modular Platform Registry**
-```typescript
-// src/platform-registry.ts - New centralized platform definitions
-export const PLATFORM_REGISTRY: Record = {
- claude: {
- name: 'Claude Desktop',
- category: 'core',
- mcpSupport: 'native',
- setupComplexity: 'easy',
- features: ['Advanced reasoning', 'Large context', 'Multi-modal'],
- // ... comprehensive metadata
- }
- // ... 13+ platforms with detailed metadata
-};
-```
-
-#### **Enhanced Type System**
-```typescript
-// Updated to support all platforms
-export type AIPlatform =
- | 'claude' | 'cursor' | 'copilot'
- | 'continue' | 'tabnine' | 'windsurf' | 'zed'
- | 'openai' | 'anthropic' | 'gemini' | 'ollama'
- | 'codeium' | 'codewisperer' | 'other';
-```
-
-#### **Path Normalization System**
-```typescript
-// src/path-normalizer.ts - Handles cross-platform path issues
-export class PathNormalizer {
- static normalize(inputPath: string): string {
- // Handles case sensitivity, separators, trailing slashes
- // Consistent path representation across platforms
- }
-}
-```
-
-### **Performance & Reliability**
-
-#### **Performance Monitoring**
-```typescript
-// Built-in performance tracking
-class PerformanceMonitor {
- static time(operation: string, fn: () => T): T;
- static getStats(): PerformanceStats;
- // Tracks database operations, API calls, file operations
-}
-```
-
-#### **Database Optimization**
-- **Connection Pooling**: Efficient database resource management
-- **Query Optimization**: Indexed searches and optimized queries
-- **Memory Management**: Smart caching and cleanup
-- **Error Recovery**: Robust error handling and recovery
-
-#### **Migration Safety**
-- **Backup Creation**: Automatic backups before migrations
-- **Rollback Support**: Ability to revert changes if needed
-- **Validation**: Extensive pre-migration validation
-- **Progress Tracking**: Detailed migration progress reporting
-
-### **Installation & Setup Improvements**
-
-#### **Enhanced Auto-Configuration**
-```javascript
-// bin/install.cjs - Improved installation system
-- Automatic platform detection
-- Multi-platform path resolution
-- Configuration validation
-- Error recovery and fallbacks
-```
-
-#### **Platform-Specific Setup**
-```typescript
-// Intelligent setup instructions per platform
-static getInstallInstructions(platform: AIPlatform): string {
- // Returns detailed, platform-specific setup guide
- // Handles different config formats, paths, and requirements
-}
-```
-
----
-
-## ๐ **Breaking Changes & Migration Guide**
-
-### **โ ๏ธ Breaking Changes**
-
-#### **1. Package.json Updates**
-```diff
-{
-- "version": "0.6.2",
-+ "version": "1.0.0",
-+ "type": "module",
-- "postinstall": "node bin/install.js || true",
-+ "postinstall": "node bin/install.cjs || true",
-}
-```
-
-#### **2. Type System Changes**
-```diff
-// Previous (limited platforms)
--export type AIPlatform = 'claude' | 'cursor' | 'copilot' | 'other';
-
-// New (13+ platforms)
-+export type AIPlatform =
-+ | 'claude' | 'cursor' | 'copilot'
-+ | 'continue' | 'tabnine' | 'windsurf' | 'zed'
-+ | 'openai' | 'anthropic' | 'gemini' | 'ollama'
-+ | 'codeium' | 'codewisperer' | 'other';
-```
-
-#### **3. Database Schema Updates**
-```sql
--- New migration-related tables and indexes
--- Path normalization improvements
--- Enhanced project deduplication
-```
-
-### **๐ Migration Guide**
-
-#### **Automatic Migration (Recommended)**
-```bash
-# 1. Update Context Sync
-npm install -g @context-sync/server@latest
-
-# 2. Check for migration opportunities
-# In your AI assistant:
-get_migration_stats
-
-# 3. Preview migration (safe)
-migrate_database dryRun:true
-
-# 4. Execute migration
-migrate_database
-```
-
-#### **Manual Migration (If Needed)**
-```bash
-# Backup current database
-cp ~/.context-sync/context.db ~/.context-sync/context.db.backup
-
-# Run migration tools manually
-node -e "
- const { DatabaseMigrator } = require('@context-sync/server/dist/database-migrator.js');
- const migrator = new DatabaseMigrator();
- migrator.migrateDuplicateProjects().then(console.log);
-"
-```
-
-### **๐ก๏ธ Migration Safety**
-- **Automatic Backups**: Created before any migration
-- **Dry Run Mode**: Preview all changes before applying
-- **Data Preservation**: All conversations, decisions, todos preserved
-- **Rollback Support**: Easy reversion if issues occur
-
----
-
-## ๐ฏ **New MCP Tools Reference**
-
-### **Platform Management Tools**
-
-#### **`discover_ai_platforms`**
-```typescript
-{
- category?: 'all' | 'core' | 'extended' | 'api',
- includeSetupInstructions?: boolean
-}
-// Explores available AI platforms with detailed metadata
-```
-
-#### **`get_platform_recommendations`**
-```typescript
-{
- useCase?: 'coding' | 'research' | 'writing' | 'local' | 'enterprise' | 'beginner',
- priority?: 'ease_of_use' | 'privacy' | 'features' | 'cost' | 'performance'
-}
-// Personalized AI platform recommendations based on user needs
-```
-
-#### **`switch_platform`** *(Enhanced)*
-```typescript
-{
- fromPlatform: AIPlatform,
- toPlatform: AIPlatform
-}
-// Enhanced with better context handoff and platform-specific tips
-```
-
-### **Database Migration Tools**
-
-#### **`migrate_database`**
-```typescript
-{
- dryRun?: boolean // Preview changes without applying them
-}
-// Migrates and merges duplicate projects safely
-```
-
-#### **`get_migration_stats`**
-```typescript
-{}
-// Shows duplicate project statistics and migration impact
-```
-
-#### **`check_migration_suggestion`**
-```typescript
-{}
-// Checks if user should be prompted for database migration
-```
-
-### **Context Analysis Tools**
-
-#### **`analyze_conversation_context`**
-```typescript
-{
- conversationText: string,
- autoSave?: boolean // Automatically save detected context
-}
-// Intelligently extracts decisions, todos, and insights from conversations
-```
-
-#### **`suggest_missing_context`**
-```typescript
-{
- includeFileAnalysis?: boolean // Analyze recent file changes
-}
-// Suggests what important context might be missing from project
-```
-
----
-
-## ๐ **Performance Improvements**
-
-### **Database Optimizations**
-- **35% faster** project detection through indexed lookups
-- **60% reduction** in duplicate storage through normalization
-- **50% faster** context retrieval with optimized queries
-
-### **Memory Management**
-- **40% lower** memory usage through efficient caching
-- **Garbage Collection**: Automatic cleanup of unused resources
-- **Connection Pooling**: Reduced database connection overhead
-
-### **Cross-Platform Performance**
-- **Lazy Loading**: Platform configurations loaded on demand
-- **Async Operations**: Non-blocking platform detection
-- **Caching**: Intelligent caching of platform metadata
-
----
-
-## ๐ง **Developer Experience Improvements**
-
-### **Enhanced Debugging**
-```typescript
-// New debug tools
-debug_session() // Shows session state and project info
-get_performance_report() // Performance metrics and stats
-```
-
-### **Better Error Messages**
-- **Contextual Errors**: Clear explanations with suggested solutions
-- **Migration Errors**: Detailed migration failure analysis
-- **Platform Errors**: Specific platform configuration help
-
-### **Comprehensive Logging**
-- **Structured Logging**: JSON-formatted logs for analysis
-- **Performance Logs**: Detailed performance metrics
-- **Migration Logs**: Complete migration audit trail
-
----
-
-## ๐ **Security & Privacy Enhancements**
-
-### **Local-First Architecture**
-- **No Cloud Dependencies**: All data stays on user's machine
-- **SQLite Storage**: Local database with no external connections
-- **Platform Agnostic**: Works with privacy-focused platforms like Ollama
-
-### **Secure Migration**
-- **Data Validation**: Extensive validation before migration
-- **Backup Verification**: Backup integrity checks
-- **Safe Rollback**: Secure rollback mechanisms
-
-### **Privacy Preservation**
-- **No Telemetry**: No usage tracking or analytics
-- **Local Processing**: All context analysis done locally
-- **Minimal Permissions**: Only required file system access
-
----
-
-## ๐ **Getting Started with v1.0.0**
-
-### **Fresh Installation**
-```bash
-# Install globally (recommended)
-npm install -g @context-sync/server@latest
-
-# The installer now auto-detects and configures multiple platforms
-# Follow the guided setup for your preferred AI tools
-```
-
-### **Upgrading from Previous Versions**
-```bash
-# Update to latest version
-npm update -g @context-sync/server
-
-# Check for migration opportunities
-# In your AI assistant, run:
-get_migration_stats
-
-# If duplicates found, run:
-migrate_database dryRun:true # Preview
-migrate_database # Execute
-```
-
-### **Platform Setup**
-```bash
-# Get platform-specific setup instructions
-# In your AI assistant:
-discover_ai_platforms core # See core platforms
-get_platform_recommendations # Get personalized recommendations
-setup_cursor # Platform-specific setup (example)
-```
-
-### **Verification**
-```bash
-# Verify installation
-# In your AI assistant:
-get_platform_status # See configured platforms
-get_started # Interactive getting started guide
-```
-
----
-
-## ๐ **Documentation Updates**
-
-### **New Documentation Files**
-- `CROSS_PLATFORM_TESTING_IMPLEMENTATION_PLAN.md` - Testing strategy across platforms
-- `CROSS_PLATFORM_GUIDE.md` - Complete cross-platform usage guide
-- `CROSS_PLATFORM_TESTING_STRATEGY.md` - Testing methodology
-
-### **Updated Documentation**
-- `README.md` - Updated with universal platform support
-- `TROUBLESHOOTING.md` - New platform-specific troubleshooting
-- User guides updated for v1.0.0 features
-
----
-
-## ๐ **Bug Fixes**
-
-### **Path Handling**
-- **Fixed**: Case sensitivity issues on Windows/macOS
-- **Fixed**: Trailing slash inconsistencies
-- **Fixed**: Relative vs absolute path conflicts
-- **Fixed**: Network drive and UNC path support
-
-### **Platform Detection**
-- **Fixed**: False positive platform detection
-- **Fixed**: Environment variable conflicts
-- **Fixed**: Process detection accuracy
-- **Fixed**: Configuration file parsing edge cases
-
-### **Database Issues**
-- **Fixed**: Duplicate project creation
-- **Fixed**: Orphaned conversation records
-- **Fixed**: Transaction deadlocks
-- **Fixed**: Migration rollback issues
-
-### **Cross-Platform Compatibility**
-- **Fixed**: Windows registry access issues
-- **Fixed**: macOS application bundle detection
-- **Fixed**: Linux desktop entry parsing
-- **Fixed**: Package manager integration bugs
-
----
-
-## โก **Performance Benchmarks**
-
-### **Before vs After (v0.6.2 โ v1.0.0)**
-
-| Operation | v0.6.2 | v1.0.0 | Improvement |
-|-----------|--------|---------|-------------|
-| Project Detection | 450ms | 290ms | 35% faster |
-| Context Retrieval | 180ms | 90ms | 50% faster |
-| Platform Detection | 120ms | 75ms | 37% faster |
-| Database Migration | N/A | 2.3s | New feature |
-| Memory Usage | 45MB | 27MB | 40% reduction |
-
-### **Scale Testing**
-- **Tested**: 1000+ projects in database
-- **Tested**: 50+ duplicate project scenarios
-- **Tested**: All 13+ platform configurations
-- **Tested**: Migration with 10GB+ of context data
-
----
-
-## ๐ฏ **Roadmap & Future Enhancements**
-
-### **Coming in v1.1.0**
-- **Team Collaboration**: Shared context across team members
-- **Cloud Sync**: Optional cloud backup and sync
-- **Enterprise Features**: SSO, audit logs, compliance
-- **Plugin System**: Custom platform integrations
-
-### **Community Contributions**
-- **Open Issues**: 23 issues resolved in v1.0.0
-- **Contributors**: 12+ contributors to this release
-- **Platform Requests**: 8 new platforms requested and added
-
----
-
-## ๐ **Acknowledgments**
-
-### **Contributors**
-- Core team for universal platform architecture
-- Community for extensive testing across platforms
-- Beta testers for migration system validation
-- Documentation contributors for comprehensive guides
-
-### **Community Feedback**
-Special thanks to users who provided feedback on:
-- Multi-platform workflow challenges
-- Database performance issues
-- Cross-platform compatibility needs
-- Migration safety requirements
-
----
-
-## ๐ **Support & Resources**
-
-### **Getting Help**
-- **Issues**: [GitHub Issues](https://github.com/Intina47/context-sync/issues)
-- **Discussions**: [GitHub Discussions](https://github.com/Intina47/context-sync/discussions)
-- **Documentation**: [Full Documentation](https://github.com/Intina47/context-sync#readme)
-
-### **Migration Support**
-- **Migration Guide**: Detailed migration instructions above
-- **Emergency Support**: File priority issues for migration problems
-- **Rollback Help**: Contact support for rollback assistance
-
-### **Platform-Specific Support**
-- **Setup Issues**: Use platform-specific setup commands
-- **Configuration**: Platform registry provides detailed setup info
-- **Compatibility**: Check platform status and recommendations
-
----
-
-## ๐ **Release Summary**
-
-Context Sync v1.0.0 transforms the landscape of AI-assisted development by providing **universal AI memory infrastructure** that works seamlessly across 13+ AI platforms. This release focuses on:
-
-โ
**Universal Compatibility** - Works with virtually any AI platform
-โ
**Database Optimization** - Smart migration and deduplication
-โ
**Production Readiness** - Comprehensive error handling and monitoring
-โ
**Developer Experience** - Intuitive setup and powerful debugging tools
-โ
**Performance** - 35-60% improvements across all operations
-
-This major release establishes Context Sync as the definitive solution for persistent AI context and cross-platform AI workflow management.
-
----
-
-**๐ Welcome to the Universal AI Era with Context Sync v1.0.0!**
-
-*For technical questions, issues, or contributions, please visit our [GitHub repository](https://github.com/Intina47/context-sync).*
\ No newline at end of file
diff --git a/documentation/workspace.md b/documentation/workspace.md
deleted file mode 100644
index 6bdca49..0000000
--- a/documentation/workspace.md
+++ /dev/null
@@ -1,268 +0,0 @@
-# ๐ Workspace Quick Reference
-
-> **TL;DR**: Point Claude to your project folder. It can now read your code.
-
-## 30-Second Start
-
-```bash
-# 1. Open Claude
-# 2. Say this:
-
-You: Set workspace to /path/to/your/project
-
-# Done! Claude can now read your files.
-```
-
----
-
-## The 4 Commands You Need
-
-### 1. ๐ Open Workspace
-```
-You: Set workspace to /Users/me/my-app
-```
-**What it does:** Opens your project folder
-
-### 2. ๐ Read Files
-```
-You: Read src/app.ts
-You: Show me the config file
-You: What's in package.json?
-```
-**What it does:** Reads specific files
-
-### 3. ๐ณ See Structure
-```
-You: Show project structure
-You: What folders do I have?
-```
-**What it does:** Visual tree of your project
-
-### 4. ๐ Scan Everything
-```
-You: Scan workspace
-You: Give me a project overview
-```
-**What it does:** Intelligent summary of your codebase
-
----
-
-## Common Workflows
-
-### ๐ New Project
-```
-You: Set workspace to /my/new-project
-You: Scan workspace
-You: Read README.md
-```
-**Result:** Full understanding of the project
-
-### ๐ Debugging
-```
-You: Set workspace to /my/app
-You: I have a bug in the auth code
-Claude: [reads src/lib/auth.ts]
-Claude: Found the issue...
-```
-**Result:** Claude sees your actual code
-
-### ๐ Code Review
-```
-You: Set workspace to /my/app
-You: Review the database module
-Claude: [reads src/lib/db.ts]
-Claude: Here are my suggestions...
-```
-**Result:** Informed code review
-
-### ๐ Refactoring
-```
-You: Set workspace to /my/app
-You: Show me the Header component
-Claude: [reads src/components/Header.tsx]
-You: How can I improve it?
-Claude: Here are 3 improvements...
-```
-**Result:** Context-aware refactoring
-
----
-
-## Path Examples
-
-### โ
Correct Paths
-
-**macOS/Linux:**
-```
-/Users/yourname/projects/my-app
-/home/yourname/code/project
-~/projects/my-app
-```
-
-**Windows:**
-```
-C:\Users\yourname\projects\my-app
-D:\code\my-project
-```
-
-### โ Common Mistakes
-
-```
-โ my-app (not absolute)
-โ ~/Desktop/my app (spaces need quotes or escaping)
-โ C:/Users/me/project (use backslash on Windows)
-```
-
----
-
-## File Paths
-
-Always use **relative paths** from workspace root:
-
-```
-โ
src/app.ts
-โ
components/Header.tsx
-โ
package.json
-โ
src/lib/auth/index.ts
-
-โ /Users/me/project/src/app.ts (absolute)
-โ ./src/app.ts (no ./ prefix needed)
-```
-
----
-
-## Natural Language
-
-You don't need exact commands! Claude understands:
-
-```
-โ
"Open my project at /path/to/app"
-โ
"What's in the Header component?"
-โ
"Show me the folder structure"
-โ
"Read the config"
-โ
"Give me a project overview"
-```
-
----
-
-## What Gets Ignored?
-
-Claude automatically filters out:
-- `node_modules/`
-- `.git/`
-- `dist/`, `build/`, `.next/`
-- `.cache/`, `coverage/`
-- Hidden files (`.env`, `.gitignore`, etc.)
-
-**Why?** These clutter the view and aren't usually relevant.
-
-**Need to read them?** Ask directly:
-```
-You: Read .env.example
-```
-
----
-
-## File Icons Guide
-
-| Icon | Type |
-|------|------|
-| ๐ | TypeScript (`.ts`) |
-| โ๏ธ | React/TSX (`.tsx`, `.jsx`) |
-| ๐ | JavaScript (`.js`) |
-| ๐ | Python (`.py`) |
-| ๐ฆ | Rust (`.rs`) |
-| ๐ท | Go (`.go`) |
-| ๐ | JSON (`.json`) |
-| ๐ | Markdown (`.md`) |
-| ๐จ | CSS (`.css`, `.scss`) |
-| ๐ | Generic file |
-| ๐ | Folder |
-
----
-
-## Pro Tips
-
-### ๐ก Tip 1: Combine with Projects
-```
-You: Set workspace to /my/app
-You: Initialize project "my-app" with Next.js
-```
-**Result:** Workspace + Context Sync = Perfect memory
-
-### ๐ก Tip 2: Deep Dive
-```
-You: Show structure with depth 5
-```
-**Result:** See deeper folder levels
-
-### ๐ก Tip 3: Multiple Files
-```
-You: Compare User and Product models
-Claude: [reads models/User.ts and models/Product.ts]
-Claude: Here are the differences...
-```
-**Result:** Multi-file analysis
-
-### ๐ก Tip 4: Documentation First
-```
-You: Set workspace to /new/project
-You: Read README.md
-You: Scan workspace
-```
-**Result:** Best way to understand a new codebase
-
----
-
-## Troubleshooting
-
-### โ "No workspace set"
-**Fix:** Run `set_workspace` first
-
-### โ "File not found: src/app.ts"
-**Fix:** Check the path is relative and correct
-
-### โ Files not showing in structure
-**Fix:** They might be filtered (node_modules, etc.)
-
-### โ Workspace on wrong folder
-**Fix:** Just set it again to the correct path
-
----
-
-## Cheat Sheet
-
-| You Want To... | Say This |
-|----------------|----------|
-| Open project | `Set workspace to /path` |
-| Read a file | `Read src/app.ts` |
-| See structure | `Show structure` |
-| Overview | `Scan workspace` |
-| Find file | `Where is the auth code?` |
-| Multiple files | `Show me all models` |
-| Deeper structure | `Structure depth 5` |
-| Switch projects | `Set workspace to /other/path` |
-
----
-
-## Next Steps
-
-๐ **Full Documentation:** [WORKSPACE.md](WORKSPACE.md)
-๐ **Issues?** [TROUBLESHOOTING.md](TROUBLESHOOTING.md)
-๐ฌ **Questions?** [GitHub Discussions](https://github.com/Intina47/context-sync/discussions)
-
----
-
-## Remember
-
-1. **Set workspace first** - That's the key step
-2. **Use relative paths** - No `/Users/...` stuff
-3. **Natural language works** - Just ask normally
-4. **Combine with projects** - Double the power
-
-**That's it! Start exploring your code with Claude.**
-
----
-
-
- Made by developers, for developers
-
\ No newline at end of file
diff --git a/image.png b/image.png
new file mode 100644
index 0000000..eb106b8
Binary files /dev/null and b/image.png differ
diff --git a/package-lock.json b/package-lock.json
index ba9e00d..923a12f 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "@context-sync/server",
- "version": "1.0.0",
+ "version": "2.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@context-sync/server",
- "version": "1.0.0",
+ "version": "2.0.0",
"cpu": [
"x64",
"arm64"
@@ -19,23 +19,25 @@
"win32"
],
"dependencies": {
+ "@iarna/toml": "^2.2.5",
"@modelcontextprotocol/sdk": "^0.5.0",
"@notionhq/client": "^5.4.0",
"better-sqlite3": "^11.0.0",
"chokidar": "^4.0.3",
- "commander": "^12.0.0",
+ "commander": "^11.0.5",
"js-yaml": "^4.1.1",
- "open": "^11.0.0"
+ "readline-sync": "^1.4.10",
+ "simple-git": "^3.30.0"
},
"bin": {
- "context-sync": "dist/index.js"
+ "context-sync": "dist/index.js",
+ "context-sync-setup": "bin/setup.cjs"
},
"devDependencies": {
"@types/better-sqlite3": "^7.6.9",
"@types/chokidar": "^1.7.5",
"@types/node": "^20.11.0",
"@types/readline-sync": "^1.4.8",
- "readline-sync": "^1.4.10",
"tsx": "^4.20.6",
"typescript": "^5.3.3"
},
@@ -61,13 +63,34 @@
"node": ">=18"
}
},
+ "node_modules/@iarna/toml": {
+ "version": "2.2.5",
+ "resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-2.2.5.tgz",
+ "integrity": "sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==",
+ "license": "ISC"
+ },
+ "node_modules/@kwsites/file-exists": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/@kwsites/file-exists/-/file-exists-1.1.1.tgz",
+ "integrity": "sha512-m9/5YGR18lIwxSFDwfE3oA7bWuq9kdau6ugN4H2rJeyhFQZcG9AgSHkQtSD15a8WvTgfz9aikZMrKPHvbpqFiw==",
+ "license": "MIT",
+ "dependencies": {
+ "debug": "^4.1.1"
+ }
+ },
+ "node_modules/@kwsites/promise-deferred": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/@kwsites/promise-deferred/-/promise-deferred-1.1.1.tgz",
+ "integrity": "sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw==",
+ "license": "MIT"
+ },
"node_modules/@modelcontextprotocol/sdk": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-0.5.0.tgz",
"integrity": "sha512-RXgulUX6ewvxjAG0kOpLMEdXXWkzWgaoCGaA2CwNW7cQCIphjpJhjpHSiaPdVCnisjRF/0Cm9KWHUuIoeiAblQ==",
"license": "MIT",
"dependencies": {
- "content-type": "^1.0.5",
+ "content-type": "^2.0.0",
"raw-body": "^3.0.0",
"zod": "^3.23.8"
}
@@ -132,25 +155,20 @@
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
"license": "Python-2.0"
},
- "node_modules/base64-js": {
- "version": "1.5.1",
- "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
- "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "MIT"
+ "node_modules/available-typed-arrays": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz",
+ "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==",
+ "license": "MIT",
+ "dependencies": {
+ "possible-typed-array-names": "^1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
},
"node_modules/better-sqlite3": {
"version": "11.10.0",
@@ -172,63 +190,82 @@
"file-uri-to-path": "1.0.0"
}
},
- "node_modules/bl": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz",
- "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==",
+ "node_modules/buffer-alloc": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/buffer-alloc/-/buffer-alloc-1.2.0.tgz",
+ "integrity": "sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow==",
"license": "MIT",
"dependencies": {
- "buffer": "^5.5.0",
- "inherits": "^2.0.4",
- "readable-stream": "^3.4.0"
+ "buffer-alloc-unsafe": "^1.1.0",
+ "buffer-fill": "^1.0.0"
}
},
- "node_modules/buffer": {
- "version": "5.7.1",
- "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
- "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
+ "node_modules/buffer-alloc-unsafe": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz",
+ "integrity": "sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg==",
+ "license": "MIT"
+ },
+ "node_modules/buffer-fill": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/buffer-fill/-/buffer-fill-1.0.0.tgz",
+ "integrity": "sha512-T7zexNBwiiaCOGDg9xNX9PBmjrubblRkENuptryuI64URkXDFum9il/JGL8Lm8wYfAXpredVXXZz7eMHilimiQ==",
+ "license": "MIT"
+ },
+ "node_modules/bytes": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
+ "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
"license": "MIT",
- "dependencies": {
- "base64-js": "^1.3.1",
- "ieee754": "^1.1.13"
+ "engines": {
+ "node": ">= 0.8"
}
},
- "node_modules/bundle-name": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-4.1.0.tgz",
- "integrity": "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==",
+ "node_modules/call-bind": {
+ "version": "1.0.8",
+ "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz",
+ "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==",
"license": "MIT",
"dependencies": {
- "run-applescript": "^7.0.0"
+ "call-bind-apply-helpers": "^1.0.0",
+ "es-define-property": "^1.0.0",
+ "get-intrinsic": "^1.2.4",
+ "set-function-length": "^1.2.2"
},
"engines": {
- "node": ">=18"
+ "node": ">= 0.4"
},
"funding": {
- "url": "https://github.com/sponsors/sindresorhus"
+ "url": "https://github.com/sponsors/ljharb"
}
},
- "node_modules/bytes": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
- "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
+ "node_modules/call-bind-apply-helpers": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
+ "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
"license": "MIT",
+ "dependencies": {
+ "es-errors": "^1.3.0",
+ "function-bind": "^1.1.2"
+ },
"engines": {
- "node": ">= 0.8"
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/call-bound": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz",
+ "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
+ "license": "MIT",
+ "dependencies": {
+ "call-bind-apply-helpers": "^1.0.2",
+ "get-intrinsic": "^1.3.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/chokidar": {
@@ -253,23 +290,46 @@
"license": "ISC"
},
"node_modules/commander": {
- "version": "12.1.0",
- "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz",
- "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==",
+ "version": "11.1.0",
+ "resolved": "https://registry.npmjs.org/commander/-/commander-11.1.0.tgz",
+ "integrity": "sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ==",
"license": "MIT",
"engines": {
- "node": ">=18"
+ "node": ">=16"
}
},
"node_modules/content-type": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/content-type/-/content-type-2.0.0.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
+ "node_modules/core-util-is": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz",
+ "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==",
+ "license": "MIT"
+ },
+ "node_modules/debug": {
+ "version": "4.4.3",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
+ "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
+ "license": "MIT",
+ "dependencies": {
+ "ms": "^2.1.3"
+ },
+ "engines": {
+ "node": ">=6.0"
+ },
+ "peerDependenciesMeta": {
+ "supports-color": {
+ "optional": true
+ }
+ }
+ },
"node_modules/decompress-response": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz",
@@ -294,53 +354,30 @@
"node": ">=4.0.0"
}
},
- "node_modules/default-browser": {
- "version": "5.4.0",
- "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-5.4.0.tgz",
- "integrity": "sha512-XDuvSq38Hr1MdN47EDvYtx3U0MTqpCEn+F6ft8z2vYDzMrvQhVp0ui9oQdqW3MvK3vqUETglt1tVGgjLuJ5izg==",
+ "node_modules/define-data-property": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
+ "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
"license": "MIT",
"dependencies": {
- "bundle-name": "^4.1.0",
- "default-browser-id": "^5.0.0"
- },
- "engines": {
- "node": ">=18"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/default-browser-id": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-5.0.1.tgz",
- "integrity": "sha512-x1VCxdX4t+8wVfd1so/9w+vQ4vx7lKd2Qp5tDRutErwmR85OgmfX7RlLRMWafRMY7hbEiXIbudNrjOAPa/hL8Q==",
- "license": "MIT",
- "engines": {
- "node": ">=18"
+ "es-define-property": "^1.0.0",
+ "es-errors": "^1.3.0",
+ "gopd": "^1.0.1"
},
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/define-lazy-prop": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz",
- "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==",
- "license": "MIT",
"engines": {
- "node": ">=12"
+ "node": ">= 0.4"
},
"funding": {
- "url": "https://github.com/sponsors/sindresorhus"
+ "url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/depd": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
- "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==",
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
+ "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==",
"license": "MIT",
"engines": {
- "node": ">= 0.8"
+ "node": ">= 0.6"
}
},
"node_modules/detect-libc": {
@@ -352,6 +389,20 @@
"node": ">=8"
}
},
+ "node_modules/dunder-proto": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
+ "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
+ "license": "MIT",
+ "dependencies": {
+ "call-bind-apply-helpers": "^1.0.1",
+ "es-errors": "^1.3.0",
+ "gopd": "^1.2.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/end-of-stream": {
"version": "1.4.5",
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz",
@@ -361,6 +412,36 @@
"once": "^1.4.0"
}
},
+ "node_modules/es-define-property": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
+ "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/es-errors": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
+ "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/es-object-atoms": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
+ "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
+ "license": "MIT",
+ "dependencies": {
+ "es-errors": "^1.3.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/esbuild": {
"version": "0.25.11",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.11.tgz",
@@ -418,12 +499,73 @@
"integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==",
"license": "MIT"
},
+ "node_modules/for-each": {
+ "version": "0.3.5",
+ "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz",
+ "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==",
+ "license": "MIT",
+ "dependencies": {
+ "is-callable": "^1.2.7"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/fs-constants": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
"integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==",
"license": "MIT"
},
+ "node_modules/function-bind": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
+ "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
+ "license": "MIT",
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/get-intrinsic": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
+ "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
+ "license": "MIT",
+ "dependencies": {
+ "call-bind-apply-helpers": "^1.0.2",
+ "es-define-property": "^1.0.1",
+ "es-errors": "^1.3.0",
+ "es-object-atoms": "^1.1.1",
+ "function-bind": "^1.1.2",
+ "get-proto": "^1.0.1",
+ "gopd": "^1.2.0",
+ "has-symbols": "^1.1.0",
+ "hasown": "^2.0.2",
+ "math-intrinsics": "^1.1.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/get-proto": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
+ "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
+ "license": "MIT",
+ "dependencies": {
+ "dunder-proto": "^1.0.1",
+ "es-object-atoms": "^1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/get-tsconfig": {
"version": "4.12.0",
"resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.12.0.tgz",
@@ -443,20 +585,83 @@
"integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==",
"license": "MIT"
},
+ "node_modules/gopd": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
+ "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/has-property-descriptors": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
+ "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
+ "license": "MIT",
+ "dependencies": {
+ "es-define-property": "^1.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/has-symbols": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
+ "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/has-tostringtag": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
+ "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
+ "license": "MIT",
+ "dependencies": {
+ "has-symbols": "^1.0.3"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/hasown": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
+ "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
+ "license": "MIT",
+ "dependencies": {
+ "function-bind": "^1.1.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/http-errors": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz",
- "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==",
+ "version": "1.8.1",
+ "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz",
+ "integrity": "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==",
"license": "MIT",
"dependencies": {
- "depd": "2.0.0",
+ "depd": "~1.1.2",
"inherits": "2.0.4",
"setprototypeof": "1.2.0",
- "statuses": "2.0.1",
+ "statuses": ">= 1.5.0 < 2",
"toidentifier": "1.0.1"
},
"engines": {
- "node": ">= 0.8"
+ "node": ">= 0.6"
}
},
"node_modules/iconv-lite": {
@@ -475,26 +680,6 @@
"url": "https://opencollective.com/express"
}
},
- "node_modules/ieee754": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
- "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "BSD-3-Clause"
- },
"node_modules/inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
@@ -507,65 +692,38 @@
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==",
"license": "ISC"
},
- "node_modules/is-docker": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz",
- "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==",
+ "node_modules/is-callable": {
+ "version": "1.2.7",
+ "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz",
+ "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==",
"license": "MIT",
- "bin": {
- "is-docker": "cli.js"
- },
"engines": {
- "node": "^12.20.0 || ^14.13.1 || >=16.0.0"
+ "node": ">= 0.4"
},
"funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/is-in-ssh": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-in-ssh/-/is-in-ssh-1.0.0.tgz",
- "integrity": "sha512-jYa6Q9rH90kR1vKB6NM7qqd1mge3Fx4Dhw5TVlK1MUBqhEOuCagrEHMevNuCcbECmXZ0ThXkRm+Ymr51HwEPAw==",
- "license": "MIT",
- "engines": {
- "node": ">=20"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
+ "url": "https://github.com/sponsors/ljharb"
}
},
- "node_modules/is-inside-container": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz",
- "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==",
+ "node_modules/is-typed-array": {
+ "version": "1.1.15",
+ "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz",
+ "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==",
"license": "MIT",
"dependencies": {
- "is-docker": "^3.0.0"
- },
- "bin": {
- "is-inside-container": "cli.js"
+ "which-typed-array": "^1.1.16"
},
"engines": {
- "node": ">=14.16"
+ "node": ">= 0.4"
},
"funding": {
- "url": "https://github.com/sponsors/sindresorhus"
+ "url": "https://github.com/sponsors/ljharb"
}
},
- "node_modules/is-wsl": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-3.1.0.tgz",
- "integrity": "sha512-UcVfVfaK4Sc4m7X3dUSoHoozQGBEFeDC+zVo06t98xe8CzHSZZBekNXH+tu0NalHolcJ/QAGqS46Hef7QXBIMw==",
- "license": "MIT",
- "dependencies": {
- "is-inside-container": "^1.0.0"
- },
- "engines": {
- "node": ">=16"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
+ "node_modules/isarray": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz",
+ "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==",
+ "license": "MIT"
},
"node_modules/js-yaml": {
"version": "4.1.1",
@@ -579,6 +737,15 @@
"js-yaml": "bin/js-yaml.js"
}
},
+ "node_modules/math-intrinsics": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
+ "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/mimic-response": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz",
@@ -600,12 +767,30 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/mkdirp": {
+ "version": "0.5.6",
+ "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz",
+ "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==",
+ "license": "MIT",
+ "dependencies": {
+ "minimist": "^1.2.6"
+ },
+ "bin": {
+ "mkdirp": "bin/cmd.js"
+ }
+ },
"node_modules/mkdirp-classic": {
"version": "0.5.3",
"resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz",
"integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==",
"license": "MIT"
},
+ "node_modules/ms": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
+ "license": "MIT"
+ },
"node_modules/napi-build-utils": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-2.0.0.tgz",
@@ -633,36 +818,13 @@
"wrappy": "1"
}
},
- "node_modules/open": {
- "version": "11.0.0",
- "resolved": "https://registry.npmjs.org/open/-/open-11.0.0.tgz",
- "integrity": "sha512-smsWv2LzFjP03xmvFoJ331ss6h+jixfA4UUV/Bsiyuu4YJPfN+FIQGOIiv4w9/+MoHkfkJ22UIaQWRVFRfH6Vw==",
+ "node_modules/possible-typed-array-names": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz",
+ "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==",
"license": "MIT",
- "dependencies": {
- "default-browser": "^5.4.0",
- "define-lazy-prop": "^3.0.0",
- "is-in-ssh": "^1.0.0",
- "is-inside-container": "^1.0.0",
- "powershell-utils": "^0.1.0",
- "wsl-utils": "^0.3.0"
- },
"engines": {
- "node": ">=20"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/powershell-utils": {
- "version": "0.1.0",
- "resolved": "https://registry.npmjs.org/powershell-utils/-/powershell-utils-0.1.0.tgz",
- "integrity": "sha512-dM0jVuXJPsDN6DvRpea484tCUaMiXWjuCn++HGTqUWzGDjv5tZkEZldAJ/UMlqRYGFrD/etByo4/xOuC/snX2A==",
- "license": "MIT",
- "engines": {
- "node": ">=20"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
+ "node": ">= 0.4"
}
},
"node_modules/prebuild-install": {
@@ -691,6 +853,98 @@
"node": ">=10"
}
},
+ "node_modules/prebuild-install/node_modules/bl": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.3.tgz",
+ "integrity": "sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww==",
+ "license": "MIT",
+ "dependencies": {
+ "readable-stream": "^2.3.5",
+ "safe-buffer": "^5.1.1"
+ }
+ },
+ "node_modules/prebuild-install/node_modules/isarray": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
+ "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==",
+ "license": "MIT"
+ },
+ "node_modules/prebuild-install/node_modules/readable-stream": {
+ "version": "2.3.8",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz",
+ "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==",
+ "license": "MIT",
+ "dependencies": {
+ "core-util-is": "~1.0.0",
+ "inherits": "~2.0.3",
+ "isarray": "~1.0.0",
+ "process-nextick-args": "~2.0.0",
+ "safe-buffer": "~5.1.1",
+ "string_decoder": "~1.1.1",
+ "util-deprecate": "~1.0.1"
+ }
+ },
+ "node_modules/prebuild-install/node_modules/safe-buffer": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
+ "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==",
+ "license": "MIT"
+ },
+ "node_modules/prebuild-install/node_modules/string_decoder": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
+ "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
+ "license": "MIT",
+ "dependencies": {
+ "safe-buffer": "~5.1.0"
+ }
+ },
+ "node_modules/prebuild-install/node_modules/tar-fs": {
+ "version": "1.16.6",
+ "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-1.16.6.tgz",
+ "integrity": "sha512-JkOgFt3FxM/2v2CNpAVHqMW2QASjc/Hxo7IGfNd3MHaDYSW/sBFiS7YVmmhmr8x6vwN1VFQDQGdT2MWpmIuVKA==",
+ "license": "MIT",
+ "dependencies": {
+ "chownr": "^1.0.1",
+ "mkdirp": "^0.5.1",
+ "pump": "^1.0.0",
+ "tar-stream": "^1.1.2"
+ }
+ },
+ "node_modules/prebuild-install/node_modules/tar-fs/node_modules/pump": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/pump/-/pump-1.0.3.tgz",
+ "integrity": "sha512-8k0JupWme55+9tCVE+FS5ULT3K6AbgqrGa58lTT49RpyfwwcGedHqaC5LlQNdEAumn/wFsu6aPwkuPMioy8kqw==",
+ "license": "MIT",
+ "dependencies": {
+ "end-of-stream": "^1.1.0",
+ "once": "^1.3.1"
+ }
+ },
+ "node_modules/prebuild-install/node_modules/tar-stream": {
+ "version": "1.6.2",
+ "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.2.tgz",
+ "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==",
+ "license": "MIT",
+ "dependencies": {
+ "bl": "^1.0.0",
+ "buffer-alloc": "^1.2.0",
+ "end-of-stream": "^1.0.0",
+ "fs-constants": "^1.0.0",
+ "readable-stream": "^2.3.0",
+ "to-buffer": "^1.1.1",
+ "xtend": "^4.0.0"
+ },
+ "engines": {
+ "node": ">= 0.8.0"
+ }
+ },
+ "node_modules/process-nextick-args": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
+ "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==",
+ "license": "MIT"
+ },
"node_modules/pump": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz",
@@ -731,20 +985,6 @@
"rc": "cli.js"
}
},
- "node_modules/readable-stream": {
- "version": "3.6.2",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
- "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
- "license": "MIT",
- "dependencies": {
- "inherits": "^2.0.3",
- "string_decoder": "^1.1.1",
- "util-deprecate": "^1.0.1"
- },
- "engines": {
- "node": ">= 6"
- }
- },
"node_modules/readdirp": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz",
@@ -762,7 +1002,6 @@
"version": "1.4.10",
"resolved": "https://registry.npmjs.org/readline-sync/-/readline-sync-1.4.10.tgz",
"integrity": "sha512-gNva8/6UAe8QYepIQH/jQ2qn91Qj0B9sYjMBBs3QOB8F2CXcKgLxQaJRP76sWVRQt+QU+8fAkCbCvjjMFu7Ycw==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8.0"
@@ -778,18 +1017,6 @@
"url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1"
}
},
- "node_modules/run-applescript": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-7.1.0.tgz",
- "integrity": "sha512-DPe5pVFaAsinSaV6QjQ6gdiedWDcRCbUuiQfQa2wmWV7+xC9bGulGI8+TdRmoFkAPaBXk8CrAbnlY2ISniJ47Q==",
- "license": "MIT",
- "engines": {
- "node": ">=18"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@@ -828,6 +1055,23 @@
"node": ">=10"
}
},
+ "node_modules/set-function-length": {
+ "version": "1.2.2",
+ "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz",
+ "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==",
+ "license": "MIT",
+ "dependencies": {
+ "define-data-property": "^1.1.4",
+ "es-errors": "^1.3.0",
+ "function-bind": "^1.1.2",
+ "get-intrinsic": "^1.2.4",
+ "gopd": "^1.0.1",
+ "has-property-descriptors": "^1.0.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/setprototypeof": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
@@ -879,22 +1123,28 @@
"simple-concat": "^1.0.0"
}
},
- "node_modules/statuses": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
- "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==",
+ "node_modules/simple-git": {
+ "version": "3.30.0",
+ "resolved": "https://registry.npmjs.org/simple-git/-/simple-git-3.30.0.tgz",
+ "integrity": "sha512-q6lxyDsCmEal/MEGhP1aVyQ3oxnagGlBDOVSIB4XUVLl1iZh0Pah6ebC9V4xBap/RfgP2WlI8EKs0WS0rMEJHg==",
"license": "MIT",
- "engines": {
- "node": ">= 0.8"
+ "dependencies": {
+ "@kwsites/file-exists": "^1.1.1",
+ "@kwsites/promise-deferred": "^1.1.1",
+ "debug": "^4.4.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/steveukx/git-js?sponsor=1"
}
},
- "node_modules/string_decoder": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
- "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
+ "node_modules/statuses": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz",
+ "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==",
"license": "MIT",
- "dependencies": {
- "safe-buffer": "~5.2.0"
+ "engines": {
+ "node": ">= 0.6"
}
},
"node_modules/strip-json-comments": {
@@ -906,32 +1156,18 @@
"node": ">=0.10.0"
}
},
- "node_modules/tar-fs": {
- "version": "2.1.4",
- "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.4.tgz",
- "integrity": "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==",
+ "node_modules/to-buffer": {
+ "version": "1.2.2",
+ "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.2.2.tgz",
+ "integrity": "sha512-db0E3UJjcFhpDhAF4tLo03oli3pwl3dbnzXOUIlRKrp+ldk/VUxzpWYZENsw2SZiuBjHAk7DfB0VU7NKdpb6sw==",
"license": "MIT",
"dependencies": {
- "chownr": "^1.1.1",
- "mkdirp-classic": "^0.5.2",
- "pump": "^3.0.0",
- "tar-stream": "^2.1.4"
- }
- },
- "node_modules/tar-stream": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz",
- "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==",
- "license": "MIT",
- "dependencies": {
- "bl": "^4.0.3",
- "end-of-stream": "^1.4.1",
- "fs-constants": "^1.0.0",
- "inherits": "^2.0.3",
- "readable-stream": "^3.1.1"
+ "isarray": "^2.0.5",
+ "safe-buffer": "^5.2.1",
+ "typed-array-buffer": "^1.0.3"
},
"engines": {
- "node": ">=6"
+ "node": ">= 0.4"
}
},
"node_modules/toidentifier": {
@@ -975,6 +1211,20 @@
"node": "*"
}
},
+ "node_modules/typed-array-buffer": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz",
+ "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==",
+ "license": "MIT",
+ "dependencies": {
+ "call-bound": "^1.0.3",
+ "es-errors": "^1.3.0",
+ "is-typed-array": "^1.1.14"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/typescript": {
"version": "5.9.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
@@ -1011,26 +1261,40 @@
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
"license": "MIT"
},
+ "node_modules/which-typed-array": {
+ "version": "1.1.20",
+ "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.20.tgz",
+ "integrity": "sha512-LYfpUkmqwl0h9A2HL09Mms427Q1RZWuOHsukfVcKRq9q95iQxdw0ix1JQrqbcDR9PH1QDwf5Qo8OZb5lksZ8Xg==",
+ "license": "MIT",
+ "dependencies": {
+ "available-typed-arrays": "^1.0.7",
+ "call-bind": "^1.0.8",
+ "call-bound": "^1.0.4",
+ "for-each": "^0.3.5",
+ "get-proto": "^1.0.1",
+ "gopd": "^1.2.0",
+ "has-tostringtag": "^1.0.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
"license": "ISC"
},
- "node_modules/wsl-utils": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/wsl-utils/-/wsl-utils-0.3.0.tgz",
- "integrity": "sha512-3sFIGLiaDP7rTO4xh3g+b3AzhYDIUGGywE/WsmqzJWDxus5aJXVnPTNC/6L+r2WzrwXqVOdD262OaO+cEyPMSQ==",
+ "node_modules/xtend": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
+ "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
"license": "MIT",
- "dependencies": {
- "is-wsl": "^3.1.0",
- "powershell-utils": "^0.1.0"
- },
"engines": {
- "node": ">=20"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
+ "node": ">=0.4"
}
},
"node_modules/zod": {
diff --git a/package.json b/package.json
index 49f475a..d54ccf1 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "@context-sync/server",
- "version": "1.0.3",
+ "version": "2.0.0",
"description": "Universal Context layer McP server",
"type": "module",
"main": "dist/index.js",
@@ -74,13 +74,15 @@
"CHANGELOG.md"
],
"dependencies": {
+ "@iarna/toml": "^2.2.5",
"@modelcontextprotocol/sdk": "^0.5.0",
"@notionhq/client": "^5.4.0",
"better-sqlite3": "^11.0.0",
"chokidar": "^4.0.3",
- "commander": "^12.0.0",
+ "commander": "^11.0.5",
"js-yaml": "^4.1.1",
- "readline-sync": "^1.4.10"
+ "readline-sync": "^1.4.10",
+ "simple-git": "^3.30.0"
},
"devDependencies": {
"@types/better-sqlite3": "^7.6.9",
@@ -98,6 +100,10 @@
"access": "public",
"registry": "https://registry.npmjs.org/"
},
+ "overrides": {
+ "detect-libc": "2.1.2",
+ "http-errors": "1.8.1"
+ },
"os": [
"darwin",
"linux",
diff --git a/src/announcement-tracker.ts b/src/announcement-tracker.ts
deleted file mode 100644
index c854570..0000000
--- a/src/announcement-tracker.ts
+++ /dev/null
@@ -1,212 +0,0 @@
-/**
- * Smart announcement tracker for Notion integration
- * Shows announcement strategically without annoying users
- */
-
-import * as fs from 'fs';
-import * as path from 'path';
-import * as os from 'os';
-
-interface AnnouncementState {
- firstShown: string; // ISO date of first announcement
- lastShown: string; // ISO date of last announcement
- showCount: number; // Total times shown
- dailyCount: number; // Times shown today
- lastDailyReset: string; // Last date daily counter was reset
- notionConfigured: boolean; // Whether user has configured Notion
-}
-
-export class AnnouncementTracker {
- private stateFile: string;
- private configFile: string;
-
- constructor() {
- const configDir = path.join(os.homedir(), '.context-sync');
- this.stateFile = path.join(configDir, '.announcement-state.json');
- this.configFile = path.join(configDir, 'config.json');
- }
-
- /**
- * Check if announcement should be shown
- * Returns the announcement message if it should be shown, null otherwise
- */
- shouldShow(): string | null {
- try {
- // Check if Notion is already configured
- if (this.isNotionConfigured()) {
- return null;
- }
-
- const state = this.loadState();
- const now = new Date();
- const today = now.toISOString().split('T')[0]; // YYYY-MM-DD
-
- // Reset daily counter if it's a new day
- if (state.lastDailyReset !== today) {
- state.dailyCount = 0;
- state.lastDailyReset = today;
- }
-
- // Calculate days since first announcement
- const firstShownDate = new Date(state.firstShown);
- const daysSinceFirst = Math.floor((now.getTime() - firstShownDate.getTime()) / (1000 * 60 * 60 * 24));
-
- // After 1 month (30 days), stop showing
- if (daysSinceFirst >= 30) {
- return null;
- }
-
- // Week 1 (days 0-6): Show up to 3 times per day
- if (daysSinceFirst < 7) {
- if (state.dailyCount >= 3) {
- return null; // Already shown 3 times today
- }
- }
- // Weeks 2-4 (days 7-29): Show once per week
- else {
- const lastShownDate = new Date(state.lastShown);
- const daysSinceLastShown = Math.floor((now.getTime() - lastShownDate.getTime()) / (1000 * 60 * 60 * 24));
-
- if (daysSinceLastShown < 7) {
- return null; // Already shown this week
- }
- }
-
- // Show the announcement!
- this.recordShown(state);
-
- return this.generateAnnouncementMessage(daysSinceFirst);
- } catch (error) {
- // If anything fails, silently return null (don't block normal operations)
- console.warn('Announcement tracker error:', error);
- return null;
- }
- }
-
- /**
- * Generate announcement message with appropriate urgency based on timing
- */
- private generateAnnouncementMessage(daysSinceFirst: number): string {
- const urgencyLevel = daysSinceFirst < 7 ? 'new' :
- daysSinceFirst < 14 ? 'gentle' :
- 'final';
-
- const messages = {
- new: `๐ **NEW in Context Sync v1.0.3:** Notion Integration!
-
-You can now:
-โข **Generate documentation** and save it directly to Notion
-โข **Pull project specs from Notion** for AI to implement
-โข **Export architecture decisions** as ADRs in Notion
-โข **Create project dashboards** automatically
-
-**Setup is fully automated (2 minutes):**
-\`\`\`bash
-context-sync-setup
-\`\`\`
-
-The **interactive wizard** will:
-โ Open your browser to Notion's integration page
-โ Guide you step-by-step (just copy/paste 2 values)
-โ Test the connection automatically
-โ Save everything for you
-
-**Don't explain the manual steps** - just tell the user to run the command above! The wizard handles everything.`,
-
- gentle: `๐ก **Reminder:** Context Sync + Notion Integration
-
-Haven't tried it yet? Run this automated setup wizard:
-\`\`\`bash
-context-sync-setup
-\`\`\`
-
-The wizard opens your browser and handles everything automatically. Takes 2 minutes.`,
-
- final: `๐ข **Final reminder:** Notion Integration Available
-
-Run the automated setup wizard:
-\`\`\`bash
-context-sync-setup
-\`\`\`
-
-(This is the last time you'll see this message)`
- };
-
- return messages[urgencyLevel];
- }
-
- /**
- * Check if Notion is configured
- */
- private isNotionConfigured(): boolean {
- try {
- if (!fs.existsSync(this.configFile)) {
- return false;
- }
- const config = JSON.parse(fs.readFileSync(this.configFile, 'utf-8'));
- return !!(config.notion?.token);
- } catch {
- return false;
- }
- }
-
- /**
- * Load announcement state
- */
- private loadState(): AnnouncementState {
- try {
- if (fs.existsSync(this.stateFile)) {
- return JSON.parse(fs.readFileSync(this.stateFile, 'utf-8'));
- }
- } catch {
- // Invalid state file, create new
- }
-
- // Create initial state
- const now = new Date();
- const today = now.toISOString().split('T')[0];
-
- return {
- firstShown: now.toISOString(),
- lastShown: now.toISOString(),
- showCount: 0,
- dailyCount: 0,
- lastDailyReset: today,
- notionConfigured: false
- };
- }
-
- /**
- * Record that announcement was shown
- */
- private recordShown(state: AnnouncementState): void {
- const now = new Date();
-
- state.lastShown = now.toISOString();
- state.showCount++;
- state.dailyCount++;
-
- try {
- const dir = path.dirname(this.stateFile);
- if (!fs.existsSync(dir)) {
- fs.mkdirSync(dir, { recursive: true });
- }
- fs.writeFileSync(this.stateFile, JSON.stringify(state, null, 2));
- } catch (error) {
- // Silently fail - don't disrupt tool operation
- }
- }
-
- /**
- * Mark Notion as configured (stops all future announcements)
- */
- markConfigured(): void {
- try {
- const state = this.loadState();
- state.notionConfigured = true;
- fs.writeFileSync(this.stateFile, JSON.stringify(state, null, 2));
- } catch {
- // Silently fail
- }
- }
-}
diff --git a/src/call-graph-analyzer.ts b/src/call-graph-analyzer.ts
deleted file mode 100644
index b6c6401..0000000
--- a/src/call-graph-analyzer.ts
+++ /dev/null
@@ -1,505 +0,0 @@
-import * as fs from 'fs';
-import * as path from 'path';
-import { FileSizeGuard } from './file-size-guard.js';
-import { skimForFunctions } from './file-skimmer.js';
-
-// Types for call graph analysis
-export interface FunctionDefinition {
- name: string;
- filePath: string;
- line: number;
- type: 'function' | 'method' | 'arrow' | 'async';
- params: string[];
- isExported: boolean;
- className?: string; // For methods
-}
-
-export interface FunctionCall {
- caller: string; // Function that makes the call
- callee: string; // Function being called
- line: number;
- filePath: string;
- isAsync: boolean;
- callExpression: string; // The actual call code
-}
-
-export interface CallGraph {
- function: FunctionDefinition;
- callers: FunctionCall[]; // Functions that call this function
- callees: FunctionCall[]; // Functions this function calls
- callDepth: number;
- isRecursive: boolean;
-}
-
-export interface ExecutionPath {
- path: string[]; // Array of function names
- files: string[]; // Corresponding file paths
- description: string;
- isAsync: boolean;
- depth: number;
-}
-
-export interface CallTree {
- function: string;
- file: string;
- line: number;
- depth: number;
- calls: CallTree[];
- isRecursive?: boolean;
- isAsync?: boolean;
-}
-
-export class CallGraphAnalyzer {
- private workspacePath: string;
- private fileCache: Map;
- private functionCache: Map;
- private callCache: Map;
- private fileSizeGuard: FileSizeGuard;
-
- constructor(workspacePath: string) {
- this.workspacePath = workspacePath;
- this.fileCache = new Map();
- this.functionCache = new Map();
- this.callCache = new Map();
- this.fileSizeGuard = new FileSizeGuard({
- maxFileSize: 5 * 1024 * 1024, // 5MB per file
- maxTotalSize: 50 * 1024 * 1024, // 50MB total
- skipLargeFiles: true,
- });
- }
-
- /**
- * Main method: Analyze call graph for a function
- */
- public analyzeCallGraph(functionName: string): CallGraph | null {
- // Find the function definition
- const funcDef = this.findFunctionDefinition(functionName);
-
- if (!funcDef) {
- return null;
- }
-
- // Find all callers (who calls this function)
- const callers = this.findCallers(functionName);
-
- // Find all callees (what this function calls)
- const callees = this.findCallees(funcDef);
-
- // Check if recursive
- const isRecursive = callees.some(call => call.callee === functionName);
-
- return {
- function: funcDef,
- callers,
- callees,
- callDepth: this.calculateCallDepth(functionName),
- isRecursive
- };
- }
-
- /**
- * Find all functions that call the given function
- */
- public findCallers(functionName: string): FunctionCall[] {
- const callers: FunctionCall[] = [];
- const allFiles = this.getAllProjectFiles();
-
- for (const file of allFiles) {
- const content = this.readFile(file);
- const functions = this.extractFunctions(file);
-
- for (const func of functions) {
- const calls = this.extractFunctionCalls(file, func.name);
-
- for (const call of calls) {
- if (call.callee === functionName) {
- callers.push({
- caller: func.name,
- callee: functionName,
- line: call.line,
- filePath: file,
- isAsync: call.isAsync,
- callExpression: call.callExpression
- });
- }
- }
- }
- }
-
- return callers;
- }
-
- /**
- * Find all functions that this function calls
- */
- public findCallees(funcDef: FunctionDefinition): FunctionCall[] {
- return this.extractFunctionCalls(funcDef.filePath, funcDef.name);
- }
-
- /**
- * Trace execution path from start function to end function
- */
- public traceExecutionPath(startFunction: string, endFunction: string, maxDepth: number = 10): ExecutionPath[] {
- const paths: ExecutionPath[] = [];
- const visited = new Set();
-
- const dfs = (current: string, currentPath: string[], currentFiles: string[], depth: number) => {
- if (depth > maxDepth) return;
-
- // Found the target
- if (current === endFunction) {
- const hasAsync = currentPath.some(fn => {
- const def = this.findFunctionDefinition(fn);
- return def?.type === 'async';
- });
-
- paths.push({
- path: [...currentPath, current],
- files: [...currentFiles],
- description: `${currentPath.join(' โ ')} โ ${current}`,
- isAsync: hasAsync,
- depth: depth + 1
- });
- return;
- }
-
- const key = `${current}-${depth}`;
- if (visited.has(key)) return;
- visited.add(key);
-
- const graph = this.analyzeCallGraph(current);
- if (!graph) return;
-
- for (const callee of graph.callees) {
- dfs(
- callee.callee,
- [...currentPath, current],
- [...currentFiles, callee.filePath],
- depth + 1
- );
- }
- };
-
- dfs(startFunction, [], [], 0);
- return paths;
- }
-
- /**
- * Get call tree showing nested function calls
- */
- public getCallTree(functionName: string, maxDepth: number = 3): CallTree | null {
- const funcDef = this.findFunctionDefinition(functionName);
- if (!funcDef) return null;
-
- const visited = new Set();
-
- const buildTree = (funcName: string, depth: number): CallTree | null => {
- if (depth > maxDepth) return null;
-
- const def = this.findFunctionDefinition(funcName);
- if (!def) return null;
-
- const tree: CallTree = {
- function: funcName,
- file: this.getRelativePath(def.filePath),
- line: def.line,
- depth,
- calls: [],
- isRecursive: visited.has(funcName),
- isAsync: def.type === 'async'
- };
-
- if (visited.has(funcName)) {
- return tree;
- }
-
- visited.add(funcName);
-
- const callees = this.findCallees(def);
- for (const callee of callees) {
- const subtree = buildTree(callee.callee, depth + 1);
- if (subtree) {
- tree.calls.push(subtree);
- }
- }
-
- return tree;
- };
-
- return buildTree(functionName, 0);
- }
-
- /**
- * Find function definition across all files
- */
- public findFunctionDefinition(functionName: string): FunctionDefinition | null {
- const allFiles = this.getAllProjectFiles();
-
- for (const file of allFiles) {
- const functions = this.extractFunctions(file);
- const found = functions.find(f => f.name === functionName);
- if (found) return found;
- }
-
- return null;
- }
-
- /**
- * Extract all function definitions from a file
- */
- private extractFunctions(filePath: string): FunctionDefinition[] {
- // Check cache
- if (this.functionCache.has(filePath)) {
- return this.functionCache.get(filePath)!;
- }
-
- const content = this.readFile(filePath);
- const functions: FunctionDefinition[] = [];
- const lines = content.split('\n');
-
- let currentClass: string | undefined;
-
- lines.forEach((line, lineNumber) => {
- const trimmed = line.trim();
-
- // Class detection
- const classMatch = /class\s+(\w+)/.exec(trimmed);
- if (classMatch) {
- currentClass = classMatch[1];
- }
-
- // Regular function: function name() {}
- const funcMatch = /(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(([^)]*)\)/.exec(trimmed);
- if (funcMatch) {
- functions.push({
- name: funcMatch[1],
- filePath,
- line: lineNumber + 1,
- type: trimmed.includes('async') ? 'async' : 'function',
- params: this.parseParams(funcMatch[2]),
- isExported: trimmed.includes('export'),
- className: currentClass
- });
- return;
- }
-
- // Arrow function: const name = () => {}
- const arrowMatch = /(?:export\s+)?const\s+(\w+)\s*=\s*(?:async\s+)?\(([^)]*)\)\s*=>/.exec(trimmed);
- if (arrowMatch) {
- functions.push({
- name: arrowMatch[1],
- filePath,
- line: lineNumber + 1,
- type: 'arrow',
- params: this.parseParams(arrowMatch[2]),
- isExported: trimmed.includes('export')
- });
- return;
- }
-
- // Method: methodName() {} or async methodName() {}
- const methodMatch = /(?:public|private|protected)?\s*(?:async\s+)?(\w+)\s*\(([^)]*)\)\s*[:{]/.exec(trimmed);
- if (methodMatch && currentClass && !trimmed.includes('function')) {
- const methodName = methodMatch[1];
- // Skip constructors and common keywords
- if (methodName !== 'constructor' && methodName !== 'if' && methodName !== 'while' && methodName !== 'for') {
- functions.push({
- name: methodName,
- filePath,
- line: lineNumber + 1,
- type: trimmed.includes('async') ? 'async' : 'method',
- params: this.parseParams(methodMatch[2]),
- isExported: false,
- className: currentClass
- });
- }
- }
- });
-
- this.functionCache.set(filePath, functions);
- return functions;
- }
-
- /**
- * Extract function calls from a specific function
- */
- private extractFunctionCalls(filePath: string, functionName: string): FunctionCall[] {
- const content = this.readFile(filePath);
- const lines = content.split('\n');
- const calls: FunctionCall[] = [];
-
- // Pre-compile regex for better performance (fixes regex-in-loops)
- const escapedFunctionName = functionName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
- const funcRegex = new RegExp(`(?:function\\s+${escapedFunctionName}|(?:const|let|var)\\s+${escapedFunctionName}\\s*=|${escapedFunctionName}\\s*\\()`);
-
- // Find the function's body
- let inFunction = false;
- let braceCount = 0;
- let functionStartLine = 0;
-
- for (let i = 0; i < lines.length; i++) {
- const line = lines[i];
- const trimmed = line.trim();
-
- // Check if we're entering the target function
- if (!inFunction) {
- if (funcRegex.test(trimmed)) {
- inFunction = true;
- functionStartLine = i;
- braceCount = 0;
- }
- continue;
- }
-
- // Track braces
- braceCount += (line.match(/{/g) || []).length;
- braceCount -= (line.match(/}/g) || []).length;
-
- // Extract function calls in this line
- const callRegex = /(\w+)\s*\(/g;
- let match;
-
- while ((match = callRegex.exec(trimmed)) !== null) {
- const calledFunc = match[1];
-
- // Skip keywords and common patterns
- if (this.isKeyword(calledFunc)) continue;
-
- calls.push({
- caller: functionName,
- callee: calledFunc,
- line: i + 1,
- filePath,
- isAsync: trimmed.includes('await'),
- callExpression: trimmed
- });
- }
-
- // Exit function when braces are balanced
- if (braceCount === 0 && inFunction && i > functionStartLine) {
- break;
- }
- }
-
- return calls;
- }
-
- /**
- * Calculate call depth (longest chain from this function)
- */
- private calculateCallDepth(functionName: string, visited = new Set()): number {
- if (visited.has(functionName)) return 0;
- visited.add(functionName);
-
- const funcDef = this.findFunctionDefinition(functionName);
- if (!funcDef) return 0;
-
- const callees = this.findCallees(funcDef);
- if (callees.length === 0) return 1;
-
- let maxDepth = 0;
- for (const callee of callees) {
- const depth = this.calculateCallDepth(callee.callee, new Set(visited));
- maxDepth = Math.max(maxDepth, depth);
- }
-
- return maxDepth + 1;
- }
-
- // Helper methods
-
- private parseParams(paramString: string): string[] {
- if (!paramString || !paramString.trim()) return [];
- return paramString.split(',').map(p => p.trim().split(/[:=]/)[0].trim());
- }
-
- private isKeyword(word: string): boolean {
- const keywords = [
- 'if', 'else', 'for', 'while', 'do', 'switch', 'case', 'break', 'continue',
- 'return', 'throw', 'try', 'catch', 'finally', 'new', 'typeof', 'instanceof',
- 'this', 'super', 'class', 'extends', 'import', 'export', 'default', 'const',
- 'let', 'var', 'function', 'async', 'await', 'yield', 'delete', 'in', 'of'
- ];
- return keywords.includes(word);
- }
-
- private readFile(filePath: string): string {
- if (this.fileCache.has(filePath)) {
- return this.fileCache.get(filePath)!;
- }
-
- try {
- // Use intelligent skimming for function analysis
- const skimResult = skimForFunctions(filePath);
-
- if (!skimResult.content) {
- // Fallback to file size guard if skimming fails
- const guardResult = this.fileSizeGuard.readFile(filePath, 'utf-8');
- if (guardResult.skipped) {
- return ''; // Return empty string for skipped files to prevent crashes
- }
- const content = guardResult.content;
- this.fileCache.set(filePath, content);
- return content;
- }
-
- // Use skimmed content for function analysis
- const content = skimResult.content;
- this.fileCache.set(filePath, content);
- return content;
- } catch (error) {
- return '';
- }
- }
-
- private resolveFilePath(filePath: string): string {
- if (path.isAbsolute(filePath)) {
- return filePath;
- }
- return path.resolve(this.workspacePath, filePath);
- }
-
- private getRelativePath(filePath: string): string {
- return path.relative(this.workspacePath, filePath);
- }
-
- private getAllProjectFiles(): string[] {
- const files: string[] = [];
- const extensions = ['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'];
-
- const walk = (dir: string) => {
- try {
- const entries = fs.readdirSync(dir, { withFileTypes: true });
-
- for (const entry of entries) {
- const fullPath = path.join(dir, entry.name);
-
- if (entry.isDirectory()) {
- if (!['node_modules', 'dist', 'build', '.git', '.next', 'out', 'coverage'].includes(entry.name)) {
- walk(fullPath);
- }
- } else {
- const ext = path.extname(entry.name);
- if (extensions.includes(ext)) {
- files.push(fullPath);
- }
- }
- }
- } catch (error) {
- // Skip directories we can't read
- }
- };
-
- walk(this.workspacePath);
- return files;
- }
-
- /**
- * Clear caches
- */
- public clearCache() {
- this.fileCache.clear();
- this.functionCache.clear();
- this.callCache.clear();
- }
-}
\ No newline at end of file
diff --git a/src/context-analyzer.ts b/src/context-analyzer.ts
deleted file mode 100644
index 4b4e557..0000000
--- a/src/context-analyzer.ts
+++ /dev/null
@@ -1,219 +0,0 @@
-import type { ProjectContext } from './types.js';
-
-export interface ContextSuggestion {
- type: 'decision' | 'todo' | 'conversation';
- priority: 'high' | 'medium' | 'low';
- content: string;
- reasoning: string;
- suggestedAction: string;
-}
-
-export interface ConversationAnalysis {
- decisions: ContextSuggestion[];
- todos: ContextSuggestion[];
- insights: ContextSuggestion[];
- summary: string;
-}
-
-export class ContextAnalyzer {
- // Keywords that indicate technical decisions
- private static readonly DECISION_TRIGGERS = [
- 'we should use', 'let\'s go with', 'i recommend', 'decided to', 'we\'ll use',
- 'best approach is', 'we\'ll implement', 'chosen', 'selected', 'opted for',
- 'architecture', 'framework', 'library', 'database', 'deployment',
- 'pattern', 'design', 'structure', 'approach', 'strategy', 'solution'
- ];
-
- // Keywords that indicate action items/todos
- private static readonly TODO_TRIGGERS = [
- 'need to', 'should implement', 'todo', 'action item', 'next step',
- 'follow up', 'remember to', 'don\'t forget', 'make sure to',
- 'we need', 'must', 'have to', 'should add', 'should fix',
- 'refactor', 'optimize', 'improve', 'update', 'modify'
- ];
-
- // Keywords that indicate important insights
- private static readonly INSIGHT_TRIGGERS = [
- 'discovered', 'found out', 'realized', 'learned', 'breakthrough',
- 'key insight', 'important', 'critical', 'crucial', 'significant',
- 'problem solved', 'solution', 'workaround', 'fix', 'resolution'
- ];
-
- // Technical context keywords that increase importance
- private static readonly TECHNICAL_KEYWORDS = [
- 'api', 'database', 'security', 'performance', 'scalability',
- 'authentication', 'authorization', 'caching', 'testing',
- 'deployment', 'configuration', 'architecture', 'design pattern',
- 'algorithm', 'optimization', 'integration', 'migration'
- ];
-
- /**
- * Analyze conversation content for context that should be saved
- */
- static analyzeConversation(conversationText: string): ConversationAnalysis {
- const sentences = this.splitIntoSentences(conversationText);
- const decisions: ContextSuggestion[] = [];
- const todos: ContextSuggestion[] = [];
- const insights: ContextSuggestion[] = [];
-
- for (const sentence of sentences) {
- const lowerSentence = sentence.toLowerCase();
-
- // Check for decisions
- const decisionMatch = this.DECISION_TRIGGERS.find(trigger =>
- lowerSentence.includes(trigger.toLowerCase())
- );
-
- if (decisionMatch) {
- const priority = this.calculatePriority(sentence);
- decisions.push({
- type: 'decision',
- priority,
- content: sentence.trim(),
- reasoning: `Contains decision trigger: "${decisionMatch}"`,
- suggestedAction: `save_decision with type based on content context`
- });
- }
-
- // Check for todos
- const todoMatch = this.TODO_TRIGGERS.find(trigger =>
- lowerSentence.includes(trigger.toLowerCase())
- );
-
- if (todoMatch) {
- const priority = this.calculatePriority(sentence);
- todos.push({
- type: 'todo',
- priority,
- content: sentence.trim(),
- reasoning: `Contains todo trigger: "${todoMatch}"`,
- suggestedAction: `todo_create with extracted task`
- });
- }
-
- // Check for insights
- const insightMatch = this.INSIGHT_TRIGGERS.find(trigger =>
- lowerSentence.includes(trigger.toLowerCase())
- );
-
- if (insightMatch) {
- const priority = this.calculatePriority(sentence);
- insights.push({
- type: 'conversation',
- priority,
- content: sentence.trim(),
- reasoning: `Contains insight trigger: "${insightMatch}"`,
- suggestedAction: `save_conversation as key insight`
- });
- }
- }
-
- // Create summary
- const totalSuggestions = decisions.length + todos.length + insights.length;
- const summary = totalSuggestions > 0
- ? `Found ${totalSuggestions} context items to save: ${decisions.length} decisions, ${todos.length} todos, ${insights.length} insights`
- : 'No significant context detected for saving';
-
- return {
- decisions,
- todos,
- insights,
- summary
- };
- }
-
- /**
- * Calculate priority based on technical keywords and sentence structure
- */
- private static calculatePriority(sentence: string): 'high' | 'medium' | 'low' {
- const lowerSentence = sentence.toLowerCase();
-
- // High priority indicators
- const hasMultipleTechnicalKeywords = this.TECHNICAL_KEYWORDS
- .filter(keyword => lowerSentence.includes(keyword.toLowerCase())).length >= 2;
-
- const hasHighPriorityWords = ['critical', 'important', 'must', 'crucial', 'essential']
- .some(word => lowerSentence.includes(word));
-
- if (hasMultipleTechnicalKeywords || hasHighPriorityWords) {
- return 'high';
- }
-
- // Medium priority indicators
- const hasTechnicalKeywords = this.TECHNICAL_KEYWORDS
- .some(keyword => lowerSentence.includes(keyword.toLowerCase()));
-
- const hasMediumPriorityWords = ['should', 'recommend', 'suggest', 'consider']
- .some(word => lowerSentence.includes(word));
-
- if (hasTechnicalKeywords || hasMediumPriorityWords) {
- return 'medium';
- }
-
- return 'low';
- }
-
- /**
- * Split text into meaningful sentences
- */
- private static splitIntoSentences(text: string): string[] {
- // Simple sentence splitting - could be enhanced with NLP
- return text
- .split(/[.!?]+/)
- .map(s => s.trim())
- .filter(s => s.length > 10); // Filter out very short fragments
- }
-
- /**
- * Extract specific decision information from text
- */
- static extractDecision(text: string): { type: string; description: string; reasoning?: string } | null {
- const lowerText = text.toLowerCase();
-
- // Determine decision type based on keywords
- let type = 'other';
- if (lowerText.includes('architecture') || lowerText.includes('design')) type = 'architecture';
- else if (lowerText.includes('library') || lowerText.includes('framework') || lowerText.includes('package')) type = 'library';
- else if (lowerText.includes('pattern') || lowerText.includes('approach') || lowerText.includes('method')) type = 'pattern';
- else if (lowerText.includes('config') || lowerText.includes('setting') || lowerText.includes('environment')) type = 'configuration';
-
- // Extract reasoning if present
- const reasoningKeywords = ['because', 'since', 'due to', 'as', 'reason', 'why'];
- let reasoning: string | undefined;
-
- for (const keyword of reasoningKeywords) {
- const keywordIndex = lowerText.indexOf(keyword);
- if (keywordIndex !== -1) {
- reasoning = text.substring(keywordIndex).trim();
- break;
- }
- }
-
- return {
- type,
- description: text.trim(),
- reasoning
- };
- }
-
- /**
- * Extract todo information from text
- */
- static extractTodo(text: string): { title: string; description: string; priority: string } | null {
- const lowerText = text.toLowerCase();
-
- // Determine priority
- let priority = 'medium';
- if (lowerText.includes('urgent') || lowerText.includes('critical') || lowerText.includes('asap')) priority = 'high';
- else if (lowerText.includes('later') || lowerText.includes('eventually') || lowerText.includes('nice to have')) priority = 'low';
-
- // Extract title (first meaningful part)
- const title = text.split(/[.!?]/)[0].trim().substring(0, 100);
-
- return {
- title,
- description: text.trim(),
- priority
- };
- }
-}
\ No newline at end of file
diff --git a/src/context-detector.ts b/src/context-detector.ts
deleted file mode 100644
index bba6911..0000000
--- a/src/context-detector.ts
+++ /dev/null
@@ -1,196 +0,0 @@
-// Automatic context detection from conversations
-
-export interface DetectedContext {
- type: 'tech' | 'decision' | 'architecture' | 'library';
- content: string;
- confidence: number;
- reasoning?: string;
-}
-
-export class ContextDetector {
- /**
- * Detect context from a message
- */
- detect(message: string): DetectedContext[] {
- const detected: DetectedContext[] = [];
-
- // Detect technologies
- detected.push(...this.detectTechnologies(message));
-
- // Detect decisions
- detected.push(...this.detectDecisions(message));
-
- // Detect architecture patterns
- detected.push(...this.detectArchitecture(message));
-
- // Filter by confidence threshold
- return detected.filter(d => d.confidence >= 0.7);
- }
-
- private detectTechnologies(message: string): DetectedContext[] {
- const detected: DetectedContext[] = [];
-
- // Technology mention patterns
- const techPatterns = [
- // "using X for Y"
- {
- regex: /(?:using|use|with)\s+([A-Z]\w+(?:\s+[A-Z]\w+)?)\s+for\s+(\w+)/gi,
- confidence: 0.9
- },
- // "add/integrate X"
- {
- regex: /(?:add|integrate|integrating|install|setup|set up)\s+([A-Z]\w+(?:\s+\d+)?)/gi,
- confidence: 0.85
- },
- // "X database/framework/library"
- {
- regex: /([A-Z]\w+(?:\s+\d+)?)\s+(?:database|framework|library|ORM|auth)/gi,
- confidence: 0.8
- }
- ];
-
- for (const pattern of techPatterns) {
- const matches = Array.from(message.matchAll(pattern.regex));
- for (const match of matches) {
- const tech = match[1].trim();
-
- // Filter out common words
- if (this.isValidTech(tech)) {
- detected.push({
- type: 'tech',
- content: tech,
- confidence: pattern.confidence,
- reasoning: `Detected from pattern: ${match[0]}`
- });
- }
- }
- }
-
- return detected;
- }
-
- private detectDecisions(message: string): DetectedContext[] {
- const detected: DetectedContext[] = [];
-
- // Decision patterns
- const decisionPatterns = [
- // "decided to..."
- {
- regex: /(?:decided|decide|decision)\s+(?:to\s+)?(.+?)(?:\s+because\s+(.+?))?(?:\.|$)/gi,
- confidence: 0.95
- },
- // "going to use..."
- {
- regex: /(?:going to|gonna|will)\s+use\s+(.+?)(?:\s+for\s+(.+?))?(?:\s+because\s+(.+?))?(?:\.|$)/gi,
- confidence: 0.9
- },
- // "chose X over Y"
- {
- regex: /(?:chose|choose|chosen)\s+(.+?)\s+over\s+(.+?)(?:\s+because\s+(.+?))?(?:\.|$)/gi,
- confidence: 0.95
- }
- ];
-
- for (const pattern of decisionPatterns) {
- const matches = Array.from(message.matchAll(pattern.regex));
- for (const match of matches) {
- const decision = match[1].trim();
- const reasoning = match[2] || match[3];
-
- detected.push({
- type: 'decision',
- content: decision,
- confidence: pattern.confidence,
- reasoning: reasoning ? reasoning.trim() : undefined
- });
- }
- }
-
- return detected;
- }
-
- private detectArchitecture(message: string): DetectedContext[] {
- const detected: DetectedContext[] = [];
-
- // Architecture patterns
- const archPatterns = [
- // "Next.js 14 with TypeScript"
- {
- regex: /(Next\.js\s+\d+|React|Vue|Angular|Svelte)\s+with\s+(.+?)(?:\s+and\s+(.+?))?(?:\.|$)/gi,
- confidence: 0.9
- },
- // "using [stack]"
- {
- regex: /using\s+([A-Z]\w+(?:\s+\d+)?)\s*\+\s*([A-Z]\w+)/gi,
- confidence: 0.85
- }
- ];
-
- for (const pattern of archPatterns) {
- const matches = Array.from(message.matchAll(pattern.regex));
- for (const match of matches) {
- detected.push({
- type: 'architecture',
- content: match[0].trim(),
- confidence: pattern.confidence
- });
- }
- }
-
- return detected;
- }
-
- /**
- * Check if a string is a valid technology name
- */
- private isValidTech(tech: string): boolean {
- // Filter out common words that aren't tech
- const commonWords = [
- 'the', 'and', 'for', 'with', 'this', 'that',
- 'have', 'from', 'they', 'will', 'would',
- 'there', 'their', 'what', 'when', 'where'
- ];
-
- const lower = tech.toLowerCase();
-
- // Must start with capital or be known tech
- if (!tech[0] || tech[0] !== tech[0].toUpperCase()) {
- return false;
- }
-
- // Not a common word
- if (commonWords.includes(lower)) {
- return false;
- }
-
- // Has reasonable length
- if (tech.length < 2 || tech.length > 30) {
- return false;
- }
-
- return true;
- }
-
- /**
- * Merge duplicate detections
- */
- mergeDuplicates(detections: DetectedContext[]): DetectedContext[] {
- const seen = new Map();
-
- for (const detection of detections) {
- const key = `${detection.type}:${detection.content.toLowerCase()}`;
-
- if (!seen.has(key)) {
- seen.set(key, detection);
- } else {
- // Keep the one with higher confidence
- const existing = seen.get(key)!;
- if (detection.confidence > existing.confidence) {
- seen.set(key, detection);
- }
- }
- }
-
- return Array.from(seen.values());
- }
-}
\ No newline at end of file
diff --git a/src/context-layers.ts b/src/context-layers.ts
new file mode 100644
index 0000000..15323e6
--- /dev/null
+++ b/src/context-layers.ts
@@ -0,0 +1,109 @@
+๏ปฟ/**
+ * Context Layers - Intentional, not pattern-based
+ * What AI needs to onboard quickly
+ */
+
+export interface ProjectIdentity {
+ id: string;
+ name: string;
+ path: string;
+ purpose: string;
+ tech: string[];
+ architecture: string;
+ created_at: number;
+ updated_at: number;
+}
+
+export interface ActiveWork {
+ id: string;
+ project_id: string;
+ task: string;
+ context: string;
+ files: string[];
+ branch?: string;
+ timestamp: number;
+ status: 'active' | 'paused' | 'completed';
+}
+
+export interface Constraint {
+ id: string;
+ project_id: string;
+ key: string;
+ value: string;
+ reasoning: string;
+ timestamp: number;
+}
+
+export interface Problem {
+ id: string;
+ project_id: string;
+ description: string;
+ context?: string;
+ status: 'open' | 'investigating' | 'resolved';
+ resolution?: string;
+ timestamp: number;
+}
+
+export interface Goal {
+ id: string;
+ project_id: string;
+ description: string;
+ target_date?: string;
+ status: 'planned' | 'in-progress' | 'blocked' | 'completed';
+ timestamp: number;
+}
+
+export interface Decision {
+ id: string;
+ project_id: string;
+ description: string;
+ reasoning: string;
+ alternatives?: string[];
+ timestamp: number;
+}
+
+export interface Note {
+ id: string;
+ project_id: string;
+ content: string;
+ tags: string[];
+ timestamp: number;
+}
+
+export interface Caveat {
+ id: string;
+ project_id: string;
+ description: string;
+ category: 'mistake' | 'shortcut' | 'unverified' | 'assumption' | 'workaround';
+ severity: 'low' | 'medium' | 'high' | 'critical';
+ attempted?: string;
+ error?: string;
+ recovery?: string;
+ verified: boolean;
+ action_required?: string;
+ affects_production: boolean;
+ timestamp: number;
+ resolved: boolean;
+ resolution?: string;
+ resolved_at?: number;
+}
+
+export interface RecallResult {
+ project: ProjectIdentity;
+ active_work: ActiveWork[];
+ constraints: Constraint[];
+ problems: Problem[];
+ goals: Goal[];
+ recent_decisions: Decision[];
+ notes: Note[];
+ caveats: Caveat[];
+}
+
+export type RememberType = 'active_work' | 'constraint' | 'problem' | 'goal' | 'decision' | 'note' | 'caveat';
+
+export interface RememberInput {
+ type: RememberType;
+ content: string;
+ metadata?: Record;
+}
+
diff --git a/src/core-tools.ts b/src/core-tools.ts
new file mode 100644
index 0000000..25aca3c
--- /dev/null
+++ b/src/core-tools.ts
@@ -0,0 +1,300 @@
+๏ปฟ/**
+ * Core Tool Definitions - The 9 Essential Tools
+ * Set of core tools: 8 project tools + 1 documentation tool (Notion)
+ */
+
+export const CORE_TOOLS = [
+ // ========== PROJECT MANAGEMENT ==========
+ {
+ name: 'set_project',
+ description: 'Initialize a project for context tracking. Detects project type, tech stack, and architecture. Call this once when starting work on a new project. This is the foundation - run this first.',
+ inputSchema: {
+ type: 'object',
+ properties: {
+ path: {
+ type: 'string',
+ description: 'Absolute path to project directory'
+ },
+ purpose: {
+ type: 'string',
+ description: 'Optional: What this project does (if not in README)'
+ }
+ },
+ required: ['path']
+ }
+ },
+
+ // ========== MEMORY ==========
+ {
+ name: 'remember',
+ description: `Store important context about the project. Use when the user shares something that matters for future sessions: active work, architectural constraints, problems encountered, goals, decisions, notes, or **caveats** (AI mistakes/tech debt).
+
+** NEW: CAVEAT TYPE - Track AI Mistakes & Tech Debt**
+
+When you (the AI) realize something went wrong, ALWAYS call remember with type='caveat':
+- You tried something that didn't work
+- You took a shortcut instead of proper solution
+- You made changes but didn't verify them
+- You made assumptions that might be wrong
+- You used a workaround instead of fixing root cause
+
+**Caveat Categories:**
+- **mistake**: Tried X, got error Y, did workaround Z (not ideal)
+- **shortcut**: Skipped proper testing/verification to save time
+- **unverified**: Made changes but didn't build/test/verify
+- **assumption**: Assumed X works without checking
+- **workaround**: Fixed symptom but not root cause
+
+**Caveat Metadata (REQUIRED):**
+- **severity**: 'low' | 'medium' | 'high' | 'critical'
+- **attempted**: What you tried to do
+- **error**: What went wrong (if applicable)
+- **recovery**: What you did instead
+- **verified**: true/false - Did you test the recovery?
+- **action_required**: What user needs to do (e.g., "Restart MCP server")
+- **affects_production**: true/false - Will users hit this issue?
+
+**Example - Git Tool MCP Issue:**
+\`\`\`typescript
+remember(
+ type: 'caveat',
+ content: 'Git tool updated with 6 new actions but MCP validation fails',
+ metadata: {
+ category: 'unverified',
+ severity: 'medium',
+ attempted: 'Test new git actions via MCP',
+ error: 'Schema validation error - actions not in allowed values',
+ recovery: 'Created manual test script instead',
+ verified: true, // Test script worked
+ action_required: 'User must restart MCP server for new actions to work',
+ affects_production: true // MCP users affected until restart
+ }
+)
+\`\`\`
+
+**When to Use Caveats:**
+ "Actually, I realize..." CAVEAT
+ "Let me try a different approach..." CAVEAT (first approach failed)
+ "Hmm, that didn't work..." CAVEAT
+ "Wait, on second thought..." CAVEAT (assumption was wrong)
+ Made changes but didn't run build/test CAVEAT (unverified)
+ Used workaround instead of proper fix CAVEAT (shortcut)
+
+**This makes Context Sync the FIRST tool to track AI mistakes as technical context!**
+
+**Command Language:** Users may say "cs remember" (git-style command). When they do, analyze the conversation and save relevant context.
+
+**AI-Driven Checkpoint Mode:**
+When the user says "cs remember" or "remember this conversation" WITHOUT specifying type/content, YOU (the AI) should:
+
+1. **Analyze the Recent Conversation** (last 5-10 messages)
+2. **Extract Context Categories:**
+ - Active Work: What task is currently being worked on? Files being edited? Branch name?
+ - Constraints: Any architectural rules, technology choices, or limitations mentioned?
+ - Problems: Blockers, bugs, errors, or issues discussed?
+ - Goals: Targets with deadlines? Features to build?
+ - Decisions: Important choices made with reasoning?
+ - Notes: Other important information worth remembering?
+ - **Caveats**: AI mistakes, workarounds, unverified changes?
+
+3. **Call remember() Multiple Times** - Once per item extracted, with structured data:
+ - Type: Choose the appropriate category
+ - Content: Clear, specific description (1-2 sentences)
+ - Metadata: Include file paths, code snippets, links, or other relevant context
+
+**Example Checkpoint Analysis:**
+If conversation discusses "building the TypeScript server with 8 core tools, tested successfully via MCP":
+- Call remember(type="active_work", content="Building Context Sync server with 8 core tools (set_project, remember, recall, etc.)", metadata={files: ["src/server.ts", "src/core-tools.ts"]})
+- Call remember(type="decision", content="Reduced from 50+ tools to 8 core tools, moved rest to internal utilities for simplicity", metadata={reasoning: "Users overwhelmed by too many tools"})
+- Call remember(type="active_work", content="Testing implementation via MCP protocol in Cursor", metadata={status: "All 8 tools tested successfully"})
+
+**When to Use Checkpoint Mode:**
+- User says "cs remember" with no arguments
+- User says "remember this conversation" or "save this session"
+- User asks to checkpoint progress before switching tasks
+- End of significant work session
+
+**When to Use Direct Mode:**
+- User specifies what to remember: "cs remember: using TypeScript for the server"
+- Clear type indicated: "cs constraint: must support SQLite"
+- Single piece of information to save
+
+This approach leverages YOUR (the AI's) conversation understanding while keeping the server as simple storage.`,
+ inputSchema: {
+ type: 'object',
+ properties: {
+ type: {
+ type: 'string',
+ enum: ['active_work', 'constraint', 'problem', 'goal', 'decision', 'note', 'caveat'],
+ description: 'What kind of context? active_work=current task, constraint=architectural rule, problem=blocker, goal=target, decision=choice made, note=important info, caveat=AI mistake/tech debt'
+ },
+ content: {
+ type: 'string',
+ description: 'What to remember (natural language, be specific)'
+ },
+ metadata: {
+ type: 'object',
+ description: 'Optional: Related files, code snippets, links, etc. For caveats: MUST include severity, category, attempted, recovery, verified, action_required, affects_production'
+ }
+ },
+ required: ['type', 'content']
+ }
+ },
+
+ {
+ name: 'recall',
+ description: 'Get context about the current project. Returns project identity, active work, constraints, problems, and recent decisions. Call this at the start of a conversation to understand what the user is working on. Essential for "good morning" handoffs.',
+ inputSchema: {
+ type: 'object',
+ properties: {
+ query: {
+ type: 'string',
+ description: 'Optional: Specific aspect to recall (e.g., "what were we working on?", "what constraints do we have?")'
+ },
+ limit: {
+ type: 'number',
+ description: 'Optional: How many recent items to return per category (default: 10)'
+ }
+ }
+ }
+ },
+
+ // ========== FILE OPERATIONS ==========
+ {
+ name: 'read_file',
+ description: 'Read a file from the current workspace. Use this to understand code context.',
+ inputSchema: {
+ type: 'object',
+ properties: {
+ path: {
+ type: 'string',
+ description: 'Relative path from workspace root'
+ }
+ },
+ required: ['path']
+ }
+ },
+
+ {
+ name: 'search',
+ description: 'Search the workspace. Can search by filename pattern or file contents. Use this to discover relevant files or find specific code.',
+ inputSchema: {
+ type: 'object',
+ properties: {
+ query: {
+ type: 'string',
+ description: 'What to search for (filename pattern or text content)'
+ },
+ type: {
+ type: 'string',
+ enum: ['files', 'content'],
+ description: 'Search type: "files" for filename patterns, "content" for text within files'
+ },
+ options: {
+ type: 'object',
+ properties: {
+ regex: { type: 'boolean', description: 'Use regex for content search' },
+ caseSensitive: { type: 'boolean', description: 'Case-sensitive search' },
+ filePattern: { type: 'string', description: 'Filter by file pattern (for content search)' },
+ maxResults: { type: 'number', description: 'Max results to return' }
+ }
+ }
+ },
+ required: ['query', 'type']
+ }
+ },
+
+ {
+ name: 'structure',
+ description: 'Get the file/folder structure of current workspace. Use this to understand project layout.',
+ inputSchema: {
+ type: 'object',
+ properties: {
+ depth: {
+ type: 'number',
+ description: 'Optional: How deep to traverse (default: 3)'
+ }
+ }
+ }
+ },
+
+ // ========== GIT OPERATIONS ==========
+ {
+ name: 'git',
+ description: `Git repository operations with intelligence. Provides status, context, risk analysis, and code ownership insights.
+
+**Actions:**
+- **status**: Current branch, staged/unstaged changes, commit readiness
+- **context**: Suggested commit messages, recent commits, branch info
+- **hotspots**: Risk analysis - files with high change frequency (indicates complexity/bugs)
+- **coupling**: Files that change together (reveals hidden dependencies)
+- **blame**: Code ownership - who wrote what (find the expert to ask)
+- **analysis**: Comprehensive overview combining all insights
+
+**Use Cases:**
+- Before committing: \`git action=status\` + \`git action=context\`
+- Understanding risk: \`git action=hotspots\` - find dangerous files
+- Refactoring: \`git action=coupling\` - find tightly coupled files
+- Need help?: \`git action=blame path=file.ts\` - find the expert
+- Big picture: \`git action=analysis\` - complete health check`,
+ inputSchema: {
+ type: 'object',
+ properties: {
+ action: {
+ type: 'string',
+ enum: ['status', 'context', 'hotspots', 'coupling', 'blame', 'analysis'],
+ description: 'Action: status=changes, context=commits, hotspots=risk, coupling=dependencies, blame=ownership, analysis=overview'
+ },
+ path: {
+ type: 'string',
+ description: 'File path (required for action=blame)'
+ },
+ staged: {
+ type: 'boolean',
+ description: 'For context: show staged changes (default: false)'
+ },
+ files: {
+ type: 'array',
+ items: { type: 'string' },
+ description: 'For context: specific files to analyze'
+ },
+ limit: {
+ type: 'number',
+ description: 'For hotspots: max results (default: 10)'
+ },
+ minCoupling: {
+ type: 'number',
+ description: 'For coupling: minimum co-changes (default: 3)'
+ }
+ },
+ required: ['action']
+ }
+ },
+
+ // ========== DOCUMENTATION ==========
+ {
+ name: 'notion',
+ description: 'Access your Notion workspace documentation. Use notion.search to find pages, notion.read to view content. Essential for pulling in external documentation context.',
+ inputSchema: {
+ type: 'object',
+ properties: {
+ action: {
+ type: 'string',
+ enum: ['search', 'read'],
+ description: 'Action to perform: "search" to find pages, "read" to view page content'
+ },
+ query: {
+ type: 'string',
+ description: 'Search query (required for action=search)'
+ },
+ pageId: {
+ type: 'string',
+ description: 'Notion page ID or URL (required for action=read)'
+ }
+ },
+ required: ['action']
+ }
+ }
+] as const;
+
+
diff --git a/src/database-migrator.ts b/src/database-migrator.ts
deleted file mode 100644
index 5137bc1..0000000
--- a/src/database-migrator.ts
+++ /dev/null
@@ -1,244 +0,0 @@
-// Database migration and cleanup tools
-import Database from 'better-sqlite3';
-import * as path from 'path';
-import * as os from 'os';
-import { PathNormalizer } from './path-normalizer.js';
-
-export interface MigrationResult {
- success: boolean;
- duplicatesFound: number;
- duplicatesRemoved: number;
- projectsMerged: number;
- errors: string[];
- details: string[];
-}
-
-export class DatabaseMigrator {
- private db: Database.Database;
-
- constructor(dbPath?: string) {
- const defaultPath = path.join(os.homedir(), '.context-sync', 'data.db');
- const actualPath = dbPath || defaultPath;
- this.db = new Database(actualPath);
- }
-
- /**
- * Migrate and merge duplicate projects by normalized path
- */
- async migrateDuplicateProjects(): Promise {
- const result: MigrationResult = {
- success: false,
- duplicatesFound: 0,
- duplicatesRemoved: 0,
- projectsMerged: 0,
- errors: [],
- details: []
- };
-
- try {
- // Begin transaction for safety
- this.db.exec('BEGIN TRANSACTION');
-
- // Find all projects grouped by normalized path
- const projects = this.db.prepare(`
- SELECT id, name, path, created_at, updated_at, architecture, tech_stack
- FROM projects
- WHERE path IS NOT NULL
- ORDER BY path, created_at ASC
- `).all() as any[];
-
- // Group by normalized path
- const pathGroups: Map = new Map();
-
- for (const project of projects) {
- const normalizedPath = PathNormalizer.normalize(project.path);
- if (!pathGroups.has(normalizedPath)) {
- pathGroups.set(normalizedPath, []);
- }
- pathGroups.get(normalizedPath)!.push(project);
- }
-
- // Process each group and merge duplicates
- for (const [normalizedPath, projectGroup] of pathGroups.entries()) {
- if (projectGroup.length <= 1) continue; // No duplicates
-
- result.duplicatesFound += projectGroup.length - 1;
- result.details.push(`Found ${projectGroup.length} duplicates for path: ${normalizedPath}`);
-
- // Sort by updated_at DESC to keep the most recently updated project
- projectGroup.sort((a, b) => b.updated_at - a.updated_at);
- const keepProject = projectGroup[0];
- const removeProjects = projectGroup.slice(1);
-
- // Update the keeper with best metadata
- const bestName = this.chooseBestProjectName(projectGroup, normalizedPath);
- const mergedTechStack = this.mergeTechStacks(projectGroup);
- const bestArchitecture = this.chooseBestArchitecture(projectGroup);
-
- // Update the project we're keeping
- this.db.prepare(`
- UPDATE projects
- SET name = ?, architecture = ?, tech_stack = ?, updated_at = ?
- WHERE id = ?
- `).run(
- bestName,
- bestArchitecture,
- mergedTechStack ? JSON.stringify(mergedTechStack) : null,
- Date.now(),
- keepProject.id
- );
-
- // Migrate conversations and decisions to the keeper
- for (const removeProject of removeProjects) {
- // Update conversations
- const conversationCount = this.db.prepare(`
- UPDATE conversations SET project_id = ? WHERE project_id = ?
- `).run(keepProject.id, removeProject.id).changes;
-
- // Update decisions
- const decisionCount = this.db.prepare(`
- UPDATE decisions SET project_id = ? WHERE project_id = ?
- `).run(keepProject.id, removeProject.id).changes;
-
- // Update todos
- const todoCount = this.db.prepare(`
- UPDATE todos SET project_id = ? WHERE project_id = ?
- `).run(keepProject.id, removeProject.id).changes;
-
- result.details.push(
- `Merged project ${removeProject.name} (${removeProject.id.substring(0, 8)}): ` +
- `${conversationCount} conversations, ${decisionCount} decisions, ${todoCount} todos`
- );
-
- // Delete the duplicate project
- this.db.prepare('DELETE FROM projects WHERE id = ?').run(removeProject.id);
- result.duplicatesRemoved++;
- }
-
- result.projectsMerged++;
- result.details.push(`Kept project: ${keepProject.name} (${keepProject.id.substring(0, 8)})`);
- }
-
- // Commit transaction
- this.db.exec('COMMIT');
- result.success = true;
-
- result.details.push(`\nโ
Migration completed successfully!`);
- result.details.push(`๐ Summary: ${result.duplicatesRemoved} duplicates removed, ${result.projectsMerged} projects merged`);
-
- } catch (error) {
- // Rollback on error
- this.db.exec('ROLLBACK');
- result.errors.push(`Migration failed: ${error instanceof Error ? error.message : String(error)}`);
- result.success = false;
- }
-
- return result;
- }
-
- /**
- * Choose the best project name from duplicates (prefer folder name)
- */
- private chooseBestProjectName(projects: any[], normalizedPath: string): string {
- const folderName = path.basename(normalizedPath);
-
- // Prefer the folder name if any project has it
- const folderNameProject = projects.find(p => p.name === folderName);
- if (folderNameProject) return folderName;
-
- // Otherwise prefer names that don't start with @ (package names)
- const nonPackageNames = projects.filter(p => !p.name.startsWith('@'));
- if (nonPackageNames.length > 0) {
- return nonPackageNames[0].name;
- }
-
- // Fallback to most recent project name
- return projects[0].name;
- }
-
- /**
- * Merge tech stacks from duplicate projects
- */
- private mergeTechStacks(projects: any[]): string[] | null {
- const allTechStacks: string[] = [];
-
- for (const project of projects) {
- if (project.tech_stack) {
- try {
- const techStack = JSON.parse(project.tech_stack);
- if (Array.isArray(techStack)) {
- allTechStacks.push(...techStack);
- }
- } catch {
- // Ignore invalid JSON
- }
- }
- }
-
- // Return unique tech stack items
- const uniqueTechStack = [...new Set(allTechStacks)];
- return uniqueTechStack.length > 0 ? uniqueTechStack : null;
- }
-
- /**
- * Choose the best architecture from duplicates
- */
- private chooseBestArchitecture(projects: any[]): string | null {
- // Find the most specific architecture
- const architectures = projects
- .map(p => p.architecture)
- .filter(arch => arch && arch !== 'Not specified');
-
- return architectures[0] || null;
- }
-
- /**
- * Get migration statistics without running the migration
- */
- async getMigrationStats(): Promise<{
- totalProjects: number;
- projectsWithPaths: number;
- duplicateGroups: number;
- totalDuplicates: number;
- duplicateDetails: Array<{ path: string; count: number; names: string[] }>;
- }> {
- const projects = this.db.prepare(`
- SELECT id, name, path
- FROM projects
- WHERE path IS NOT NULL
- `).all() as any[];
-
- const pathGroups: Map = new Map();
-
- for (const project of projects) {
- const normalizedPath = PathNormalizer.normalize(project.path);
- if (!pathGroups.has(normalizedPath)) {
- pathGroups.set(normalizedPath, []);
- }
- pathGroups.get(normalizedPath)!.push(project);
- }
-
- const duplicateGroups = Array.from(pathGroups.entries())
- .filter(([_, group]) => group.length > 1);
-
- const totalDuplicates = duplicateGroups.reduce((sum, [_, group]) => sum + (group.length - 1), 0);
-
- const totalCount = this.db.prepare('SELECT COUNT(*) as count FROM projects').get() as { count: number };
-
- return {
- totalProjects: totalCount.count,
- projectsWithPaths: projects.length,
- duplicateGroups: duplicateGroups.length,
- totalDuplicates,
- duplicateDetails: duplicateGroups.map(([path, group]) => ({
- path,
- count: group.length,
- names: group.map(p => p.name)
- }))
- };
- }
-
- close(): void {
- this.db.close();
- }
-}
\ No newline at end of file
diff --git a/src/dependency-analyzer.ts b/src/dependency-analyzer.ts
deleted file mode 100644
index f88891c..0000000
--- a/src/dependency-analyzer.ts
+++ /dev/null
@@ -1,638 +0,0 @@
-import * as fs from 'fs';
-import * as path from 'path';
-import * as chokidar from 'chokidar';
-import { FileSizeGuard } from './file-size-guard.js';
-import { skimForDependencies } from './file-skimmer.js';
-
-// Types for dependency analysis
-export interface ImportInfo {
- source: string; // The module being imported (e.g., './utils' or 'react')
- importedNames: string[]; // Named imports (e.g., ['useState', 'useEffect'])
- defaultImport?: string; // Default import name
- namespaceImport?: string; // Namespace import (e.g., 'import * as React')
- isExternal: boolean; // Whether it's an npm package vs local file
- line: number; // Line number in file
- rawStatement: string; // Original import statement
-}
-
-export interface ExportInfo {
- exportedNames: string[]; // Named exports
- hasDefaultExport: boolean;
- line: number;
- rawStatement: string;
-}
-
-export interface DependencyGraph {
- filePath: string;
- imports: ImportInfo[];
- exports: ExportInfo[];
- importers: string[]; // Files that import this file
- dependencies: string[]; // Files this file imports
- circularDeps: CircularDependency[];
-}
-
-export interface CircularDependency {
- cycle: string[]; // Array of file paths forming the cycle
- description: string;
-}
-
-export interface DependencyTree {
- file: string;
- depth: number;
- imports: DependencyTree[];
- isExternal: boolean;
- isCyclic?: boolean;
-}
-
-export class DependencyAnalyzer {
- private workspacePath: string;
- private fileCache: Map;
- private dependencyCache: Map;
- private fileWatcher: chokidar.FSWatcher | null = null;
- private fileSizeGuard: FileSizeGuard;
- // Performance: File index for O(1) importer lookups instead of O(n) scans
- private fileIndex: Map> | null = null;
- // Performance: Debounce file watcher to prevent thrashing
- private invalidateDebounceTimer: NodeJS.Timeout | null = null;
- private pendingInvalidations: Set = new Set();
- private readonly DEBOUNCE_MS = 300;
-
- constructor(workspacePath: string) {
- this.workspacePath = workspacePath;
- this.fileCache = new Map();
- this.dependencyCache = new Map();
- this.fileSizeGuard = new FileSizeGuard({
- maxFileSize: 5 * 1024 * 1024, // 5MB per file (suitable for large TypeScript files)
- maxTotalSize: 50 * 1024 * 1024, // 50MB total (prevent excessive memory use)
- skipLargeFiles: true, // Skip rather than error on large files
- });
- this.setupFileWatcher();
- }
-
- /**
- * Set up file watcher for cache invalidation
- */
- private setupFileWatcher(): void {
- const watchPatterns = [
- path.join(this.workspacePath, '**/*.{ts,tsx,js,jsx,mjs,cjs}'),
- ];
-
- this.fileWatcher = chokidar.watch(watchPatterns, {
- ignored: [
- '**/node_modules/**',
- '**/.git/**',
- '**/dist/**',
- '**/build/**',
- '**/.next/**',
- '**/out/**',
- '**/coverage/**'
- ],
- ignoreInitial: true,
- persistent: true,
- awaitWriteFinish: {
- stabilityThreshold: 100,
- pollInterval: 50
- }
- });
-
- this.fileWatcher
- .on('change', (filePath) => {
- this.invalidateCache(filePath);
- })
- .on('add', (filePath) => {
- this.invalidateCache(filePath);
- })
- .on('unlink', (filePath) => {
- this.invalidateCache(filePath);
- })
- .on('error', () => {
- // Silently handle file watcher errors
- });
- }
-
- /**
- * Invalidate caches for a specific file (debounced)
- */
- private invalidateCache(filePath: string): void {
- // Add to pending invalidations
- this.pendingInvalidations.add(filePath);
-
- // Clear existing timer
- if (this.invalidateDebounceTimer) {
- clearTimeout(this.invalidateDebounceTimer);
- }
-
- // Set new timer to batch invalidations
- this.invalidateDebounceTimer = setTimeout(() => {
- this.flushInvalidations();
- }, this.DEBOUNCE_MS);
- }
-
- /**
- * Flush all pending cache invalidations
- */
- private flushInvalidations(): void {
- if (this.pendingInvalidations.size === 0) {
- return;
- }
-
- const filesToInvalidate = Array.from(this.pendingInvalidations);
- this.pendingInvalidations.clear();
- this.invalidateDebounceTimer = null;
-
- console.error(`๐ Flushing ${filesToInvalidate.length} cache invalidations...`);
-
- // Invalidate file index once (not per file)
- this.fileIndex = null;
-
- // Process each file
- for (const filePath of filesToInvalidate) {
- // Remove file from cache
- this.fileCache.delete(filePath);
-
- // Remove dependency graph from cache
- this.dependencyCache.delete(filePath);
-
- // Also invalidate any dependent files (files that import this file)
- for (const [cachedFile, graph] of this.dependencyCache.entries()) {
- if (graph.dependencies.includes(filePath) || graph.importers.includes(filePath)) {
- this.dependencyCache.delete(cachedFile);
- }
- }
- }
-
- const relPaths = filesToInvalidate.map(f => path.relative(this.workspacePath, f)).join(', ');
- console.error(`โ
Cache invalidated for: ${relPaths.length > 100 ? relPaths.slice(0, 97) + '...' : relPaths}`);
- }
-
- /**
- * Main method: Analyze all dependencies for a file
- */
- public analyzeDependencies(filePath: string): DependencyGraph {
- const absolutePath = this.resolveFilePath(filePath);
-
- // Check cache first
- if (this.dependencyCache.has(absolutePath)) {
- return this.dependencyCache.get(absolutePath)!;
- }
-
- const imports = this.getImports(absolutePath);
- const exports = this.getExports(absolutePath);
- const importers = this.findImporters(absolutePath);
- const dependencies = imports.map(imp => this.resolveImportPath(absolutePath, imp.source)).filter(Boolean) as string[];
- const circularDeps = this.detectCircularDependencies(absolutePath);
-
- const graph: DependencyGraph = {
- filePath: absolutePath,
- imports,
- exports,
- importers,
- dependencies,
- circularDeps
- };
-
- this.dependencyCache.set(absolutePath, graph);
- return graph;
- }
-
- /**
- * Get all imports from a file
- */
- public getImports(filePath: string): ImportInfo[] {
- const content = this.readFile(filePath);
- const imports: ImportInfo[] = [];
-
- // Regex patterns for different import styles
- const patterns = [
- // ES6 imports: import { x, y } from 'module'
- /import\s+{([^}]+)}\s+from\s+['"]([^'"]+)['"]/g,
- // Default import: import React from 'react'
- /import\s+(\w+)\s+from\s+['"]([^'"]+)['"]/g,
- // Namespace import: import * as name from 'module'
- /import\s+\*\s+as\s+(\w+)\s+from\s+['"]([^'"]+)['"]/g,
- // Side-effect import: import 'module'
- /import\s+['"]([^'"]+)['"]/g,
- // require(): const x = require('module')
- /require\s*\(\s*['"]([^'"]+)['"]\s*\)/g,
- ];
-
- const lines = content.split('\n');
-
- lines.forEach((line, lineNumber) => {
- // ES6 named imports
- const namedMatch = /import\s+{([^}]+)}\s+from\s+['"]([^'"]+)['"]/.exec(line);
- if (namedMatch) {
- const importedNames = namedMatch[1].split(',').map(s => s.trim().split(/\s+as\s+/)[0]);
- const source = namedMatch[2];
- imports.push({
- source,
- importedNames,
- isExternal: this.isExternalModule(source),
- line: lineNumber + 1,
- rawStatement: line.trim()
- });
- return;
- }
-
- // Default import
- const defaultMatch = /import\s+(\w+)\s+from\s+['"]([^'"]+)['"]/.exec(line);
- if (defaultMatch && !line.includes('*')) {
- imports.push({
- source: defaultMatch[2],
- importedNames: [],
- defaultImport: defaultMatch[1],
- isExternal: this.isExternalModule(defaultMatch[2]),
- line: lineNumber + 1,
- rawStatement: line.trim()
- });
- return;
- }
-
- // Namespace import
- const namespaceMatch = /import\s+\*\s+as\s+(\w+)\s+from\s+['"]([^'"]+)['"]/.exec(line);
- if (namespaceMatch) {
- imports.push({
- source: namespaceMatch[2],
- importedNames: [],
- namespaceImport: namespaceMatch[1],
- isExternal: this.isExternalModule(namespaceMatch[2]),
- line: lineNumber + 1,
- rawStatement: line.trim()
- });
- return;
- }
-
- // Side-effect import
- const sideEffectMatch = /^import\s+['"]([^'"]+)['"]/.exec(line.trim());
- if (sideEffectMatch) {
- imports.push({
- source: sideEffectMatch[1],
- importedNames: [],
- isExternal: this.isExternalModule(sideEffectMatch[1]),
- line: lineNumber + 1,
- rawStatement: line.trim()
- });
- return;
- }
-
- // require()
- const requireMatch = /require\s*\(\s*['"]([^'"]+)['"]\s*\)/.exec(line);
- if (requireMatch) {
- imports.push({
- source: requireMatch[1],
- importedNames: [],
- isExternal: this.isExternalModule(requireMatch[1]),
- line: lineNumber + 1,
- rawStatement: line.trim()
- });
- }
- });
-
- return imports;
- }
-
- /**
- * Get all exports from a file
- */
- public getExports(filePath: string): ExportInfo[] {
- const content = this.readFile(filePath);
- const exports: ExportInfo[] = [];
- const lines = content.split('\n');
-
- lines.forEach((line, lineNumber) => {
- // Named exports: export { x, y }
- const namedMatch = /export\s+{([^}]+)}/.exec(line);
- if (namedMatch) {
- const exportedNames = namedMatch[1].split(',').map(s => s.trim().split(/\s+as\s+/)[0]);
- exports.push({
- exportedNames,
- hasDefaultExport: false,
- line: lineNumber + 1,
- rawStatement: line.trim()
- });
- return;
- }
-
- // Export declaration: export const x = ...
- const declMatch = /export\s+(const|let|var|function|class|interface|type|enum)\s+(\w+)/.exec(line);
- if (declMatch) {
- exports.push({
- exportedNames: [declMatch[2]],
- hasDefaultExport: false,
- line: lineNumber + 1,
- rawStatement: line.trim()
- });
- return;
- }
-
- // Default export
- if (line.includes('export default')) {
- exports.push({
- exportedNames: [],
- hasDefaultExport: true,
- line: lineNumber + 1,
- rawStatement: line.trim()
- });
- }
- });
-
- return exports;
- }
-
- /**
- * Build file index for fast importer lookups
- * Maps file path -> Set of files that import it
- */
- private buildFileIndex(maxFiles: number = 1000): Map> {
- const index = new Map>();
- const allFiles = this.getAllProjectFiles(maxFiles);
-
- console.error(`๐ Building file index for ${allFiles.length} files...`);
-
- for (const file of allFiles) {
- const imports = this.getImports(file);
- for (const imp of imports) {
- if (!imp.isExternal) {
- const resolvedPath = this.resolveImportPath(file, imp.source);
- if (resolvedPath) {
- if (!index.has(resolvedPath)) {
- index.set(resolvedPath, new Set());
- }
- index.get(resolvedPath)!.add(file);
- }
- }
- }
- }
-
- console.error(`โ
File index built with ${index.size} entries`);
- return index;
- }
-
- /**
- * Find all files that import the given file
- * Performance: O(1) with file index vs O(n) without
- */
- public findImporters(filePath: string, maxFiles: number = 1000): string[] {
- const absolutePath = this.resolveFilePath(filePath);
-
- // Build index on first use
- if (!this.fileIndex) {
- this.fileIndex = this.buildFileIndex(maxFiles);
- }
-
- // O(1) lookup!
- const importers = this.fileIndex.get(absolutePath);
- return importers ? Array.from(importers) : [];
- }
-
- /**
- * Detect circular dependencies
- */
- public detectCircularDependencies(filePath: string): CircularDependency[] {
- const absolutePath = this.resolveFilePath(filePath);
- const visited = new Set();
- const recursionStack = new Set();
- const cycles: CircularDependency[] = [];
-
- const dfs = (currentFile: string, path: string[]) => {
- if (recursionStack.has(currentFile)) {
- // Found a cycle
- const cycleStart = path.indexOf(currentFile);
- const cycle = path.slice(cycleStart).concat([currentFile]);
- cycles.push({
- cycle,
- description: `Circular dependency: ${cycle.join(' โ ')}`
- });
- return;
- }
-
- if (visited.has(currentFile)) {
- return;
- }
-
- visited.add(currentFile);
- recursionStack.add(currentFile);
-
- const imports = this.getImports(currentFile);
- for (const imp of imports) {
- if (!imp.isExternal) {
- const resolvedPath = this.resolveImportPath(currentFile, imp.source);
- if (resolvedPath) {
- dfs(resolvedPath, [...path, currentFile]);
- }
- }
- }
-
- recursionStack.delete(currentFile);
- };
-
- dfs(absolutePath, []);
- return cycles;
- }
-
- /**
- * Get dependency tree with depth
- */
- public getDependencyTree(filePath: string, maxDepth: number = 3): DependencyTree {
- const absolutePath = this.resolveFilePath(filePath);
- const visited = new Set();
-
- const buildTree = (file: string, depth: number): DependencyTree => {
- const imports = this.getImports(file);
- const tree: DependencyTree = {
- file: this.getRelativePath(file),
- depth,
- imports: [],
- isExternal: false,
- isCyclic: visited.has(file)
- };
-
- if (depth >= maxDepth || visited.has(file)) {
- return tree;
- }
-
- visited.add(file);
-
- for (const imp of imports) {
- if (imp.isExternal) {
- tree.imports.push({
- file: imp.source,
- depth: depth + 1,
- imports: [],
- isExternal: true
- });
- } else {
- const resolvedPath = this.resolveImportPath(file, imp.source);
- if (resolvedPath) {
- tree.imports.push(buildTree(resolvedPath, depth + 1));
- }
- }
- }
-
- return tree;
- };
-
- return buildTree(absolutePath, 0);
- }
-
- // Helper methods
-
- private readFile(filePath: string): string {
- if (this.fileCache.has(filePath)) {
- return this.fileCache.get(filePath)!;
- }
-
- try {
- // Use intelligent skimming for dependency analysis
- const skimResult = skimForDependencies(filePath);
-
- if (!skimResult.content) {
- // Fallback to file size guard if skimming fails
- const guardResult = this.fileSizeGuard.readFile(filePath, 'utf-8');
- if (guardResult.skipped) {
- if (guardResult.reason) {
- console.error(`โ ๏ธ ${guardResult.reason}, skipping: ${path.relative(this.workspacePath, filePath)}`);
- }
- return '';
- }
- const content = guardResult.content;
- this.fileCache.set(filePath, content);
- return content;
- }
-
- // Successfully skimmed or read the file
- // File skimming is now silent to reduce log noise
-
- const content = skimResult.content;
- this.fileCache.set(filePath, content);
- return content;
- } catch (error) {
- return '';
- }
- }
-
- private resolveFilePath(filePath: string): string {
- if (path.isAbsolute(filePath)) {
- return filePath;
- }
- return path.resolve(this.workspacePath, filePath);
- }
-
- private getRelativePath(filePath: string): string {
- return path.relative(this.workspacePath, filePath);
- }
-
- private isExternalModule(source: string): boolean {
- // External if it doesn't start with . or /
- return !source.startsWith('.') && !source.startsWith('/');
- }
-
- private resolveImportPath(fromFile: string, importSource: string): string | null {
- if (this.isExternalModule(importSource)) {
- return null; // Don't resolve external modules
- }
-
- const dir = path.dirname(fromFile);
- const extensions = ['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'];
-
- // Try to resolve with extensions
- for (const ext of extensions) {
- const withExt = path.resolve(dir, importSource + ext);
- if (fs.existsSync(withExt)) {
- return withExt;
- }
- }
-
- // Try index files
- for (const ext of extensions) {
- const indexFile = path.resolve(dir, importSource, 'index' + ext);
- if (fs.existsSync(indexFile)) {
- return indexFile;
- }
- }
-
- // Try as-is
- const asIs = path.resolve(dir, importSource);
- if (fs.existsSync(asIs)) {
- return asIs;
- }
-
- return null;
- }
-
- private getAllProjectFiles(maxFiles: number = 1000): string[] {
- const files: string[] = [];
- const extensions = ['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'];
- let fileCount = 0;
-
- const walk = (dir: string) => {
- // Stop if we've hit the file limit
- if (fileCount >= maxFiles) {
- return;
- }
-
- try {
- const entries = fs.readdirSync(dir, { withFileTypes: true });
-
- for (const entry of entries) {
- // Check limit again in the loop
- if (fileCount >= maxFiles) {
- console.warn(`โ ๏ธ File limit reached (${maxFiles} files). Stopping scan to prevent hangs.`);
- return;
- }
-
- const fullPath = path.join(dir, entry.name);
-
- // Skip node_modules, dist, build, etc.
- if (entry.isDirectory()) {
- if (!['node_modules', 'dist', 'build', '.git', '.next', 'out', 'coverage'].includes(entry.name)) {
- walk(fullPath);
- }
- } else {
- const ext = path.extname(entry.name);
- if (extensions.includes(ext)) {
- files.push(fullPath);
- fileCount++;
- }
- }
- }
- } catch (error) {
- // Skip directories we can't read
- }
- };
-
- walk(this.workspacePath);
-
- if (fileCount >= maxFiles) {
- console.warn(`๐ Scanned ${maxFiles} files (limit reached). Use smaller projects or increase limit for complete analysis.`);
- }
-
- return files;
- }
-
- /**
- * Clear caches (useful for testing or when files change)
- */
- public clearCache() {
- this.fileCache.clear();
- this.dependencyCache.clear();
- }
-
- /**
- * Dispose resources (cleanup file watcher and pending timers)
- */
- public dispose(): void {
- // Clear debounce timer and flush pending invalidations
- if (this.invalidateDebounceTimer) {
- clearTimeout(this.invalidateDebounceTimer);
- this.invalidateDebounceTimer = null;
- }
- this.flushInvalidations();
-
- // Close file watcher
- if (this.fileWatcher) {
- this.fileWatcher.close();
- this.fileWatcher = null;
- }
- }
-}
diff --git a/src/file-searcher.ts b/src/file-searcher.ts
deleted file mode 100644
index f68aa05..0000000
--- a/src/file-searcher.ts
+++ /dev/null
@@ -1,768 +0,0 @@
-// File and Content Search Operations
-
-import * as fs from 'fs';
-import * as fsp from 'fs/promises';
-import * as path from 'path';
-import { WorkspaceDetector } from './workspace-detector.js';
-import { FileSizeGuard } from './file-size-guard.js';
-import { FileSkimmer } from './file-skimmer.js';
-
-export interface FileMatch {
- path: string;
- name: string;
- size: number;
- language: string;
-}
-
-export interface ContentMatch {
- path: string;
- line: number;
- content: string;
- match: string;
- context: {
- before: string[];
- after: string[];
- };
-}
-
-export interface SearchOptions {
- maxResults?: number;
- ignoreCase?: boolean;
- filePattern?: string;
-}
-
-export interface ContentSearchOptions extends SearchOptions {
- regex?: boolean;
- caseSensitive?: boolean;
- contextLines?: number;
-}
-
-export class FileSearcher {
- // Regex pattern cache for better performance
- private patternCache: Map = new Map();
- private fileSizeGuard: FileSizeGuard;
- private fileSkimmer: FileSkimmer;
-
- constructor(private workspaceDetector: WorkspaceDetector) {
- this.fileSizeGuard = new FileSizeGuard();
- this.fileSkimmer = new FileSkimmer({
- maxFileSize: 2 * 1024 * 1024, // Start skimming at 2MB for search operations
- headerSize: 64 * 1024, // 64KB header
- footerSize: 32 * 1024, // 32KB footer
- });
- }
-
- /**
- * Search for files by name or pattern (async version with parallel processing)
- */
- async searchFilesAsync(pattern: string, options: SearchOptions = {}): Promise {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (!workspace) {
- return [];
- }
-
- const {
- maxResults = 50,
- ignoreCase = true,
- filePattern
- } = options;
-
- const results: FileMatch[] = [];
- const searchPattern = ignoreCase ? pattern.toLowerCase() : pattern;
-
- await this.searchRecursiveAsync(
- workspace,
- searchPattern,
- results,
- maxResults,
- ignoreCase,
- filePattern
- );
-
- return results;
- }
-
- /**
- * Search for files by name or pattern (sync version - deprecated, use searchFilesAsync)
- */
- searchFiles(pattern: string, options: SearchOptions = {}): FileMatch[] {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (!workspace) {
- return [];
- }
-
- const {
- maxResults = 50,
- ignoreCase = true,
- filePattern
- } = options;
-
- const results: FileMatch[] = [];
- const searchPattern = ignoreCase ? pattern.toLowerCase() : pattern;
-
- this.searchRecursive(
- workspace,
- searchPattern,
- results,
- maxResults,
- ignoreCase,
- filePattern
- );
-
- return results;
- }
-
- /**
- * Search file contents for text or regex (async version with parallel processing)
- */
- async searchContentAsync(
- query: string,
- options: ContentSearchOptions = {}
- ): Promise {
- // Reset file size guard for new search operation
- this.fileSizeGuard.reset();
-
- const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (!workspace) {
- return [];
- }
-
- const {
- maxResults = 100,
- regex = false,
- caseSensitive = false,
- contextLines = 2,
- filePattern
- } = options;
-
- const results: ContentMatch[] = [];
- const searchRegex = this.createSearchRegex(query, regex, caseSensitive);
-
- await this.searchContentRecursiveAsync(
- workspace,
- query,
- searchRegex,
- results,
- maxResults,
- contextLines,
- filePattern
- );
-
- return results;
- }
-
- /**
- * Search file contents for text or regex (sync version - deprecated, use searchContentAsync)
- */
- searchContent(
- query: string,
- options: ContentSearchOptions = {}
- ): ContentMatch[] {
- // Reset file size guard for new search operation
- this.fileSizeGuard.reset();
-
- const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (!workspace) {
- return [];
- }
-
- const {
- maxResults = 100,
- regex = false,
- caseSensitive = false,
- contextLines = 2,
- filePattern
- } = options;
-
- const results: ContentMatch[] = [];
- const searchRegex = this.createSearchRegex(query, regex, caseSensitive);
-
- this.searchContentRecursive(
- workspace,
- query,
- searchRegex,
- results,
- maxResults,
- contextLines,
- filePattern
- );
-
- return results;
- }
-
- /**
- * Find symbol definitions (functions, classes, etc.)
- */
- findSymbol(symbol: string, type?: 'function' | 'class' | 'variable' | 'all'): ContentMatch[] {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (!workspace) {
- return [];
- }
-
- const patterns = this.getSymbolPatterns(symbol, type || 'all');
- const results: ContentMatch[] = [];
-
- for (const pattern of patterns) {
- const matches = this.searchContent(pattern, {
- regex: true,
- caseSensitive: true,
- maxResults: 20
- });
- results.push(...matches);
- }
-
- return results;
- }
-
- /**
- * Get unique file extensions in workspace
- */
- getFileExtensions(): Map {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (!workspace) {
- return new Map();
- }
-
- const extensions = new Map();
- this.countExtensions(workspace, extensions);
- return extensions;
- }
-
- /**
- * Get file statistics
- */
- getFileStats(): {
- totalFiles: number;
- totalSize: number;
- byExtension: Map;
- } {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (!workspace) {
- return { totalFiles: 0, totalSize: 0, byExtension: new Map() };
- }
-
- const stats = {
- totalFiles: 0,
- totalSize: 0,
- byExtension: new Map()
- };
-
- this.calculateStats(workspace, stats);
- return stats;
- }
-
- // ========== PRIVATE HELPER METHODS ==========
-
- /**
- * Async recursive search with parallel directory processing
- */
- private async searchRecursiveAsync(
- dirPath: string,
- pattern: string,
- results: FileMatch[],
- maxResults: number,
- ignoreCase: boolean,
- filePattern?: string
- ): Promise {
- if (results.length >= maxResults) return;
-
- try {
- const entries = await fsp.readdir(dirPath, { withFileTypes: true });
-
- // Separate directories and files
- const directories: string[] = [];
- const files: Array<{ name: string; fullPath: string }> = [];
-
- for (const entry of entries) {
- if (this.shouldIgnore(entry.name)) continue;
- const fullPath = path.join(dirPath, entry.name);
-
- if (entry.isDirectory()) {
- directories.push(fullPath);
- } else {
- files.push({ name: entry.name, fullPath });
- }
- }
-
- // Process files in parallel batches
- const BATCH_SIZE = 10;
- for (let i = 0; i < files.length && results.length < maxResults; i += BATCH_SIZE) {
- const batch = files.slice(i, i + BATCH_SIZE);
- const filePromises = batch.map(async ({ name, fullPath }) => {
- try {
- const displayName = ignoreCase ? name.toLowerCase() : name;
-
- // Check file pattern if specified
- if (filePattern && !this.matchesPattern(name, filePattern)) {
- return null;
- }
-
- // Check if name matches search pattern
- if (displayName.includes(pattern)) {
- const stats = await fsp.stat(fullPath);
- const relativePath = path.relative(
- this.workspaceDetector.getCurrentWorkspace()!,
- fullPath
- );
-
- return {
- path: relativePath,
- name: name,
- size: stats.size,
- language: this.detectLanguage(name)
- };
- }
- return null;
- } catch {
- return null;
- }
- });
-
- const batchResults = await Promise.all(filePromises);
- for (const result of batchResults) {
- if (result && results.length < maxResults) {
- results.push(result);
- }
- }
- }
-
- // Process directories recursively in parallel (batches of 3 to avoid overwhelming)
- const DIR_BATCH_SIZE = 3;
- for (let i = 0; i < directories.length && results.length < maxResults; i += DIR_BATCH_SIZE) {
- const batch = directories.slice(i, i + DIR_BATCH_SIZE);
- await Promise.all(
- batch.map(dir => this.searchRecursiveAsync(dir, pattern, results, maxResults, ignoreCase, filePattern))
- );
- }
- } catch (error) {
- // Ignore errors (permission denied, etc.)
- }
- }
-
- private searchRecursive(
- dirPath: string,
- pattern: string,
- results: FileMatch[],
- maxResults: number,
- ignoreCase: boolean,
- filePattern?: string
- ): void {
- if (results.length >= maxResults) return;
-
- try {
- const entries = fs.readdirSync(dirPath, { withFileTypes: true });
-
- for (const entry of entries) {
- if (results.length >= maxResults) break;
- if (this.shouldIgnore(entry.name)) continue;
-
- const fullPath = path.join(dirPath, entry.name);
-
- if (entry.isDirectory()) {
- this.searchRecursive(fullPath, pattern, results, maxResults, ignoreCase, filePattern);
- } else {
- const name = ignoreCase ? entry.name.toLowerCase() : entry.name;
-
- // Check file pattern if specified
- if (filePattern && !this.matchesPattern(entry.name, filePattern)) {
- continue;
- }
-
- // Check if name matches search pattern
- if (name.includes(pattern)) {
- const stats = fs.statSync(fullPath);
- const relativePath = path.relative(
- this.workspaceDetector.getCurrentWorkspace()!,
- fullPath
- );
-
- results.push({
- path: relativePath,
- name: entry.name,
- size: stats.size,
- language: this.detectLanguage(entry.name)
- });
- }
- }
- }
- } catch (error) {
- // Ignore errors (permission denied, etc.)
- }
- }
-
- /**
- * Async recursive content search with parallel file processing
- */
- private async searchContentRecursiveAsync(
- dirPath: string,
- originalQuery: string,
- searchRegex: RegExp,
- results: ContentMatch[],
- maxResults: number,
- contextLines: number,
- filePattern?: string
- ): Promise {
- if (results.length >= maxResults) return;
-
- try {
- const entries = await fsp.readdir(dirPath, { withFileTypes: true });
-
- // Separate directories and files
- const directories: string[] = [];
- const textFiles: Array<{ name: string; fullPath: string }> = [];
-
- for (const entry of entries) {
- if (this.shouldIgnore(entry.name)) continue;
- const fullPath = path.join(dirPath, entry.name);
-
- if (entry.isDirectory()) {
- directories.push(fullPath);
- } else if (this.isTextFile(entry.name)) {
- // Check file pattern
- if (filePattern && !this.matchesPattern(entry.name, filePattern)) {
- continue;
- }
- textFiles.push({ name: entry.name, fullPath });
- }
- }
-
- // Process files in parallel batches
- const BATCH_SIZE = 10;
- for (let i = 0; i < textFiles.length && results.length < maxResults; i += BATCH_SIZE) {
- const batch = textFiles.slice(i, i + BATCH_SIZE);
- const filePromises = batch.map(async ({ name, fullPath }) => {
- try {
- // Try intelligent skimming first for large files
- const skimResult = this.fileSkimmer.readFile(fullPath, [originalQuery]);
-
- let content = skimResult.content;
-
- // If file was too large even for skimming, use fallback
- if (!content) {
- const guardResult = this.fileSizeGuard.readFile(fullPath, 'utf8');
- if (guardResult.skipped) {
- return []; // Skip files that are still too large
- }
- content = guardResult.content;
- }
-
- const lines = content.split('\n');
- const relativePath = path.relative(
- this.workspaceDetector.getCurrentWorkspace()!,
- fullPath
- );
-
- const fileMatches: ContentMatch[] = [];
- for (let i = 0; i < lines.length && fileMatches.length + results.length < maxResults; i++) {
- const line = lines[i];
- const match = line.match(searchRegex);
-
- if (match) {
- fileMatches.push({
- path: relativePath,
- line: i + 1,
- content: line.trim(),
- match: match[0],
- context: {
- before: this.getContext(lines, i, -contextLines),
- after: this.getContext(lines, i, contextLines)
- }
- });
- }
- }
- return fileMatches;
- } catch {
- return [];
- }
- });
-
- const batchResults = await Promise.all(filePromises);
- for (const fileMatches of batchResults) {
- for (const match of fileMatches) {
- if (results.length < maxResults) {
- results.push(match);
- }
- }
- }
- }
-
- // Process directories recursively in parallel (batches of 3)
- const DIR_BATCH_SIZE = 3;
- for (let i = 0; i < directories.length && results.length < maxResults; i += DIR_BATCH_SIZE) {
- const batch = directories.slice(i, i + DIR_BATCH_SIZE);
- await Promise.all(
- batch.map(dir => this.searchContentRecursiveAsync(dir, originalQuery, searchRegex, results, maxResults, contextLines, filePattern))
- );
- }
- } catch (error) {
- // Ignore errors
- }
- }
-
- private searchContentRecursive(
- dirPath: string,
- originalQuery: string,
- searchRegex: RegExp,
- results: ContentMatch[],
- maxResults: number,
- contextLines: number,
- filePattern?: string
- ): void {
- if (results.length >= maxResults) return;
-
- try {
- const entries = fs.readdirSync(dirPath, { withFileTypes: true });
-
- for (const entry of entries) {
- if (results.length >= maxResults) break;
- if (this.shouldIgnore(entry.name)) continue;
-
- const fullPath = path.join(dirPath, entry.name);
-
- if (entry.isDirectory()) {
- this.searchContentRecursive(
- fullPath,
- originalQuery,
- searchRegex,
- results,
- maxResults,
- contextLines,
- filePattern
- );
- } else {
- // Check file pattern
- if (filePattern && !this.matchesPattern(entry.name, filePattern)) {
- continue;
- }
-
- // Only search text files
- if (!this.isTextFile(entry.name)) {
- continue;
- }
-
- try {
- // Try intelligent skimming first for large files
- const skimResult = this.fileSkimmer.readFile(fullPath, [originalQuery]);
-
- let content = skimResult.content;
- let isSkimmed = skimResult.skimmed;
-
- // If file was too large even for skimming, use fallback
- if (!content) {
- const guardResult = this.fileSizeGuard.readFile(fullPath, 'utf8');
- if (guardResult.skipped) {
- continue; // Skip files that are still too large
- }
- content = guardResult.content;
- isSkimmed = false;
- }
-
- const lines = content.split('\n');
- const relativePath = path.relative(
- this.workspaceDetector.getCurrentWorkspace()!,
- fullPath
- );
-
- for (let i = 0; i < lines.length; i++) {
- if (results.length >= maxResults) break;
-
- const line = lines[i];
- const match = line.match(searchRegex);
-
- if (match) {
- results.push({
- path: relativePath,
- line: i + 1,
- content: line.trim(),
- match: match[0],
- context: {
- before: this.getContext(lines, i, -contextLines),
- after: this.getContext(lines, i, contextLines)
- }
- });
- }
- }
- } catch (error) {
- // Ignore files that can't be read as text
- }
- }
- }
- } catch (error) {
- // Ignore errors
- }
- }
-
- private createSearchRegex(query: string, regex: boolean, caseSensitive: boolean): RegExp {
- if (regex) {
- return new RegExp(query, caseSensitive ? 'g' : 'gi');
- } else {
- // Escape special regex characters
- const escaped = query.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
- return new RegExp(escaped, caseSensitive ? 'g' : 'gi');
- }
- }
-
- private getSymbolPatterns(symbol: string, type: string): string[] {
- const patterns: string[] = [];
-
- if (type === 'function' || type === 'all') {
- // Function declarations
- patterns.push(`function\\s+${symbol}\\s*\\(`);
- patterns.push(`const\\s+${symbol}\\s*=\\s*\\(`);
- patterns.push(`${symbol}\\s*:\\s*\\([^)]*\\)\\s*=>`);
- patterns.push(`async\\s+function\\s+${symbol}\\s*\\(`);
- }
-
- if (type === 'class' || type === 'all') {
- // Class declarations
- patterns.push(`class\\s+${symbol}\\s*[{<]`);
- patterns.push(`interface\\s+${symbol}\\s*[{<]`);
- patterns.push(`type\\s+${symbol}\\s*=`);
- }
-
- if (type === 'variable' || type === 'all') {
- // Variable declarations
- patterns.push(`const\\s+${symbol}\\s*[=:]`);
- patterns.push(`let\\s+${symbol}\\s*[=:]`);
- patterns.push(`var\\s+${symbol}\\s*[=:]`);
- }
-
- return patterns;
- }
-
- private getContext(lines: string[], index: number, offset: number): string[] {
- const context: string[] = [];
- const start = Math.max(0, index + (offset < 0 ? offset : 1));
- const end = Math.min(lines.length, index + (offset < 0 ? 0 : offset + 1));
-
- for (let i = start; i < end; i++) {
- if (i !== index) {
- context.push(lines[i].trim());
- }
- }
-
- return context;
- }
-
- private matchesPattern(filename: string, pattern: string): boolean {
- // Simple glob pattern matching with cached regex for performance
- if (pattern.includes('*')) {
- let regex = this.patternCache.get(pattern);
- if (!regex) {
- regex = new RegExp('^' + pattern.replace(/\*/g, '.*') + '$');
- this.patternCache.set(pattern, regex);
- }
- return regex.test(filename);
- }
- return filename.includes(pattern);
- }
-
- private isTextFile(filename: string): boolean {
- const textExtensions = [
- '.ts', '.tsx', '.js', '.jsx', '.json', '.md', '.txt',
- '.css', '.scss', '.html', '.xml', '.yaml', '.yml',
- '.py', '.rs', '.go', '.java', '.c', '.cpp', '.h',
- '.rb', '.php', '.swift', '.kt', '.sql', '.sh'
- ];
-
- const ext = path.extname(filename).toLowerCase();
- return textExtensions.includes(ext);
- }
-
- private detectLanguage(filename: string): string {
- const ext = path.extname(filename).toLowerCase();
-
- const langMap: Record = {
- '.ts': 'TypeScript',
- '.tsx': 'TypeScript React',
- '.js': 'JavaScript',
- '.jsx': 'JavaScript React',
- '.py': 'Python',
- '.rs': 'Rust',
- '.go': 'Go',
- '.java': 'Java',
- '.json': 'JSON',
- '.md': 'Markdown'
- };
-
- return langMap[ext] || 'Unknown';
- }
-
- private shouldIgnore(name: string): boolean {
- const ignorePatterns = [
- 'node_modules',
- '.git',
- '.next',
- 'dist',
- 'build',
- '.turbo',
- 'coverage',
- '.cache'
- ];
-
- return ignorePatterns.some(pattern => name === pattern || name.startsWith('.'));
- }
-
- private countExtensions(dirPath: string, extensions: Map): void {
- try {
- const entries = fs.readdirSync(dirPath, { withFileTypes: true });
-
- for (const entry of entries) {
- if (this.shouldIgnore(entry.name)) continue;
-
- const fullPath = path.join(dirPath, entry.name);
-
- if (entry.isDirectory()) {
- this.countExtensions(fullPath, extensions);
- } else {
- const ext = path.extname(entry.name);
- if (ext) {
- extensions.set(ext, (extensions.get(ext) || 0) + 1);
- }
- }
- }
- } catch (error) {
- // Ignore errors
- }
- }
-
- private calculateStats(
- dirPath: string,
- stats: {
- totalFiles: number;
- totalSize: number;
- byExtension: Map;
- }
- ): void {
- try {
- const entries = fs.readdirSync(dirPath, { withFileTypes: true });
-
- for (const entry of entries) {
- if (this.shouldIgnore(entry.name)) continue;
-
- const fullPath = path.join(dirPath, entry.name);
-
- if (entry.isDirectory()) {
- this.calculateStats(fullPath, stats);
- } else {
- try {
- const fileStats = fs.statSync(fullPath);
- const ext = path.extname(entry.name) || 'no-extension';
-
- stats.totalFiles++;
- stats.totalSize += fileStats.size;
-
- const extStats = stats.byExtension.get(ext) || { count: 0, size: 0 };
- extStats.count++;
- extStats.size += fileStats.size;
- stats.byExtension.set(ext, extStats);
- } catch (error) {
- // Ignore stat errors
- }
- }
- }
- } catch (error) {
- // Ignore errors
- }
- }
-}
\ No newline at end of file
diff --git a/src/file-size-guard.ts b/src/file-size-guard.ts
deleted file mode 100644
index f07a191..0000000
--- a/src/file-size-guard.ts
+++ /dev/null
@@ -1,161 +0,0 @@
-import * as fs from 'fs';
-import * as path from 'path';
-
-export interface FileSizeConfig {
- maxFileSize: number; // Maximum size for a single file (bytes)
- maxTotalSize: number; // Maximum total size for all files in an operation (bytes)
- skipLargeFiles: boolean; // Whether to skip large files (true) or throw error (false)
-}
-
-export interface SafeReadResult {
- content: string;
- skipped: boolean;
- reason?: string;
- size: number;
-}
-
-export class FileSizeGuard {
- private static readonly DEFAULT_CONFIG: FileSizeConfig = {
- maxFileSize: 10 * 1024 * 1024, // 10MB per file
- maxTotalSize: 100 * 1024 * 1024, // 100MB total
- skipLargeFiles: true, // Skip rather than error
- };
-
- private totalBytesRead = 0;
- private config: FileSizeConfig;
-
- constructor(config?: Partial) {
- this.config = { ...FileSizeGuard.DEFAULT_CONFIG, ...config };
- }
-
- /**
- * Safely read a file with size limits
- */
- readFile(filePath: string, encoding: BufferEncoding = 'utf8'): SafeReadResult {
- try {
- const stats = fs.statSync(filePath);
- const fileSize = stats.size;
-
- // Check individual file size limit
- if (fileSize > this.config.maxFileSize) {
- if (this.config.skipLargeFiles) {
- return {
- content: '',
- skipped: true,
- reason: `File too large: ${this.formatSize(fileSize)} > ${this.formatSize(this.config.maxFileSize)}`,
- size: fileSize,
- };
- } else {
- throw new Error(`File too large: ${filePath} (${this.formatSize(fileSize)} > ${this.formatSize(this.config.maxFileSize)})`);
- }
- }
-
- // Check total size limit
- if (this.totalBytesRead + fileSize > this.config.maxTotalSize) {
- if (this.config.skipLargeFiles) {
- return {
- content: '',
- skipped: true,
- reason: `Total size limit would be exceeded: ${this.formatSize(this.totalBytesRead + fileSize)} > ${this.formatSize(this.config.maxTotalSize)}`,
- size: fileSize,
- };
- } else {
- throw new Error(`Total size limit exceeded: ${this.formatSize(this.totalBytesRead + fileSize)} > ${this.formatSize(this.config.maxTotalSize)}`);
- }
- }
-
- // Read the file
- const content = fs.readFileSync(filePath, encoding);
- this.totalBytesRead += fileSize;
-
- return {
- content,
- skipped: false,
- size: fileSize,
- };
- } catch (error) {
- if (error instanceof Error) {
- return {
- content: '',
- skipped: true,
- reason: error.message,
- size: 0,
- };
- }
- throw error;
- }
- }
-
- /**
- * Reset the total bytes counter
- */
- reset(): void {
- this.totalBytesRead = 0;
- }
-
- /**
- * Get current statistics
- */
- getStats(): { totalBytesRead: number; maxTotalSize: number; remainingBytes: number } {
- return {
- totalBytesRead: this.totalBytesRead,
- maxTotalSize: this.config.maxTotalSize,
- remainingBytes: this.config.maxTotalSize - this.totalBytesRead,
- };
- }
-
- /**
- * Format bytes as human-readable string
- */
- private formatSize(bytes: number): string {
- const units = ['B', 'KB', 'MB', 'GB'];
- let size = bytes;
- let unitIndex = 0;
-
- while (size >= 1024 && unitIndex < units.length - 1) {
- size /= 1024;
- unitIndex++;
- }
-
- return `${size.toFixed(1)}${units[unitIndex]}`;
- }
-
- /**
- * Check if a file would exceed limits without reading
- */
- wouldExceedLimits(filePath: string): { exceeds: boolean; reason?: string } {
- try {
- const stats = fs.statSync(filePath);
- const fileSize = stats.size;
-
- if (fileSize > this.config.maxFileSize) {
- return {
- exceeds: true,
- reason: `File too large: ${this.formatSize(fileSize)} > ${this.formatSize(this.config.maxFileSize)}`,
- };
- }
-
- if (this.totalBytesRead + fileSize > this.config.maxTotalSize) {
- return {
- exceeds: true,
- reason: `Total size limit would be exceeded: ${this.formatSize(this.totalBytesRead + fileSize)} > ${this.formatSize(this.config.maxTotalSize)}`,
- };
- }
-
- return { exceeds: false };
- } catch (error) {
- return {
- exceeds: true,
- reason: error instanceof Error ? error.message : 'Unknown error',
- };
- }
- }
-}
-
-/**
- * Convenience function for one-off safe file reads
- */
-export function safeReadFile(filePath: string, config?: Partial): SafeReadResult {
- const guard = new FileSizeGuard(config);
- return guard.readFile(filePath);
-}
\ No newline at end of file
diff --git a/src/file-skimmer.ts b/src/file-skimmer.ts
deleted file mode 100644
index af33ac0..0000000
--- a/src/file-skimmer.ts
+++ /dev/null
@@ -1,324 +0,0 @@
-import * as fs from 'fs';
-import * as path from 'path';
-
-export interface FileSkimmingConfig {
- maxFileSize: number; // When to start skimming (bytes)
- skimChunkSize: number; // Size of each chunk to read (bytes)
- headerSize: number; // Always read from start (bytes)
- footerSize: number; // Always read from end (bytes)
- searchPatterns?: string[]; // Patterns to look for while skimming
- maxChunks: number; // Maximum chunks to read when skimming
- encoding: BufferEncoding; // File encoding
-}
-
-export interface SkimmedContent {
- content: string;
- skimmed: boolean;
- originalSize: number;
- actualSize: number;
- chunks: SkimChunk[];
- patterns?: PatternMatch[];
-}
-
-export interface SkimChunk {
- start: number; // Byte offset in original file
- end: number; // Byte offset end
- type: 'header' | 'footer' | 'middle' | 'pattern';
- content: string;
-}
-
-export interface PatternMatch {
- pattern: string;
- line: number;
- context: string;
- byteOffset: number;
-}
-
-export class FileSkimmer {
- private static readonly DEFAULT_CONFIG: FileSkimmingConfig = {
- maxFileSize: 1 * 1024 * 1024, // Start skimming at 1MB
- skimChunkSize: 64 * 1024, // 64KB chunks
- headerSize: 32 * 1024, // Read first 32KB
- footerSize: 16 * 1024, // Read last 16KB
- maxChunks: 10, // Max 10 chunks (640KB total)
- encoding: 'utf8',
- };
-
- private config: FileSkimmingConfig;
-
- constructor(config?: Partial) {
- this.config = { ...FileSkimmer.DEFAULT_CONFIG, ...config };
- }
-
- /**
- * Smart file reading with skimming for large files
- */
- readFile(filePath: string, searchPatterns?: string[]): SkimmedContent {
- try {
- const stats = fs.statSync(filePath);
- const fileSize = stats.size;
-
- // For small files, read normally
- if (fileSize <= this.config.maxFileSize) {
- const content = fs.readFileSync(filePath, this.config.encoding);
- return {
- content,
- skimmed: false,
- originalSize: fileSize,
- actualSize: content.length,
- chunks: [{
- start: 0,
- end: fileSize,
- type: 'header',
- content,
- }],
- };
- }
-
- // For large files, use intelligent skimming
- return this.skimFile(filePath, fileSize, searchPatterns || this.config.searchPatterns);
-
- } catch (error) {
- return {
- content: '',
- skimmed: false,
- originalSize: 0,
- actualSize: 0,
- chunks: [],
- };
- }
- }
-
- /**
- * Skim a large file intelligently
- */
- private skimFile(filePath: string, fileSize: number, searchPatterns?: string[]): SkimmedContent {
- const fd = fs.openSync(filePath, 'r');
- const chunks: SkimChunk[] = [];
- let totalContent = '';
-
- try {
- // 1. Always read header (beginning of file)
- const headerSize = Math.min(this.config.headerSize, fileSize);
- const headerBuffer = Buffer.alloc(headerSize);
- fs.readSync(fd, headerBuffer, 0, headerSize, 0);
- const headerContent = headerBuffer.toString(this.config.encoding);
-
- chunks.push({
- start: 0,
- end: headerSize,
- type: 'header',
- content: headerContent,
- });
- totalContent += headerContent;
-
- // 2. Always read footer (end of file)
- const footerSize = Math.min(this.config.footerSize, fileSize - headerSize);
- if (footerSize > 0) {
- const footerBuffer = Buffer.alloc(footerSize);
- const footerStart = fileSize - footerSize;
- fs.readSync(fd, footerBuffer, 0, footerSize, footerStart);
- const footerContent = footerBuffer.toString(this.config.encoding);
-
- chunks.push({
- start: footerStart,
- end: fileSize,
- type: 'footer',
- content: footerContent,
- });
- totalContent += '\n\n[... file content skipped ...]\n\n' + footerContent;
- }
-
- // 3. Pattern-based skimming if patterns provided
- let patterns: PatternMatch[] = [];
- if (searchPatterns && searchPatterns.length > 0) {
- const patternResult = this.searchPatterns(filePath, fileSize, searchPatterns, fd);
- patterns = patternResult.matches;
-
- // Add pattern chunks to content
- for (const match of patterns) {
- totalContent += `\n\n[... pattern match at line ${match.line} ...]\n${match.context}`;
- }
- }
-
- // 4. Strategic middle sampling (for files without specific patterns)
- if (!searchPatterns || searchPatterns.length === 0) {
- const middleChunks = this.sampleMiddleSections(fd, fileSize, headerSize, footerSize);
- chunks.push(...middleChunks);
-
- for (const chunk of middleChunks) {
- totalContent += '\n\n[... skipped content ...]\n' + chunk.content;
- }
- }
-
- return {
- content: totalContent,
- skimmed: true,
- originalSize: fileSize,
- actualSize: totalContent.length,
- chunks,
- patterns: patterns.length > 0 ? patterns : undefined,
- };
-
- } finally {
- fs.closeSync(fd);
- }
- }
-
- /**
- * Search for specific patterns in large files
- */
- private searchPatterns(
- filePath: string,
- fileSize: number,
- patterns: string[],
- fd: number
- ): { matches: PatternMatch[]; chunks: SkimChunk[] } {
- const matches: PatternMatch[] = [];
- const chunks: SkimChunk[] = [];
- const chunkSize = this.config.skimChunkSize;
- let currentOffset = 0;
- let processedChunks = 0;
-
- while (currentOffset < fileSize && processedChunks < this.config.maxChunks) {
- const readSize = Math.min(chunkSize, fileSize - currentOffset);
- const buffer = Buffer.alloc(readSize);
- fs.readSync(fd, buffer, 0, readSize, currentOffset);
- const content = buffer.toString(this.config.encoding);
-
- // Check for patterns in this chunk
- const lines = content.split('\n');
- for (let i = 0; i < lines.length; i++) {
- const line = lines[i];
- for (const pattern of patterns) {
- if (line.toLowerCase().includes(pattern.toLowerCase())) {
- // Found a match! Include context
- const contextStart = Math.max(0, i - 2);
- const contextEnd = Math.min(lines.length, i + 3);
- const context = lines.slice(contextStart, contextEnd).join('\n');
-
- matches.push({
- pattern,
- line: Math.floor(currentOffset / 80) + i, // Rough line estimate
- context,
- byteOffset: currentOffset,
- });
-
- chunks.push({
- start: currentOffset,
- end: currentOffset + readSize,
- type: 'pattern',
- content: context,
- });
- }
- }
- }
-
- currentOffset += chunkSize;
- processedChunks++;
- }
-
- return { matches, chunks };
- }
-
- /**
- * Sample middle sections of large files strategically
- */
- private sampleMiddleSections(
- fd: number,
- fileSize: number,
- headerSize: number,
- footerSize: number
- ): SkimChunk[] {
- const chunks: SkimChunk[] = [];
- const availableMiddle = fileSize - headerSize - footerSize;
-
- if (availableMiddle <= 0) return chunks;
-
- // Sample 3-5 strategic points in the middle
- const samplePoints = Math.min(5, this.config.maxChunks - 2); // -2 for header/footer
- const interval = Math.floor(availableMiddle / (samplePoints + 1));
-
- for (let i = 1; i <= samplePoints; i++) {
- const sampleStart = headerSize + (interval * i);
- const sampleSize = Math.min(this.config.skimChunkSize, availableMiddle - (interval * i));
-
- if (sampleSize > 0) {
- const buffer = Buffer.alloc(sampleSize);
- fs.readSync(fd, buffer, 0, sampleSize, sampleStart);
- const content = buffer.toString(this.config.encoding);
-
- chunks.push({
- start: sampleStart,
- end: sampleStart + sampleSize,
- type: 'middle',
- content,
- });
- }
- }
-
- return chunks;
- }
-
- /**
- * Get skimming statistics
- */
- getSkimmingInfo(result: SkimmedContent): string {
- if (!result.skimmed) {
- return 'File read completely (no skimming needed)';
- }
-
- const compressionRatio = ((result.originalSize - result.actualSize) / result.originalSize * 100).toFixed(1);
- const chunksInfo = result.chunks.map(c => c.type).join(', ');
-
- let info = `Skimmed large file: ${this.formatSize(result.originalSize)} โ ${this.formatSize(result.actualSize)} (${compressionRatio}% reduction)\n`;
- info += `Chunks read: ${chunksInfo}`;
-
- if (result.patterns && result.patterns.length > 0) {
- info += `\nPattern matches: ${result.patterns.length}`;
- }
-
- return info;
- }
-
- private formatSize(bytes: number): string {
- const units = ['B', 'KB', 'MB', 'GB'];
- let size = bytes;
- let unitIndex = 0;
-
- while (size >= 1024 && unitIndex < units.length - 1) {
- size /= 1024;
- unitIndex++;
- }
-
- return `${size.toFixed(1)}${units[unitIndex]}`;
- }
-}
-
-/**
- * Convenience functions for different use cases
- */
-export function skimForDependencies(filePath: string): SkimmedContent {
- const skimmer = new FileSkimmer({
- maxFileSize: 512 * 1024, // 512KB threshold for dependency files
- searchPatterns: ['import', 'require', 'export', 'from'],
- headerSize: 64 * 1024, // Dependencies usually at top
- footerSize: 8 * 1024, // Small footer
- });
-
- return skimmer.readFile(filePath, ['import', 'require', 'export', 'from']);
-}
-
-export function skimForFunctions(filePath: string): SkimmedContent {
- const skimmer = new FileSkimmer({
- maxFileSize: 1 * 1024 * 1024, // 1MB threshold
- searchPatterns: ['function', 'class', 'const ', 'let ', 'var ', '=>'],
- });
-
- return skimmer.readFile(filePath, ['function', 'class', 'const ', 'let ', 'var ', '=>']);
-}
-
-export function skimForContent(filePath: string, searchTerms: string[]): SkimmedContent {
- const skimmer = new FileSkimmer();
- return skimmer.readFile(filePath, searchTerms);
-}
\ No newline at end of file
diff --git a/src/file-writer.ts b/src/file-writer.ts
deleted file mode 100644
index 153b450..0000000
--- a/src/file-writer.ts
+++ /dev/null
@@ -1,573 +0,0 @@
-// File Writing Operations with Safety Controls
-
-import * as fs from 'fs';
-import { promises as fsAsync } from 'fs';
-import * as path from 'path';
-import { WorkspaceDetector } from './workspace-detector.js';
-import type { Storage } from './storage.js';
-
-export interface FileChange {
- type: 'replace' | 'insert' | 'delete';
- line?: number;
- oldText?: string;
- newText: string;
-}
-
-export interface FileWriteResult {
- success: boolean;
- path: string;
- message: string;
- preview?: string;
- requiresApproval: boolean;
-}
-
-export interface FileBackup {
- path: string;
- content: string;
- timestamp: Date;
-}
-
-export class FileWriter {
- private undoStack: Map = new Map();
- private readonly MAX_UNDO_LEVELS = 10;
- private readonly MAX_FILE_SIZE = 1 * 1024 * 1024; // 1MB
-
- constructor(
- private workspaceDetector: WorkspaceDetector,
- private storage: Storage
- ) {}
-
- /**
- * Create a new file
- */
- async createFile(
- relativePath: string,
- content: string,
- overwrite: boolean = false
- ): Promise {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
-
- if (!workspace) {
- return {
- success: false,
- path: relativePath,
- message: 'No workspace set. Use set_workspace first.',
- requiresApproval: false
- };
- }
-
- // Validate path
- const validation = this.validatePath(relativePath);
- if (!validation.valid) {
- return {
- success: false,
- path: relativePath,
- message: validation.error!,
- requiresApproval: false
- };
- }
-
- const fullPath = path.join(workspace, relativePath);
-
- // Check if file exists
- let fileExists = false;
- try {
- await fsAsync.access(fullPath);
- fileExists = true;
- } catch {
- fileExists = false;
- }
-
- if (fileExists && !overwrite) {
- return {
- success: false,
- path: relativePath,
- message: `File already exists: ${relativePath}. Use overwrite flag to replace.`,
- requiresApproval: false
- };
- }
-
- // Validate content size
- const size = Buffer.byteLength(content);
- if (size > this.MAX_FILE_SIZE) {
- return {
- success: false,
- path: relativePath,
- message: `File too large: ${(size / 1024).toFixed(1)}KB. Max: ${(this.MAX_FILE_SIZE / 1024).toFixed(1)}KB`,
- requiresApproval: false
- };
- }
-
- // Generate preview
- const preview = this.generateCreatePreview(relativePath, content);
-
- return {
- success: true,
- path: relativePath,
- message: 'Ready to create file. Preview shown above.',
- preview,
- requiresApproval: true
- };
- }
-
- /**
- * Actually write the file after approval
- */
- async applyCreateFile(relativePath: string, content: string): Promise {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (!workspace) {
- return {
- success: false,
- path: relativePath,
- message: 'No workspace set',
- requiresApproval: false
- };
- }
-
- const fullPath = path.join(workspace, relativePath);
-
- try {
- // Create directory if doesn't exist
- const dir = path.dirname(fullPath);
- try {
- await fsAsync.access(dir);
- } catch {
- await fsAsync.mkdir(dir, { recursive: true });
- }
-
- // Write file
- await fsAsync.writeFile(fullPath, content, 'utf8');
-
- // Log decision
- const project = this.storage.getCurrentProject();
- if (project) {
- this.storage.addDecision({
- projectId: project.id,
- type: 'other',
- description: `Created file: ${relativePath}`,
- reasoning: 'Generated by Claude with user approval'
- });
- }
-
- return {
- success: true,
- path: relativePath,
- message: `โ
Created ${relativePath}`,
- requiresApproval: false
- };
- } catch (error) {
- return {
- success: false,
- path: relativePath,
- message: `Error creating file: ${error instanceof Error ? error.message : 'Unknown error'}`,
- requiresApproval: false
- };
- }
- }
-
- /**
- * Modify an existing file
- */
- async modifyFile(
- relativePath: string,
- changes: FileChange[]
- ): Promise {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
-
- if (!workspace) {
- return {
- success: false,
- path: relativePath,
- message: 'No workspace set. Use set_workspace first.',
- requiresApproval: false
- };
- }
-
- const fullPath = path.join(workspace, relativePath);
-
- // Check if file exists
- try {
- await fsAsync.access(fullPath);
- } catch {
- return {
- success: false,
- path: relativePath,
- message: `File not found: ${relativePath}`,
- requiresApproval: false
- };
- }
-
- try {
- // Read current content
- const originalContent = await fsAsync.readFile(fullPath, 'utf8');
-
- // Create backup
- this.createBackup(relativePath, originalContent);
-
- // Apply changes
- const newContent = this.applyChanges(originalContent, changes);
-
- // Generate diff preview
- const preview = this.generateDiffPreview(relativePath, originalContent, newContent);
-
- return {
- success: true,
- path: relativePath,
- message: 'Ready to modify file. Preview shown above.',
- preview,
- requiresApproval: true
- };
- } catch (error) {
- return {
- success: false,
- path: relativePath,
- message: `Error reading file: ${error instanceof Error ? error.message : 'Unknown error'}`,
- requiresApproval: false
- };
- }
- }
-
- /**
- * Actually apply the modification after approval
- */
- async applyModifyFile(
- relativePath: string,
- changes: FileChange[]
- ): Promise {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (!workspace) {
- return {
- success: false,
- path: relativePath,
- message: 'No workspace set',
- requiresApproval: false
- };
- }
-
- const fullPath = path.join(workspace, relativePath);
-
- try {
- const originalContent = await fsAsync.readFile(fullPath, 'utf8');
- const newContent = this.applyChanges(originalContent, changes);
-
- // Write modified file
- await fsAsync.writeFile(fullPath, newContent, 'utf8');
-
- // Log decision
- const project = this.storage.getCurrentProject();
- if (project) {
- this.storage.addDecision({
- projectId: project.id,
- type: 'other',
- description: `Modified file: ${relativePath}`,
- reasoning: 'Changes applied by Claude with user approval'
- });
- }
-
- return {
- success: true,
- path: relativePath,
- message: `โ
Modified ${relativePath}`,
- requiresApproval: false
- };
- } catch (error) {
- return {
- success: false,
- path: relativePath,
- message: `Error modifying file: ${error instanceof Error ? error.message : 'Unknown error'}`,
- requiresApproval: false
- };
- }
- }
-
- /**
- * Undo the last change to a file
- */
- async undoChange(relativePath: string, steps: number = 1): Promise {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (!workspace) {
- return {
- success: false,
- path: relativePath,
- message: 'No workspace set',
- requiresApproval: false
- };
- }
-
- const backups = this.undoStack.get(relativePath);
- if (!backups || backups.length === 0) {
- return {
- success: false,
- path: relativePath,
- message: 'No undo history available for this file',
- requiresApproval: false
- };
- }
-
- if (steps > backups.length) {
- steps = backups.length;
- }
-
- try {
- // Get the backup to restore
- const backup = backups[backups.length - steps];
- const fullPath = path.join(workspace, relativePath);
-
- // Restore the backup
- await fsAsync.writeFile(fullPath, backup.content, 'utf8');
-
- // Remove undone items from stack
- backups.splice(-steps);
- this.undoStack.set(relativePath, backups);
-
- return {
- success: true,
- path: relativePath,
- message: `โ
Reverted ${relativePath} (undid ${steps} change${steps > 1 ? 's' : ''})`,
- requiresApproval: false
- };
- } catch (error) {
- return {
- success: false,
- path: relativePath,
- message: `Error undoing change: ${error instanceof Error ? error.message : 'Unknown error'}`,
- requiresApproval: false
- };
- }
- }
-
- /**
- * Delete a file
- */
- async deleteFile(relativePath: string): Promise {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
-
- if (!workspace) {
- return {
- success: false,
- path: relativePath,
- message: 'No workspace set',
- requiresApproval: false
- };
- }
-
- const fullPath = path.join(workspace, relativePath);
-
- try {
- await fsAsync.access(fullPath);
- } catch {
- return {
- success: false,
- path: relativePath,
- message: `File not found: ${relativePath}`,
- requiresApproval: false
- };
- }
-
- // Create backup before deletion
- const content = await fsAsync.readFile(fullPath, 'utf8');
- this.createBackup(relativePath, content);
-
- const preview = `โ ๏ธ WARNING: This will DELETE the file!\n\nFile: ${relativePath}\nSize: ${(Buffer.byteLength(content) / 1024).toFixed(1)}KB\n\nThis action can be undone with undo_file_change.`;
-
- return {
- success: true,
- path: relativePath,
- message: 'Ready to delete file. Confirm deletion.',
- preview,
- requiresApproval: true
- };
- }
-
- /**
- * Actually delete the file after approval
- */
- async applyDeleteFile(relativePath: string): Promise {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (!workspace) {
- return {
- success: false,
- path: relativePath,
- message: 'No workspace set',
- requiresApproval: false
- };
- }
-
- const fullPath = path.join(workspace, relativePath);
-
- try {
- fs.unlinkSync(fullPath);
-
- return {
- success: true,
- path: relativePath,
- message: `โ
Deleted ${relativePath}`,
- requiresApproval: false
- };
- } catch (error) {
- return {
- success: false,
- path: relativePath,
- message: `Error deleting file: ${error instanceof Error ? error.message : 'Unknown error'}`,
- requiresApproval: false
- };
- }
- }
-
- // ========== PRIVATE HELPER METHODS ==========
-
- private validatePath(relativePath: string): { valid: boolean; error?: string } {
- // Check for path traversal
- if (relativePath.includes('..')) {
- return { valid: false, error: 'Path traversal not allowed (..)' };
- }
-
- // Check for absolute paths
- if (path.isAbsolute(relativePath)) {
- return { valid: false, error: 'Must use relative paths, not absolute' };
- }
-
- // Check for forbidden directories
- const forbidden = ['node_modules', '.git', 'dist', 'build', '.next', '.cache'];
- for (const dir of forbidden) {
- if (relativePath.startsWith(dir + '/') || relativePath === dir) {
- return { valid: false, error: `Cannot modify ${dir} directory` };
- }
- }
-
- // Check for system files (unless specifically allowed)
- if (path.basename(relativePath).startsWith('.') && !this.isAllowedHiddenFile(relativePath)) {
- return { valid: false, error: 'Cannot modify hidden files (unless configuration)' };
- }
-
- return { valid: true };
- }
-
- private isAllowedHiddenFile(relativePath: string): boolean {
- const allowed = [
- '.env.example',
- '.gitignore',
- '.eslintrc',
- '.prettierrc',
- '.editorconfig'
- ];
- return allowed.some(file => relativePath.endsWith(file));
- }
-
- private createBackup(relativePath: string, content: string): void {
- const backups = this.undoStack.get(relativePath) || [];
-
- backups.push({
- path: relativePath,
- content,
- timestamp: new Date()
- });
-
- // Keep only last MAX_UNDO_LEVELS backups
- if (backups.length > this.MAX_UNDO_LEVELS) {
- backups.shift();
- }
-
- this.undoStack.set(relativePath, backups);
- }
-
- private applyChanges(content: string, changes: FileChange[]): string {
- let result = content;
-
- for (const change of changes) {
- switch (change.type) {
- case 'replace':
- if (change.oldText) {
- result = result.replace(change.oldText, change.newText);
- }
- break;
-
- case 'insert':
- if (typeof change.line === 'number') {
- const lines = result.split('\n');
- lines.splice(change.line, 0, change.newText);
- result = lines.join('\n');
- }
- break;
-
- case 'delete':
- if (typeof change.line === 'number') {
- const lines = result.split('\n');
- lines.splice(change.line, 1);
- result = lines.join('\n');
- }
- break;
- }
- }
-
- return result;
- }
-
- private generateCreatePreview(relativePath: string, content: string): string {
- const lines = content.split('\n');
- const size = Buffer.byteLength(content);
-
- let preview = '๐ Preview: Create New File\n';
- preview += 'โ'.repeat(60) + '\n';
- preview += `File: ${relativePath}\n`;
- preview += `Size: ${(size / 1024).toFixed(1)}KB\n`;
- preview += `Lines: ${lines.length}\n`;
- preview += 'โ'.repeat(60) + '\n';
-
- // Show first 20 lines
- const previewLines = lines.slice(0, 20);
- preview += previewLines.join('\n');
-
- if (lines.length > 20) {
- preview += `\n\n... (${lines.length - 20} more lines)`;
- }
-
- preview += '\n' + 'โ'.repeat(60);
-
- return preview;
- }
-
- private generateDiffPreview(
- relativePath: string,
- oldContent: string,
- newContent: string
- ): string {
- const oldLines = oldContent.split('\n');
- const newLines = newContent.split('\n');
-
- let preview = '๐ Changes Preview\n';
- preview += 'โ'.repeat(60) + '\n';
- preview += `File: ${relativePath}\n`;
- preview += 'โ'.repeat(60) + '\n';
-
- // Simple line-by-line diff
- const maxLines = Math.max(oldLines.length, newLines.length);
- let changeCount = 0;
-
- for (let i = 0; i < maxLines && changeCount < 20; i++) {
- const oldLine = oldLines[i];
- const newLine = newLines[i];
-
- if (oldLine !== newLine) {
- if (oldLine !== undefined) {
- preview += `- ${oldLine}\n`;
- }
- if (newLine !== undefined) {
- preview += `+ ${newLine}\n`;
- }
- changeCount++;
- } else if (changeCount > 0 && changeCount < 20) {
- // Show context line
- preview += ` ${oldLine}\n`;
- }
- }
-
- if (changeCount >= 20) {
- preview += '\n... (more changes below)';
- }
-
- preview += '\n' + 'โ'.repeat(60);
-
- return preview;
- }
-}
\ No newline at end of file
diff --git a/src/git-context-engine.ts b/src/git-context-engine.ts
new file mode 100644
index 0000000..02b5113
--- /dev/null
+++ b/src/git-context-engine.ts
@@ -0,0 +1,322 @@
+๏ปฟ/**
+ * Git Context Engine
+ *
+ * Layer 1: Fast git context retrieval
+ * Layer 2: Smart commit message generation
+ * Layer 3: File complexity integration for context-aware commits
+ *
+ * Features:
+ * - Automatic commit message generation
+ * - Conventional commits format support
+ * - Diff analysis for message context
+ * - File complexity awareness
+ * - Smart suggestions based on changes
+ */
+
+import simpleGit, { SimpleGit, DiffResult } from 'simple-git';
+import * as path from 'path';
+import { ReadFileEngine } from './read-file-engine.js';
+
+interface GitContext {
+ branch: string;
+ lastCommit: {
+ hash: string;
+ message: string;
+ author: string;
+ date: Date;
+ } | null;
+ uncommittedFiles: string[];
+ stagedFiles: string[];
+ ahead: number;
+ behind: number;
+ suggestedCommitMessage?: string;
+ changeAnalysis?: {
+ filesChanged: number;
+ insertions: number;
+ deletions: number;
+ categories: {
+ [key: string]: number;
+ };
+ primaryCategory?: string;
+ scope?: string;
+ };
+}
+
+export class GitContextEngine {
+ private git: SimpleGit;
+ private workspacePath: string;
+ private readFileEngine: ReadFileEngine;
+
+ constructor(workspacePath: string) {
+ this.workspacePath = workspacePath;
+ this.git = simpleGit(workspacePath);
+ this.readFileEngine = new ReadFileEngine(workspacePath);
+ }
+
+ /**
+ * Layer 1 + 2 + 3: Get enriched git context with smart commit message
+ */
+ async getContext(options: {
+ generateCommitMessage?: boolean;
+ analyzeChanges?: boolean;
+ } = {}): Promise {
+ const {
+ generateCommitMessage = true,
+ analyzeChanges = true
+ } = options;
+
+ // Layer 1: Get basic git context
+ const status = await this.git.status();
+ const logs = await this.git.log({ maxCount: 1 });
+
+ const context: GitContext = {
+ branch: status.current || 'unknown',
+ lastCommit: null,
+ uncommittedFiles: [...status.modified, ...status.not_added],
+ stagedFiles: status.staged,
+ ahead: status.ahead,
+ behind: status.behind
+ };
+
+ // Get last commit info
+ if (logs.latest) {
+ context.lastCommit = {
+ hash: logs.latest.hash.substring(0, 7),
+ message: logs.latest.message,
+ author: logs.latest.author_name,
+ date: new Date(logs.latest.date)
+ };
+ }
+
+ // Layer 2: Analyze changes if requested
+ if (analyzeChanges && context.stagedFiles.length > 0) {
+ context.changeAnalysis = await this.analyzeChanges(context.stagedFiles);
+ }
+
+ // Layer 3: Generate smart commit message
+ if (generateCommitMessage && context.stagedFiles.length > 0 && context.changeAnalysis) {
+ context.suggestedCommitMessage = await this.generateCommitMessage(
+ context.stagedFiles,
+ context.changeAnalysis
+ );
+ }
+
+ return context;
+ }
+
+ /**
+ * Layer 2: Analyze changes with diff
+ */
+ private async analyzeChanges(stagedFiles: string[]): Promise {
+ const categories: { [key: string]: number } = {};
+ let insertions = 0;
+ let deletions = 0;
+
+ // Get diff summary for staged changes
+ try {
+ const diffSummary = await this.git.diffSummary(['--cached']);
+ insertions = diffSummary.insertions;
+ deletions = diffSummary.deletions;
+ } catch (err) {
+ // If diff fails, use defaults
+ }
+
+ // Categorize files
+ for (const file of stagedFiles) {
+ const category = this.categorizeFile(file);
+ categories[category] = (categories[category] || 0) + 1;
+ }
+
+ // Determine primary category
+ const primaryCategory = Object.entries(categories)
+ .sort(([, a], [, b]) => b - a)[0]?.[0] || 'chore';
+
+ // Determine scope from file paths
+ const scope = this.determineScope(stagedFiles);
+
+ return {
+ filesChanged: stagedFiles.length,
+ insertions,
+ deletions,
+ categories,
+ primaryCategory,
+ scope
+ };
+ }
+
+ /**
+ * Layer 3: Generate smart commit message using file complexity context
+ */
+ private async generateCommitMessage(
+ stagedFiles: string[],
+ analysis: GitContext['changeAnalysis']
+ ): Promise {
+ if (!analysis) return '';
+
+ const { primaryCategory, scope, filesChanged } = analysis;
+
+ // Analyze first few staged files for context
+ const fileContexts: Array<{
+ name: string;
+ complexity?: string;
+ isNew: boolean;
+ }> = [];
+
+ for (const file of stagedFiles.slice(0, 3)) {
+ const isNew = await this.isNewFile(file);
+ const ctx: { name: string; complexity?: string; isNew: boolean } = {
+ name: path.basename(file),
+ isNew
+ };
+
+ // Get complexity for code files
+ if (this.isCodeFile(file) && !isNew) {
+ try {
+ const fullPath = path.join(this.workspacePath, file);
+ const fileCtx = await this.readFileEngine.read(fullPath);
+ ctx.complexity = fileCtx.complexity.level;
+ } catch (err) {
+ // Skip complexity if can't read
+ }
+ }
+
+ fileContexts.push(ctx);
+ }
+
+ // Generate message using conventional commits format
+ let message = this.getCommitType(primaryCategory || 'chore');
+
+ if (scope) {
+ message += `(${scope})`;
+ }
+
+ message += ': ';
+
+ // Generate description based on files and analysis
+ const descriptions: string[] = [];
+
+ // Handle new files
+ const newFiles = fileContexts.filter(f => f.isNew);
+ if (newFiles.length > 0) {
+ descriptions.push(`add ${newFiles.map(f => f.name).join(', ')}`);
+ }
+
+ // Handle existing files
+ const modifiedFiles = fileContexts.filter(f => !f.isNew);
+ if (modifiedFiles.length > 0) {
+ const complexFiles = modifiedFiles.filter(f =>
+ f.complexity === 'high' || f.complexity === 'very-high'
+ );
+
+ if (complexFiles.length > 0) {
+ descriptions.push(`refactor ${complexFiles.map(f => f.name).join(', ')}`);
+ } else if (primaryCategory === 'feature') {
+ descriptions.push(`enhance ${modifiedFiles.map(f => f.name).join(', ')}`);
+ } else if (primaryCategory === 'bugfix') {
+ descriptions.push(`fix issues in ${modifiedFiles.map(f => f.name).join(', ')}`);
+ } else {
+ descriptions.push(`update ${modifiedFiles.map(f => f.name).join(', ')}`);
+ }
+ }
+
+ if (descriptions.length === 0) {
+ // Fallback generic message
+ descriptions.push(`update ${filesChanged} file${filesChanged > 1 ? 's' : ''}`);
+ }
+
+ message += descriptions.join(' and ');
+
+ // Add line count context if significant
+ if (analysis.insertions > 100 || analysis.deletions > 100) {
+ message += `\n\n+${analysis.insertions} -${analysis.deletions} lines changed`;
+ }
+
+ return message;
+ }
+
+ /**
+ * Categorize file by path and name
+ */
+ private categorizeFile(filePath: string): string {
+ const lower = filePath.toLowerCase();
+
+ if (lower.includes('test') || lower.includes('spec')) return 'test';
+ if (lower.endsWith('.md') || lower.includes('doc')) return 'docs';
+ if (lower.includes('config') || lower.endsWith('.json') || lower.endsWith('.yaml')) return 'config';
+ if (lower.includes('fix') || lower.includes('bug')) return 'bugfix';
+ if (lower.includes('refactor')) return 'refactor';
+ if (this.isCodeFile(filePath)) return 'feature';
+
+ return 'chore';
+ }
+
+ /**
+ * Determine scope from file paths
+ */
+ private determineScope(files: string[]): string | undefined {
+ // Extract common directory or module name
+ const dirs = files.map(f => {
+ const parts = f.split(/[/\\]/);
+ return parts.length > 1 ? parts[0] : null;
+ }).filter(Boolean);
+
+ if (dirs.length === 0) return undefined;
+
+ // Find most common directory
+ const dirCounts: { [key: string]: number } = {};
+ for (const dir of dirs) {
+ if (dir) dirCounts[dir] = (dirCounts[dir] || 0) + 1;
+ }
+
+ const mostCommon = Object.entries(dirCounts)
+ .sort(([, a], [, b]) => b - a)[0];
+
+ if (mostCommon && mostCommon[1] >= files.length * 0.5) {
+ return mostCommon[0];
+ }
+
+ return undefined;
+ }
+
+ /**
+ * Get conventional commit type
+ */
+ private getCommitType(category: string): string {
+ const typeMap: { [key: string]: string } = {
+ 'feature': 'feat',
+ 'bugfix': 'fix',
+ 'docs': 'docs',
+ 'test': 'test',
+ 'refactor': 'refactor',
+ 'config': 'chore',
+ 'other': 'chore'
+ };
+
+ return typeMap[category] || 'chore';
+ }
+
+ /**
+ * Check if file is new (untracked)
+ */
+ private async isNewFile(filePath: string): Promise {
+ try {
+ const status = await this.git.status();
+ return status.not_added.includes(filePath);
+ } catch (err) {
+ return false;
+ }
+ }
+
+ /**
+ * Check if code file
+ */
+ private isCodeFile(filePath: string): boolean {
+ const extensions = [
+ '.ts', '.js', '.tsx', '.jsx',
+ '.py', '.go', '.rs', '.java',
+ '.cpp', '.c', '.h'
+ ];
+ return extensions.some(ext => filePath.endsWith(ext));
+ }
+}
+
diff --git a/src/git-hook-manager.ts b/src/git-hook-manager.ts
new file mode 100644
index 0000000..bffb65e
--- /dev/null
+++ b/src/git-hook-manager.ts
@@ -0,0 +1,424 @@
+๏ปฟ/**
+ * Git Hook Manager
+ * Automatically installs git hooks to capture development events
+ * Auto-remembers: commits, pushes, merges, branch switches
+ */
+
+import * as fs from 'fs';
+import * as path from 'path';
+import { execSync } from 'child_process';
+
+export interface GitHookConfig {
+ projectPath: string;
+ dbPath: string;
+ enabled: boolean;
+}
+
+export class GitHookManager {
+ private projectPath: string;
+ private hooksDir: string;
+ private dbPath: string;
+
+ constructor(projectPath: string, dbPath: string) {
+ this.projectPath = projectPath;
+ this.hooksDir = path.join(projectPath, '.git', 'hooks');
+ this.dbPath = dbPath;
+ }
+
+ /**
+ * Check if project is a git repository
+ */
+ isGitRepo(): boolean {
+ try {
+ return fs.existsSync(path.join(this.projectPath, '.git'));
+ } catch {
+ return false;
+ }
+ }
+
+ /**
+ * Install all Context Sync git hooks
+ */
+ installHooks(): { success: boolean; installed: string[]; errors: string[] } {
+ if (!this.isGitRepo()) {
+ return {
+ success: false,
+ installed: [],
+ errors: ['Not a git repository']
+ };
+ }
+
+ const installed: string[] = [];
+ const errors: string[] = [];
+
+ // Ensure hooks directory exists
+ if (!fs.existsSync(this.hooksDir)) {
+ fs.mkdirSync(this.hooksDir, { recursive: true });
+ }
+
+ // Install each hook
+ const hooks = [
+ { name: 'post-commit', generator: this.generatePostCommitHook.bind(this) },
+ { name: 'pre-push', generator: this.generatePrePushHook.bind(this) },
+ { name: 'post-merge', generator: this.generatePostMergeHook.bind(this) },
+ { name: 'post-checkout', generator: this.generatePostCheckoutHook.bind(this) }
+ ];
+
+ for (const hook of hooks) {
+ try {
+ const hookPath = path.join(this.hooksDir, hook.name);
+ const hookContent = hook.generator();
+
+ // Backup existing hook if it exists
+ if (fs.existsSync(hookPath)) {
+ const backupPath = `${hookPath}.backup-${Date.now()}`;
+ fs.copyFileSync(hookPath, backupPath);
+ }
+
+ // Write hook
+ fs.writeFileSync(hookPath, hookContent, { mode: 0o755 });
+ installed.push(hook.name);
+ } catch (error: any) {
+ errors.push(`Failed to install ${hook.name}: ${error.message}`);
+ }
+ }
+
+ return {
+ success: errors.length === 0,
+ installed,
+ errors
+ };
+ }
+
+ /**
+ * Uninstall Context Sync hooks (restores backups if available)
+ */
+ uninstallHooks(): { success: boolean; removed: string[] } {
+ const removed: string[] = [];
+ const hooks = ['post-commit', 'pre-push', 'post-merge', 'post-checkout'];
+
+ for (const hookName of hooks) {
+ const hookPath = path.join(this.hooksDir, hookName);
+
+ if (fs.existsSync(hookPath)) {
+ // Check if it's a Context Sync hook
+ const content = fs.readFileSync(hookPath, 'utf8');
+ if (content.includes('# Context Sync Auto-Hook')) {
+ fs.unlinkSync(hookPath);
+ removed.push(hookName);
+
+ // Restore backup if available
+ const backups = fs.readdirSync(this.hooksDir)
+ .filter(f => f.startsWith(`${hookName}.backup-`))
+ .sort()
+ .reverse();
+
+ if (backups.length > 0) {
+ const latestBackup = path.join(this.hooksDir, backups[0]);
+ fs.copyFileSync(latestBackup, hookPath);
+ }
+ }
+ }
+ }
+
+ return { success: true, removed };
+ }
+
+ /**
+ * Check which hooks are installed
+ */
+ getInstalledHooks(): string[] {
+ const installed: string[] = [];
+ const hooks = ['post-commit', 'pre-push', 'post-merge', 'post-checkout'];
+
+ for (const hookName of hooks) {
+ const hookPath = path.join(this.hooksDir, hookName);
+ if (fs.existsSync(hookPath)) {
+ const content = fs.readFileSync(hookPath, 'utf8');
+ if (content.includes('# Context Sync Auto-Hook')) {
+ installed.push(hookName);
+ }
+ }
+ }
+
+ return installed;
+ }
+
+ /**
+ * Generate post-commit hook (captures commits)
+ */
+ private generatePostCommitHook(): string {
+ const isWindows = process.platform === 'win32';
+ const nodeCmd = isWindows ? 'node.exe' : 'node';
+
+ // Escape database path for shell
+ const escapedDbPath = this.dbPath.replace(/\\/g, '/');
+
+ return `#!/bin/sh
+# Context Sync Auto-Hook: post-commit
+# Automatically captures commit information
+
+${nodeCmd} -e "
+const fs = require('fs');
+const { execSync } = require('child_process');
+const Database = require('better-sqlite3');
+
+try {
+ // Get commit info
+ const commitHash = execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
+ const commitMsg = execSync('git log -1 --pretty=%B', { encoding: 'utf8' }).trim();
+ const branch = execSync('git branch --show-current', { encoding: 'utf8' }).trim();
+ const filesChanged = execSync('git diff-tree --no-commit-id --name-only -r HEAD', { encoding: 'utf8' })
+ .trim().split('\\\\n').filter(f => f);
+
+ // Connect to Context Sync database
+ const db = new Database('${escapedDbPath}');
+
+ // Get current project
+ const project = db.prepare('SELECT id FROM projects ORDER BY updated_at DESC LIMIT 1').get();
+ if (!project) {
+ console.log('Context Sync: No project found');
+ process.exit(0);
+ }
+
+ // Store commit as decision
+ const crypto = require('crypto');
+ const id = crypto.randomUUID();
+
+ const sql = 'INSERT INTO decisions (id, project_id, type, description, reasoning, timestamp) VALUES (?, ?, ?, ?, ?, ?)';
+ db.prepare(sql).run(
+ id,
+ project.id,
+ 'commit',
+ 'Committed: ' + commitMsg.split('\\\\n')[0],
+ JSON.stringify({
+ commit: commitHash,
+ branch: branch,
+ files: filesChanged,
+ fullMessage: commitMsg,
+ event: 'commit'
+ }),
+ Date.now()
+ );
+
+ db.close();
+ console.log(' Context Sync: Captured commit ' + commitHash.substring(0, 7));
+} catch (error) {
+ // Fail silently to not interrupt git workflow
+ console.error('Context Sync hook error:', error.message);
+}
+"
+`;
+ }
+
+ /**
+ * Generate pre-push hook (captures pushes to production)
+ */
+ private generatePrePushHook(): string {
+ const isWindows = process.platform === 'win32';
+ const nodeCmd = isWindows ? 'node.exe' : 'node';
+ const escapedDbPath = this.dbPath.replace(/\\/g, '/');
+
+ return `#!/bin/sh
+# Context Sync Auto-Hook: pre-push
+# Automatically captures push events (production tracking)
+
+${nodeCmd} -e "
+const fs = require('fs');
+const { execSync } = require('child_process');
+const Database = require('better-sqlite3');
+
+try {
+ const branch = execSync('git branch --show-current', { encoding: 'utf8' }).trim();
+ const remote = process.argv[2] || 'origin';
+
+ // Count commits about to be pushed
+ const ahead = execSync('git rev-list @{u}..HEAD --count 2>/dev/null || echo 0', { encoding: 'utf8' }).trim();
+
+ // Connect to Context Sync database
+ const db = new Database('${escapedDbPath}');
+
+ // Get current project
+ const project = db.prepare('SELECT id FROM projects ORDER BY updated_at DESC LIMIT 1').get();
+ if (!project) {
+ console.log('Context Sync: No project found');
+ process.exit(0);
+ }
+
+ // Store push as decision
+ const crypto = require('crypto');
+ const id = crypto.randomUUID();
+
+ const isProduction = branch === 'main' || branch === 'master' || branch === 'production';
+
+ const sql = 'INSERT INTO decisions (id, project_id, type, description, reasoning, timestamp) VALUES (?, ?, ?, ?, ?, ?)';
+ db.prepare(sql).run(
+ id,
+ project.id,
+ 'push',
+ 'Pushing ' + ahead + ' commit(s) to ' + remote + '/' + branch + (isProduction ? ' (PRODUCTION)' : ''),
+ JSON.stringify({
+ remote: remote,
+ branch: branch,
+ commits: parseInt(ahead),
+ production: isProduction,
+ event: 'push'
+ }),
+ Date.now()
+ );
+
+ db.close();
+ console.log(' Context Sync: Tracked push to ' + remote + '/' + branch);
+} catch (error) {
+ console.error('Context Sync hook error:', error.message);
+}
+"
+`;
+ }
+
+ /**
+ * Generate post-merge hook (captures merges)
+ */
+ private generatePostMergeHook(): string {
+ const isWindows = process.platform === 'win32';
+ const nodeCmd = isWindows ? 'node.exe' : 'node';
+ const escapedDbPath = this.dbPath.replace(/\\/g, '/');
+
+ return `#!/bin/sh
+# Context Sync Auto-Hook: post-merge
+# Automatically captures merge events (feature completions)
+
+${nodeCmd} -e "
+const fs = require('fs');
+const { execSync } = require('child_process');
+const Database = require('better-sqlite3');
+
+try {
+ const branch = execSync('git branch --show-current', { encoding: 'utf8' }).trim();
+ const mergeHead = process.env.GIT_REFLOG_ACTION || 'merge';
+
+ // Extract source branch from merge message
+ const lastCommit = execSync('git log -1 --pretty=%B', { encoding: 'utf8' }).trim();
+ const mergeMatch = lastCommit.match(/Merge (?:branch|pull request) ['\\\\\"](.*)['\\\\\"]/);
+ const fromBranch = mergeMatch ? mergeMatch[1] : 'unknown';
+
+ // Connect to Context Sync database
+ const db = new Database('${escapedDbPath}');
+
+ // Get current project
+ const project = db.prepare('SELECT id FROM projects ORDER BY updated_at DESC LIMIT 1').get();
+ if (!project) {
+ console.log('Context Sync: No project found');
+ process.exit(0);
+ }
+
+ // Store merge as decision
+ const crypto = require('crypto');
+ const id = crypto.randomUUID();
+
+ const sql = 'INSERT INTO decisions (id, project_id, type, description, reasoning, timestamp) VALUES (?, ?, ?, ?, ?, ?)';
+ db.prepare(sql).run(
+ id,
+ project.id,
+ 'merge',
+ 'Merged ' + fromBranch + ' into ' + branch,
+ JSON.stringify({
+ from: fromBranch,
+ to: branch,
+ mergeCommit: lastCommit,
+ event: 'merge'
+ }),
+ Date.now()
+ );
+
+ db.close();
+ console.log(' Context Sync: Captured merge ' + fromBranch + ' ' + branch);
+} catch (error) {
+ console.error('Context Sync hook error:', error.message);
+}
+"
+`;
+ }
+
+ /**
+ * Generate post-checkout hook (captures branch switches)
+ */
+ private generatePostCheckoutHook(): string {
+ const isWindows = process.platform === 'win32';
+ const nodeCmd = isWindows ? 'node.exe' : 'node';
+ const escapedDbPath = this.dbPath.replace(/\\/g, '/');
+
+ return `#!/bin/sh
+# Context Sync Auto-Hook: post-checkout
+# Automatically captures branch switches (context switching)
+
+# Args: previous-head new-head branch-checkout-flag
+PREV_HEAD=\$1
+NEW_HEAD=\$2
+BRANCH_CHECKOUT=\$3
+
+# Only track branch checkouts (not file checkouts)
+if [ "\$BRANCH_CHECKOUT" != "1" ]; then
+ exit 0
+fi
+
+${nodeCmd} -e "
+const fs = require('fs');
+const { execSync } = require('child_process');
+const Database = require('better-sqlite3');
+
+try {
+ const newBranch = execSync('git branch --show-current', { encoding: 'utf8' }).trim();
+ const prevHead = process.argv[2];
+
+ // Get previous branch name
+ let prevBranch = 'unknown';
+ try {
+ prevBranch = execSync('git name-rev --name-only ' + prevHead, { encoding: 'utf8' }).trim();
+ } catch {}
+
+ // Skip if same branch (happens on initial checkout)
+ if (prevBranch === newBranch) {
+ process.exit(0);
+ }
+
+ // Connect to Context Sync database
+ const db = new Database('${escapedDbPath}');
+
+ // Get current project
+ const project = db.prepare('SELECT id FROM projects ORDER BY updated_at DESC LIMIT 1').get();
+ if (!project) {
+ console.log('Context Sync: No project found');
+ process.exit(0);
+ }
+
+ // Store branch switch as active_work
+ const crypto = require('crypto');
+ const id = crypto.randomUUID();
+
+ const sql = 'INSERT INTO active_work (id, project_id, task, context, branch, timestamp, status) VALUES (?, ?, ?, ?, ?, ?, ?)';
+ db.prepare(sql).run(
+ id,
+ project.id,
+ 'Switched to branch: ' + newBranch,
+ JSON.stringify({
+ from: prevBranch,
+ to: newBranch,
+ event: 'checkout'
+ }),
+ newBranch,
+ Date.now(),
+ 'active'
+ );
+
+ db.close();
+ console.log(' Context Sync: Tracked branch switch ' + prevBranch + ' ' + newBranch);
+} catch (error) {
+ console.error('Context Sync hook error:', error.message);
+}
+" "\$PREV_HEAD" "\$NEW_HEAD"
+`;
+ }
+}
+
+
diff --git a/src/git-integration.ts b/src/git-integration.ts
index 7e73fe0..73b94c0 100644
--- a/src/git-integration.ts
+++ b/src/git-integration.ts
@@ -1,4 +1,4 @@
-// Git Integration for Version Control Operations
+๏ปฟ// Git Integration for Version Control Operations
import * as fs from 'fs';
import * as path from 'path';
@@ -250,6 +250,250 @@ export class GitIntegration {
}
}
+ /**
+ * Get hotspots - files with high change frequency (risk analysis)
+ */
+ getHotspots(limit: number = 10): Array<{ file: string; changes: number; lastChanged: string; risk: string }> | null {
+ if (!this.isGitRepo()) {
+ return null;
+ }
+
+ try {
+ // Get file change frequency from git log
+ const output = this.exec('git log --format=format: --name-only --since="6 months ago"');
+ const files = output.split('\n').filter(f => f.trim() && !f.startsWith('commit'));
+
+ // Count changes per file
+ const changeCount = new Map();
+ for (const file of files) {
+ changeCount.set(file, (changeCount.get(file) || 0) + 1);
+ }
+
+ // Sort by change frequency
+ const sorted = Array.from(changeCount.entries())
+ .sort((a, b) => b[1] - a[1])
+ .slice(0, limit);
+
+ // Get last changed date for each file
+ const hotspots = sorted.map(([file, changes]) => {
+ try {
+ const lastChanged = this.exec(`git log -1 --format="%ar" -- "${file}"`).trim();
+
+ // Risk calculation based on change frequency
+ let risk = 'low';
+ if (changes > 50) risk = 'critical';
+ else if (changes > 30) risk = 'high';
+ else if (changes > 15) risk = 'medium';
+
+ return { file, changes, lastChanged, risk };
+ } catch {
+ return { file, changes, lastChanged: 'unknown', risk: 'low' };
+ }
+ });
+
+ return hotspots;
+ } catch (error) {
+ console.error('Error getting hotspots:', error);
+ return null;
+ }
+ }
+
+ /**
+ * Get file coupling - files that frequently change together (hidden dependencies)
+ */
+ getFileCoupling(minCoupling: number = 3): Array<{ fileA: string; fileB: string; timesChanged: number; coupling: string }> | null {
+ if (!this.isGitRepo()) {
+ return null;
+ }
+
+ try {
+ // Get commits with changed files
+ const output = this.exec('git log --format="COMMIT:%H" --name-only --since="6 months ago"');
+ const lines = output.split('\n');
+
+ // Parse commits and their files
+ const commits: string[][] = [];
+ let currentCommit: string[] = [];
+
+ for (const line of lines) {
+ if (line.startsWith('COMMIT:')) {
+ if (currentCommit.length > 0) {
+ commits.push([...currentCommit]);
+ }
+ currentCommit = [];
+ } else if (line.trim()) {
+ currentCommit.push(line.trim());
+ }
+ }
+ if (currentCommit.length > 0) {
+ commits.push(currentCommit);
+ }
+
+ // Count co-changes
+ const couplingMap = new Map();
+
+ for (const files of commits) {
+ if (files.length < 2) continue;
+
+ // For each pair of files in the commit
+ for (let i = 0; i < files.length; i++) {
+ for (let j = i + 1; j < files.length; j++) {
+ const pair = [files[i], files[j]].sort().join('|||');
+ couplingMap.set(pair, (couplingMap.get(pair) || 0) + 1);
+ }
+ }
+ }
+
+ // Filter and format results
+ const couplings = Array.from(couplingMap.entries())
+ .filter(([_, count]) => count >= minCoupling)
+ .map(([pair, count]) => {
+ const [fileA, fileB] = pair.split('|||');
+
+ // Coupling strength
+ let coupling = 'weak';
+ if (count > 15) coupling = 'strong';
+ else if (count > 8) coupling = 'medium';
+
+ return { fileA, fileB, timesChanged: count, coupling };
+ })
+ .sort((a, b) => b.timesChanged - a.timesChanged)
+ .slice(0, 20);
+
+ return couplings;
+ } catch (error) {
+ console.error('Error getting file coupling:', error);
+ return null;
+ }
+ }
+
+ /**
+ * Get blame/ownership info for a file
+ */
+ getBlame(filepath: string): Array<{ author: string; lines: number; percentage: number; lastEdit: string }> | null {
+ if (!this.isGitRepo()) {
+ return null;
+ }
+
+ try {
+ // Get blame with author info
+ const output = this.exec(`git blame --line-porcelain "${filepath}"`);
+ const lines = output.split('\n');
+
+ // Parse blame output
+ const authorLines = new Map();
+ const authorDates = new Map();
+ let currentAuthor = '';
+ let totalLines = 0;
+
+ for (const line of lines) {
+ if (line.startsWith('author ')) {
+ currentAuthor = line.substring(7);
+ authorLines.set(currentAuthor, (authorLines.get(currentAuthor) || 0) + 1);
+ totalLines++;
+ } else if (line.startsWith('author-time ')) {
+ const timestamp = parseInt(line.substring(12)) * 1000;
+ const existing = authorDates.get(currentAuthor);
+ if (!existing || timestamp > new Date(existing).getTime()) {
+ authorDates.set(currentAuthor, new Date(timestamp).toISOString());
+ }
+ }
+ }
+
+ // Calculate percentages and format
+ const ownership = Array.from(authorLines.entries())
+ .map(([author, lines]) => {
+ const percentage = Math.round((lines / totalLines) * 100);
+ const lastEditISO = authorDates.get(author) || '';
+ const lastEdit = lastEditISO ? this.formatRelativeTime(lastEditISO) : 'unknown';
+
+ return { author, lines, percentage, lastEdit };
+ })
+ .sort((a, b) => b.lines - a.lines);
+
+ return ownership;
+ } catch (error) {
+ console.error('Error getting blame:', error);
+ return null;
+ }
+ }
+
+ /**
+ * Get comprehensive git analysis (combines hotspots, coupling, and more)
+ */
+ getAnalysis(): {
+ hotspots: Array<{ file: string; changes: number; lastChanged: string; risk: string }>;
+ coupling: Array<{ fileA: string; fileB: string; timesChanged: number; coupling: string }>;
+ contributors: Array<{ name: string; commits: number; lastCommit: string }>;
+ branchHealth: { current: string; behind: number; ahead: number; stale: boolean };
+ } | null {
+ if (!this.isGitRepo()) {
+ return null;
+ }
+
+ try {
+ const hotspots = this.getHotspots(10) || [];
+ const coupling = this.getFileCoupling(3) || [];
+
+ // Get contributor activity
+ const contributorOutput = this.exec('git shortlog -sn --since="6 months ago"');
+ const contributors = contributorOutput
+ .split('\n')
+ .filter(line => line.trim())
+ .map(line => {
+ const match = line.trim().match(/(\d+)\s+(.+)/);
+ if (!match) return null;
+
+ const commits = parseInt(match[1]);
+ const name = match[2];
+
+ // Get last commit date for this author
+ const lastCommitOutput = this.exec(`git log -1 --author="${name}" --format="%ar"`);
+ const lastCommit = lastCommitOutput.trim();
+
+ return { name, commits, lastCommit };
+ })
+ .filter(c => c !== null) as Array<{ name: string; commits: number; lastCommit: string }>;
+
+ // Branch health
+ const status = this.getStatus();
+ const branchHealth = {
+ current: status?.branch || 'unknown',
+ behind: status?.behind || 0,
+ ahead: status?.ahead || 0,
+ stale: (status?.behind || 0) > 10
+ };
+
+ return { hotspots, coupling, contributors, branchHealth };
+ } catch (error) {
+ console.error('Error getting git analysis:', error);
+ return null;
+ }
+ }
+
+ /**
+ * Format relative time from ISO string
+ */
+ private formatRelativeTime(isoString: string): string {
+ const date = new Date(isoString);
+ const now = new Date();
+ const diff = now.getTime() - date.getTime();
+
+ const seconds = Math.floor(diff / 1000);
+ const minutes = Math.floor(seconds / 60);
+ const hours = Math.floor(minutes / 60);
+ const days = Math.floor(hours / 24);
+ const weeks = Math.floor(days / 7);
+ const months = Math.floor(days / 30);
+
+ if (months > 0) return `${months} month${months > 1 ? 's' : ''} ago`;
+ if (weeks > 0) return `${weeks} week${weeks > 1 ? 's' : ''} ago`;
+ if (days > 0) return `${days} day${days > 1 ? 's' : ''} ago`;
+ if (hours > 0) return `${hours} hour${hours > 1 ? 's' : ''} ago`;
+ if (minutes > 0) return `${minutes} minute${minutes > 1 ? 's' : ''} ago`;
+ return 'just now';
+ }
+
// ========== PRIVATE HELPER METHODS ==========
private exec(command: string): string {
@@ -382,4 +626,4 @@ export class GitIntegration {
return `update ${files.length} files`;
}
-}
\ No newline at end of file
+}
diff --git a/src/git-status-engine.ts b/src/git-status-engine.ts
new file mode 100644
index 0000000..1d267fa
--- /dev/null
+++ b/src/git-status-engine.ts
@@ -0,0 +1,385 @@
+๏ปฟ/**
+ * Git Status Engine
+ *
+ * Layer 1: Fast git status with categorization
+ * Layer 2: Impact analysis and change classification
+ * Layer 3: File complexity context for changed files
+ *
+ * Features:
+ * - Change categorization (features/bugs/refactors/docs)
+ * - Impact scoring (high/medium/low)
+ * - Complexity context for changed files
+ * - Commit readiness assessment
+ * - Smart warnings and suggestions
+ */
+
+import simpleGit, { SimpleGit, StatusResult } from 'simple-git';
+import * as path from 'path';
+import { ReadFileEngine } from './read-file-engine.js';
+
+interface FileChange {
+ path: string;
+ status: 'staged' | 'modified' | 'untracked' | 'deleted' | 'renamed';
+ category?: 'feature' | 'bugfix' | 'refactor' | 'docs' | 'config' | 'test' | 'other';
+ impact?: 'high' | 'medium' | 'low';
+ complexity?: string;
+ linesOfCode?: number;
+}
+
+interface GitStatusResult {
+ branch: string;
+ ahead: number;
+ behind: number;
+ clean: boolean;
+ changes: {
+ staged: FileChange[];
+ modified: FileChange[];
+ untracked: FileChange[];
+ deleted: FileChange[];
+ renamed: FileChange[];
+ };
+ summary: {
+ totalChanges: number;
+ highImpact: number;
+ complexity: {
+ high: number;
+ medium: number;
+ low: number;
+ };
+ categories: {
+ [key: string]: number;
+ };
+ };
+ commitReadiness: {
+ ready: boolean;
+ warnings: string[];
+ suggestions: string[];
+ };
+}
+
+export class GitStatusEngine {
+ private git: SimpleGit;
+ private workspacePath: string;
+ private readFileEngine: ReadFileEngine;
+
+ constructor(workspacePath: string) {
+ this.workspacePath = workspacePath;
+ this.git = simpleGit(workspacePath);
+ this.readFileEngine = new ReadFileEngine(workspacePath);
+ }
+
+ /**
+ * Layer 1 + 2 + 3: Get enriched git status
+ */
+ async getStatus(options: {
+ analyzeImpact?: boolean;
+ enrichContext?: boolean;
+ } = {}): Promise {
+ const {
+ analyzeImpact = true,
+ enrichContext = true
+ } = options;
+
+ // Layer 1: Get basic git status
+ const status = await this.git.status();
+
+ const result: GitStatusResult = {
+ branch: status.current || 'unknown',
+ ahead: status.ahead,
+ behind: status.behind,
+ clean: status.isClean(),
+ changes: {
+ staged: [],
+ modified: [],
+ untracked: [],
+ deleted: [],
+ renamed: []
+ },
+ summary: {
+ totalChanges: 0,
+ highImpact: 0,
+ complexity: { high: 0, medium: 0, low: 0 },
+ categories: {}
+ },
+ commitReadiness: {
+ ready: false,
+ warnings: [],
+ suggestions: []
+ }
+ };
+
+ // Process all changes
+ await this.processChanges(status, result, analyzeImpact, enrichContext);
+
+ // Calculate summary
+ this.calculateSummary(result);
+
+ // Assess commit readiness
+ this.assessCommitReadiness(result);
+
+ return result;
+ }
+
+ /**
+ * Layer 2: Process and categorize changes
+ */
+ private async processChanges(
+ status: StatusResult,
+ result: GitStatusResult,
+ analyzeImpact: boolean,
+ enrichContext: boolean
+ ): Promise {
+ // Staged files
+ for (const file of status.staged) {
+ const change = await this.analyzeChange(file, 'staged', analyzeImpact, enrichContext);
+ result.changes.staged.push(change);
+ }
+
+ // Modified files
+ for (const file of status.modified) {
+ const change = await this.analyzeChange(file, 'modified', analyzeImpact, enrichContext);
+ result.changes.modified.push(change);
+ }
+
+ // Untracked files
+ for (const file of status.not_added) {
+ const change = await this.analyzeChange(file, 'untracked', analyzeImpact, enrichContext);
+ result.changes.untracked.push(change);
+ }
+
+ // Deleted files
+ for (const file of status.deleted) {
+ const change: FileChange = {
+ path: file,
+ status: 'deleted',
+ category: this.categorizeFile(file),
+ impact: 'medium'
+ };
+ result.changes.deleted.push(change);
+ }
+
+ // Renamed files
+ for (const file of status.renamed) {
+ const change = await this.analyzeChange(file.to || file.from, 'renamed', analyzeImpact, enrichContext);
+ result.changes.renamed.push(change);
+ }
+ }
+
+ /**
+ * Layer 2 + 3: Analyze individual change
+ */
+ private async analyzeChange(
+ filePath: string,
+ status: FileChange['status'],
+ analyzeImpact: boolean,
+ enrichContext: boolean
+ ): Promise {
+ const change: FileChange = {
+ path: filePath,
+ status
+ };
+
+ // Layer 2: Categorize and assess impact
+ if (analyzeImpact) {
+ change.category = this.categorizeFile(filePath);
+ change.impact = this.assessImpact(filePath, change.category);
+ }
+
+ // Layer 3: Enrich with file complexity context
+ if (enrichContext && status !== 'deleted' && this.isCodeFile(filePath)) {
+ try {
+ const fullPath = path.join(this.workspacePath, filePath);
+ const fileCtx = await this.readFileEngine.read(fullPath);
+ change.complexity = fileCtx.complexity.level;
+ change.linesOfCode = fileCtx.metadata.linesOfCode;
+ } catch (err) {
+ // Skip files that can't be analyzed
+ }
+ }
+
+ return change;
+ }
+
+ /**
+ * Layer 2: Categorize file by path and name
+ */
+ private categorizeFile(filePath: string): FileChange['category'] {
+ const lower = filePath.toLowerCase();
+
+ // Tests
+ if (lower.includes('test') || lower.includes('spec') || lower.includes('__tests__')) {
+ return 'test';
+ }
+
+ // Docs
+ if (lower.endsWith('.md') || lower.includes('doc') || lower.includes('readme')) {
+ return 'docs';
+ }
+
+ // Config
+ if (
+ lower.includes('config') ||
+ lower.endsWith('.json') ||
+ lower.endsWith('.yaml') ||
+ lower.endsWith('.yml') ||
+ lower.endsWith('.toml') ||
+ lower.endsWith('.env')
+ ) {
+ return 'config';
+ }
+
+ // Try to infer from common patterns
+ if (lower.includes('fix') || lower.includes('bug')) {
+ return 'bugfix';
+ }
+
+ if (lower.includes('refactor') || lower.includes('cleanup')) {
+ return 'refactor';
+ }
+
+ // Default to feature for code files
+ if (this.isCodeFile(filePath)) {
+ return 'feature';
+ }
+
+ return 'other';
+ }
+
+ /**
+ * Layer 2: Assess impact of change
+ */
+ private assessImpact(filePath: string, category?: FileChange['category']): FileChange['impact'] {
+ const lower = filePath.toLowerCase();
+
+ // High impact files
+ if (
+ lower.includes('index') ||
+ lower.includes('main') ||
+ lower.includes('server') ||
+ lower.includes('app') ||
+ lower.includes('core') ||
+ lower.includes('engine') ||
+ lower.includes('database') ||
+ lower.includes('migration')
+ ) {
+ return 'high';
+ }
+
+ // Low impact files
+ if (
+ category === 'docs' ||
+ category === 'test' ||
+ lower.includes('util') ||
+ lower.includes('helper') ||
+ lower.includes('type')
+ ) {
+ return 'low';
+ }
+
+ // Medium by default
+ return 'medium';
+ }
+
+ /**
+ * Calculate summary statistics
+ */
+ private calculateSummary(result: GitStatusResult): void {
+ const allChanges = [
+ ...result.changes.staged,
+ ...result.changes.modified,
+ ...result.changes.untracked,
+ ...result.changes.deleted,
+ ...result.changes.renamed
+ ];
+
+ result.summary.totalChanges = allChanges.length;
+
+ for (const change of allChanges) {
+ // Count high impact
+ if (change.impact === 'high') {
+ result.summary.highImpact++;
+ }
+
+ // Count complexity
+ if (change.complexity) {
+ if (change.complexity === 'low') result.summary.complexity.low++;
+ else if (change.complexity === 'medium') result.summary.complexity.medium++;
+ else if (change.complexity === 'high' || change.complexity === 'very-high') {
+ result.summary.complexity.high++;
+ }
+ }
+
+ // Count categories
+ if (change.category) {
+ result.summary.categories[change.category] =
+ (result.summary.categories[change.category] || 0) + 1;
+ }
+ }
+ }
+
+ /**
+ * Layer 3: Assess commit readiness
+ */
+ private assessCommitReadiness(result: GitStatusResult): void {
+ const warnings: string[] = [];
+ const suggestions: string[] = [];
+
+ // Check if there are staged changes
+ if (result.changes.staged.length === 0) {
+ warnings.push('No files staged for commit');
+ if (result.changes.modified.length > 0 || result.changes.untracked.length > 0) {
+ suggestions.push('Stage files with git add before committing');
+ }
+ }
+
+ // Check for high impact changes
+ const stagedHighImpact = result.changes.staged.filter(c => c.impact === 'high').length;
+ if (stagedHighImpact > 0) {
+ warnings.push(`${stagedHighImpact} high-impact file(s) staged - review carefully`);
+ suggestions.push('Consider splitting high-impact changes into separate commits');
+ }
+
+ // Check for very complex files
+ const complexFiles = result.changes.staged.filter(c =>
+ c.complexity === 'high' || c.complexity === 'very-high'
+ ).length;
+ if (complexFiles > 0) {
+ suggestions.push(`${complexFiles} complex file(s) changed - ensure adequate testing`);
+ }
+
+ // Check for mixed categories
+ const categories = new Set(result.changes.staged.map(c => c.category));
+ if (categories.size > 2) {
+ suggestions.push('Multiple change types staged - consider separate commits for clarity');
+ }
+
+ // Check if working tree is clean except staged
+ const hasUnstagedChanges =
+ result.changes.modified.length > 0 ||
+ result.changes.untracked.length > 0;
+ if (hasUnstagedChanges) {
+ suggestions.push('Unstaged changes present - consider stashing or committing separately');
+ }
+
+ // Ready if staged files exist and no critical warnings
+ result.commitReadiness.ready =
+ result.changes.staged.length > 0 &&
+ warnings.filter(w => w.includes('review carefully')).length === 0;
+
+ result.commitReadiness.warnings = warnings;
+ result.commitReadiness.suggestions = suggestions;
+ }
+
+ /**
+ * Helper: Check if code file
+ */
+ private isCodeFile(filePath: string): boolean {
+ const extensions = [
+ '.ts', '.js', '.tsx', '.jsx',
+ '.py', '.go', '.rs', '.java',
+ '.cpp', '.c', '.h'
+ ];
+ return extensions.some(ext => filePath.endsWith(ext));
+ }
+}
+
diff --git a/src/index.ts b/src/index.ts
index 30ecba7..98b583f 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -4,37 +4,33 @@ import { ContextSyncServer } from './server.js';
async function main() {
// Support custom database path for development/testing
- // Priority: CLI argument > Environment variable > Default
let storagePath: string | undefined;
// Check for --db-path argument
const dbPathIndex = process.argv.indexOf('--db-path');
if (dbPathIndex !== -1 && process.argv[dbPathIndex + 1]) {
storagePath = process.argv[dbPathIndex + 1];
- console.error(`Using custom database: ${storagePath}`);
+ console.error(`Context Sync - Using custom database: ${storagePath}`);
}
// Check for environment variable
else if (process.env.CONTEXT_SYNC_DB_PATH) {
storagePath = process.env.CONTEXT_SYNC_DB_PATH;
- console.error(`Using database from env: ${storagePath}`);
- }
- // Check for --dev flag (uses dev database)
- else if (process.argv.includes('--dev')) {
- const os = require('os');
- const path = require('path');
- storagePath = path.join(os.homedir(), '.context-sync', 'dev-data.db');
- console.error(`Using development database: ${storagePath}`);
+ console.error(`Context Sync - Using database from env: ${storagePath}`);
+ } else {
+ console.error('Context Sync - 9 Essential Tools');
}
const server = new ContextSyncServer(storagePath);
-
+
// Handle graceful shutdown
process.on('SIGINT', () => {
+ console.error('\nShutting down Context Sync...');
server.close();
process.exit(0);
});
process.on('SIGTERM', () => {
+ console.error('\nShutting down Context Sync...');
server.close();
process.exit(0);
});
@@ -43,6 +39,7 @@ async function main() {
}
main().catch((error) => {
- console.error('Fatal error:', error);
+ console.error('Failed to start Context Sync:', error);
process.exit(1);
-});
\ No newline at end of file
+});
+
diff --git a/src/migration-prompter.ts b/src/migration-prompter.ts
deleted file mode 100644
index 5c7b8e6..0000000
--- a/src/migration-prompter.ts
+++ /dev/null
@@ -1,169 +0,0 @@
-import { DatabaseMigrator } from './database-migrator.js';
-import type { ProjectContext } from './types.js';
-
-export interface MigrationPromptResult {
- shouldPrompt: boolean;
- message: string;
- duplicateCount: number;
- hasPromptedThisSession?: boolean;
-}
-
-export class MigrationPrompter {
- private static sessionPrompts: Set = new Set();
- private static readonly VERSION_REQUIRING_MIGRATION = '1.0.0';
-
- /**
- * Check if we should prompt the user for database migration
- */
- static async shouldPromptForMigration(
- currentVersion: string,
- dbPath?: string
- ): Promise {
- // Only prompt for v1.0.0+
- if (!this.isVersionRequiringMigration(currentVersion)) {
- return {
- shouldPrompt: false,
- message: '',
- duplicateCount: 0
- };
- }
-
- // Check if already prompted this session
- const sessionKey = `migration_${dbPath || 'default'}`;
- if (this.sessionPrompts.has(sessionKey)) {
- return {
- shouldPrompt: false,
- message: '',
- duplicateCount: 0,
- hasPromptedThisSession: true
- };
- }
-
- try {
- // Check for duplicates
- const migrator = new DatabaseMigrator(dbPath);
- const stats = await migrator.getMigrationStats();
- migrator.close();
-
- if (stats.duplicateGroups === 0) {
- return {
- shouldPrompt: false,
- message: '',
- duplicateCount: 0
- };
- }
-
- // Mark as prompted for this session
- this.sessionPrompts.add(sessionKey);
-
- const message = this.buildMigrationPromptMessage(stats);
-
- return {
- shouldPrompt: true,
- message,
- duplicateCount: stats.totalDuplicates,
- hasPromptedThisSession: false
- };
-
- } catch (error) {
- console.warn('Failed to check migration status:', error);
- return {
- shouldPrompt: false,
- message: '',
- duplicateCount: 0
- };
- }
- }
-
- /**
- * Check if current version requires migration prompting
- */
- private static isVersionRequiringMigration(version: string): boolean {
- try {
- const current = this.parseVersion(version);
- const required = this.parseVersion(this.VERSION_REQUIRING_MIGRATION);
-
- return current.major >= required.major &&
- (current.major > required.major || current.minor >= required.minor);
- } catch {
- return false; // If we can't parse version, don't prompt
- }
- }
-
- /**
- * Parse semantic version string
- */
- private static parseVersion(version: string): { major: number; minor: number; patch: number } {
- const match = version.match(/^(\d+)\.(\d+)\.(\d+)/);
- if (!match) {
- throw new Error(`Invalid version format: ${version}`);
- }
-
- return {
- major: parseInt(match[1], 10),
- minor: parseInt(match[2], 10),
- patch: parseInt(match[3], 10)
- };
- }
-
- /**
- * Build user-friendly migration prompt message
- */
- private static buildMigrationPromptMessage(stats: any): string {
- let message = `๐ง **Context Sync v1.0.0+ Database Optimization Available**\n\n`;
-
- message += `Your database has **${stats.totalDuplicates} duplicate projects** that can be cleaned up for better performance.\n\n`;
-
- message += `โจ **Benefits of running migration:**\n`;
- message += `โข ๐ **Faster performance** - Optimized database operations\n`;
- message += `โข ๐งน **Cleaner project list** - Remove duplicate entries\n`;
- message += `โข ๐ฏ **Better AI integration** - Improved context accuracy\n`;
- message += `โข ๐พ **Preserved data** - All conversations, decisions, and todos kept safe\n\n`;
-
- message += `๐ **What will be cleaned:**\n`;
- stats.duplicateDetails.slice(0, 3).forEach((group: any, i: number) => {
- message += `โข ${group.path} (${group.count} duplicates)\n`;
- });
-
- if (stats.duplicateDetails.length > 3) {
- message += `โข ... and ${stats.duplicateDetails.length - 3} more duplicate groups\n`;
- }
-
- message += `\n๐ก๏ธ **Safe & Reversible:**\n`;
- message += `โข Preview changes first: \`migrate_database dryRun:true\`\n`;
- message += `โข Full backup recommended before migration\n`;
- message += `โข All your project data will be preserved and merged\n\n`;
-
- message += `๐ **Ready to optimize?**\n`;
- message += `1. Preview: \`get_migration_stats\`\n`;
- message += `2. Test run: \`migrate_database dryRun:true\`\n`;
- message += `3. Apply: \`migrate_database\`\n\n`;
-
- message += `*This message shows once per session. Migration is optional but recommended for optimal performance.*`;
-
- return message;
- }
-
- /**
- * Create a lightweight migration suggestion message
- */
- static createLightweightPrompt(duplicateCount: number): string {
- return `๐ก **Performance Tip:** Your database has ${duplicateCount} duplicate projects. ` +
- `Run \`get_migration_stats\` to see cleanup options for better performance.`;
- }
-
- /**
- * Reset session prompts (useful for testing)
- */
- static resetSessionPrompts(): void {
- this.sessionPrompts.clear();
- }
-
- /**
- * Check if a specific migration has been prompted this session
- */
- static hasPromptedThisSession(dbPath?: string): boolean {
- const sessionKey = `migration_${dbPath || 'default'}`;
- return this.sessionPrompts.has(sessionKey);
- }
-}
\ No newline at end of file
diff --git a/src/notion-handlers.ts b/src/notion-handlers.ts
index b68035d..e3b0033 100644
--- a/src/notion-handlers.ts
+++ b/src/notion-handlers.ts
@@ -1,4 +1,4 @@
-/**
+๏ปฟ/**
* Notion tool handlers for Context Sync MCP Server
*/
@@ -11,7 +11,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: 'โ Notion is not configured. Run `context-sync setup` to configure Notion integration.',
+ text: ' Notion is not configured. Run `context-sync setup` to configure Notion integration.',
}],
isError: true,
};
@@ -20,7 +20,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
try {
const results = await notionIntegration.searchPages(args.query);
- let response = `๐ **Notion Search Results for "${args.query}"**\n\n`;
+ let response = ` **Notion Search Results for "${args.query}"**\n\n`;
response += `Found ${results.pages.length} page(s)\n\n`;
results.pages.forEach((page, i) => {
@@ -31,10 +31,10 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
});
if (results.pages.length === 0) {
- response += `๐ก **Tips:**\n`;
- response += `โข Make sure pages are shared with your Notion integration\n`;
- response += `โข Try a different search query\n`;
- response += `โข Check your Notion workspace permissions\n`;
+ response += ` **Tips:**\n`;
+ response += ` Make sure pages are shared with your Notion integration\n`;
+ response += ` Try a different search query\n`;
+ response += ` Check your Notion workspace permissions\n`;
}
return {
@@ -44,7 +44,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: `โ **Failed to search Notion**\n\nError: ${error.message}`,
+ text: ` **Failed to search Notion**\n\nError: ${error.message}`,
}],
isError: true,
};
@@ -56,7 +56,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: 'โ Notion is not configured. Run `context-sync setup` to configure Notion integration.',
+ text: ' Notion is not configured. Run `context-sync setup` to configure Notion integration.',
}],
isError: true,
};
@@ -65,8 +65,8 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
try {
const page = await notionIntegration.readPage(args.pageId);
- let response = `๐ **${page.title}**\n\n`;
- response += `๐ ${page.url}\n\n`;
+ let response = ` **${page.title}**\n\n`;
+ response += ` ${page.url}\n\n`;
response += `---\n\n`;
response += page.content;
@@ -77,7 +77,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: `โ **Failed to read page**\n\nError: ${error.message}\n\nMake sure:\nโข The page ID is correct\nโข The page is shared with your integration`,
+ text: ` **Failed to read page**\n\nError: ${error.message}\n\nMake sure:\n The page ID is correct\n The page is shared with your integration`,
}],
isError: true,
};
@@ -89,7 +89,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: 'โ Notion is not configured. Run `context-sync setup` to configure Notion integration.',
+ text: ' Notion is not configured. Run `context-sync setup` to configure Notion integration.',
}],
isError: true,
};
@@ -98,14 +98,14 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
try {
const page = await notionIntegration.createPage(args.title, args.content, args.parentPageId);
- let response = `โ
**Page Created Successfully!**\n\n`;
- response += `๐ **${page.title}**\n`;
- response += `๐ ${page.url}\n`;
- response += `๐ Page ID: ${page.id}\n\n`;
- response += `๐ก You can now:\n`;
- response += `โข Open the page in Notion\n`;
- response += `โข Read it with: notion_read_page pageId:"${page.id}"\n`;
- response += `โข Update it with: notion_update_page\n`;
+ let response = ` **Page Created Successfully!**\n\n`;
+ response += ` **${page.title}**\n`;
+ response += ` ${page.url}\n`;
+ response += ` Page ID: ${page.id}\n\n`;
+ response += ` You can now:\n`;
+ response += ` Open the page in Notion\n`;
+ response += ` Read it with: notion_read_page pageId:"${page.id}"\n`;
+ response += ` Update it with: notion_update_page\n`;
return {
content: [{ type: 'text', text: response }],
@@ -114,7 +114,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: `โ **Failed to create page**\n\nError: ${error.message}\n\nMake sure:\nโข A default parent page is configured, or provide parentPageId\nโข The parent page is shared with your integration`,
+ text: ` **Failed to create page**\n\nError: ${error.message}\n\nMake sure:\n A default parent page is configured, or provide parentPageId\n The parent page is shared with your integration`,
}],
isError: true,
};
@@ -126,7 +126,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: 'โ Notion is not configured. Run `context-sync setup` to configure Notion integration.',
+ text: ' Notion is not configured. Run `context-sync setup` to configure Notion integration.',
}],
isError: true,
};
@@ -135,10 +135,10 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
try {
await notionIntegration.updatePage(args.pageId, args.content);
- let response = `โ
**Page Updated Successfully!**\n\n`;
- response += `๐ Page ID: ${args.pageId}\n`;
- response += `โจ Content has been replaced with new content\n\n`;
- response += `๐ก Read the updated page with: notion_read_page pageId:"${args.pageId}"\n`;
+ let response = ` **Page Updated Successfully!**\n\n`;
+ response += ` Page ID: ${args.pageId}\n`;
+ response += ` Content has been replaced with new content\n\n`;
+ response += ` Read the updated page with: notion_read_page pageId:"${args.pageId}"\n`;
return {
content: [{ type: 'text', text: response }],
@@ -147,7 +147,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: `โ **Failed to update page**\n\nError: ${error.message}`,
+ text: ` **Failed to update page**\n\nError: ${error.message}`,
}],
isError: true,
};
@@ -159,7 +159,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: 'โ Notion is not configured. Run `context-sync setup` to configure Notion integration.',
+ text: ' Notion is not configured. Run `context-sync setup` to configure Notion integration.',
}],
isError: true,
};
@@ -174,7 +174,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: `โ Decision with ID "${args.decisionId}" not found.\n\nUse get_project_context to see available decisions.`,
+ text: ` Decision with ID "${args.decisionId}" not found.\n\nUse get_project_context to see available decisions.`,
}],
isError: true,
};
@@ -182,10 +182,10 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
const page = await notionIntegration.syncDecision(decision);
- let response = `โ
**Decision Synced to Notion!**\n\n`;
- response += `๐ **ADR: ${decision.description}**\n`;
- response += `๐ ${page.url}\n`;
- response += `๐ Page ID: ${page.id}\n\n`;
+ let response = ` **Decision Synced to Notion!**\n\n`;
+ response += ` **ADR: ${decision.description}**\n`;
+ response += ` ${page.url}\n`;
+ response += ` Page ID: ${page.id}\n\n`;
response += `The decision has been formatted as an Architecture Decision Record (ADR) in Notion.\n`;
return {
@@ -195,7 +195,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: `โ **Failed to sync decision**\n\nError: ${error.message}`,
+ text: ` **Failed to sync decision**\n\nError: ${error.message}`,
}],
isError: true,
};
@@ -207,7 +207,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: 'โ Notion is not configured. Run `context-sync setup` to configure Notion integration.',
+ text: ' Notion is not configured. Run `context-sync setup` to configure Notion integration.',
}],
isError: true,
};
@@ -220,7 +220,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: 'โ No project specified and no current project set. Use set_workspace first or provide projectId.',
+ text: ' No project specified and no current project set. Use set_workspace first or provide projectId.',
}],
isError: true,
};
@@ -232,7 +232,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: `โ Project with ID "${projectId}" not found.`,
+ text: ` Project with ID "${projectId}" not found.`,
}],
isError: true,
};
@@ -240,15 +240,15 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
const page = await notionIntegration.createProjectDashboard(project);
- let response = `โ
**Project Dashboard Created!**\n\n`;
- response += `๐ **Project: ${project.name}**\n`;
- response += `๐ ${page.url}\n`;
- response += `๐ Page ID: ${page.id}\n\n`;
+ let response = ` **Project Dashboard Created!**\n\n`;
+ response += ` **Project: ${project.name}**\n`;
+ response += ` ${page.url}\n`;
+ response += ` Page ID: ${page.id}\n\n`;
response += `The dashboard includes:\n`;
- response += `โข Project overview\n`;
- response += `โข Tech stack\n`;
- response += `โข Architecture notes\n`;
- response += `โข Creation & update timestamps\n`;
+ response += ` Project overview\n`;
+ response += ` Tech stack\n`;
+ response += ` Architecture notes\n`;
+ response += ` Creation & update timestamps\n`;
return {
content: [{ type: 'text', text: response }],
@@ -257,7 +257,7 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
return {
content: [{
type: 'text',
- text: `โ **Failed to create dashboard**\n\nError: ${error.message}`,
+ text: ` **Failed to create dashboard**\n\nError: ${error.message}`,
}],
isError: true,
};
@@ -265,3 +265,5 @@ export function createNotionHandlers(notionIntegration: NotionIntegration | null
},
};
}
+
+
diff --git a/src/notion-integration.ts b/src/notion-integration.ts
index 4426e28..c8a9169 100644
--- a/src/notion-integration.ts
+++ b/src/notion-integration.ts
@@ -1,4 +1,4 @@
-/**
+๏ปฟ/**
* Notion Integration for Context Sync
* Provides direct Notion API integration for reading and writing documentation
*/
@@ -70,8 +70,8 @@ export class NotionIntegration {
});
const pages = response.results
- .filter(result => result.object === 'page')
- .map(page => {
+ .filter((result: any) => result.object === 'page')
+ .map((page: any) => {
const pageData = page as any;
const title = this.extractPageTitle(pageData);
@@ -272,7 +272,7 @@ export class NotionIntegration {
page_size: 20
});
- return response.results.map(page => {
+ return response.results.map((page: any) => {
const pageData = page as any;
return {
id: pageData.id,
@@ -541,3 +541,4 @@ ${project.architecture ? `\n## Architecture\n${project.architecture}` : ''}
`.trim();
}
}
+
diff --git a/src/path-normalizer.ts b/src/path-normalizer.ts
index 110bfff..ac69d10 100644
--- a/src/path-normalizer.ts
+++ b/src/path-normalizer.ts
@@ -1,4 +1,4 @@
-/**
+๏ปฟ/**
* Path normalization utilities for consistent path handling across platforms
*/
@@ -124,4 +124,4 @@ export class PathNormalizer {
return { valid: true };
}
-}
\ No newline at end of file
+}
diff --git a/src/performance-monitor.ts b/src/performance-monitor.ts
deleted file mode 100644
index 1eeea83..0000000
--- a/src/performance-monitor.ts
+++ /dev/null
@@ -1,154 +0,0 @@
-/**
- * Performance monitoring utilities for Context Sync operations
- */
-
-export interface PerformanceMetrics {
- operation: string;
- duration: number;
- timestamp: number;
- memoryUsage?: NodeJS.MemoryUsage;
- metadata?: any;
-}
-
-export class PerformanceMonitor {
- private static metrics: PerformanceMetrics[] = [];
- private static readonly MAX_METRICS = 1000; // Keep last 1000 operations
-
- /**
- * Start timing an operation
- */
- static startTimer(operation: string): () => PerformanceMetrics {
- const startTime = process.hrtime.bigint();
- const startMemory = process.memoryUsage();
-
- return (metadata?: any): PerformanceMetrics => {
- const endTime = process.hrtime.bigint();
- const duration = Number(endTime - startTime) / 1_000_000; // Convert to milliseconds
- const endMemory = process.memoryUsage();
-
- const metric: PerformanceMetrics = {
- operation,
- duration,
- timestamp: Date.now(),
- memoryUsage: {
- rss: endMemory.rss - startMemory.rss,
- heapTotal: endMemory.heapTotal - startMemory.heapTotal,
- heapUsed: endMemory.heapUsed - startMemory.heapUsed,
- external: endMemory.external - startMemory.external,
- arrayBuffers: endMemory.arrayBuffers - startMemory.arrayBuffers,
- },
- metadata,
- };
-
- this.recordMetric(metric);
- return metric;
- };
- }
-
- /**
- * Record a performance metric
- */
- private static recordMetric(metric: PerformanceMetrics): void {
- this.metrics.push(metric);
-
- // Keep only the most recent metrics to prevent memory bloat
- if (this.metrics.length > this.MAX_METRICS) {
- this.metrics = this.metrics.slice(-this.MAX_METRICS);
- }
- }
-
- /**
- * Get performance statistics for operations
- */
- static getStats(operation?: string): {
- count: number;
- totalDuration: number;
- averageDuration: number;
- minDuration: number;
- maxDuration: number;
- recentMetrics: PerformanceMetrics[];
- } {
- const relevantMetrics = operation
- ? this.metrics.filter(m => m.operation === operation)
- : this.metrics;
-
- if (relevantMetrics.length === 0) {
- return {
- count: 0,
- totalDuration: 0,
- averageDuration: 0,
- minDuration: 0,
- maxDuration: 0,
- recentMetrics: [],
- };
- }
-
- const durations = relevantMetrics.map(m => m.duration);
- const totalDuration = durations.reduce((sum, d) => sum + d, 0);
-
- return {
- count: relevantMetrics.length,
- totalDuration,
- averageDuration: totalDuration / relevantMetrics.length,
- minDuration: Math.min(...durations),
- maxDuration: Math.max(...durations),
- recentMetrics: relevantMetrics.slice(-10), // Last 10 operations
- };
- }
-
- /**
- * Get all operation types with their stats
- */
- static getAllOperationStats(): Record> {
- const operations = [...new Set(this.metrics.map(m => m.operation))];
- const stats: Record> = {};
-
- for (const operation of operations) {
- stats[operation] = this.getStats(operation);
- }
-
- return stats;
- }
-
- /**
- * Clear all metrics
- */
- static clearMetrics(): void {
- this.metrics = [];
- }
-
- /**
- * Get a performance report
- */
- static getReport(): string {
- const allStats = this.getAllOperationStats();
- const operations = Object.keys(allStats).sort();
-
- if (operations.length === 0) {
- return '๐ **Performance Report**: No metrics recorded yet.';
- }
-
- let report = '๐ **Performance Report**\n\n';
- report += `Total Operations: ${this.metrics.length}\n`;
- report += `Unique Operation Types: ${operations.length}\n\n`;
-
- report += '| Operation | Count | Avg (ms) | Min (ms) | Max (ms) | Total (ms) |\n';
- report += '|-----------|-------|----------|----------|----------|------------|\n';
-
- for (const operation of operations) {
- const stats = allStats[operation];
- report += `| ${operation} | ${stats.count} | ${stats.averageDuration.toFixed(2)} | ${stats.minDuration.toFixed(2)} | ${stats.maxDuration.toFixed(2)} | ${stats.totalDuration.toFixed(2)} |\n`;
- }
-
- // Show slowest operations
- const allMetrics = this.metrics.slice().sort((a, b) => b.duration - a.duration);
- if (allMetrics.length > 0) {
- report += '\n**Slowest Recent Operations:**\n';
- allMetrics.slice(0, 5).forEach((metric, index) => {
- report += `${index + 1}. ${metric.operation}: ${metric.duration.toFixed(2)}ms\n`;
- });
- }
-
- return report;
- }
-}
\ No newline at end of file
diff --git a/src/platform-registry.ts b/src/platform-registry.ts
deleted file mode 100644
index 38fd328..0000000
--- a/src/platform-registry.ts
+++ /dev/null
@@ -1,144 +0,0 @@
-/**
- * Platform Registry - Metadata about supported AI platforms
- */
-
-import type { AIPlatform } from './platform-sync.js';
-
-export interface PlatformMetadata {
- name: string;
- category: 'core' | 'extended' | 'api';
- description: string;
- website: string;
- setupComplexity: 'easy' | 'medium' | 'advanced';
- mcpSupport: 'native' | 'extension' | 'custom';
- status: 'stable' | 'beta' | 'experimental';
- features: string[];
-}
-
-export const PLATFORM_REGISTRY: Record = {
- claude: {
- name: 'Claude Desktop',
- category: 'core',
- description: 'Anthropic\'s desktop app with native MCP support',
- website: 'https://claude.ai/desktop',
- setupComplexity: 'easy',
- mcpSupport: 'native',
- status: 'stable',
- features: ['Advanced reasoning', 'Code analysis', 'Long context', 'File operations']
- },
-
- cursor: {
- name: 'Cursor IDE',
- category: 'core',
- description: 'AI-first code editor with built-in AI assistance',
- website: 'https://cursor.sh',
- setupComplexity: 'easy',
- mcpSupport: 'native',
- status: 'stable',
- features: ['Real-time coding', 'Codebase chat', 'AI editing', 'Terminal integration']
- },
-
- copilot: {
- name: 'GitHub Copilot',
- category: 'core',
- description: 'GitHub\'s AI pair programmer for VS Code',
- website: 'https://github.com/features/copilot',
- setupComplexity: 'medium',
- mcpSupport: 'extension',
- status: 'stable',
- features: ['Code completion', 'Chat interface', 'PR analysis', 'Enterprise features']
- },
-
- continue: {
- name: 'Continue.dev',
- category: 'extended',
- description: 'Open source AI coding assistant for VS Code',
- website: 'https://continue.dev',
- setupComplexity: 'medium',
- mcpSupport: 'native',
- status: 'stable',
- features: ['Open source', 'Custom models', 'Self-hosted', 'Extensible']
- },
-
- zed: {
- name: 'Zed Editor',
- category: 'extended',
- description: 'High-performance collaborative code editor',
- website: 'https://zed.dev',
- setupComplexity: 'medium',
- mcpSupport: 'extension',
- status: 'beta',
- features: ['Fast performance', 'Collaboration', 'AI integration', 'Modern UI']
- },
-
- windsurf: {
- name: 'Windsurf by Codeium',
- category: 'extended',
- description: 'AI-native IDE by Codeium with integrated AI assistant',
- website: 'https://windsurf.codeium.com',
- setupComplexity: 'easy',
- mcpSupport: 'native',
- status: 'beta',
- features: ['AI-native design', 'Codeium integration', 'Modern interface', 'Fast setup', 'Free tier available']
- },
-
- tabnine: {
- name: 'TabNine',
- category: 'extended',
- description: 'Enterprise-focused AI code completion',
- website: 'https://tabnine.com',
- setupComplexity: 'medium',
- mcpSupport: 'extension',
- status: 'stable',
- features: ['Enterprise focus', 'On-premise deployment', 'Security compliance', 'Team management']
- },
-
- notion: {
- name: 'Notion',
- category: 'extended',
- description: 'Documentation and knowledge management platform with official MCP server',
- website: 'https://www.notion.so',
- setupComplexity: 'easy',
- mcpSupport: 'native',
- status: 'stable',
- features: ['Documentation management', 'Knowledge base', 'Page creation', 'Content search', 'Database integration']
- },
-
- other: {
- name: 'Other Platform',
- category: 'api',
- description: 'Custom or unsupported platform',
- website: 'https://github.com/Intina47/context-sync',
- setupComplexity: 'advanced',
- mcpSupport: 'custom',
- status: 'experimental',
- features: ['Custom integration', 'Community support', 'Experimental']
- }
-};
-
-/**
- * Get platforms by category
- */
-export function getPlatformsByCategory(category: PlatformMetadata['category']): AIPlatform[] {
- return Object.entries(PLATFORM_REGISTRY)
- .filter(([_, meta]) => meta.category === category)
- .map(([platform, _]) => platform as AIPlatform);
-}
-
-/**
- * Get recommended platforms for new users
- */
-export function getRecommendedPlatforms(): AIPlatform[] {
- return Object.entries(PLATFORM_REGISTRY)
- .filter(([_, meta]) => meta.setupComplexity === 'easy' && meta.status === 'stable')
- .map(([platform, _]) => platform as AIPlatform);
-}
-
-/**
- * Get platform setup difficulty
- */
-export function getPlatformsByDifficulty(difficulty: PlatformMetadata['setupComplexity']): AIPlatform[] {
- return Object.entries(PLATFORM_REGISTRY)
- .filter(([_, meta]) => meta.setupComplexity === difficulty)
- .map(([platform, _]) => platform as AIPlatform);
-}
\ No newline at end of file
diff --git a/src/platform-sync.ts b/src/platform-sync.ts
deleted file mode 100644
index bc01876..0000000
--- a/src/platform-sync.ts
+++ /dev/null
@@ -1,542 +0,0 @@
-import * as fs from 'fs';
-import * as path from 'path';
-import * as os from 'os';
-import type { Storage } from './storage.js';
-import type { ProjectContext } from './types.js';
-
-export type AIPlatform =
- | 'claude' // Claude Desktop
- | 'cursor' // Cursor IDE
- | 'copilot' // GitHub Copilot (VS Code)
- | 'continue' // Continue.dev VS Code extension
- | 'tabnine' // TabNine
- | 'windsurf' // Windsurf
- | 'zed' // Zed Editor
- | 'notion' // Notion (documentation/knowledge management)
- | 'other';
-
-export interface PlatformSession {
- platform: AIPlatform;
- projectId: string;
- startedAt: Date;
- lastActivityAt: Date;
-}
-
-export interface HandoffContext {
- fromPlatform: AIPlatform;
- toPlatform: AIPlatform;
- project: ProjectContext;
- summary: string;
- conversationCount: number;
- decisionCount: number;
- timestamp: Date;
-}
-
-/**
- * Manages cross-platform context synchronization
- * Enables seamless handoff between Claude Desktop, Cursor, and other AI platforms
- */
-export class PlatformSync {
- private storage: Storage;
- private currentPlatform: AIPlatform = 'claude'; // Default to Claude
- private sessionStartTime: Date;
-
- constructor(storage: Storage) {
- this.storage = storage;
- this.sessionStartTime = new Date();
- }
-
- /**
- * Set the current AI platform
- */
- setPlatform(platform: AIPlatform): void {
- this.currentPlatform = platform;
- }
-
- /**
- * Get the current AI platform
- */
- getPlatform(): AIPlatform {
- return this.currentPlatform;
- }
-
- /**
- * Create a handoff context when switching platforms
- */
- createHandoff(fromPlatform: AIPlatform, toPlatform: AIPlatform): HandoffContext | null {
- const project = this.storage.getCurrentProject();
-
- if (!project) {
- return null;
- }
-
- const contextSummary = this.storage.getContextSummary(project.id);
- const recentConversations = this.storage.getRecentConversations(project.id, 10);
-
- // Filter conversations by platform
- const fromPlatformConvs = recentConversations.filter(c => c.tool === fromPlatform);
-
- // Build summary
- let summary = `๐ฑ Platform Handoff: ${fromPlatform} โ ${toPlatform}\n\n`;
- summary += `๐ Project: ${project.name}\n`;
-
- if (project.architecture) {
- summary += `๐๏ธ Architecture: ${project.architecture}\n`;
- }
-
- if (project.techStack.length > 0) {
- summary += `โ๏ธ Tech Stack: ${project.techStack.join(', ')}\n`;
- }
-
- summary += `\n`;
-
- if (contextSummary.recentDecisions.length > 0) {
- summary += `๐ Recent Decisions (${contextSummary.recentDecisions.length}):\n`;
- contextSummary.recentDecisions.slice(0, 3).forEach((d, i) => {
- summary += `${i + 1}. [${d.type}] ${d.description}\n`;
- });
- summary += `\n`;
- }
-
- if (fromPlatformConvs.length > 0) {
- summary += `๐ฌ Last conversation on ${fromPlatform}:\n`;
- const lastConv = fromPlatformConvs[fromPlatformConvs.length - 1];
- summary += `"${lastConv.content.substring(0, 200)}${lastConv.content.length > 200 ? '...' : ''}"\n`;
- }
-
- summary += `\nโ
Context synced and ready on ${toPlatform}!`;
-
- const handoff: HandoffContext = {
- fromPlatform,
- toPlatform,
- project,
- summary,
- conversationCount: fromPlatformConvs.length,
- decisionCount: contextSummary.recentDecisions.length,
- timestamp: new Date(),
- };
-
- // Log the handoff as a conversation
- this.storage.addConversation({
- projectId: project.id,
- tool: toPlatform,
- role: 'assistant',
- content: `[Platform Handoff] Switched from ${fromPlatform} to ${toPlatform}. ${fromPlatformConvs.length} conversations and ${contextSummary.recentDecisions.length} decisions synced.`,
- metadata: { handoff: true, fromPlatform, toPlatform },
- });
-
- return handoff;
- }
-
- /**
- * Get platform-specific context
- */
- getPlatformContext(platform: AIPlatform): string {
- const project = this.storage.getCurrentProject();
-
- if (!project) {
- return `No active project. Initialize a project to start syncing context across platforms.`;
- }
-
- const contextSummary = this.storage.getContextSummary(project.id);
- const recentConversations = this.storage.getRecentConversations(project.id, 20);
-
- // Filter by platform
- const platformConvs = recentConversations.filter(c => c.tool === platform);
- const otherConvs = recentConversations.filter(c => c.tool !== platform);
- const otherPlatforms = [...new Set(otherConvs.map(c => c.tool))];
-
- let context = `๐ฑ Current Platform: ${platform}\n\n`;
- context += `๐ Project: ${project.name}\n`;
-
- if (project.architecture) {
- context += `๐๏ธ Architecture: ${project.architecture}\n`;
- }
-
- if (project.techStack.length > 0) {
- context += `โ๏ธ Tech Stack: ${project.techStack.join(', ')}\n`;
- }
-
- context += `\n`;
-
- // Recent decisions (shared across all platforms)
- if (contextSummary.recentDecisions.length > 0) {
- context += `๐ Recent Decisions (shared across all platforms):\n`;
- contextSummary.recentDecisions.slice(0, 5).forEach((d, i) => {
- context += `${i + 1}. [${d.type}] ${d.description}\n`;
- if (d.reasoning) {
- context += ` Reasoning: ${d.reasoning}\n`;
- }
- });
- context += `\n`;
- }
-
- // Platform-specific conversations
- if (platformConvs.length > 0) {
- context += `๐ฌ Your conversations on ${platform} (${platformConvs.length} total):\n`;
- platformConvs.slice(-3).forEach((conv, i) => {
- const snippet = conv.content.substring(0, 100);
- const time = new Date(conv.timestamp).toLocaleString();
- context += `${i + 1}. [${time}] ${conv.role}: ${snippet}...\n`;
- });
- context += `\n`;
- }
-
- // Cross-platform activity
- if (otherPlatforms.length > 0) {
- context += `๐ Activity on other platforms:\n`;
- otherPlatforms.forEach(otherPlatform => {
- const count = otherConvs.filter(c => c.tool === otherPlatform).length;
- context += ` โข ${otherPlatform}: ${count} conversations\n`;
- });
- context += `\n๐ก All context is automatically synced!\n`;
- }
-
- return context;
- }
-
- /**
- * Detect which platform is being used based on environment
- * This is a heuristic - not 100% accurate but covers most common cases
- */
- static detectPlatform(): AIPlatform {
- // Check environment variables and process info
- const processTitle = process.title?.toLowerCase() || '';
- const processArgv = process.argv.join(' ').toLowerCase();
-
- // Cursor IDE detection
- if (process.env.CURSOR_IDE ||
- process.env.CURSOR_VERSION ||
- processTitle.includes('cursor') ||
- processArgv.includes('cursor')) {
- return 'cursor';
- }
-
- // Zed Editor detection
- if (process.env.ZED_EDITOR ||
- processTitle.includes('zed') ||
- processArgv.includes('zed')) {
- return 'zed';
- }
-
- // VS Code with various extensions
- if (process.env.VSCODE_PID ||
- process.env.VSCODE_CWD ||
- processTitle.includes('code') ||
- processArgv.includes('code')) {
-
- // Continue.dev extension
- if (process.env.CONTINUE_GLOBAL_DIR ||
- processArgv.includes('continue')) {
- return 'continue';
- }
-
- // GitHub Copilot
- if (process.env.GITHUB_COPILOT_TOKEN ||
- process.env.GITHUB_TOKEN) {
- return 'copilot';
- }
-
- // Codeium now integrated into Windsurf - check for Windsurf instead
- // (Legacy Codeium detection moved to Windsurf detection above)
-
- // TabNine
- if (processArgv.includes('tabnine')) {
- return 'tabnine';
- }
- }
-
- // Windsurf by Codeium
- if (process.env.WINDSURF_IDE ||
- processTitle.includes('windsurf') ||
- processArgv.includes('windsurf')) {
- return 'windsurf';
- }
-
- // Default to Claude Desktop if no specific detection
- return 'claude';
- }
-
- /**
- * Get configuration paths for different platforms
- */
- static getConfigPaths(): Record {
- const homeDir = os.homedir();
- const platform = os.platform();
-
- const paths: Record = {};
-
- // Claude Desktop config
- if (platform === 'win32') {
- paths.claude = path.join(homeDir, 'AppData', 'Roaming', 'Claude', 'claude_desktop_config.json');
- } else if (platform === 'darwin') {
- paths.claude = path.join(homeDir, 'Library', 'Application Support', 'Claude', 'claude_desktop_config.json');
- } else {
- paths.claude = path.join(homeDir, '.config', 'Claude', 'claude_desktop_config.json');
- }
-
- // Cursor config
- paths.cursor = path.join(homeDir, '.cursor', 'mcp.json');
-
- // VS Code config (used by multiple extensions)
- if (platform === 'win32') {
- paths.vscode = path.join(homeDir, 'AppData', 'Roaming', 'Code', 'User', 'settings.json');
- paths.copilot = paths.vscode; // GitHub Copilot uses VS Code settings
- } else if (platform === 'darwin') {
- paths.vscode = path.join(homeDir, 'Library', 'Application Support', 'Code', 'User', 'settings.json');
- paths.copilot = paths.vscode;
- } else {
- paths.vscode = path.join(homeDir, '.config', 'Code', 'User', 'settings.json');
- paths.copilot = paths.vscode;
- }
-
- // Continue.dev config (YAML format)
- if (platform === 'win32') {
- paths.continue = path.join(homeDir, '.continue', 'config.yaml');
- } else if (platform === 'darwin') {
- paths.continue = path.join(homeDir, '.continue', 'config.yaml');
- } else {
- paths.continue = path.join(homeDir, '.continue', 'config.yaml');
- }
-
- // Zed config
- if (platform === 'win32') {
- paths.zed = path.join(homeDir, 'AppData', 'Roaming', 'Zed', 'settings.json');
- } else if (platform === 'darwin') {
- paths.zed = path.join(homeDir, 'Library', 'Application Support', 'Zed', 'settings.json');
- } else {
- paths.zed = path.join(homeDir, '.config', 'zed', 'settings.json');
- }
-
- // Windsurf config (similar to Cursor)
- paths.windsurf = path.join(homeDir, '.windsurf', 'mcp.json');
-
- // TabNine native MCP support
- paths.tabnine = path.join(homeDir, '.tabnine', 'mcp_servers.json');
-
- return paths;
- }
-
- /**
- * Check if Context Sync is configured for a platform
- */
- static isConfigured(platform: AIPlatform): boolean {
- const paths = PlatformSync.getConfigPaths();
-
- try {
- switch (platform) {
- case 'claude':
- if (!fs.existsSync(paths.claude)) return false;
- const claudeConfig = JSON.parse(fs.readFileSync(paths.claude, 'utf-8'));
- return !!(claudeConfig.mcpServers?.['context-sync']);
-
- case 'cursor':
- if (!fs.existsSync(paths.cursor)) return false;
- const cursorConfig = JSON.parse(fs.readFileSync(paths.cursor, 'utf-8'));
- return !!(cursorConfig.mcpServers?.['context-sync']);
-
- case 'copilot':
- // GitHub Copilot uses VS Code MCP extension
- if (!fs.existsSync(paths.vscode)) return false;
- const vscodeConfig = JSON.parse(fs.readFileSync(paths.vscode, 'utf-8'));
- return !!(vscodeConfig['mcp.servers']?.['context-sync']);
-
- case 'tabnine':
- // TabNine uses native MCP support via .tabnine/mcp_servers.json
- if (!fs.existsSync(paths.tabnine)) return false;
- const tabnineConfig = JSON.parse(fs.readFileSync(paths.tabnine, 'utf-8'));
- return !!(tabnineConfig.mcpServers?.['context-sync']);
-
- case 'continue':
- // Check global config first
- if (fs.existsSync(paths.continue)) {
- try {
- const yaml = require('js-yaml');
- const continueConfig = yaml.load(fs.readFileSync(paths.continue, 'utf-8')) || {};
- const mcpServers = Array.isArray(continueConfig.mcpServers) ? continueConfig.mcpServers : [];
- // Check if Context Sync is in the array
- const found = mcpServers.some((s: any) => {
- if (!s) return false;
- if (s.name && typeof s.name === 'string' && s.name.toLowerCase().includes('context')) return true;
- if (s.command && typeof s.command === 'string' && (s.command.includes('context-sync') ||
- (Array.isArray(s.args) && s.args.some((arg: any) => typeof arg === 'string' && arg.includes('context-sync'))))) {
- return true;
- }
- return false;
- });
- if (found) return true;
- } catch {
- // Continue to workspace check if global config fails
- }
- }
-
- // Check workspace config (.continue/mcpServers/context-sync.yaml)
- const workspaceContinueDir = path.join(process.cwd(), '.continue', 'mcpServers');
- if (fs.existsSync(workspaceContinueDir)) {
- const contextSyncYaml = path.join(workspaceContinueDir, 'context-sync.yaml');
- if (fs.existsSync(contextSyncYaml)) {
- try {
- const content = fs.readFileSync(contextSyncYaml, 'utf-8');
- if (content.includes('Context Sync') || content.includes('context-sync') || content.includes('@context-sync/server')) {
- return true;
- }
- } catch {
- // Ignore errors
- }
- }
- }
- return false;
-
- case 'zed':
- if (!fs.existsSync(paths.zed)) return false;
- const zedConfig = JSON.parse(fs.readFileSync(paths.zed, 'utf-8'));
- return !!(zedConfig.mcpServers?.['context-sync']);
-
- case 'windsurf':
- if (!fs.existsSync(paths.windsurf)) return false;
- const windsurfConfig = JSON.parse(fs.readFileSync(paths.windsurf, 'utf-8'));
- return !!(windsurfConfig.mcpServers?.['context-sync']);
-
- default:
- return false;
- }
- } catch {
- return false;
- }
- }
-
- /**
- * Get status of all platform configurations
- */
- static getPlatformStatus(): Record {
- return {
- claude: PlatformSync.isConfigured('claude'),
- cursor: PlatformSync.isConfigured('cursor'),
- copilot: PlatformSync.isConfigured('copilot'),
- continue: PlatformSync.isConfigured('continue'),
- tabnine: PlatformSync.isConfigured('tabnine'),
- windsurf: PlatformSync.isConfigured('windsurf'),
- zed: PlatformSync.isConfigured('zed'),
- notion: PlatformSync.isConfigured('notion'),
- other: false,
- };
- }
-
- /**
- * Generate installation instructions for a platform
- */
- static getInstallInstructions(platform: AIPlatform): string {
- const paths = PlatformSync.getConfigPaths();
-
- let instructions = `# Install Context Sync for ${platform.charAt(0).toUpperCase() + platform.slice(1)}\n\n`;
-
- switch (platform) {
- case 'claude':
- instructions += `1. Open: ${paths.claude}\n`;
- instructions += `2. Add this to the "mcpServers" section:\n\n`;
- instructions += `"context-sync": {\n`;
- instructions += ` "command": "npx",\n`;
- instructions += ` "args": ["-y", "@context-sync/server"]\n`;
- instructions += `}\n\n`;
- instructions += `3. Restart Claude Desktop\n`;
- break;
-
- case 'cursor':
- instructions += `1. Open: ${paths.cursor}\n`;
- instructions += ` Or go to: Cursor โ Settings โ MCP\n\n`;
- instructions += `2. Add this to the "mcpServers" section:\n\n`;
- instructions += `"context-sync": {\n`;
- instructions += ` "command": "npx",\n`;
- instructions += ` "args": ["-y", "@context-sync/server"]\n`;
- instructions += `}\n\n`;
- instructions += `3. Refresh MCP servers in Cursor settings\n`;
- break;
-
- case 'copilot':
- instructions += `1. Install MCP extension for VS Code\n`;
- instructions += `2. Open VS Code settings: ${paths.vscode}\n`;
- instructions += `3. Add this to your settings.json:\n\n`;
- instructions += `"mcp.servers": {\n`;
- instructions += ` "context-sync": {\n`;
- instructions += ` "command": "npx",\n`;
- instructions += ` "args": ["-y", "@context-sync/server"]\n`;
- instructions += ` }\n`;
- instructions += `}\n\n`;
- instructions += `4. Reload VS Code window\n`;
- instructions += `5. Open Copilot Chat and look for Context Sync in available tools\n`;
- break;
-
- case 'continue':
- instructions += `1. Install Continue.dev extension in VS Code\n`;
- instructions += `2. Choose ONE of the following methods:\n\n`;
- instructions += ` Option A: Global Config (applies to all workspaces)\n`;
- instructions += ` - Open: ${paths.continue}\n`;
- instructions += ` - Add this to the "mcpServers" array:\n\n`;
- instructions += ` mcpServers:\n`;
- instructions += ` - name: Context Sync\n`;
- instructions += ` type: stdio\n`;
- instructions += ` command: npx\n`;
- instructions += ` args:\n`;
- instructions += ` - -y\n`;
- instructions += ` - @context-sync/server\n`;
- instructions += ` env: {}\n\n`;
- instructions += ` Option B: Workspace Config (per-project)\n`;
- instructions += ` - Create: .continue/mcpServers/context-sync.yaml\n`;
- instructions += ` - Add this content:\n\n`;
- instructions += ` name: Context Sync\n`;
- instructions += ` type: stdio\n`;
- instructions += ` command: npx\n`;
- instructions += ` args:\n`;
- instructions += ` - -y\n`;
- instructions += ` - @context-sync/server\n`;
- instructions += ` env: {}\n\n`;
- instructions += `3. Restart VS Code or reload the Continue.dev extension\n`;
- instructions += `4. Make sure Agent Mode is enabled in Continue.dev\n`;
- break;
-
- case 'zed':
- instructions += `1. Install Context Sync extension for Zed (coming soon)\n`;
- instructions += `2. Or configure manually in: ${paths.zed}\n`;
- instructions += `3. Add MCP server configuration\n`;
- instructions += `\nNote: Zed MCP support is experimental\n`;
- break;
-
- case 'windsurf':
- instructions += `1. Open: ${paths.windsurf}\n`;
- instructions += `2. Add this to the "mcpServers" section:\n\n`;
- instructions += `"context-sync": {\n`;
- instructions += ` "command": "npx",\n`;
- instructions += ` "args": ["-y", "@context-sync/server"]\n`;
- instructions += `}\n\n`;
- instructions += `3. Restart Windsurf\n`;
- break;
-
-
-
- case 'tabnine':
- instructions += `1. Install TabNine extension in your IDE\n`;
- instructions += `2. Create/edit config: ${paths.tabnine}\n`;
- instructions += `3. Add Context Sync server configuration:\n`;
- instructions += ` {\n`;
- instructions += ` "mcpServers": {\n`;
- instructions += ` "context-sync": {\n`;
- instructions += ` "command": "npx",\n`;
- instructions += ` "args": ["-y", "@context-sync/server"]\n`;
- instructions += ` }\n`;
- instructions += ` }\n`;
- instructions += ` }\n`;
- instructions += `4. Restart TabNine Agent\n`;
- instructions += `\nNote: Uses TabNine's native MCP support\n`;
- break;
-
-
-
- default:
- instructions += `Platform "${platform}" is not yet supported.\n`;
- instructions += `\nWant to add support? Contribute at:\n`;
- instructions += `https://github.com/Intina47/context-sync\n`;
- }
-
- return instructions;
- }
-}
diff --git a/src/project-analyzers.ts b/src/project-analyzers.ts
new file mode 100644
index 0000000..1ae221d
--- /dev/null
+++ b/src/project-analyzers.ts
@@ -0,0 +1,466 @@
+๏ปฟ/**
+ * Project Analyzers - Layer 2: Interpretation
+ * Work on cached ProjectFiles data, no file system access
+ */
+
+import { ProjectFiles } from './project-scanner.js';
+
+export interface DependencyInfo {
+ name: string;
+ version: string;
+ critical: boolean;
+ dev: boolean;
+}
+
+export interface BuildSystem {
+ type: 'npm' | 'gradle' | 'maven' | 'make' | 'cargo' | 'go' | 'poetry' | 'unknown';
+ commands: Record;
+ configFile: string;
+}
+
+export interface TestFramework {
+ name: string;
+ pattern: string;
+ configFile?: string;
+ coverage: number | null;
+}
+
+export interface EnvVarInfo {
+ required: string[];
+ optional: string[];
+ example: Record;
+ envFiles: string[];
+}
+
+export interface ServiceInfo {
+ name: string;
+ port: number | null;
+ protocol: 'http' | 'https' | 'grpc' | 'websocket' | 'unknown';
+ healthCheck?: string;
+}
+
+export interface DatabaseInfo {
+ type: 'postgres' | 'mysql' | 'mongodb' | 'sqlite' | 'redis' | 'unknown';
+ connectionVar?: string;
+ migrations: boolean;
+ migrationsPath?: string;
+}
+
+export class DependencyAnalyzer {
+ static analyze(files: ProjectFiles): DependencyInfo[] {
+ const deps: DependencyInfo[] = [];
+
+ // Node.js: Use lockfile first (exact versions), fall back to package.json
+ if (files.packageLock) {
+ // package-lock.json v2/v3 format
+ const packages = files.packageLock.packages || files.packageLock.dependencies;
+ if (packages) {
+ for (const [name, info] of Object.entries(packages)) {
+ if (!name || name === '') continue; // Root package
+
+ const pkgInfo = info as any;
+ const pkgName = name.replace(/^node_modules\//, '');
+ deps.push({
+ name: pkgName,
+ version: pkgInfo.version || 'unknown',
+ critical: !pkgInfo.dev,
+ dev: !!pkgInfo.dev
+ });
+ }
+ }
+ } else if (files.packageJson) {
+ // Fall back to package.json (but versions will be ranges)
+ const pkg = files.packageJson;
+
+ if (pkg.dependencies) {
+ for (const [name, version] of Object.entries(pkg.dependencies)) {
+ deps.push({
+ name,
+ version: this.cleanVersion(version as string),
+ critical: true,
+ dev: false
+ });
+ }
+ }
+
+ if (pkg.devDependencies) {
+ for (const [name, version] of Object.entries(pkg.devDependencies)) {
+ deps.push({
+ name,
+ version: this.cleanVersion(version as string),
+ critical: false,
+ dev: true
+ });
+ }
+ }
+ }
+
+ // Go: Parse go.mod (direct) and go.sum (exact versions)
+ if (files.goMod) {
+ const goDeps = this.parseGoMod(files.goMod, files.goSum);
+ deps.push(...goDeps);
+ }
+
+ // Rust: Parse Cargo.lock (exact) or Cargo.toml
+ if (files.cargoLock) {
+ const rustDeps = this.parseCargoLock(files.cargoLock);
+ deps.push(...rustDeps);
+ } else if (files.cargoToml) {
+ const rustDeps = this.parseCargoToml(files.cargoToml);
+ deps.push(...rustDeps);
+ }
+
+ return deps;
+ }
+
+ private static parseGoMod(goMod: string, goSum?: string): DependencyInfo[] {
+ const deps: DependencyInfo[] = [];
+ const lines = goMod.split('\n');
+ let inRequire = false;
+
+ for (const line of lines) {
+ const trimmed = line.trim();
+
+ // Inline require
+ if (trimmed.startsWith('require ')) {
+ const match = trimmed.match(/require\s+(\S+)\s+v?(\S+)/);
+ if (match) {
+ deps.push({
+ name: match[1],
+ version: match[2],
+ critical: true,
+ dev: false
+ });
+ }
+ continue;
+ }
+
+ // Require block
+ if (trimmed === 'require (') {
+ inRequire = true;
+ continue;
+ }
+
+ if (trimmed === ')' && inRequire) {
+ inRequire = false;
+ continue;
+ }
+
+ if (inRequire && trimmed && !trimmed.startsWith('//')) {
+ const match = trimmed.match(/(\S+)\s+v?(\S+)/);
+ if (match) {
+ const isDev = trimmed.includes('// indirect');
+ deps.push({
+ name: match[1],
+ version: match[2],
+ critical: !isDev,
+ dev: isDev
+ });
+ }
+ }
+ }
+
+ return deps;
+ }
+
+ private static parseCargoLock(cargoLock: any): DependencyInfo[] {
+ const deps: DependencyInfo[] = [];
+
+ if (cargoLock.package) {
+ for (const pkg of cargoLock.package) {
+ deps.push({
+ name: pkg.name,
+ version: pkg.version,
+ critical: true,
+ dev: false
+ });
+ }
+ }
+
+ return deps;
+ }
+
+ private static parseCargoToml(cargoToml: any): DependencyInfo[] {
+ const deps: DependencyInfo[] = [];
+
+ if (cargoToml.dependencies) {
+ for (const [name, info] of Object.entries(cargoToml.dependencies)) {
+ const version = typeof info === 'string' ? info : (info as any).version || 'unknown';
+ deps.push({
+ name,
+ version,
+ critical: true,
+ dev: false
+ });
+ }
+ }
+
+ if (cargoToml['dev-dependencies']) {
+ for (const [name, info] of Object.entries(cargoToml['dev-dependencies'])) {
+ const version = typeof info === 'string' ? info : (info as any).version || 'unknown';
+ deps.push({
+ name,
+ version,
+ critical: false,
+ dev: true
+ });
+ }
+ }
+
+ return deps;
+ }
+
+ private static cleanVersion(version: string): string {
+ return version.replace(/[\^\~]/, '');
+ }
+}
+
+export class BuildSystemAnalyzer {
+ static analyze(files: ProjectFiles): BuildSystem {
+ // Node.js
+ if (files.packageJson) {
+ return {
+ type: 'npm',
+ commands: files.packageJson.scripts || {},
+ configFile: 'package.json'
+ };
+ }
+
+ // Gradle
+ if (files.cargoToml) {
+ return {
+ type: 'cargo',
+ commands: {
+ build: 'cargo build',
+ test: 'cargo test',
+ start: 'cargo run'
+ },
+ configFile: 'Cargo.toml'
+ };
+ }
+
+ // Go
+ if (files.goMod) {
+ return {
+ type: 'go',
+ commands: {
+ build: 'go build',
+ test: 'go test ./...',
+ start: 'go run .'
+ },
+ configFile: 'go.mod'
+ };
+ }
+
+ return {
+ type: 'unknown',
+ commands: {},
+ configFile: 'none'
+ };
+ }
+}
+
+export class TestFrameworkAnalyzer {
+ static analyze(files: ProjectFiles): TestFramework | null {
+ // Jest
+ if (files.jestConfig) {
+ return {
+ name: 'Jest',
+ pattern: '**/*.test.{js,ts,jsx,tsx}',
+ configFile: files.jestConfig.file || 'jest.config.json',
+ coverage: null
+ };
+ }
+
+ // Vitest
+ if (files.vitestConfig) {
+ return {
+ name: 'Vitest',
+ pattern: '**/*.test.{js,ts}',
+ configFile: 'vitest.config.ts',
+ coverage: null
+ };
+ }
+
+ // pytest
+ if (files.pytestConfig) {
+ return {
+ name: 'pytest',
+ pattern: '**/test_*.py',
+ configFile: 'pytest.ini',
+ coverage: null
+ };
+ }
+
+ // Check package.json scripts
+ if (files.packageJson?.scripts?.test) {
+ const script = files.packageJson.scripts.test;
+ if (script.includes('jest')) {
+ return { name: 'Jest', pattern: '**/*.test.{js,ts}', coverage: null };
+ }
+ if (script.includes('vitest')) {
+ return { name: 'Vitest', pattern: '**/*.test.{js,ts}', coverage: null };
+ }
+ }
+
+ // Check for Go testing (built-in)
+ if (files.goMod) {
+ return {
+ name: 'Go testing',
+ pattern: '**/*_test.go',
+ configFile: 'go.mod',
+ coverage: null
+ };
+ }
+
+ // Check for Rust testing (built-in)
+ if (files.cargoToml) {
+ return {
+ name: 'Rust testing',
+ pattern: 'tests/**/*.rs',
+ configFile: 'Cargo.toml',
+ coverage: null
+ };
+ }
+
+ return null;
+ }
+}
+
+export class EnvVarAnalyzer {
+ static analyze(files: ProjectFiles): EnvVarInfo {
+ const required = new Set();
+ const optional = new Set();
+ const example: Record = {};
+ const envFiles: string[] = [];
+
+ for (const [filename, content] of files.envFiles) {
+ envFiles.push(filename);
+ const vars = this.parseEnvFile(content);
+
+ for (const [key, value] of vars) {
+ example[key] = value;
+
+ // .env.example vars are required if it's the only env file
+ if (filename.includes('example') && files.envFiles.size === 1) {
+ required.add(key);
+ } else if (filename === '.env') {
+ required.add(key);
+ } else {
+ optional.add(key);
+ }
+ }
+ }
+
+ // Remove from optional if in required
+ optional.forEach(key => {
+ if (required.has(key)) optional.delete(key);
+ });
+
+ return {
+ required: Array.from(required),
+ optional: Array.from(optional),
+ example,
+ envFiles
+ };
+ }
+
+ private static parseEnvFile(content: string): Map {
+ const vars = new Map();
+ const lines = content.split('\n');
+
+ for (const line of lines) {
+ const trimmed = line.trim();
+ if (!trimmed || trimmed.startsWith('#')) continue;
+
+ const match = trimmed.match(/^([A-Z_][A-Z0-9_]*)\s*=\s*(.*)$/);
+ if (match) {
+ vars.set(match[1], match[2].replace(/^["']|["']$/g, ''));
+ }
+ }
+
+ return vars;
+ }
+}
+
+export class ServiceAnalyzer {
+ static analyze(files: ProjectFiles): ServiceInfo[] {
+ const services: ServiceInfo[] = [];
+
+ // Check package.json scripts for ports
+ if (files.packageJson?.scripts) {
+ for (const [name, script] of Object.entries(files.packageJson.scripts)) {
+ const portMatch = (script as string).match(/:(\d+)/);
+ if (portMatch) {
+ services.push({
+ name: name.includes('dev') ? 'Development Server' : 'Server',
+ port: parseInt(portMatch[1]),
+ protocol: 'http',
+ });
+ }
+ }
+ }
+
+ // Scan main files for .listen() calls
+ for (const [file, content] of files.mainFiles) {
+ const listenMatch = content.match(/\.listen\s*\(\s*(\d+)/);
+ if (listenMatch) {
+ const framework = this.detectFramework(content);
+ services.push({
+ name: framework || 'Server',
+ port: parseInt(listenMatch[1]),
+ protocol: 'http'
+ });
+ }
+ }
+
+ // Deduplicate by port
+ const seen = new Set();
+ return services.filter(s => {
+ if (s.port && seen.has(s.port)) return false;
+ if (s.port) seen.add(s.port);
+ return true;
+ });
+ }
+
+ private static detectFramework(content: string): string | null {
+ if (content.includes('express')) return 'Express';
+ if (content.includes('fastify')) return 'Fastify';
+ if (content.includes('next')) return 'Next.js';
+ return null;
+ }
+}
+
+export class DatabaseAnalyzer {
+ static analyze(files: ProjectFiles): DatabaseInfo[] {
+ const databases: DatabaseInfo[] = [];
+ const deps = DependencyAnalyzer.analyze(files);
+
+ // Check dependencies for database drivers
+ const dbMap: Record = {
+ 'pg': 'postgres',
+ 'postgres': 'postgres',
+ 'mysql': 'mysql',
+ 'mysql2': 'mysql',
+ 'mongodb': 'mongodb',
+ 'mongoose': 'mongodb',
+ 'sqlite3': 'sqlite',
+ 'better-sqlite3': 'sqlite',
+ 'redis': 'redis'
+ };
+
+ for (const dep of deps) {
+ const dbType = dbMap[dep.name.toLowerCase()];
+ if (dbType) {
+ databases.push({
+ type: dbType,
+ migrations: false, // TODO: detect migration systems
+ migrationsPath: undefined
+ });
+ }
+ }
+
+ return databases;
+ }
+}
+
diff --git a/src/project-cache.ts b/src/project-cache.ts
new file mode 100644
index 0000000..6a40abc
--- /dev/null
+++ b/src/project-cache.ts
@@ -0,0 +1,155 @@
+๏ปฟ/**
+ * Project Cache - Layer 3: Optimization
+ * Intelligent caching with invalidation based on git HEAD + file mtimes
+ */
+
+import { promises as fs } from 'fs';
+import { join } from 'path';
+import simpleGit from 'simple-git';
+
+interface CacheEntry {
+ data: T;
+ gitHead: string;
+ manifestMtime: number;
+ timestamp: number;
+}
+
+interface CacheKey {
+ projectPath: string;
+ gitHead: string;
+ manifestMtime: number;
+}
+
+export class ProjectCache {
+ private cache = new Map>();
+ private ttlMs = 60 * 60 * 1000; // 1 hour
+
+ /**
+ * Get cached data if valid
+ */
+ async get(projectPath: string): Promise {
+ const key = await this.getCacheKey(projectPath);
+ if (!key) return null;
+
+ const cacheKey = this.buildKey(key);
+ const entry = this.cache.get(cacheKey);
+
+ if (!entry) return null;
+
+ // Check TTL
+ if (Date.now() - entry.timestamp > this.ttlMs) {
+ this.cache.delete(cacheKey);
+ return null;
+ }
+
+ // Validate cache still valid
+ if (entry.gitHead !== key.gitHead || entry.manifestMtime !== key.manifestMtime) {
+ this.cache.delete(cacheKey);
+ return null;
+ }
+
+ return entry.data as T;
+ }
+
+ /**
+ * Store data in cache
+ */
+ async set(projectPath: string, data: T): Promise {
+ const key = await this.getCacheKey(projectPath);
+ if (!key) return;
+
+ const cacheKey = this.buildKey(key);
+ this.cache.set(cacheKey, {
+ data,
+ gitHead: key.gitHead,
+ manifestMtime: key.manifestMtime,
+ timestamp: Date.now()
+ });
+ }
+
+ /**
+ * Invalidate cache for a project
+ */
+ async invalidate(projectPath: string): Promise {
+ const key = await this.getCacheKey(projectPath);
+ if (!key) return;
+
+ const cacheKey = this.buildKey(key);
+ this.cache.delete(cacheKey);
+ }
+
+ /**
+ * Clear all cache
+ */
+ clear(): void {
+ this.cache.clear();
+ }
+
+ /**
+ * Get cache statistics
+ */
+ stats(): { size: number; keys: string[] } {
+ return {
+ size: this.cache.size,
+ keys: Array.from(this.cache.keys())
+ };
+ }
+
+ /**
+ * Build cache key from components
+ */
+ private buildKey(key: CacheKey): string {
+ return `${key.projectPath.toLowerCase()}:${key.gitHead}:${key.manifestMtime}`;
+ }
+
+ /**
+ * Get cache key components for a project
+ */
+ private async getCacheKey(projectPath: string): Promise {
+ try {
+ // Get git HEAD
+ const git = simpleGit(projectPath);
+ let gitHead = 'no-git';
+
+ try {
+ const log = await git.log({ maxCount: 1 });
+ gitHead = log.latest?.hash || 'no-commits';
+ } catch (err) {
+ // Not a git repo or no commits
+ }
+
+ // Get manifest file mtime (use the first that exists)
+ const manifestFiles = [
+ 'package.json',
+ 'go.mod',
+ 'Cargo.toml',
+ 'pyproject.toml',
+ 'pom.xml',
+ 'build.gradle'
+ ];
+
+ let manifestMtime = 0;
+ for (const file of manifestFiles) {
+ try {
+ const stat = await fs.stat(join(projectPath, file));
+ manifestMtime = stat.mtimeMs;
+ break;
+ } catch (err) {
+ continue;
+ }
+ }
+
+ return {
+ projectPath: projectPath.toLowerCase(),
+ gitHead,
+ manifestMtime
+ };
+ } catch (err) {
+ return null;
+ }
+ }
+}
+
+// Global cache instance
+export const projectCache = new ProjectCache();
+
diff --git a/src/project-detector.ts b/src/project-detector.ts
index 71c1d7b..85b1767 100644
--- a/src/project-detector.ts
+++ b/src/project-detector.ts
@@ -1,4 +1,4 @@
-// Automatic project detection from filesystem
+๏ปฟ// Automatic project detection from filesystem
import * as fs from 'fs';
import { promises as fsAsync } from 'fs';
@@ -283,7 +283,7 @@ export class ProjectDetector {
const metadata = await this.detectFromPath(normalizedPath);
if (!metadata) {
- console.error('โ No project detected at:', displayPath);
+ console.error(' No project detected at:', displayPath);
return;
}
@@ -298,8 +298,8 @@ export class ProjectDetector {
architecture: metadata.architecture || existing.architecture,
});
- // โ
No longer setting current project - that's session state now!
- console.error(`๐ Updated project: ${existing.name}`);
+ // No longer setting current project - that's session state now!
+ console.error(` Updated project: ${existing.name}`);
} else {
// Create new project (using normalized path for storage)
const projectName = metadata.name || path.basename(normalizedPath);
@@ -308,8 +308,9 @@ export class ProjectDetector {
techStack: metadata.techStack,
architecture: metadata.architecture,
});
- console.error(`๐ Auto-detected project: ${metadata.name}`);
+ console.error(` Auto-detected project: ${metadata.name}`);
console.error(` Tech Stack: ${metadata.techStack.join(', ')}`);
}
}
-}
\ No newline at end of file
+}
+
diff --git a/src/project-metrics.ts b/src/project-metrics.ts
new file mode 100644
index 0000000..b11a9c1
--- /dev/null
+++ b/src/project-metrics.ts
@@ -0,0 +1,128 @@
+๏ปฟ/**
+ * Project Metrics Analyzer
+ * Calculates LOC, file counts, complexity from scanned files
+ */
+
+import { promises as fs } from 'fs';
+import { join, extname } from 'path';
+
+export interface ProjectMetrics {
+ linesOfCode: number;
+ fileCount: number;
+ complexity: number | null;
+ lastUpdated: string;
+}
+
+export class MetricsAnalyzer {
+ private projectPath: string;
+
+ constructor(projectPath: string) {
+ this.projectPath = projectPath;
+ }
+
+ async analyze(): Promise {
+ const directories = [
+ this.projectPath, // Root
+ join(this.projectPath, 'src'),
+ join(this.projectPath, 'lib'),
+ join(this.projectPath, 'app'),
+ join(this.projectPath, 'pkg'),
+ join(this.projectPath, 'cmd'),
+ ];
+
+ const files = new Set();
+ let linesOfCode = 0;
+
+ for (const dir of directories) {
+ try {
+ const dirFiles = await this.scanDirectory(dir);
+ dirFiles.forEach(f => files.add(f));
+ } catch (err) {
+ // Directory doesn't exist, continue
+ }
+ }
+
+ // Count LOC for all unique files
+ for (const file of files) {
+ try {
+ const content = await fs.readFile(file, 'utf-8');
+ linesOfCode += this.countLOC(content);
+ } catch (err) {
+ // File read error, skip
+ }
+ }
+
+ return {
+ linesOfCode,
+ fileCount: files.size,
+ complexity: null, // TODO: Implement cyclomatic complexity
+ lastUpdated: new Date().toISOString()
+ };
+ }
+
+ private async scanDirectory(dir: string): Promise {
+ const files: string[] = [];
+ const skipDirs = new Set([
+ 'node_modules', 'vendor', 'dist', 'build', 'out', '.next',
+ 'coverage', '.git', '.svn', 'target', 'bin', 'obj',
+ '__pycache__', '.venv', 'venv', '.pytest_cache',
+ 'packaging', 'scripts', '.github', '.vscode', 'docs',
+ 'third_party', '.idea', '.gradle'
+ ]);
+
+ const extensions = new Set([
+ '.js', '.ts', '.jsx', '.tsx', '.py', '.go', '.rs',
+ '.java', '.cpp', '.c', '.h', '.rb', '.php', '.cs'
+ ]);
+
+ const scan = async (currentDir: string) => {
+ try {
+ const entries = await fs.readdir(currentDir, { withFileTypes: true });
+
+ for (const entry of entries) {
+ const fullPath = join(currentDir, entry.name);
+
+ if (entry.isDirectory()) {
+ if (!skipDirs.has(entry.name) && !entry.name.startsWith('.')) {
+ await scan(fullPath);
+ }
+ } else if (entry.isFile()) {
+ const ext = extname(entry.name);
+ if (extensions.has(ext)) {
+ files.push(fullPath);
+ }
+ }
+ }
+ } catch (err) {
+ // Permission error or directory doesn't exist
+ }
+ };
+
+ await scan(dir);
+ return files;
+ }
+
+ private countLOC(content: string): number {
+ const lines = content.split('\n');
+ let count = 0;
+
+ for (const line of lines) {
+ const trimmed = line.trim();
+
+ // Skip empty lines
+ if (trimmed === '') continue;
+
+ // Skip comments
+ if (trimmed.startsWith('//')) continue;
+ if (trimmed.startsWith('#')) continue;
+ if (trimmed.startsWith('/*')) continue;
+ if (trimmed.startsWith('*')) continue;
+ if (trimmed.startsWith('*/')) continue;
+
+ count++;
+ }
+
+ return count;
+ }
+}
+
diff --git a/src/project-profiler.ts b/src/project-profiler.ts
new file mode 100644
index 0000000..df0d2ed
--- /dev/null
+++ b/src/project-profiler.ts
@@ -0,0 +1,161 @@
+๏ปฟ/**
+ * Project Profiler
+ * Uses 3-layer architecture: Scanner -> Analyzer -> Cache
+ * 5-10x faster first scan, 160x faster cached
+ */
+
+import { ProjectScanner } from './project-scanner.js';
+import {
+ DependencyAnalyzer,
+ BuildSystemAnalyzer,
+ TestFrameworkAnalyzer,
+ EnvVarAnalyzer,
+ ServiceAnalyzer,
+ DatabaseAnalyzer,
+ type DependencyInfo,
+ type BuildSystem,
+ type TestFramework,
+ type EnvVarInfo,
+ type ServiceInfo,
+ type DatabaseInfo
+} from './project-analyzers.js';
+import { MetricsAnalyzer, type ProjectMetrics } from './project-metrics.js';
+import { projectCache } from './project-cache.js';
+
+export interface ProjectAnalysis {
+ // Identity
+ projectPath: string;
+ architecture: string;
+ techStack: string[];
+
+ // 7 Enhanced Dimensions
+ dependencies: DependencyInfo[];
+ buildSystem: BuildSystem;
+ testFramework: TestFramework | null;
+ envVars: EnvVarInfo;
+ services: ServiceInfo[];
+ databases: DatabaseInfo[];
+ metrics: ProjectMetrics;
+
+ // Metadata
+ cached: boolean;
+ scanTimeMs: number;
+}
+
+export class ProjectProfiler {
+ /**
+ * Analyze a project (with caching)
+ */
+ static async analyze(projectPath: string): Promise {
+ const startTime = Date.now();
+
+ // Check cache first
+ const cached = await projectCache.get(projectPath);
+ if (cached) {
+ return {
+ ...cached,
+ cached: true,
+ scanTimeMs: Date.now() - startTime
+ };
+ }
+
+ // Layer 1: Scan (single file system pass)
+ const scanner = new ProjectScanner(projectPath);
+ const files = await scanner.scan();
+
+ // Layer 2: Analyze (no file I/O, work on cached data)
+ const dependencies = DependencyAnalyzer.analyze(files);
+ const buildSystem = BuildSystemAnalyzer.analyze(files);
+ const testFramework = TestFrameworkAnalyzer.analyze(files);
+ const envVars = EnvVarAnalyzer.analyze(files);
+ const services = ServiceAnalyzer.analyze(files);
+ const databases = DatabaseAnalyzer.analyze(files);
+
+ // Metrics still needs file scanning (LOC counting)
+ const metricsAnalyzer = new MetricsAnalyzer(projectPath);
+ const metrics = await metricsAnalyzer.analyze();
+
+ // Derive architecture and tech stack
+ const architecture = this.detectArchitecture(files, services);
+ const techStack = this.detectTechStack(files, dependencies);
+
+ const analysis: ProjectAnalysis = {
+ projectPath,
+ architecture,
+ techStack,
+ dependencies,
+ buildSystem,
+ testFramework,
+ envVars,
+ services,
+ databases,
+ metrics,
+ cached: false,
+ scanTimeMs: Date.now() - startTime
+ };
+
+ // Layer 3: Cache for next time
+ await projectCache.set(projectPath, analysis);
+
+ return analysis;
+ }
+
+ /**
+ * Invalidate cache for a project
+ */
+ static async invalidate(projectPath: string): Promise {
+ await projectCache.invalidate(projectPath);
+ }
+
+ /**
+ * Clear all cache
+ */
+ static clearCache(): void {
+ projectCache.clear();
+ }
+
+ /**
+ * Detect project architecture
+ */
+ private static detectArchitecture(files: any, services: ServiceInfo[]): string {
+ if (files.packageJson?.dependencies?.['next']) return 'Next.js';
+ if (files.packageJson?.dependencies?.['@nestjs/core']) return 'NestJS';
+ if (files.packageJson?.dependencies?.['express']) return 'Express';
+ if (files.packageJson?.dependencies?.['fastify']) return 'Fastify';
+ if (files.cargoToml?.package?.name) return 'Rust Binary';
+ if (files.goMod) return 'Go Module';
+ if (services.length > 0) return 'Service';
+ return 'Library';
+ }
+
+ /**
+ * Detect tech stack
+ */
+ private static detectTechStack(files: any, dependencies: DependencyInfo[]): string[] {
+ const stack = new Set();
+
+ // Languages
+ if (files.packageJson) stack.add('TypeScript');
+ if (files.goMod) stack.add('Go');
+ if (files.cargoToml) stack.add('Rust');
+
+ // Major frameworks
+ const frameworks = [
+ 'react', 'vue', 'angular', 'next', 'svelte',
+ 'express', 'fastify', 'koa', 'nest',
+ 'django', 'flask', 'fastapi'
+ ];
+
+ for (const dep of dependencies) {
+ const name = dep.name.toLowerCase();
+ for (const framework of frameworks) {
+ if (name.includes(framework)) {
+ stack.add(framework.charAt(0).toUpperCase() + framework.slice(1));
+ }
+ }
+ }
+
+ return Array.from(stack);
+ }
+}
+
diff --git a/src/project-scanner.ts b/src/project-scanner.ts
new file mode 100644
index 0000000..837151a
--- /dev/null
+++ b/src/project-scanner.ts
@@ -0,0 +1,193 @@
+๏ปฟ/**
+ * Project Scanner - Layer 1: File System Access
+ * Reads ALL config files ONCE, caches in memory
+ */
+
+import * as fs from 'fs/promises';
+import * as path from 'path';
+import { simpleGit, SimpleGit } from 'simple-git';
+
+export interface ProjectFiles {
+ // Manifests
+ packageJson?: any;
+ packageLock?: any;
+ yarnLock?: string;
+ goMod?: string;
+ goSum?: string;
+ cargoToml?: any;
+ cargoLock?: any;
+
+ // Configs
+ jestConfig?: any;
+ vitestConfig?: any;
+ pytestConfig?: string;
+
+ // Env files
+ envFiles: Map; // filename -> content
+
+ // Git info
+ git: {
+ head?: string;
+ lastCommit?: Date;
+ contributors?: number;
+ stats?: any;
+ };
+
+ // Source code samples (for service detection)
+ mainFiles: Map; // path -> content
+}
+
+export class ProjectScanner {
+ private git: SimpleGit;
+
+ constructor(private projectPath: string) {
+ this.git = simpleGit(projectPath, {
+ timeout: { block: 5000 },
+ config: []
+ });
+ }
+
+ /**
+ * Single file system scan - reads everything we need ONCE
+ */
+ async scan(): Promise {
+ const files: ProjectFiles = {
+ envFiles: new Map(),
+ mainFiles: new Map(),
+ git: {}
+ };
+
+ // Parallel reads for all potential config files
+ await Promise.all([
+ this.readManifests(files),
+ this.readConfigs(files),
+ this.readEnvFiles(files),
+ this.readGitInfo(files),
+ this.readMainFiles(files)
+ ]);
+
+ return files;
+ }
+
+ private async readManifests(files: ProjectFiles): Promise {
+ const reads = [
+ this.readJson('package.json').then(pkg => files.packageJson = pkg),
+ this.readJson('package-lock.json').then(lock => files.packageLock = lock),
+ this.readText('yarn.lock').then(yarn => files.yarnLock = yarn),
+ this.readText('go.mod').then(mod => files.goMod = mod),
+ this.readText('go.sum').then(sum => files.goSum = sum),
+ this.readToml('Cargo.toml').then(toml => files.cargoToml = toml),
+ this.readToml('Cargo.lock').then(lock => files.cargoLock = lock),
+ ];
+
+ await Promise.allSettled(reads);
+ }
+
+ private async readConfigs(files: ProjectFiles): Promise {
+ const reads = [
+ this.readJson('jest.config.json').then(cfg => files.jestConfig = cfg),
+ this.readText('pytest.ini').then(cfg => files.pytestConfig = cfg),
+ ];
+
+ // Also try .js/.ts config files
+ const jestJs = await this.fileExists('jest.config.js');
+ const vitestTs = await this.fileExists('vitest.config.ts');
+
+ if (jestJs) files.jestConfig = { exists: true, file: 'jest.config.js' };
+ if (vitestTs) files.vitestConfig = { exists: true, file: 'vitest.config.ts' };
+
+ await Promise.allSettled(reads);
+ }
+
+ private async readEnvFiles(files: ProjectFiles): Promise {
+ const envFileNames = ['.env', '.env.example', '.env.local', '.env.development', '.env.production'];
+
+ const reads = envFileNames.map(async name => {
+ const content = await this.readText(name);
+ if (content) files.envFiles.set(name, content);
+ });
+
+ await Promise.allSettled(reads);
+ }
+
+ private async readGitInfo(files: ProjectFiles): Promise {
+ try {
+ const isRepo = await this.git.checkIsRepo();
+ if (!isRepo) return;
+
+ const [log, contributors] = await Promise.all([
+ this.git.log({ maxCount: 1 }).catch(() => null),
+ this.git.raw(['shortlog', '-sn', '--all']).catch(() => '')
+ ]);
+
+ if (log?.latest) {
+ files.git.head = log.latest.hash;
+ files.git.lastCommit = new Date(log.latest.date);
+ }
+
+ if (contributors) {
+ const lines = contributors.trim().split('\n').filter((line: string) => line.trim());
+ files.git.contributors = lines.length;
+ }
+ } catch {
+ // Not a git repo or git not available
+ }
+ }
+
+ private async readMainFiles(files: ProjectFiles): Promise {
+ // Read likely entry points for service detection
+ const candidates = [
+ 'src/index.ts', 'src/index.js', 'src/main.ts', 'src/main.js',
+ 'src/server.ts', 'src/server.js', 'src/app.ts', 'src/app.js',
+ 'index.ts', 'index.js', 'server.ts', 'server.js',
+ 'main.go', 'main.rs'
+ ];
+
+ const reads = candidates.map(async file => {
+ const content = await this.readText(file);
+ if (content) files.mainFiles.set(file, content);
+ });
+
+ await Promise.allSettled(reads);
+ }
+
+ // Helper methods
+ private async readJson(filename: string): Promise {
+ try {
+ const content = await fs.readFile(path.join(this.projectPath, filename), 'utf8');
+ return JSON.parse(content);
+ } catch {
+ return undefined;
+ }
+ }
+
+ private async readText(filename: string): Promise {
+ try {
+ return await fs.readFile(path.join(this.projectPath, filename), 'utf8');
+ } catch {
+ return undefined;
+ }
+ }
+
+ private async readToml(filename: string): Promise {
+ try {
+ const content = await this.readText(filename);
+ if (!content) return undefined;
+
+ const toml = await import('@iarna/toml');
+ return toml.parse(content);
+ } catch {
+ return undefined;
+ }
+ }
+
+ private async fileExists(filename: string): Promise {
+ try {
+ await fs.access(path.join(this.projectPath, filename));
+ return true;
+ } catch {
+ return false;
+ }
+ }
+}
+
diff --git a/src/read-file-engine.ts b/src/read-file-engine.ts
new file mode 100644
index 0000000..bc80873
--- /dev/null
+++ b/src/read-file-engine.ts
@@ -0,0 +1,368 @@
+๏ปฟ/**
+ * Read File Engine
+ * Provides rich file context: content + metadata + relationships + complexity
+ */
+
+import { promises as fs } from 'fs';
+import { join, extname, relative, dirname } from 'path';
+import simpleGit from 'simple-git';
+
+interface FileMetadata {
+ size: number;
+ lastModified: Date;
+ author: string | null;
+ changeFrequency: number; // commits in last 30 days
+ linesOfCode: number;
+ language: string;
+}
+
+interface FileRelationships {
+ imports: string[]; // Files this imports
+ importedBy: string[]; // Files that import this
+ relatedTests: string[];
+ relatedConfigs: string[];
+}
+
+interface FileComplexity {
+ level: 'low' | 'medium' | 'high' | 'very-high';
+ score: number;
+ reasons: string[];
+}
+
+interface FileContext {
+ path: string;
+ content: string;
+ metadata: FileMetadata;
+ relationships: FileRelationships;
+ complexity: FileComplexity;
+}
+
+export class ReadFileEngine {
+ private projectPath: string;
+ private git: ReturnType;
+
+ constructor(projectPath: string) {
+ this.projectPath = projectPath;
+ this.git = simpleGit(projectPath);
+ }
+
+ /**
+ * Read file with rich context
+ */
+ async read(relativePath: string): Promise {
+ const fullPath = join(this.projectPath, relativePath);
+
+ // Layer 1: Read file content and basic info
+ const content = await fs.readFile(fullPath, 'utf-8');
+ const stats = await fs.stat(fullPath);
+
+ // Layer 2: Analyze in parallel
+ const [metadata, relationships, complexity] = await Promise.all([
+ this.analyzeMetadata(relativePath, stats, content),
+ this.analyzeRelationships(relativePath, content),
+ this.analyzeComplexity(content, relativePath)
+ ]);
+
+ return {
+ path: relativePath,
+ content,
+ metadata,
+ relationships,
+ complexity
+ };
+ }
+
+ /**
+ * Analyze file metadata
+ */
+ private async analyzeMetadata(
+ relativePath: string,
+ stats: any,
+ content: string
+ ): Promise {
+ // Git history
+ let author: string | null = null;
+ let changeFrequency = 0;
+
+ try {
+ // Get last author
+ const log = await this.git.log({ file: relativePath, maxCount: 1 });
+ author = log.latest?.author_name || null;
+
+ // Count commits in last 30 days
+ const thirtyDaysAgo = new Date();
+ thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
+ const recentLog = await this.git.log({
+ file: relativePath,
+ since: thirtyDaysAgo.toISOString()
+ });
+ changeFrequency = recentLog.all.length;
+ } catch (err) {
+ // Not a git repo or file not tracked
+ }
+
+ // Count lines of code (excluding blanks/comments)
+ const linesOfCode = this.countLOC(content);
+
+ // Detect language
+ const language = this.detectLanguage(relativePath);
+
+ return {
+ size: stats.size,
+ lastModified: stats.mtime,
+ author,
+ changeFrequency,
+ linesOfCode,
+ language
+ };
+ }
+
+ /**
+ * Analyze file relationships
+ */
+ private async analyzeRelationships(
+ relativePath: string,
+ content: string
+ ): Promise {
+ const imports: string[] = [];
+ const importedBy: string[] = [];
+ const relatedTests: string[] = [];
+ const relatedConfigs: string[] = [];
+
+ // Extract imports from file content
+ const ext = extname(relativePath);
+
+ if (['.ts', '.js', '.tsx', '.jsx'].includes(ext)) {
+ // JavaScript/TypeScript imports
+ const importRegex = /(?:import|require)\s*\(?['"]([^'"]+)['"]/g;
+ let match;
+ while ((match = importRegex.exec(content)) !== null) {
+ let importPath = match[1];
+
+ // Resolve relative imports
+ if (importPath.startsWith('.')) {
+ const dir = dirname(relativePath);
+ importPath = join(dir, importPath);
+
+ // Add common extensions if missing
+ if (!extname(importPath)) {
+ for (const tryExt of ['.ts', '.tsx', '.js', '.jsx']) {
+ try {
+ await fs.access(join(this.projectPath, importPath + tryExt));
+ importPath += tryExt;
+ break;
+ } catch {}
+ }
+ }
+
+ imports.push(importPath);
+ }
+ }
+ } else if (['.py'].includes(ext)) {
+ // Python imports
+ const importRegex = /(?:from|import)\s+(\w+(?:\.\w+)*)/g;
+ let match;
+ while ((match = importRegex.exec(content)) !== null) {
+ imports.push(match[1]);
+ }
+ } else if (['.go'].includes(ext)) {
+ // Go imports
+ const importRegex = /import\s+"([^"]+)"/g;
+ let match;
+ while ((match = importRegex.exec(content)) !== null) {
+ imports.push(match[1]);
+ }
+ }
+
+ // Find files that import this one (expensive, skip for now)
+ // Could be optimized with a project-wide import graph
+
+ // Find related test files
+ const testPatterns = this.getTestFilePatterns(relativePath);
+ for (const pattern of testPatterns) {
+ try {
+ await fs.access(join(this.projectPath, pattern));
+ relatedTests.push(pattern);
+ } catch {}
+ }
+
+ // Find related config files
+ const configPatterns = this.getConfigFilePatterns(relativePath);
+ for (const pattern of configPatterns) {
+ try {
+ await fs.access(join(this.projectPath, pattern));
+ relatedConfigs.push(pattern);
+ } catch {}
+ }
+
+ return {
+ imports,
+ importedBy, // TODO: Build project-wide graph
+ relatedTests,
+ relatedConfigs
+ };
+ }
+
+ /**
+ * Analyze file complexity
+ */
+ private analyzeComplexity(content: string, path: string): FileComplexity {
+ let score = 0;
+ const reasons: string[] = [];
+
+ // Lines of code
+ const loc = this.countLOC(content);
+ if (loc > 500) {
+ score += 30;
+ reasons.push(`Large file (${loc} LOC)`);
+ } else if (loc > 300) {
+ score += 20;
+ reasons.push(`Medium-large file (${loc} LOC)`);
+ } else if (loc > 150) {
+ score += 10;
+ }
+
+ // Cyclomatic complexity (count decision points)
+ const decisionPoints = (content.match(/\b(if|else|for|while|switch|case|\?|&&|\|\|)\b/g) || []).length;
+ if (decisionPoints > 50) {
+ score += 30;
+ reasons.push(`High cyclomatic complexity (${decisionPoints} decision points)`);
+ } else if (decisionPoints > 25) {
+ score += 15;
+ reasons.push(`Medium complexity (${decisionPoints} decision points)`);
+ }
+
+ // Nesting depth (count indentation)
+ const lines = content.split('\n');
+ let maxIndent = 0;
+ for (const line of lines) {
+ const indent = line.match(/^\s*/)?.[0].length || 0;
+ maxIndent = Math.max(maxIndent, Math.floor(indent / 2));
+ }
+ if (maxIndent > 6) {
+ score += 20;
+ reasons.push(`Deep nesting (${maxIndent} levels)`);
+ } else if (maxIndent > 4) {
+ score += 10;
+ }
+
+ // Function count
+ const functions = (content.match(/\bfunction\b|\bconst\s+\w+\s*=\s*\(|\bdef\b|\bfunc\b/g) || []).length;
+ if (functions > 20) {
+ score += 15;
+ reasons.push(`Many functions (${functions})`);
+ }
+
+ // TODO statements
+ const todos = (content.match(/\/\/\s*TODO|#\s*TODO/gi) || []).length;
+ if (todos > 0) {
+ score += todos * 5;
+ reasons.push(`${todos} TODO(s) pending`);
+ }
+
+ // Determine level
+ let level: FileComplexity['level'];
+ if (score >= 60) {
+ level = 'very-high';
+ } else if (score >= 40) {
+ level = 'high';
+ } else if (score >= 20) {
+ level = 'medium';
+ } else {
+ level = 'low';
+ }
+
+ return { level, score, reasons };
+ }
+
+ /**
+ * Count lines of code (excluding blanks and comments)
+ */
+ private countLOC(content: string): number {
+ const lines = content.split('\n');
+ let count = 0;
+
+ for (const line of lines) {
+ const trimmed = line.trim();
+ if (trimmed === '') continue;
+ if (trimmed.startsWith('//')) continue;
+ if (trimmed.startsWith('#')) continue;
+ if (trimmed.startsWith('/*')) continue;
+ if (trimmed.startsWith('*')) continue;
+ if (trimmed.startsWith('*/')) continue;
+ count++;
+ }
+
+ return count;
+ }
+
+ /**
+ * Detect programming language
+ */
+ private detectLanguage(path: string): string {
+ const ext = extname(path);
+ const map: Record = {
+ '.ts': 'TypeScript',
+ '.tsx': 'TypeScript React',
+ '.js': 'JavaScript',
+ '.jsx': 'JavaScript React',
+ '.py': 'Python',
+ '.go': 'Go',
+ '.rs': 'Rust',
+ '.java': 'Java',
+ '.cpp': 'C++',
+ '.c': 'C',
+ '.rb': 'Ruby',
+ '.php': 'PHP',
+ '.cs': 'C#',
+ '.swift': 'Swift',
+ '.kt': 'Kotlin',
+ '.md': 'Markdown',
+ '.json': 'JSON',
+ '.yaml': 'YAML',
+ '.yml': 'YAML',
+ '.toml': 'TOML',
+ '.xml': 'XML',
+ '.html': 'HTML',
+ '.css': 'CSS',
+ '.scss': 'SCSS',
+ '.sql': 'SQL'
+ };
+ return map[ext] || 'Unknown';
+ }
+
+ /**
+ * Get potential test file patterns
+ */
+ private getTestFilePatterns(filePath: string): string[] {
+ const dir = dirname(filePath);
+ const base = filePath.replace(extname(filePath), '');
+ const ext = extname(filePath);
+
+ return [
+ `${base}.test${ext}`,
+ `${base}.spec${ext}`,
+ `${base}_test${ext}`,
+ join(dir, '__tests__', filePath.split('/').pop() || ''),
+ join('tests', filePath),
+ join('test', filePath)
+ ];
+ }
+
+ /**
+ * Get potential config file patterns
+ */
+ private getConfigFilePatterns(filePath: string): string[] {
+ if (filePath.includes('jest')) {
+ return ['jest.config.js', 'jest.config.ts', 'package.json'];
+ }
+ if (filePath.includes('webpack')) {
+ return ['webpack.config.js', 'webpack.config.ts'];
+ }
+ if (filePath.includes('vite')) {
+ return ['vite.config.ts', 'vite.config.js'];
+ }
+ return ['package.json', 'tsconfig.json', '.eslintrc'];
+ }
+}
+
diff --git a/src/recall-engine.ts b/src/recall-engine.ts
new file mode 100644
index 0000000..bd03482
--- /dev/null
+++ b/src/recall-engine.ts
@@ -0,0 +1,536 @@
+๏ปฟ/**
+ * Recall Engine
+ * The 80% impact tool - transforms raw context into actionable intelligence
+ */
+
+import type { Database } from 'better-sqlite3';
+
+interface ContextItem {
+ type: 'active_work' | 'constraint' | 'problem' | 'goal' | 'decision' | 'note' | 'caveat';
+ content: string;
+ timestamp: number;
+ metadata?: any;
+ relevance?: number;
+ staleness?: 'fresh' | 'recent' | 'stale' | 'expired';
+}
+
+interface RecallSynthesis {
+ summary: string; // 2-paragraph "where you left off"
+ criticalPath: string[]; // Ordered next steps
+ activeWork: ContextItem[];
+ constraints: ContextItem[];
+ problems: ContextItem[];
+ goals: ContextItem[];
+ decisions: ContextItem[];
+ notes: ContextItem[];
+ caveats: ContextItem[]; // AI mistakes, tech debt
+ relationships: Map; // decision affected files
+ gaps: string[]; // Missing context warnings
+ suggestions: string[]; // Concrete next actions
+ freshness: {
+ fresh: number;
+ recent: number;
+ stale: number;
+ expired: number;
+ };
+}
+
+export class RecallEngine {
+ private db: Database;
+ private projectId: string;
+
+ constructor(db: Database, projectId: string) {
+ this.db = db;
+ this.projectId = projectId;
+ }
+
+ /**
+ * Generate intelligent context recall with synthesis
+ */
+ async recall(query?: string, limit: number = 10): Promise {
+ // 1. Gather all context
+ const context = await this.gatherContext(limit);
+
+ // 2. Analyze freshness
+ this.analyzeFreshness(context);
+
+ // 3. Rank by relevance
+ this.rankByRelevance(context, query);
+
+ // 4. Build relationships
+ const relationships = this.buildRelationships(context);
+
+ // 5. Detect gaps
+ const gaps = this.detectGaps(context);
+
+ // 6. Generate summary
+ const summary = this.generateSummary(context);
+
+ // 7. Extract critical path
+ const criticalPath = this.extractCriticalPath(context);
+
+ // 8. Generate suggestions
+ const suggestions = this.generateSuggestions(context, gaps);
+
+ // 9. Calculate freshness stats
+ const freshness = this.calculateFreshness(context);
+
+ return {
+ summary,
+ criticalPath,
+ activeWork: context.filter(c => c.type === 'active_work'),
+ constraints: context.filter(c => c.type === 'constraint'),
+ problems: context.filter(c => c.type === 'problem'),
+ goals: context.filter(c => c.type === 'goal'),
+ decisions: context.filter(c => c.type === 'decision'),
+ notes: context.filter(c => c.type === 'note'),
+ caveats: context.filter(c => c.type === 'caveat'),
+ relationships,
+ gaps,
+ suggestions,
+ freshness
+ };
+ }
+
+ /**
+ * Gather all context from database
+ */
+ private async gatherContext(limit: number): Promise {
+ const context: ContextItem[] = [];
+
+ // Active work
+ const activeWork = this.db.prepare(`
+ SELECT * FROM active_work
+ WHERE project_id = ? AND status = 'active'
+ ORDER BY timestamp DESC LIMIT ?
+ `).all(this.projectId, limit) as any[];
+
+ activeWork.forEach(work => {
+ context.push({
+ type: 'active_work',
+ content: work.task,
+ timestamp: work.timestamp,
+ metadata: {
+ files: work.files ? JSON.parse(work.files) : [],
+ branch: work.branch,
+ status: work.status
+ }
+ });
+ });
+
+ // Constraints
+ const constraints = this.db.prepare(`
+ SELECT * FROM constraints
+ WHERE project_id = ?
+ ORDER BY timestamp DESC LIMIT ?
+ `).all(this.projectId, limit) as any[];
+
+ constraints.forEach(c => {
+ context.push({
+ type: 'constraint',
+ content: `${c.key}: ${c.value}`,
+ timestamp: c.timestamp,
+ metadata: { reasoning: c.reasoning }
+ });
+ });
+
+ // Problems
+ const problems = this.db.prepare(`
+ SELECT * FROM problems
+ WHERE project_id = ? AND status = 'open'
+ ORDER BY timestamp DESC LIMIT ?
+ `).all(this.projectId, limit) as any[];
+
+ problems.forEach(p => {
+ context.push({
+ type: 'problem',
+ content: p.description,
+ timestamp: p.timestamp,
+ metadata: {
+ context: p.context ? JSON.parse(p.context) : null,
+ status: p.status
+ }
+ });
+ });
+
+ // Goals
+ const goals = this.db.prepare(`
+ SELECT * FROM goals
+ WHERE project_id = ? AND status IN ('planned', 'in-progress')
+ ORDER BY timestamp DESC LIMIT ?
+ `).all(this.projectId, limit) as any[];
+
+ goals.forEach(g => {
+ context.push({
+ type: 'goal',
+ content: g.description,
+ timestamp: g.timestamp,
+ metadata: {
+ targetDate: g.target_date,
+ status: g.status
+ }
+ });
+ });
+
+ // Decisions
+ const decisions = this.db.prepare(`
+ SELECT * FROM decisions
+ WHERE project_id = ?
+ ORDER BY timestamp DESC LIMIT ?
+ `).all(this.projectId, limit) as any[];
+
+ decisions.forEach(d => {
+ context.push({
+ type: 'decision',
+ content: d.description,
+ timestamp: d.timestamp,
+ metadata: {
+ reasoning: d.reasoning,
+ alternatives: d.alternatives ? JSON.parse(d.alternatives) : []
+ }
+ });
+ });
+
+ // Notes
+ const notes = this.db.prepare(`
+ SELECT * FROM notes
+ WHERE project_id = ?
+ ORDER BY timestamp DESC LIMIT ?
+ `).all(this.projectId, limit) as any[];
+
+ notes.forEach(n => {
+ context.push({
+ type: 'note',
+ content: n.content,
+ timestamp: n.timestamp,
+ metadata: { tags: n.tags }
+ });
+ });
+
+ // Caveats (AI mistakes, tech debt) - Only fetch unresolved ones
+ const caveats = this.db.prepare(`
+ SELECT * FROM caveats
+ WHERE project_id = ? AND resolved = 0
+ ORDER BY severity DESC, timestamp DESC LIMIT ?
+ `).all(this.projectId, limit) as any[];
+
+ caveats.forEach(c => {
+ context.push({
+ type: 'caveat',
+ content: c.description,
+ timestamp: c.timestamp,
+ metadata: {
+ category: c.category,
+ severity: c.severity,
+ attempted: c.attempted,
+ error: c.error,
+ recovery: c.recovery,
+ verified: c.verified === 1,
+ action_required: c.action_required,
+ affects_production: c.affects_production === 1
+ }
+ });
+ });
+
+ return context;
+ }
+
+ /**
+ * Analyze freshness of each context item
+ */
+ private analyzeFreshness(context: ContextItem[]): void {
+ const now = Date.now();
+ const HOUR = 60 * 60 * 1000;
+ const DAY = 24 * HOUR;
+
+ context.forEach(item => {
+ const age = now - item.timestamp;
+
+ if (age < 4 * HOUR) {
+ item.staleness = 'fresh'; // Last 4 hours
+ } else if (age < 2 * DAY) {
+ item.staleness = 'recent'; // Last 2 days
+ } else if (age < 7 * DAY) {
+ item.staleness = 'stale'; // Last week
+ } else {
+ item.staleness = 'expired'; // Older than a week
+ }
+ });
+ }
+
+ /**
+ * Rank by relevance (not just chronology)
+ */
+ private rankByRelevance(context: ContextItem[], query?: string): void {
+ context.forEach(item => {
+ let score = 0;
+
+ // Recency bonus
+ const age = Date.now() - item.timestamp;
+ const dayAge = age / (24 * 60 * 60 * 1000);
+ score += Math.max(0, 10 - dayAge); // 10 points if today, decreases
+
+ // Type priority
+ const typePriority: Record = {
+ 'active_work': 10,
+ 'caveat': 9, // High priority - unresolved tech debt!
+ 'problem': 8,
+ 'goal': 7,
+ 'constraint': 6,
+ 'decision': 5,
+ 'note': 3
+ };
+ score += typePriority[item.type];
+
+ // Query match bonus
+ if (query) {
+ const queryLower = query.toLowerCase();
+ const contentLower = item.content.toLowerCase();
+ if (contentLower.includes(queryLower)) {
+ score += 15; // Big bonus for query match
+ }
+ }
+
+ item.relevance = score;
+ });
+
+ // Sort by relevance
+ context.sort((a, b) => (b.relevance || 0) - (a.relevance || 0));
+ }
+
+ /**
+ * Build relationship graph (decisions files, problems constraints)
+ */
+ private buildRelationships(context: ContextItem[]): Map {
+ const relationships = new Map();
+
+ context.forEach(item => {
+ if (item.type === 'decision' || item.type === 'active_work') {
+ const files = item.metadata?.files || [];
+ if (files.length > 0) {
+ relationships.set(item.content, files);
+ }
+ }
+ });
+
+ return relationships;
+ }
+
+ /**
+ * Detect gaps in context
+ */
+ private detectGaps(context: ContextItem[]): string[] {
+ const gaps: string[] = [];
+
+ const hasActiveWork = context.some(c => c.type === 'active_work');
+ const hasGoals = context.some(c => c.type === 'goal');
+ const hasConstraints = context.some(c => c.type === 'constraint');
+ const hasProblems = context.some(c => c.type === 'problem');
+
+ if (!hasActiveWork) {
+ gaps.push(' No active work tracked - what are you currently working on?');
+ }
+
+ if (!hasGoals) {
+ gaps.push(' No goals defined - what are you trying to achieve?');
+ }
+
+ if (hasProblems && !hasConstraints) {
+ gaps.push(' Problems exist but no constraints documented - consider adding architectural constraints');
+ }
+
+ // Check for stale context
+ const staleItems = context.filter(c => c.staleness === 'expired');
+ if (staleItems.length > context.length / 2) {
+ gaps.push(' Most context is >1 week old - consider updating or archiving');
+ }
+
+ return gaps;
+ }
+
+ /**
+ * Generate 2-paragraph summary
+ */
+ private generateSummary(context: ContextItem[]): string {
+ const activeWork = context.filter(c => c.type === 'active_work' && c.staleness === 'fresh');
+ const problems = context.filter(c => c.type === 'problem');
+ const goals = context.filter(c => c.type === 'goal');
+ const recentDecisions = context.filter(c => c.type === 'decision' && c.staleness !== 'expired').slice(0, 2);
+
+ let summary = '';
+
+ // Paragraph 1: Current state
+ if (activeWork.length > 0) {
+ summary += `You're currently ${activeWork.length === 1 ? 'working on' : 'juggling'} `;
+ summary += activeWork.slice(0, 2).map(w => `"${w.content}"`).join(' and ');
+ if (activeWork.length > 2) summary += ` plus ${activeWork.length - 2} other task(s)`;
+ summary += '. ';
+ } else {
+ summary += 'No active work tracked recently. ';
+ }
+
+ if (problems.length > 0) {
+ summary += `You're facing ${problems.length} open problem(s), `;
+ summary += `most critical: "${problems[0].content}". `;
+ }
+
+ // Paragraph 2: Context and direction
+ summary += '\n\n';
+
+ if (goals.length > 0) {
+ summary += `Your current goal is to ${goals[0].content}. `;
+ }
+
+ if (recentDecisions.length > 0) {
+ summary += `Recently decided: ${recentDecisions.map(d => d.content).join('; ')}. `;
+ }
+
+ if (summary.trim() === '\n\n') {
+ summary = 'This is a fresh start - no recent context found. Consider using `remember` to document your current work, goals, and constraints.';
+ }
+
+ return summary.trim();
+ }
+
+ /**
+ * Extract critical path (ordered next steps)
+ */
+ private extractCriticalPath(context: ContextItem[]): string[] {
+ const path: string[] = [];
+
+ // 1. Unblocked active work
+ const activeWork = context.filter(c => c.type === 'active_work' && c.staleness !== 'expired');
+ if (activeWork.length > 0) {
+ path.push(`Continue: ${activeWork[0].content}`);
+ }
+
+ // 2. Open problems blocking progress
+ const criticalProblems = context.filter(c => c.type === 'problem');
+ if (criticalProblems.length > 0) {
+ path.push(`Fix: ${criticalProblems[0].content}`);
+ }
+
+ // 3. In-progress goals
+ const activeGoals = context.filter(c => c.type === 'goal' && c.metadata?.status === 'in-progress');
+ if (activeGoals.length > 0 && path.length < 3) {
+ path.push(`Achieve: ${activeGoals[0].content}`);
+ }
+
+ // 4. Planned goals
+ const plannedGoals = context.filter(c => c.type === 'goal' && c.metadata?.status === 'planned');
+ if (plannedGoals.length > 0 && path.length < 3) {
+ path.push(`Next: ${plannedGoals[0].content}`);
+ }
+
+ return path;
+ }
+
+ /**
+ * Generate actionable suggestions with cross-tool intelligence
+ */
+ private generateSuggestions(context: ContextItem[], gaps: string[]): string[] {
+ const suggestions: string[] = [];
+
+ // Based on active work
+ const activeWork = context.filter(c => c.type === 'active_work');
+ if (activeWork.length > 0) {
+ const work = activeWork[0];
+ if (work.metadata?.files && work.metadata.files.length > 0) {
+ suggestions.push(`Review ${work.metadata.files[0]} related to: ${work.content}`);
+ }
+
+ // Notion integration: suggest documentation search
+ if (this.mentionsDocumentation(work.content)) {
+ const keywords = this.extractKeywords(work.content);
+ suggestions.push(` Search Notion docs: notion action=search query="${keywords}"`);
+ }
+ }
+
+ // Based on decisions
+ const recentDecisions = context.filter(c => c.type === 'decision').slice(0, 2);
+ if (recentDecisions.length > 0) {
+ const decision = recentDecisions[0];
+ if (this.mentionsArchitecture(decision.content)) {
+ suggestions.push(` Find architecture docs: notion action=search query="architecture"`);
+ }
+ }
+
+ // Based on constraints
+ const constraints = context.filter(c => c.type === 'constraint');
+ if (constraints.length > 0 && this.mentionsDocumentation(constraints[0].content)) {
+ suggestions.push(` Check constraint documentation in Notion`);
+ }
+
+ // Based on problems
+ const problems = context.filter(c => c.type === 'problem');
+ if (problems.length > 0) {
+ suggestions.push(`Research solution for: ${problems[0].content}`);
+
+ // Suggest searching Notion for similar issues
+ const keywords = this.extractKeywords(problems[0].content);
+ if (keywords) {
+ suggestions.push(` Search past solutions: notion action=search query="${keywords}"`);
+ }
+ }
+
+ // Based on gaps
+ if (gaps.length > 0) {
+ suggestions.push('Run `remember` to update your current context');
+ }
+
+ return suggestions;
+ }
+
+ /**
+ * Detect if content mentions documentation/architecture concepts
+ */
+ private mentionsDocumentation(content: string): boolean {
+ const docKeywords = [
+ 'documentation', 'docs', 'design doc', 'architecture', 'specification',
+ 'guide', 'manual', 'reference', 'readme', 'wiki', 'adr', 'rfc'
+ ];
+ const lower = content.toLowerCase();
+ return docKeywords.some(keyword => lower.includes(keyword));
+ }
+
+ /**
+ * Detect if content mentions architecture concepts
+ */
+ private mentionsArchitecture(content: string): boolean {
+ const archKeywords = [
+ 'architecture', 'design', 'pattern', 'microservice', 'monolith',
+ 'api', 'database', 'infrastructure', 'system design', 'scalability'
+ ];
+ const lower = content.toLowerCase();
+ return archKeywords.some(keyword => lower.includes(keyword));
+ }
+
+ /**
+ * Extract meaningful keywords for Notion search
+ */
+ private extractKeywords(content: string): string {
+ // Remove common words and extract meaningful terms
+ const stopWords = ['the', 'a', 'an', 'is', 'are', 'was', 'were', 'to', 'for', 'of', 'in', 'on'];
+ const words = content.toLowerCase()
+ .replace(/[^\w\s]/g, ' ')
+ .split(/\s+/)
+ .filter(w => w.length > 3 && !stopWords.includes(w));
+
+ // Return first 2-3 meaningful words
+ return words.slice(0, 3).join(' ');
+ }
+
+ /**
+ * Calculate freshness statistics
+ */
+ private calculateFreshness(context: ContextItem[]): RecallSynthesis['freshness'] {
+ const fresh = context.filter(c => c.staleness === 'fresh').length;
+ const recent = context.filter(c => c.staleness === 'recent').length;
+ const stale = context.filter(c => c.staleness === 'stale').length;
+ const expired = context.filter(c => c.staleness === 'expired').length;
+
+ return { fresh, recent, stale, expired };
+ }
+}
+
+
diff --git a/src/remember-engine.ts b/src/remember-engine.ts
new file mode 100644
index 0000000..6b6bd85
--- /dev/null
+++ b/src/remember-engine.ts
@@ -0,0 +1,597 @@
+๏ปฟ/**
+ * Remember Engine
+ * Smart context storage with auto-detection, deduplication, and validation
+ * Auto-enriches with git context AND file context (complexity, imports, relationships)
+ */
+
+import type { Database } from 'better-sqlite3';
+import { randomUUID } from 'crypto';
+import simpleGit from 'simple-git';
+import { ReadFileEngine } from './read-file-engine.js';
+
+interface RememberInput {
+ type: 'active_work' | 'constraint' | 'problem' | 'goal' | 'decision' | 'note' | 'caveat';
+ content: string;
+ metadata?: Record;
+}
+
+interface RememberResult {
+ action: 'created' | 'updated' | 'skipped';
+ id: string;
+ type: string;
+ reason?: string;
+ gitContext?: {
+ branch: string;
+ uncommittedFiles: string[];
+ stagedFiles: string[];
+ lastCommit: string;
+ };
+ fileContext?: {
+ files: Array<{
+ path: string;
+ complexity: string;
+ linesOfCode: number;
+ imports: string[];
+ }>;
+ };
+}
+
+export class RememberEngine {
+ private db: Database;
+ private projectId: string;
+ private projectPath: string;
+ private readFileEngine: ReadFileEngine;
+
+ constructor(db: Database, projectId: string, projectPath: string) {
+ this.db = db;
+ this.projectId = projectId;
+ this.projectPath = projectPath;
+ this.readFileEngine = new ReadFileEngine(projectPath);
+ }
+
+ /**
+ * Remember context intelligently
+ */
+ async remember(input: RememberInput): Promise {
+ // 1. Validate content
+ const validation = this.validateContent(input);
+ if (!validation.valid) {
+ throw new Error(validation.reason);
+ }
+
+ // 2. Fetch git context to enrich metadata
+ const gitContext = await this.fetchGitContext();
+
+ // 3. Auto-enhance metadata with content + git info
+ input.metadata = await this.enhanceMetadata(input, gitContext);
+
+ // 4. Enrich with file context (complexity, imports, relationships)
+ let fileContext: RememberResult['fileContext'];
+ if (input.metadata.files && input.metadata.files.length > 0) {
+ fileContext = await this.enrichWithFileContext(input.metadata.files);
+
+ // Store file context in metadata for later retrieval
+ if (fileContext && fileContext.files.length > 0) {
+ input.metadata.fileContext = fileContext.files;
+ }
+ }
+
+ // 5. Check for duplicates/updates
+ const existing = this.findSimilar(input);
+ if (existing) {
+ // Update existing instead of creating duplicate
+ return this.updateExisting(existing, input, gitContext, fileContext);
+ }
+
+ // 6. Store new context
+ return this.storeNew(input, gitContext, fileContext);
+ }
+
+ /**
+ * Fetch git context (branch, changes, status)
+ */
+ private async fetchGitContext(): Promise {
+ try {
+ const git = simpleGit(this.projectPath);
+ const status = await git.status();
+ const log = await git.log({ maxCount: 1 });
+
+ return {
+ branch: status.current || 'unknown',
+ uncommittedFiles: [
+ ...status.modified,
+ ...status.created,
+ ...status.deleted,
+ ...status.renamed.map((entry: { to: string }) => entry.to)
+ ],
+ stagedFiles: status.staged,
+ lastCommit: log.latest?.message || 'No commits'
+ };
+ } catch (error) {
+ // Not a git repo or git error
+ return null;
+ }
+ }
+
+ /**
+ * Validate content is meaningful
+ */
+ private validateContent(input: RememberInput): { valid: boolean; reason?: string } {
+ const content = input.content.trim();
+
+ // Too short
+ if (content.length < 10) {
+ return { valid: false, reason: 'Content too short - be more specific (minimum 10 characters)' };
+ }
+
+ // Too vague
+ const vaguePatterns = [
+ /^(this|that|it|the thing)$/i,
+ /^(todo|fix|bug|issue|task)$/i,
+ /^(working on|doing|making)$/i
+ ];
+
+ for (const pattern of vaguePatterns) {
+ if (pattern.test(content)) {
+ return { valid: false, reason: `Too vague: "${content}". Please be specific about what you're doing.` };
+ }
+ }
+
+ return { valid: true };
+ }
+
+ /**
+ * Auto-enhance metadata by extracting from content + git context
+ */
+ private async enhanceMetadata(
+ input: RememberInput,
+ gitContext: RememberResult['gitContext'] | null
+ ): Promise> {
+ const metadata = input.metadata || {};
+ const content = input.content;
+
+ // Extract file paths from content
+ if (!metadata.files) {
+ const fileMatches = content.match(/\b[\w-]+\.(ts|js|tsx|jsx|py|go|rs|java|cpp|c|h|md|json|yaml|yml|toml)\b/g);
+ if (fileMatches) {
+ metadata.files = Array.from(new Set(fileMatches));
+ }
+ }
+
+ // Extract Notion page references from content
+ if (!metadata.notionPages) {
+ const notionPatterns = [
+ // Notion URLs: https://www.notion.so/Page-Title-123abc...
+ /https:\/\/(?:www\.)?notion\.so\/[^\s]+/g,
+ // Notion page IDs: 2daae57c-efce-8109-8899-f74f9054c7b7
+ /[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/gi
+ ];
+
+ const references = new Set();
+ for (const pattern of notionPatterns) {
+ const matches = content.match(pattern);
+ if (matches) {
+ matches.forEach(ref => references.add(ref));
+ }
+ }
+
+ if (references.size > 0) {
+ metadata.notionPages = Array.from(references);
+ }
+ }
+
+ // Detect if content mentions documentation (suggest Notion search)
+ if (!metadata.suggestNotionSearch) {
+ const docKeywords = [
+ 'documentation', 'docs', 'design doc', 'architecture doc',
+ 'specification', 'guide', 'manual', 'reference', 'adr', 'rfc'
+ ];
+ const lower = content.toLowerCase();
+ const mentionsDocs = docKeywords.some(keyword => lower.includes(keyword));
+
+ if (mentionsDocs) {
+ metadata.suggestNotionSearch = true;
+ // Extract meaningful keywords for search suggestion
+ const keywords = this.extractSearchKeywords(content);
+ if (keywords) {
+ metadata.notionSearchSuggestion = keywords;
+ }
+ }
+ }
+
+ // Add uncommitted files from git (likely what user is working on)
+ if (gitContext && (input.type === 'active_work' || input.type === 'problem')) {
+ const existingFiles = metadata.files || [];
+ const allFiles = new Set([
+ ...existingFiles,
+ ...gitContext.uncommittedFiles,
+ ...gitContext.stagedFiles
+ ]);
+ metadata.files = Array.from(allFiles);
+ }
+
+ // Extract/use branch from git
+ if (!metadata.branch && gitContext) {
+ metadata.branch = gitContext.branch;
+ }
+
+ // Extract deadlines for goals
+ if (input.type === 'goal' && !metadata.target_date) {
+ const datePatterns = [
+ /by\s+(\d{4}-\d{2}-\d{2})/i,
+ /by\s+(january|february|march|april|may|june|july|august|september|october|november|december)\s+(\d{1,2})/i,
+ /deadline[:\s]+(\d{4}-\d{2}-\d{2})/i
+ ];
+
+ for (const pattern of datePatterns) {
+ const match = content.match(pattern);
+ if (match) {
+ metadata.target_date = match[1];
+ break;
+ }
+ }
+ }
+
+ return metadata;
+ }
+
+ /**
+ * Extract meaningful keywords for Notion search suggestion
+ */
+ private extractSearchKeywords(content: string): string | null {
+ // Remove URLs, special chars, and extract meaningful terms
+ const cleaned = content
+ .replace(/https?:\/\/[^\s]+/g, '')
+ .replace(/[^\w\s]/g, ' ')
+ .toLowerCase();
+
+ const stopWords = ['the', 'a', 'an', 'is', 'are', 'was', 'were', 'to', 'for', 'of', 'in', 'on', 'that', 'this', 'we', 'i'];
+ const words = cleaned
+ .split(/\s+/)
+ .filter(w => w.length > 3 && !stopWords.includes(w));
+
+ // Return first 2-3 meaningful words
+ const keywords = words.slice(0, 3).join(' ');
+ return keywords.length > 0 ? keywords : null;
+ }
+
+ /**
+ * Enrich with file context (NEW!)
+ * Read files and add complexity, imports, relationships
+ */
+ private async enrichWithFileContext(
+ files: string[]
+ ): Promise {
+ const fileContexts: RememberResult['fileContext'] = { files: [] };
+
+ // Only process first 3 files (avoid slowdown)
+ const filesToProcess = files.slice(0, 3);
+
+ for (const file of filesToProcess) {
+ try {
+ const fileCtx = await this.readFileEngine.read(file);
+
+ fileContexts.files.push({
+ path: file,
+ complexity: fileCtx.complexity.level,
+ linesOfCode: fileCtx.metadata.linesOfCode,
+ imports: fileCtx.relationships.imports.slice(0, 5) // Top 5 imports
+ });
+ } catch (err) {
+ // File doesn't exist or can't be read, skip
+ continue;
+ }
+ }
+
+ return fileContexts.files.length > 0 ? fileContexts : undefined;
+ }
+
+ /**
+ * Find similar existing context
+ */
+ private findSimilar(input: RememberInput): any | null {
+ const { type, content } = input;
+
+ // For active_work, check if task is similar
+ if (type === 'active_work') {
+ const existing = this.db.prepare(`
+ SELECT * FROM active_work
+ WHERE project_id = ? AND status = 'active'
+ ORDER BY timestamp DESC LIMIT 5
+ `).all(this.projectId) as any[];
+
+ for (const item of existing) {
+ const similarity = this.calculateSimilarity(content, item.task);
+ if (similarity > 0.8) {
+ return { ...item, table: 'active_work' };
+ }
+ }
+ }
+
+ // For constraints, check if key matches
+ if (type === 'constraint') {
+ const keyValue = this.parseKeyValue(content);
+ const existing = this.db.prepare(`
+ SELECT * FROM constraints
+ WHERE project_id = ? AND key = ?
+ `).get(this.projectId, keyValue.key);
+
+ if (existing) {
+ return { ...existing, table: 'constraints' };
+ }
+ }
+
+ // For goals, check if description is similar
+ if (type === 'goal') {
+ const existing = this.db.prepare(`
+ SELECT * FROM goals
+ WHERE project_id = ? AND status IN ('planned', 'in-progress')
+ ORDER BY timestamp DESC LIMIT 5
+ `).all(this.projectId) as any[];
+
+ for (const item of existing) {
+ const similarity = this.calculateSimilarity(content, item.description);
+ if (similarity > 0.7) {
+ return { ...item, table: 'goals' };
+ }
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Calculate text similarity (simple word overlap)
+ */
+ private calculateSimilarity(text1: string, text2: string): number {
+ const words1 = new Set(text1.toLowerCase().split(/\s+/));
+ const words2 = new Set(text2.toLowerCase().split(/\s+/));
+
+ const intersection = new Set([...words1].filter(w => words2.has(w)));
+ const union = new Set([...words1, ...words2]);
+
+ return intersection.size / union.size;
+ }
+
+ /**
+ * Update existing context
+ */
+ private updateExisting(
+ existing: any,
+ input: RememberInput,
+ gitContext: RememberResult['gitContext'] | null,
+ fileContext?: RememberResult['fileContext']
+ ): RememberResult {
+ const timestamp = Date.now();
+ const files = input.metadata?.files || [];
+ const branch = input.metadata?.branch || gitContext?.branch || 'unknown';
+
+ switch (existing.table) {
+ case 'active_work':
+ // Store file context in the context field as JSON if present
+ const contextData = input.metadata?.context || existing.context;
+ const enrichedContext = fileContext
+ ? JSON.stringify({ text: contextData, fileContext })
+ : contextData;
+
+ this.db.prepare(`
+ UPDATE active_work
+ SET task = ?, context = ?, files = ?, branch = ?, timestamp = ?
+ WHERE id = ?
+ `).run(
+ input.content,
+ enrichedContext,
+ JSON.stringify(files),
+ branch,
+ timestamp,
+ existing.id
+ );
+ break;
+
+ case 'constraints':
+ const keyValue = this.parseKeyValue(input.content);
+ this.db.prepare(`
+ UPDATE constraints
+ SET value = ?, reasoning = ?, timestamp = ?
+ WHERE id = ?
+ `).run(
+ keyValue.value,
+ input.metadata?.reasoning || existing.reasoning,
+ timestamp,
+ existing.id
+ );
+ break;
+
+ case 'goals':
+ this.db.prepare(`
+ UPDATE goals
+ SET description = ?, target_date = ?, timestamp = ?
+ WHERE id = ?
+ `).run(
+ input.content,
+ input.metadata?.target_date || existing.target_date,
+ timestamp,
+ existing.id
+ );
+ break;
+ }
+
+ return {
+ action: 'updated',
+ id: existing.id,
+ type: input.type,
+ reason: 'Found similar existing context and updated it',
+ gitContext: gitContext || undefined,
+ fileContext
+ };
+ }
+
+ /**
+ * Store new context
+ */
+ private storeNew(
+ input: RememberInput,
+ gitContext: RememberResult['gitContext'] | null,
+ fileContext?: RememberResult['fileContext']
+ ): RememberResult {
+ const { type, content, metadata } = input;
+ const timestamp = Date.now();
+ const id = randomUUID();
+ const files = metadata?.files || [];
+ const branch = metadata?.branch || gitContext?.branch || 'unknown';
+
+ switch (type) {
+ case 'active_work':
+ // Store file context in the context field as JSON if present
+ const contextData = metadata?.context || '';
+ const enrichedContext = fileContext
+ ? JSON.stringify({ text: contextData, fileContext })
+ : contextData;
+
+ this.db.prepare(`
+ INSERT INTO active_work (id, project_id, task, context, files, branch, timestamp, status)
+ VALUES (?, ?, ?, ?, ?, ?, ?, 'active')
+ `).run(
+ id,
+ this.projectId,
+ content,
+ enrichedContext,
+ JSON.stringify(files),
+ branch,
+ timestamp
+ );
+ break;
+
+ case 'constraint':
+ const keyValue = this.parseKeyValue(content);
+ this.db.prepare(`
+ INSERT INTO constraints (id, project_id, key, value, reasoning, timestamp)
+ VALUES (?, ?, ?, ?, ?, ?)
+ `).run(
+ id,
+ this.projectId,
+ keyValue.key,
+ keyValue.value,
+ metadata?.reasoning || '',
+ timestamp
+ );
+ break;
+
+ case 'problem':
+ this.db.prepare(`
+ INSERT INTO problems (id, project_id, description, context, status, timestamp)
+ VALUES (?, ?, ?, ?, 'open', ?)
+ `).run(
+ id,
+ this.projectId,
+ content,
+ JSON.stringify(metadata?.context || {}),
+ timestamp
+ );
+ break;
+
+ case 'goal':
+ this.db.prepare(`
+ INSERT INTO goals (id, project_id, description, target_date, status, timestamp)
+ VALUES (?, ?, ?, ?, 'planned', ?)
+ `).run(
+ id,
+ this.projectId,
+ content,
+ metadata?.target_date || null,
+ timestamp
+ );
+ break;
+
+ case 'decision':
+ // Store alternatives in reasoning as JSON if provided
+ const reasoning = metadata?.reasoning || '';
+ const reasoningWithAlternatives = metadata?.alternatives
+ ? JSON.stringify({ reasoning, alternatives: metadata.alternatives })
+ : reasoning;
+
+ this.db.prepare(`
+ INSERT INTO decisions (id, project_id, type, description, reasoning, timestamp)
+ VALUES (?, ?, 'other', ?, ?, ?)
+ `).run(
+ id,
+ this.projectId,
+ content,
+ reasoningWithAlternatives,
+ timestamp
+ );
+ break;
+
+ case 'note':
+ this.db.prepare(`
+ INSERT INTO notes (id, project_id, content, tags, timestamp)
+ VALUES (?, ?, ?, ?, ?)
+ `).run(
+ id,
+ this.projectId,
+ content,
+ JSON.stringify(metadata?.tags || []),
+ timestamp
+ );
+ break;
+
+ case 'caveat':
+ // AI self-reporting mistakes and tech debt
+ const category = metadata?.category || 'workaround';
+ const severity = metadata?.severity || 'medium';
+ const verified = metadata?.verified === true ? 1 : 0;
+ const affects_production = metadata?.affects_production === true ? 1 : 0;
+
+ this.db.prepare(`
+ INSERT INTO caveats (
+ id, project_id, description, category, severity,
+ attempted, error, recovery, verified, action_required,
+ affects_production, timestamp, resolved
+ )
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 0)
+ `).run(
+ id,
+ this.projectId,
+ content,
+ category,
+ severity,
+ metadata?.attempted || null,
+ metadata?.error || null,
+ metadata?.recovery || null,
+ verified,
+ metadata?.action_required || null,
+ affects_production,
+ timestamp
+ );
+ break;
+ }
+
+ return {
+ action: 'created',
+ id,
+ type,
+ reason: 'Stored new context',
+ gitContext: gitContext || undefined,
+ fileContext
+ };
+ }
+
+ /**
+ * Parse "Key: Value" format
+ */
+ private parseKeyValue(content: string): { key: string; value: string } {
+ const match = content.match(/^(.+?)[:=](.+)$/);
+ if (match) {
+ return {
+ key: match[1].trim(),
+ value: match[2].trim()
+ };
+ }
+ // If no separator, treat whole thing as key with empty value
+ return {
+ key: content.trim(),
+ value: ''
+ };
+ }
+}
+
diff --git a/src/schema-migration.ts b/src/schema-migration.ts
new file mode 100644
index 0000000..6b57e94
--- /dev/null
+++ b/src/schema-migration.ts
@@ -0,0 +1,479 @@
+๏ปฟ/**
+ * Schema Migration Engine
+ * Safely migrates v1 context data to the current schema without data loss
+ * Runs automatically on first startup
+ */
+
+import Database from 'better-sqlite3';
+import { randomUUID } from 'crypto';
+
+interface MigrationResult {
+ success: boolean;
+ migratedTables: string[];
+ recordsCopied: number;
+ skipped: string[];
+ errors: string[];
+ backupPath?: string;
+}
+
+export class SchemaMigrator {
+ private db: Database.Database;
+ private backupDb?: Database.Database;
+
+ constructor(db: Database.Database) {
+ this.db = db;
+ }
+
+ /**
+ * Check if migration is needed
+ */
+ needsMigration(): boolean {
+ // Check if v1 tables have data but schema tables are empty
+ const v1HasData = this.hasV1Data();
+ const schemaIsEmpty = this.isSchemaEmpty();
+
+ return v1HasData && schemaIsEmpty;
+ }
+
+ /**
+ * Perform safe migration with backup (synchronous)
+ */
+ migrateSync(): MigrationResult {
+ console.log(' Starting v1 schema migration...\n');
+
+ const result: MigrationResult = {
+ success: true,
+ migratedTables: [],
+ recordsCopied: 0,
+ skipped: [],
+ errors: []
+ };
+
+ try {
+ // 1. Create backup
+ console.log(' Creating backup...');
+ const backupPath = this.createBackup();
+ result.backupPath = backupPath;
+ console.log(` Backup created: ${backupPath}\n`);
+
+ // 2. Start transaction for atomic migration
+ this.db.exec('BEGIN TRANSACTION');
+
+ // 3. Migrate decisions decisions (enhanced with metadata)
+ console.log(' Migrating decisions...');
+ const decisionCount = this.migrateDecisions();
+ result.recordsCopied += decisionCount;
+ result.migratedTables.push('decisions');
+ console.log(` Migrated ${decisionCount} decisions\n`);
+
+ // 4. Migrate conversations notes (as general context)
+ console.log(' Migrating conversations to notes...');
+ const conversationCount = this.migrateConversationsToNotes();
+ result.recordsCopied += conversationCount;
+ result.migratedTables.push('conversations notes');
+ console.log(` Migrated ${conversationCount} conversations\n`);
+
+ // 5. Migrate learnings notes (as insights)
+ console.log(' Migrating learnings to notes...');
+ const learningCount = this.migrateLearningsToNotes();
+ result.recordsCopied += learningCount;
+ result.migratedTables.push('learnings notes');
+ console.log(` Migrated ${learningCount} learnings\n`);
+
+ // 6. Migrate problem_solutions problems (with solutions as resolution)
+ console.log(' Migrating problem solutions...');
+ const problemCount = this.migrateProblemSolutions();
+ result.recordsCopied += problemCount;
+ result.migratedTables.push('problem_solutions problems');
+ console.log(` Migrated ${problemCount} problem solutions\n`);
+
+ // 7. Migrate comparisons decisions (as decision rationale)
+ console.log(' Migrating comparisons to decisions...');
+ const comparisonCount = this.migrateComparisons();
+ result.recordsCopied += comparisonCount;
+ result.migratedTables.push('comparisons decisions');
+ console.log(` Migrated ${comparisonCount} comparisons\n`);
+
+ // 8. Migrate anti_patterns constraints (as "don't do this" rules)
+ console.log(' Migrating anti-patterns to constraints...');
+ const antiPatternCount = this.migrateAntiPatterns();
+ result.recordsCopied += antiPatternCount;
+ result.migratedTables.push('anti_patterns constraints');
+ console.log(` Migrated ${antiPatternCount} anti-patterns\n`);
+
+ // 9. Migrate todos active_work (with status mapping)
+ console.log(' Migrating todos to active work...');
+ const todoCount = this.migrateTodos();
+ result.recordsCopied += todoCount;
+ result.migratedTables.push('todos active_work');
+ console.log(` Migrated ${todoCount} todos\n`);
+
+ // 10. Commit transaction
+ this.db.exec('COMMIT');
+
+ // 10. Mark migration as complete
+ this.markMigrationComplete();
+
+ console.log(' Migration completed successfully!');
+ console.log(` Total records migrated: ${result.recordsCopied}`);
+ console.log(` Backup available at: ${backupPath}\n`);
+
+ return result;
+
+ } catch (error: any) {
+ // Rollback on error
+ try {
+ this.db.exec('ROLLBACK');
+ } catch {}
+
+ result.success = false;
+ result.errors.push(error.message);
+ console.error(' Migration failed:', error.message);
+
+ return result;
+ }
+ }
+
+ /**
+ * Perform safe migration with backup (async for compatibility)
+ */
+ async migrate(): Promise {
+ return this.migrateSync();
+ }
+
+ /**
+ * Create database backup before migration
+ */
+ private createBackup(): string {
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
+ const backupPath = this.db.name.replace('.db', `.v1-backup-${timestamp}.db`);
+
+ // Use SQLite backup API for safe copy
+ this.backupDb = new Database(backupPath);
+ this.db.backup(backupPath);
+
+ return backupPath;
+ }
+
+ /**
+ * Check if v1 tables have data
+ */
+ private hasV1Data(): boolean {
+ const v1Tables = ['decisions', 'conversations', 'learnings', 'problem_solutions', 'comparisons', 'anti_patterns', 'todos'];
+
+ for (const table of v1Tables) {
+ try {
+ const result = this.db.prepare(`SELECT COUNT(*) as count FROM ${table}`).get() as { count: number };
+ if (result.count > 0) {
+ return true;
+ }
+ } catch {
+ // Table doesn't exist, skip
+ continue;
+ }
+ }
+
+ return false;
+ }
+
+ /**
+ * Check if schema tables are empty (fresh install or needs migration)
+ */
+ private isSchemaEmpty(): boolean {
+ const schemaTables = ['active_work', 'constraints', 'problems', 'goals', 'notes'];
+
+ for (const table of schemaTables) {
+ try {
+ const result = this.db.prepare(`SELECT COUNT(*) as count FROM ${table}`).get() as { count: number };
+ if (result.count > 0) {
+ return false; // schema already has data
+ }
+ } catch {
+ // Table doesn't exist
+ return true;
+ }
+ }
+
+ return true;
+ }
+
+ /**
+ * Migrate decisions (v1 schema is compatible, just enhance)
+ */
+ private migrateDecisions(): number {
+ // V1 decisions table structure: id, project_id, type, description, reasoning, timestamp
+ // Decisions table is the same, no changes needed
+ // But we can check if any decisions exist and are already in the right format
+
+ const existing = this.db.prepare('SELECT COUNT(*) as count FROM decisions').get() as { count: number };
+ return existing.count;
+ }
+
+ /**
+ * Migrate conversations to notes
+ */
+ private migrateConversationsToNotes(): number {
+ const conversations = this.db.prepare(`
+ SELECT * FROM conversations
+ ORDER BY timestamp DESC
+ `).all();
+
+ let count = 0;
+ const insertNote = this.db.prepare(`
+ INSERT INTO notes (id, project_id, content, tags, timestamp)
+ VALUES (?, ?, ?, ?, ?)
+ `);
+
+ for (const conv of conversations as any[]) {
+ const content = `[${conv.tool}] ${conv.role}: ${conv.content}`;
+ const tags = JSON.stringify(['conversation', conv.tool, conv.role]);
+
+ insertNote.run(
+ randomUUID(),
+ conv.project_id,
+ content,
+ tags,
+ conv.timestamp
+ );
+ count++;
+ }
+
+ return count;
+ }
+
+ /**
+ * Migrate learnings to notes
+ */
+ private migrateLearningsToNotes(): number {
+ const learnings = this.db.prepare(`
+ SELECT * FROM learnings
+ ORDER BY timestamp DESC
+ `).all();
+
+ let count = 0;
+ const insertNote = this.db.prepare(`
+ INSERT INTO notes (id, project_id, content, tags, timestamp)
+ VALUES (?, ?, ?, ?, ?)
+ `);
+
+ for (const learning of learnings as any[]) {
+ const content = ` ${learning.insight}${learning.context ? `\n\nContext: ${learning.context}` : ''}`;
+ const tags = JSON.stringify(['learning', 'insight', `confidence-${learning.confidence || 'medium'}`]);
+
+ insertNote.run(
+ randomUUID(),
+ learning.project_id,
+ content,
+ tags,
+ learning.timestamp
+ );
+ count++;
+ }
+
+ return count;
+ }
+
+ /**
+ * Migrate problem_solutions to problems
+ */
+ private migrateProblemSolutions(): number {
+ const problemSolutions = this.db.prepare(`
+ SELECT * FROM problem_solutions
+ ORDER BY timestamp DESC
+ `).all();
+
+ let count = 0;
+ const insertProblem = this.db.prepare(`
+ INSERT INTO problems (id, project_id, description, context, status, resolution, timestamp)
+ VALUES (?, ?, ?, ?, ?, ?, ?)
+ `);
+
+ for (const ps of problemSolutions as any[]) {
+ insertProblem.run(
+ randomUUID(),
+ ps.project_id,
+ ps.problem,
+ `Confidence: ${ps.confidence || 'medium'}`,
+ 'resolved', // All historical problems are resolved
+ ps.solution,
+ ps.timestamp
+ );
+ count++;
+ }
+
+ return count;
+ }
+
+ /**
+ * Migrate comparisons to decisions
+ */
+ private migrateComparisons(): number {
+ const comparisons = this.db.prepare(`
+ SELECT * FROM comparisons
+ ORDER BY timestamp DESC
+ `).all();
+
+ let count = 0;
+ const insertDecision = this.db.prepare(`
+ INSERT INTO decisions (id, project_id, type, description, reasoning, timestamp)
+ VALUES (?, ?, ?, ?, ?, ?)
+ `);
+
+ for (const comp of comparisons as any[]) {
+ const description = `Chose ${comp.winner || comp.option_a} over ${comp.option_a === comp.winner ? comp.option_b : comp.option_a}`;
+ const reasoning = JSON.stringify({
+ comparison: {
+ optionA: comp.option_a,
+ optionB: comp.option_b,
+ winner: comp.winner,
+ reasoning: comp.reasoning,
+ confidence: comp.confidence
+ }
+ });
+
+ insertDecision.run(
+ randomUUID(),
+ comp.project_id,
+ 'comparison',
+ description,
+ reasoning,
+ comp.timestamp
+ );
+ count++;
+ }
+
+ return count;
+ }
+
+ /**
+ * Migrate anti_patterns to constraints
+ */
+ private migrateAntiPatterns(): number {
+ const antiPatterns = this.db.prepare(`
+ SELECT * FROM anti_patterns
+ ORDER BY timestamp DESC
+ `).all();
+
+ let count = 0;
+ const insertConstraint = this.db.prepare(`
+ INSERT INTO constraints (id, project_id, key, value, reasoning, timestamp)
+ VALUES (?, ?, ?, ?, ?, ?)
+ `);
+
+ for (const ap of antiPatterns as any[]) {
+ const key = `avoid-${ap.description.substring(0, 50).replace(/\s+/g, '-').toLowerCase()}`;
+ const value = `DON'T: ${ap.description}`;
+
+ insertConstraint.run(
+ randomUUID(),
+ ap.project_id,
+ key,
+ value,
+ ap.why,
+ ap.timestamp
+ );
+ count++;
+ }
+
+ return count;
+ }
+
+ /**
+ * Migrate todos to active_work
+ * Maps todo statuses: pending/in_progress active, completed completed
+ */
+ private migrateTodos(): number {
+ // Check if todos table exists
+ try {
+ this.db.prepare('SELECT COUNT(*) FROM todos LIMIT 1').get();
+ } catch {
+ // Table doesn't exist, skip migration
+ return 0;
+ }
+
+ const todos = this.db.prepare(`
+ SELECT * FROM todos
+ ORDER BY created_at DESC
+ `).all();
+
+ let count = 0;
+ const insertActiveWork = this.db.prepare(`
+ INSERT INTO active_work (id, project_id, task, context, files, branch, timestamp, status)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
+ `);
+
+ for (const todo of todos as any[]) {
+ // Map todo status to active_work status
+ let status: 'active' | 'paused' | 'completed' = 'active';
+ if (todo.status === 'completed' || todo.status === 'done') {
+ status = 'completed';
+ } else if (todo.status === 'blocked' || todo.status === 'on_hold') {
+ status = 'paused';
+ }
+
+ // Build context from todo description and metadata
+ const contextParts = [];
+ if (todo.description) contextParts.push(todo.description);
+ if (todo.priority) contextParts.push(`Priority: ${todo.priority}`);
+ if (todo.due_date) contextParts.push(`Due: ${todo.due_date}`);
+ if (todo.tags) {
+ try {
+ const tags = JSON.parse(todo.tags);
+ if (tags.length > 0) contextParts.push(`Tags: ${tags.join(', ')}`);
+ } catch {}
+ }
+ const context = contextParts.join('\n');
+
+ // Convert timestamp from ISO string to epoch
+ const timestamp = todo.created_at ? new Date(todo.created_at).getTime() : Date.now();
+
+ insertActiveWork.run(
+ randomUUID(),
+ todo.project_id,
+ todo.title,
+ context || null,
+ null, // files - todos didn't have this
+ null, // branch - todos didn't have this
+ timestamp,
+ status
+ );
+ count++;
+ }
+
+ return count;
+ }
+
+ /**
+ * Mark migration as complete in database
+ */
+ private markMigrationComplete(): void {
+ this.db.exec(`
+ CREATE TABLE IF NOT EXISTS migration_history (
+ id TEXT PRIMARY KEY,
+ version TEXT NOT NULL,
+ completed_at INTEGER NOT NULL
+ );
+
+ INSERT INTO migration_history (id, version, completed_at)
+ VALUES ('v1-to-v2', '2.0.0', ${Date.now()});
+ `);
+ }
+
+ /**
+ * Check if migration has been completed before
+ */
+ static hasCompletedMigration(db: Database.Database): boolean {
+ try {
+ const result = db.prepare(`
+ SELECT * FROM migration_history
+ WHERE version = '2.0.0'
+ `).get();
+
+ return !!result;
+ } catch {
+ return false;
+ }
+ }
+}
+
+
diff --git a/src/schema.ts b/src/schema.ts
new file mode 100644
index 0000000..05ffcec
--- /dev/null
+++ b/src/schema.ts
@@ -0,0 +1,228 @@
+๏ปฟ/**
+ * Database schema update - Context Layers
+ * Adds new tables for intentional context capture
+ */
+
+import Database from 'better-sqlite3';
+
+export function migrateSchema(db: Database.Database): void {
+ console.log(' Migrating database schema...');
+
+ // Create new context layer tables
+ db.exec(`
+ -- Enhanced Project Identity fields (extends projects table)
+ CREATE TABLE IF NOT EXISTS project_dependencies (
+ id TEXT PRIMARY KEY,
+ project_id TEXT NOT NULL,
+ name TEXT NOT NULL,
+ version TEXT NOT NULL,
+ critical INTEGER NOT NULL DEFAULT 0, -- boolean
+ dev INTEGER NOT NULL DEFAULT 0, -- boolean
+ FOREIGN KEY (project_id) REFERENCES projects(id)
+ );
+
+ CREATE TABLE IF NOT EXISTS project_build_system (
+ project_id TEXT PRIMARY KEY,
+ type TEXT NOT NULL,
+ commands TEXT NOT NULL, -- JSON
+ config_file TEXT NOT NULL,
+ FOREIGN KEY (project_id) REFERENCES projects(id)
+ );
+
+ CREATE TABLE IF NOT EXISTS project_test_framework (
+ project_id TEXT PRIMARY KEY,
+ name TEXT NOT NULL,
+ pattern TEXT NOT NULL,
+ config_file TEXT,
+ coverage INTEGER, -- percentage
+ FOREIGN KEY (project_id) REFERENCES projects(id)
+ );
+
+ CREATE TABLE IF NOT EXISTS project_env_vars (
+ id TEXT PRIMARY KEY,
+ project_id TEXT NOT NULL,
+ var_name TEXT NOT NULL,
+ required INTEGER NOT NULL DEFAULT 0, -- boolean
+ example_value TEXT,
+ FOREIGN KEY (project_id) REFERENCES projects(id)
+ );
+
+ CREATE TABLE IF NOT EXISTS project_services (
+ id TEXT PRIMARY KEY,
+ project_id TEXT NOT NULL,
+ name TEXT NOT NULL,
+ port INTEGER,
+ protocol TEXT NOT NULL,
+ health_check TEXT,
+ FOREIGN KEY (project_id) REFERENCES projects(id)
+ );
+
+ CREATE TABLE IF NOT EXISTS project_databases (
+ id TEXT PRIMARY KEY,
+ project_id TEXT NOT NULL,
+ type TEXT NOT NULL,
+ connection_var TEXT,
+ migrations INTEGER NOT NULL DEFAULT 0, -- boolean
+ migrations_path TEXT,
+ FOREIGN KEY (project_id) REFERENCES projects(id)
+ );
+
+ CREATE TABLE IF NOT EXISTS project_metrics (
+ project_id TEXT PRIMARY KEY,
+ lines_of_code INTEGER NOT NULL,
+ file_count INTEGER NOT NULL,
+ last_commit TEXT,
+ contributors INTEGER NOT NULL,
+ hotspots TEXT, -- JSON array
+ complexity TEXT NOT NULL,
+ updated_at INTEGER NOT NULL,
+ FOREIGN KEY (project_id) REFERENCES projects(id)
+ );
+
+ -- Active Work table
+ CREATE TABLE IF NOT EXISTS active_work (
+ id TEXT PRIMARY KEY,
+ project_id TEXT NOT NULL,
+ task TEXT NOT NULL,
+ context TEXT,
+ files TEXT, -- JSON array of file paths
+ branch TEXT,
+ timestamp INTEGER NOT NULL,
+ status TEXT CHECK(status IN ('active', 'paused', 'completed')) DEFAULT 'active',
+ FOREIGN KEY (project_id) REFERENCES projects(id)
+ );
+
+ -- Constraints table (architectural rules)
+ CREATE TABLE IF NOT EXISTS constraints (
+ id TEXT PRIMARY KEY,
+ project_id TEXT NOT NULL,
+ key TEXT NOT NULL,
+ value TEXT NOT NULL,
+ reasoning TEXT,
+ timestamp INTEGER NOT NULL,
+ FOREIGN KEY (project_id) REFERENCES projects(id)
+ );
+
+ -- Problems table (blockers/issues)
+ CREATE TABLE IF NOT EXISTS problems (
+ id TEXT PRIMARY KEY,
+ project_id TEXT NOT NULL,
+ description TEXT NOT NULL,
+ context TEXT,
+ status TEXT CHECK(status IN ('open', 'investigating', 'resolved')) DEFAULT 'open',
+ resolution TEXT,
+ timestamp INTEGER NOT NULL,
+ FOREIGN KEY (project_id) REFERENCES projects(id)
+ );
+
+ -- Goals table
+ CREATE TABLE IF NOT EXISTS goals (
+ id TEXT PRIMARY KEY,
+ project_id TEXT NOT NULL,
+ description TEXT NOT NULL,
+ target_date TEXT,
+ status TEXT CHECK(status IN ('planned', 'in-progress', 'blocked', 'completed')) DEFAULT 'planned',
+ timestamp INTEGER NOT NULL,
+ FOREIGN KEY (project_id) REFERENCES projects(id)
+ );
+
+ -- Notes table (general important info)
+ CREATE TABLE IF NOT EXISTS notes (
+ id TEXT PRIMARY KEY,
+ project_id TEXT NOT NULL,
+ content TEXT NOT NULL,
+ tags TEXT, -- JSON array of tags
+ timestamp INTEGER NOT NULL,
+ FOREIGN KEY (project_id) REFERENCES projects(id)
+ );
+
+ -- Caveats table (AI mistakes, tech debt, unverified changes)
+ CREATE TABLE IF NOT EXISTS caveats (
+ id TEXT PRIMARY KEY,
+ project_id TEXT NOT NULL,
+ description TEXT NOT NULL,
+ category TEXT CHECK(category IN ('mistake', 'shortcut', 'unverified', 'assumption', 'workaround')) NOT NULL,
+ severity TEXT CHECK(severity IN ('low', 'medium', 'high', 'critical')) DEFAULT 'medium',
+ attempted TEXT,
+ error TEXT,
+ recovery TEXT,
+ verified INTEGER NOT NULL DEFAULT 0, -- boolean
+ action_required TEXT,
+ affects_production INTEGER NOT NULL DEFAULT 0, -- boolean
+ timestamp INTEGER NOT NULL,
+ resolved INTEGER NOT NULL DEFAULT 0, -- boolean
+ resolution TEXT,
+ resolved_at INTEGER,
+ FOREIGN KEY (project_id) REFERENCES projects(id)
+ );
+
+ -- Indexes for performance
+ CREATE INDEX IF NOT EXISTS idx_project_dependencies
+ ON project_dependencies(project_id, critical DESC);
+ CREATE INDEX IF NOT EXISTS idx_project_env_vars
+ ON project_env_vars(project_id, required DESC);
+ CREATE INDEX IF NOT EXISTS idx_project_services
+ ON project_services(project_id);
+ CREATE INDEX IF NOT EXISTS idx_project_databases
+ ON project_databases(project_id);
+
+ CREATE INDEX IF NOT EXISTS idx_active_work_project
+ ON active_work(project_id, status, timestamp DESC);
+ CREATE INDEX IF NOT EXISTS idx_constraints_project
+ ON constraints(project_id, timestamp DESC);
+ CREATE INDEX IF NOT EXISTS idx_problems_project
+ ON problems(project_id, status, timestamp DESC);
+ CREATE INDEX IF NOT EXISTS idx_goals_project
+ ON goals(project_id, status, timestamp DESC);
+ CREATE INDEX IF NOT EXISTS idx_notes_project
+ ON notes(project_id, timestamp DESC);
+ CREATE INDEX IF NOT EXISTS idx_caveats_project
+ ON caveats(project_id, resolved, severity DESC, timestamp DESC);
+ `);
+
+ console.log(' Database migration complete');
+}
+
+/**
+ * Check if schema tables exist
+ */
+export function isSchemaCurrent(db: Database.Database): boolean {
+ try {
+ // Check for all schema tables (both context layers and enhanced project identity)
+ const schemaTables = [
+ 'active_work',
+ 'constraints',
+ 'problems',
+ 'goals',
+ 'decisions',
+ 'notes',
+ 'caveats',
+ 'sessions',
+ 'project_dependencies',
+ 'project_build_system',
+ 'project_test_framework',
+ 'project_env_vars',
+ 'project_services',
+ 'project_databases',
+ 'project_metrics'
+ ];
+
+ for (const table of schemaTables) {
+ const result = db.prepare(`
+ SELECT name FROM sqlite_master
+ WHERE type='table' AND name=?
+ `).get(table);
+
+ if (!result) {
+ console.log(` Missing schema table: ${table}`);
+ return false;
+ }
+ }
+
+ return true;
+ } catch {
+ return false;
+ }
+}
+
+
diff --git a/src/search-engine.ts b/src/search-engine.ts
new file mode 100644
index 0000000..14adda5
--- /dev/null
+++ b/src/search-engine.ts
@@ -0,0 +1,455 @@
+๏ปฟ/**
+ * Search Engine
+ *
+ * Layer 1: Fast search with relevance scoring
+ * Layer 2: Semantic ranking based on context
+ * Layer 3: File context enrichment (leverages read_file)
+ *
+ * Features:
+ * - Relevance scoring (exact match > prefix > fuzzy)
+ * - Semantic ranking (imports, file type, recency)
+ * - File complexity integration
+ * - Search result clustering
+ * - Smart suggestions
+ */
+
+import * as fs from 'fs/promises';
+import * as path from 'path';
+import { ReadFileEngine } from './read-file-engine.js';
+
+interface SearchMatch {
+ file: string;
+ relativePath: string;
+ line?: number;
+ column?: number;
+ text?: string;
+ matchType: 'exact' | 'prefix' | 'fuzzy' | 'content';
+ relevanceScore: number;
+ context?: {
+ language?: string;
+ complexity?: string;
+ linesOfCode?: number;
+ lastModified?: Date;
+ };
+}
+
+interface SearchResult {
+ query: string;
+ totalMatches: number;
+ matches: SearchMatch[];
+ suggestions?: string[];
+ clusters?: {
+ [key: string]: SearchMatch[];
+ };
+}
+
+export class SearchEngine {
+ private workspacePath: string;
+ private readFileEngine: ReadFileEngine;
+ private fileCache: Map;
+
+ constructor(workspacePath: string) {
+ this.workspacePath = workspacePath;
+ this.readFileEngine = new ReadFileEngine(workspacePath);
+ this.fileCache = new Map();
+ }
+
+ /**
+ * Layer 1: Fast file search with relevance scoring
+ */
+ async searchFiles(
+ query: string,
+ options: {
+ maxResults?: number;
+ enrichContext?: boolean;
+ caseSensitive?: boolean;
+ } = {}
+ ): Promise {
+ const {
+ maxResults = 50,
+ enrichContext = true,
+ caseSensitive = false
+ } = options;
+
+ // Find all files recursively
+ const allFiles = await this.getAllFiles(this.workspacePath);
+
+ // Score and rank matches
+ const matches: SearchMatch[] = [];
+ const searchQuery = caseSensitive ? query : query.toLowerCase();
+
+ for (const file of allFiles) {
+ const fileName = path.basename(file);
+ const searchFileName = caseSensitive ? fileName : fileName.toLowerCase();
+ const searchFilePath = caseSensitive ? file : file.toLowerCase();
+
+ let matchType: SearchMatch['matchType'] | null = null;
+ let score = 0;
+
+ // Exact match (highest priority)
+ if (searchFileName === searchQuery || searchFilePath === searchQuery) {
+ matchType = 'exact';
+ score = 100;
+ }
+ // Prefix match
+ else if (searchFileName.startsWith(searchQuery) || searchFilePath.startsWith(searchQuery)) {
+ matchType = 'prefix';
+ score = 80;
+ }
+ // Contains match
+ else if (searchFileName.includes(searchQuery) || searchFilePath.includes(searchQuery)) {
+ matchType = 'fuzzy';
+ score = 60;
+ }
+ // Fuzzy match (initials, e.g., "re" matches "recall-engine")
+ else if (this.fuzzyMatch(searchFileName, searchQuery)) {
+ matchType = 'fuzzy';
+ score = 40;
+ }
+
+ if (matchType) {
+ // Boost score based on file characteristics
+ if (fileName.includes('index') || fileName.includes('main')) score += 5;
+ if (fileName.endsWith('.ts') || fileName.endsWith('.js')) score += 3;
+ if (file.split('/').length <= 2) score += 5; // Root level files
+
+ const fullPath = path.join(this.workspacePath, file);
+ matches.push({
+ file: fullPath,
+ relativePath: file,
+ matchType,
+ relevanceScore: score
+ });
+ }
+ }
+
+ // Sort by relevance
+ matches.sort((a, b) => b.relevanceScore - a.relevanceScore);
+
+ // Layer 3: Enrich top results with file context
+ const topMatches = matches.slice(0, maxResults);
+ if (enrichContext) {
+ await this.enrichMatches(topMatches.slice(0, 10)); // Only enrich top 10 for performance
+ }
+
+ // Layer 2: Cluster results by directory
+ const clusters = this.clusterByDirectory(topMatches);
+
+ // Generate suggestions
+ const suggestions = this.generateSuggestions(query, topMatches);
+
+ return {
+ query,
+ totalMatches: matches.length,
+ matches: topMatches,
+ suggestions,
+ clusters
+ };
+ }
+
+ /**
+ * Layer 1: Fast content search with context
+ */
+ async searchContent(
+ query: string,
+ options: {
+ maxResults?: number;
+ filePattern?: string;
+ caseSensitive?: boolean;
+ regex?: boolean;
+ enrichContext?: boolean;
+ } = {}
+ ): Promise {
+ const {
+ maxResults = 100,
+ filePattern,
+ caseSensitive = false,
+ regex = false,
+ enrichContext = false
+ } = options;
+
+ // Find files to search
+ const allFiles = await this.getAllFiles(this.workspacePath);
+ const files = filePattern
+ ? allFiles.filter(f => this.matchesPattern(f, filePattern))
+ : allFiles.filter(f => this.isSearchableFile(f));
+
+ const matches: SearchMatch[] = [];
+ const searchPattern = regex ? new RegExp(query, caseSensitive ? 'g' : 'gi') : null;
+
+ for (const file of files) {
+ const fullPath = path.join(this.workspacePath, file);
+
+ try {
+ const content = await this.getCachedContent(fullPath);
+ const lines = content.split('\n');
+
+ for (let i = 0; i < lines.length; i++) {
+ const line = lines[i];
+ let found = false;
+ let column = -1;
+
+ if (regex && searchPattern) {
+ const match = line.match(searchPattern);
+ if (match) {
+ found = true;
+ column = match.index || 0;
+ }
+ } else {
+ const searchLine = caseSensitive ? line : line.toLowerCase();
+ const searchQuery = caseSensitive ? query : query.toLowerCase();
+ column = searchLine.indexOf(searchQuery);
+ found = column !== -1;
+ }
+
+ if (found) {
+ // Calculate relevance score
+ let score = 50;
+
+ // Boost if query appears at start of line
+ if (column < 5) score += 20;
+
+ // Boost if line is shorter (more focused)
+ if (line.length < 100) score += 10;
+
+ // Boost for certain file types
+ if (file.endsWith('.ts') || file.endsWith('.js')) score += 5;
+
+ matches.push({
+ file: fullPath,
+ relativePath: file,
+ line: i + 1,
+ column,
+ text: line,
+ matchType: 'content',
+ relevanceScore: score
+ });
+
+ if (matches.length >= maxResults * 2) break; // Early exit for performance
+ }
+ }
+ } catch (err) {
+ // Skip files that can't be read
+ continue;
+ }
+
+ if (matches.length >= maxResults * 2) break;
+ }
+
+ // Sort by relevance
+ matches.sort((a, b) => b.relevanceScore - a.relevanceScore);
+
+ const topMatches = matches.slice(0, maxResults);
+
+ // Enrich if requested
+ if (enrichContext) {
+ await this.enrichMatches(topMatches.slice(0, 5));
+ }
+
+ // Cluster by file
+ const clusters = this.clusterByFile(topMatches);
+
+ return {
+ query,
+ totalMatches: matches.length,
+ matches: topMatches,
+ clusters
+ };
+ }
+
+ /**
+ * Layer 3: Enrich matches with file context
+ */
+ private async enrichMatches(matches: SearchMatch[]): Promise {
+ for (const match of matches) {
+ try {
+ const fileCtx = await this.readFileEngine.read(match.file);
+ match.context = {
+ language: fileCtx.metadata.language,
+ complexity: fileCtx.complexity.level,
+ linesOfCode: fileCtx.metadata.linesOfCode,
+ lastModified: fileCtx.metadata.lastModified
+ };
+ } catch (err) {
+ // Skip enrichment on error
+ continue;
+ }
+ }
+ }
+
+ /**
+ * Layer 2: Cluster results by directory
+ */
+ private clusterByDirectory(matches: SearchMatch[]): { [key: string]: SearchMatch[] } {
+ const clusters: { [key: string]: SearchMatch[] } = {};
+
+ for (const match of matches) {
+ const dir = path.dirname(match.relativePath);
+ if (!clusters[dir]) {
+ clusters[dir] = [];
+ }
+ clusters[dir].push(match);
+ }
+
+ return clusters;
+ }
+
+ /**
+ * Layer 2: Cluster results by file
+ */
+ private clusterByFile(matches: SearchMatch[]): { [key: string]: SearchMatch[] } {
+ const clusters: { [key: string]: SearchMatch[] } = {};
+
+ for (const match of matches) {
+ const file = match.relativePath;
+ if (!clusters[file]) {
+ clusters[file] = [];
+ }
+ clusters[file].push(match);
+ }
+
+ return clusters;
+ }
+
+ /**
+ * Layer 2: Generate search suggestions
+ */
+ private generateSuggestions(query: string, matches: SearchMatch[]): string[] {
+ const suggestions: Set = new Set();
+
+ // Suggest similar filenames
+ for (const match of matches.slice(0, 5)) {
+ const basename = path.basename(match.relativePath, path.extname(match.relativePath));
+ if (basename !== query && basename.includes(query)) {
+ suggestions.add(basename);
+ }
+ }
+
+ // Suggest common extensions
+ const extensions = new Set(matches.map(m => path.extname(m.relativePath)));
+ if (extensions.size > 1) {
+ extensions.forEach(ext => {
+ if (ext) suggestions.add(`${query}${ext}`);
+ });
+ }
+
+ return Array.from(suggestions).slice(0, 5);
+ }
+
+ /**
+ * Fuzzy match algorithm (initials matching)
+ */
+ private fuzzyMatch(text: string, pattern: string): boolean {
+ let patternIdx = 0;
+ let textIdx = 0;
+
+ while (textIdx < text.length && patternIdx < pattern.length) {
+ if (text[textIdx].toLowerCase() === pattern[patternIdx].toLowerCase()) {
+ patternIdx++;
+ }
+ textIdx++;
+ }
+
+ return patternIdx === pattern.length;
+ }
+
+ /**
+ * Get cached file content
+ */
+ private async getCachedContent(filePath: string): Promise {
+ const stats = await fs.stat(filePath);
+ const mtime = stats.mtimeMs;
+
+ const cached = this.fileCache.get(filePath);
+ if (cached && cached.mtime === mtime) {
+ return cached.content;
+ }
+
+ const content = await fs.readFile(filePath, 'utf-8');
+ this.fileCache.set(filePath, { mtime, content });
+
+ return content;
+ }
+
+ /**
+ * Recursively get all files in directory
+ */
+ private async getAllFiles(dirPath: string, relativePath: string = ''): Promise {
+ const files: string[] = [];
+
+ try {
+ const entries = await fs.readdir(dirPath, { withFileTypes: true });
+
+ for (const entry of entries) {
+ // Skip ignored patterns
+ if (this.shouldIgnore(entry.name)) {
+ continue;
+ }
+
+ const fullPath = path.join(dirPath, entry.name);
+ const relPath = path.join(relativePath, entry.name);
+
+ if (entry.isDirectory()) {
+ const subFiles = await this.getAllFiles(fullPath, relPath);
+ files.push(...subFiles);
+ } else if (entry.isFile()) {
+ files.push(relPath);
+ }
+ }
+ } catch (err) {
+ // Skip inaccessible directories
+ }
+
+ return files;
+ }
+
+ /**
+ * Check if should ignore file/directory
+ */
+ private shouldIgnore(name: string): boolean {
+ const ignorePatterns = [
+ 'node_modules',
+ '.git',
+ 'dist',
+ 'build',
+ '.next',
+ 'coverage',
+ '.cache',
+ '.DS_Store',
+ 'Thumbs.db'
+ ];
+
+ return ignorePatterns.includes(name) || name.startsWith('.');
+ }
+
+ /**
+ * Check if file is searchable (code file)
+ */
+ private isSearchableFile(filePath: string): boolean {
+ const extensions = [
+ '.ts', '.js', '.tsx', '.jsx',
+ '.py', '.go', '.rs', '.java',
+ '.cpp', '.c', '.h',
+ '.md', '.json', '.yaml', '.yml', '.toml'
+ ];
+ return extensions.some(ext => filePath.endsWith(ext))
+ && !filePath.includes('package-lock.json')
+ && !filePath.includes('yarn.lock')
+ && !filePath.includes('pnpm-lock.yaml');
+ }
+
+ /**
+ * Check if file matches pattern
+ */
+ private matchesPattern(filePath: string, pattern: string): boolean {
+ // Simple glob-like pattern matching
+ if (pattern.includes('*')) {
+ const regexPattern = pattern
+ .replace(/\./g, '\\.')
+ .replace(/\*/g, '.*');
+ return new RegExp(regexPattern).test(filePath);
+ }
+ return filePath.includes(pattern);
+ }
+}
+
diff --git a/src/server.ts b/src/server.ts
index ac2ab2c..7d8b636 100644
--- a/src/server.ts
+++ b/src/server.ts
@@ -1,3 +1,8 @@
+๏ปฟ/**
+ * Context Sync Server - Core Simplification
+ * 8 essential tools, everything else is internal
+ */
+
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
import {
@@ -5,108 +10,84 @@ import {
ListToolsRequestSchema,
ListPromptsRequestSchema,
GetPromptRequestSchema,
+ ListResourcesRequestSchema,
+ ReadResourceRequestSchema,
} from '@modelcontextprotocol/sdk/types.js';
import { Storage } from './storage.js';
import { ProjectDetector } from './project-detector.js';
import { WorkspaceDetector } from './workspace-detector.js';
-import { FileWriter } from './file-writer.js';
-import { FileSearcher } from './file-searcher.js';
+import { CORE_TOOLS } from './core-tools.js';
+import type { ProjectIdentity, RememberInput, RecallResult } from './context-layers.js';
+import { ProjectProfiler } from './project-profiler.js';
+import { RecallEngine } from './recall-engine.js';
+import { RememberEngine } from './remember-engine.js';
+import { ReadFileEngine } from './read-file-engine.js';
+import { SearchEngine } from './search-engine.js';
+import { StructureEngine } from './structure-engine.js';
+import { GitStatusEngine } from './git-status-engine.js';
+import { GitContextEngine } from './git-context-engine.js';
import { GitIntegration } from './git-integration.js';
-import { DependencyAnalyzer } from './dependency-analyzer.js';
-import { CallGraphAnalyzer } from './call-graph-analyzer.js';
-import { TypeAnalyzer } from './type-analyzer.js';
-import { PlatformSync, type AIPlatform } from './platform-sync.js';
-import { PLATFORM_REGISTRY, type PlatformMetadata } from './platform-registry.js';
-import { TodoManager } from './todo-manager.js';
-import { createTodoHandlers } from './todo-handlers.js';
-import { DatabaseMigrator } from './database-migrator.js';
-import { readFileSync } from 'fs';
-import { join, dirname, basename } from 'path';
-import { fileURLToPath } from 'url';
-import { PathNormalizer } from './path-normalizer.js';
-import { PerformanceMonitor } from './performance-monitor.js';
-import { todoToolDefinitions } from './todo-tools.js';
-import { ContextAnalyzer } from './context-analyzer.js';
-import type { ProjectContext } from './types.js';
-import * as fs from 'fs';
import { NotionIntegration } from './notion-integration.js';
import { createNotionHandlers } from './notion-handlers.js';
-import { AnnouncementTracker } from './announcement-tracker.js';
+import { SchemaMigrator } from './schema-migration.js';
+import { randomUUID } from 'crypto';
+import * as fs from 'fs/promises';
+import * as path from 'path';
import * as os from 'os';
-import { promises as fsPromises } from 'fs';
+
+type PromptRequest = { params: { name: string } };
+type ResourceRequest = { params: { uri: string } };
+type ToolCallRequest = { params: { name: string; arguments?: unknown } };
export class ContextSyncServer {
private server: Server;
private storage: Storage;
private projectDetector: ProjectDetector;
private workspaceDetector: WorkspaceDetector;
- private fileWriter: FileWriter;
- private fileSearcher: FileSearcher;
- private gitIntegration: GitIntegration | null = null;
- // Lazy-loaded analyzers for better performance
- private _dependencyAnalyzer: DependencyAnalyzer | null = null;
- private _callGraphAnalyzer: CallGraphAnalyzer | null = null;
- private _typeAnalyzer: TypeAnalyzer | null = null;
- private platformSync: PlatformSync;
- private todoManager: TodoManager;
- private todoHandlers: ReturnType;
private notionIntegration: NotionIntegration | null = null;
- private notionHandlers: ReturnType | null = null;
- private announcementTracker: AnnouncementTracker;
+ private notionHandlers: ReturnType;
- // โ
NEW: Session-specific current project
+ // Session-specific current project
private currentProjectId: string | null = null;
constructor(storagePath?: string) {
-
this.storage = new Storage(storagePath);
-
- // Check for migration prompt on startup (non-blocking)
- this.checkStartupMigration();
-
this.projectDetector = new ProjectDetector(this.storage);
this.workspaceDetector = new WorkspaceDetector(this.storage, this.projectDetector);
- this.fileWriter = new FileWriter(this.workspaceDetector, this.storage);
- this.fileSearcher = new FileSearcher(this.workspaceDetector);
- this.announcementTracker = new AnnouncementTracker();
+
+ // Initialize with null integration (will be set up if config exists)
+ this.notionHandlers = createNotionHandlers(null);
+
+ // Initialize Notion integration (optional - gracefully handles missing config)
+ this.initializeNotion();
+
+ // Run v1 schema migration if needed (safe, automatic, with backup)
+ this.runMigrationIfNeeded();
this.server = new Server(
{
name: 'context-sync',
- version: '1.0.0',
+ version: '2.0.0',
},
{
capabilities: {
tools: {},
prompts: {},
+ resources: {},
},
}
);
- this.platformSync = new PlatformSync(this.storage);
- this.todoManager = new TodoManager(this.storage.getDb());
- this.todoHandlers = createTodoHandlers(this.todoManager);
-
- // Initialize Notion integration if configured
- this.initializeNotion().catch(() => {
- // Silently fail - Notion is optional
- });
-
- // Auto-detect platform
- const detectedPlatform = PlatformSync.detectPlatform();
- this.platformSync.setPlatform(detectedPlatform);
-
- this.setupToolHandlers();
- this.setupPromptHandlers();
+ this.setupHandlers();
}
-
+
/**
* Initialize Notion integration from user config
*/
private async initializeNotion(): Promise {
try {
- const configPath = join(os.homedir(), '.context-sync', 'config.json');
- const configData = await fsPromises.readFile(configPath, 'utf-8');
+ const configPath = path.join(os.homedir(), '.context-sync', 'config.json');
+ const configData = await fs.readFile(configPath, 'utf-8');
const config = JSON.parse(configData);
if (config.notion?.token) {
@@ -114,3804 +95,2141 @@ export class ContextSyncServer {
token: config.notion.token,
defaultParentPageId: config.notion.defaultParentPageId
});
- this.notionHandlers = createNotionHandlers(this.notionIntegration);
- } else {
- this.notionHandlers = createNotionHandlers(null);
}
} catch {
// Config doesn't exist or invalid - Notion not configured
this.notionIntegration = null;
- this.notionHandlers = createNotionHandlers(null);
- }
- }
-
- /**
- * Check for migration prompt on startup (non-blocking)
- */
- private async checkStartupMigration(): Promise {
- try {
- const version = this.getVersion();
- const migrationCheck = await this.storage.checkMigrationPrompt(version);
-
- if (migrationCheck.shouldPrompt) {
- // Log to stderr so it shows in the MCP client without interfering with responses
- console.error('\n' + '='.repeat(80));
- console.error('CONTEXT SYNC DATABASE OPTIMIZATION AVAILABLE');
- console.error('='.repeat(80));
- console.error(migrationCheck.message.replace(/\*\*([^*]+)\*\*/g, '$1')); // Remove markdown formatting for console
- console.error('='.repeat(80) + '\n');
- }
- } catch (error) {
- // Silently fail - don't disrupt server startup
- console.warn('Startup migration check failed:', error);
}
+
+ // Always create handlers (they handle null gracefully)
+ this.notionHandlers = createNotionHandlers(this.notionIntegration);
}
/**
- * Get the current version from package.json
+ * Run v1 schema migration automatically if needed
+ * Safe, atomic, with automatic backup
*/
- private getVersion(): string {
+ private runMigrationIfNeeded(): void {
try {
- // Get the directory of the current module
- const __filename = fileURLToPath(import.meta.url);
- const __dirname = dirname(__filename);
+ const migrator = new SchemaMigrator(this.storage.getDb());
- // Look for package.json in parent directories
- let currentDir = __dirname;
- while (currentDir !== dirname(currentDir)) {
- try {
- const packagePath = join(currentDir, 'package.json');
- const packageJson = JSON.parse(readFileSync(packagePath, 'utf8'));
- if (packageJson.name === '@context-sync/server') {
- return packageJson.version;
- }
- } catch {
- // Continue searching in parent directory
- }
- currentDir = dirname(currentDir);
+ // Check if migration already completed
+ if (SchemaMigrator.hasCompletedMigration(this.storage.getDb())) {
+ return; // Already migrated
}
- // Fallback: try to read from installed package location
- try {
- const installedPackagePath = join(process.cwd(), '..', '..', 'package.json');
- const packageJson = JSON.parse(readFileSync(installedPackagePath, 'utf8'));
- if (packageJson.name === '@context-sync/server') {
- return packageJson.version;
- }
- } catch {
- // Fallback failed
+ // Check if migration is needed
+ if (!migrator.needsMigration()) {
+ return; // No v1 data to migrate
}
- return '1.0.0'; // Fallback version
- } catch (error) {
- return '1.0.0'; // Fallback version
+ // Perform migration (synchronous)
+ console.error('\n' + '='.repeat(60));
+ console.error(' Context Sync - First Time Setup');
+ console.error('='.repeat(60));
+ console.error('Detected v1.x data. Migrating to current schema...');
+ console.error('This is safe and automatic. A backup will be created.\n');
+
+ const result = migrator.migrateSync();
+
+ if (result.success) {
+ console.error('\n' + '='.repeat(60));
+ console.error(' Migration Complete!');
+ console.error('='.repeat(60));
+ console.error('Your context has been preserved and enhanced.');
+ console.error('Continue using Context Sync normally.');
+ console.error('Backup saved to:', result.backupPath);
+ console.error('='.repeat(60) + '\n');
+ } else {
+ console.error('\n Migration encountered issues:');
+ result.errors.forEach((err: string) => console.error(` ${err}`));
+ console.error('\nYour data is safe. Please report this issue.');
+ console.error('Backup available at:', result.backupPath, '\n');
+ }
+ } catch (error: any) {
+ console.error(' Migration check failed:', error.message);
+ console.error('Continuing with current database state...\n');
}
}
- /**
- * Get current project from session state
- */
- private getCurrentProject(): ProjectContext | null {
- if (!this.currentProjectId) return null;
- return this.storage.getProject(this.currentProjectId);
- }
+ private setupHandlers(): void {
+ // List our 8 core tools
+ this.server.setRequestHandler(ListToolsRequestSchema, async () => ({
+ tools: CORE_TOOLS,
+ }));
- private setupPromptHandlers(): void {
- this.server.setRequestHandler(ListPromptsRequestSchema, async () => {
- const prompts = [
+ // List available prompts (AI usage instructions)
+ this.server.setRequestHandler(ListPromptsRequestSchema, async () => ({
+ prompts: [
{
- name: 'project_context',
- description: 'Automatically inject active project context into conversation',
- arguments: [],
+ name: 'context-sync-usage',
+ description: 'Complete guide on how to use Context Sync effectively as an AI agent',
},
- ];
-
- // Add Notion announcement prompt if it should be shown
- try {
- const announcement = this.announcementTracker.shouldShow();
- if (announcement) {
- prompts.push({
- name: 'notion_announcement',
- description: 'Important announcement about Context Sync Notion integration',
- arguments: [],
- });
- }
- } catch (error) {
- // Silently ignore announcement errors
- console.warn('Notion announcement check failed:', error);
- }
-
- // Add migration prompt for v1.0.0+ users with duplicates
- try {
- const version = this.getVersion();
- const migrationCheck = await this.storage.checkMigrationPrompt(version);
- if (migrationCheck.shouldPrompt) {
- prompts.push({
- name: 'migration_prompt',
- description: 'Database optimization prompt for Context Sync v1.0.0+ users with duplicate projects',
- arguments: [],
- });
- }
- } catch (error) {
- // Silently ignore migration prompt errors
- console.warn('Migration prompt check failed:', error);
- }
-
- return { prompts };
- });
-
- this.server.setRequestHandler(GetPromptRequestSchema, async (request) => {
- if (request.params.name === 'project_context') {
- const project = this.getCurrentProject();
- if (!project) {
- return {
- description: 'No active project',
- messages: [],
- };
- }
+ {
+ name: 'debugging-context-sync',
+ description: 'How to debug Context Sync when things go wrong',
+ },
+ ],
+ }));
- const summary = this.storage.getContextSummary(project.id);
- const contextMessage = this.buildContextPrompt(summary);
+ // Get prompt content
+ this.server.setRequestHandler(GetPromptRequestSchema, async (request: PromptRequest) => {
+ const { name } = request.params;
+ if (name === 'context-sync-usage') {
return {
- description: `Context for ${project.name}`,
messages: [
{
role: 'user',
content: {
type: 'text',
- text: contextMessage,
+ text: `# Context Sync - AI Agent Usage Guide
+
+## Core Philosophy
+Context Sync is YOUR memory system. Use it to understand projects deeply and maintain context across sessions.
+
+## Correct Tool Flow
+
+### 1 ALWAYS START: set_project
+**Before doing ANYTHING else in a new project:**
+\`\`\`
+set_project({ path: "/absolute/path/to/project", purpose: "Brief description" })
+\`\`\`
+
+This initializes the project and captures:
+- Tech stack detection (Go, TypeScript, Python, etc.)
+- Dependencies with exact versions
+- Build system and commands
+- Test framework and coverage
+- Environment variables
+- Services and databases
+- Quality metrics
+
+** WRONG:** Calling structure() or search() before set_project
+** RIGHT:** set_project then structure/search/recall
+
+### 2 Explore with structure() and search()
+\`\`\`
+structure({ depth: 3 }) // Get project file tree
+search({ query: "function name", type: "content" }) // Find code
+\`\`\`
+
+### 3 Save important context with remember()
+\`\`\`
+remember({
+ type: "constraint",
+ content: "Always use TypeScript strict mode",
+ metadata: { files: ["tsconfig.json"] }
+})
+\`\`\`
+
+Types: active_work, constraint, problem, goal, decision, note
+
+### 4 Retrieve context with recall()
+\`\`\`
+recall({ query: "what were we working on?" })
+\`\`\`
+
+Returns: active work, constraints, problems, goals, decisions, notes
+
+## Common Mistakes
+
+### Mistake 1: Calling tools before set_project
+\`\`\`
+ structure() Error: No project set
+ set_project() structure() Success
+\`\`\`
+
+### Mistake 2: Using wrong project path
+Context Sync tracks ONE project at a time. If you call set_project with a different path, it switches to that project.
+
+### Mistake 3: Not using recall() at session start
+When a user says "continue where we left off" or "good morning", ALWAYS call recall() first.
+
+## Pro Tips
+
+1. **set_project is SMART** - It detects multi-language projects (e.g., Go app distributed via npm)
+2. **Use remember() liberally** - Save architectural decisions, constraints, active work
+3. **structure() before read_file** - Understand layout first, then read specific files
+4. **git_status + git_context** - Perfect combo for understanding recent changes
+
+## Command Language (User-Facing)
+Users may use these natural commands:
+- "cs init" set_project
+- "cs remember X" remember(type: note, content: X)
+- "cs recall" or "cs status" recall()
+- "cs constraint X" remember(type: constraint, ...)
+
+## Tool Chain Examples
+
+### Example 1: New Project Investigation
+\`\`\`
+1. set_project({ path: "/path/to/project" })
+2. structure({ depth: 2 })
+3. search({ query: "main entry point", type: "files" })
+4. read_file({ path: "src/index.ts" })
+5. remember({ type: "active_work", content: "Investigating project structure" })
+\`\`\`
+
+### Example 2: Debugging Session
+\`\`\`
+1. set_project({ path: "/path/to/project" })
+2. recall() // What was I working on?
+3. git_status() // What changed?
+4. git_context({ staged: false }) // Show me the diff
+5. remember({ type: "problem", content: "Bug in user auth" })
+\`\`\`
+
+### Example 3: "Good Morning" Handoff
+\`\`\`
+1. set_project({ path: "/path/to/project" })
+2. recall({ query: "active work and recent decisions" })
+3. git_status() // Any uncommitted changes?
+4. structure() // Refresh mental model
+\`\`\``,
},
},
],
};
}
- if (request.params.name === 'notion_announcement') {
- const announcement = this.announcementTracker.shouldShow();
-
- if (announcement) {
- return {
- description: 'Context Sync Notion Integration Available',
- messages: [
- {
- role: 'user',
- content: {
- type: 'text',
- text: announcement,
- },
+ if (name === 'debugging-context-sync') {
+ return {
+ messages: [
+ {
+ role: 'user',
+ content: {
+ type: 'text',
+ text: `# Debugging Context Sync
+
+## Common Issues & Solutions
+
+### Issue 1: "No project set" error
+**Cause:** Trying to use tools before initializing
+**Solution:** Always call set_project() first
+
+### Issue 2: Wrong tech stack detected
+**Symptoms:** Go project shows as Node.js (or vice versa)
+**Cause:** Multi-language project (e.g., Go app with npm packaging)
+**Debug:**
+\`\`\`
+1. set_project({ path: "/path" })
+2. structure({ depth: 2 }) // Check for package.json, go.mod, etc.
+3. Check packaging/ folder for distribution wrappers
+\`\`\`
+
+**Context Sync prioritizes:**
+1. Node.js (if package.json exists)
+2. Python (if requirements.txt/pyproject.toml)
+3. Rust (if Cargo.toml)
+4. Go (if go.mod)
+
+### Issue 3: Missing dependencies/metrics
+**Cause:** Project not fully analyzed
+**Solution:** Re-run set_project, check for node_modules or equivalent
+
+### Issue 4: Database errors (e.g., "no such table")
+**Cause:** schema not migrated
+**Solution:** Delete ~/.context-sync/data.db and reinitialize
+
+### Issue 5: Tool returns empty results
+**Cause:** Wrong project path or not initialized
+**Debug:**
+\`\`\`
+1. Check current project: recall()
+2. Verify path exists: set_project with correct absolute path
+3. Confirm structure: structure({ depth: 1 })
+\`\`\`
+
+## Self-Testing Context Sync
+
+If you suspect Context Sync is broken:
+\`\`\`
+1. set_project({ path: "/known/working/project" })
+2. structure() // Should show file tree
+3. remember({ type: "note", content: "Test note" })
+4. recall() // Should show the test note
+\`\`\`
+
+## Understanding Detection Results
+
+When set_project shows unexpected results, USE structure() to understand WHY:
+- See what config files exist (package.json, go.mod, Cargo.toml)
+- Check for packaging/ folder (distribution wrappers)
+- Look for multiple language directories
+
+Example: Jot project
+- Has: go.mod, main.go (Go source)
+- Also has: packaging/npm/package.json (npm distribution)
+- Correct detection: Go (primary language)
+- npm is just a distribution wrapper`,
},
- ],
- };
- } else {
- return {
- description: 'No announcement needed',
- messages: [],
- };
- }
+ },
+ ],
+ };
}
- throw new Error(`Unknown prompt: ${request.params.name}`);
+ throw new Error(`Unknown prompt: ${name}`);
});
- }
- private buildContextPrompt(summary: any): string {
- const { project, recentDecisions } = summary;
+ // List available resources (AI documentation)
+ this.server.setRequestHandler(ListResourcesRequestSchema, async () => ({
+ resources: [
+ {
+ uri: 'context-sync://docs/usage-guide',
+ mimeType: 'text/markdown',
+ name: 'Context Sync Usage Guide',
+ description: 'Complete guide on how to use Context Sync effectively as an AI agent',
+ },
+ {
+ uri: 'context-sync://docs/debugging-guide',
+ mimeType: 'text/markdown',
+ name: 'Debugging Context Sync',
+ description: 'How to debug Context Sync when things go wrong',
+ },
+ {
+ uri: 'context-sync://docs/tool-flow',
+ mimeType: 'text/markdown',
+ name: 'Tool Flow Patterns',
+ description: 'Common tool usage patterns and workflows',
+ },
+ ],
+ }));
- let prompt = `[ACTIVE PROJECT CONTEXT - Auto-loaded]\n\n`;
- prompt += `๐ Project: ${project.name}\n`;
+ // Read resource content
+ this.server.setRequestHandler(ReadResourceRequestSchema, async (request: ResourceRequest) => {
+ const { uri } = request.params;
- if (project.architecture) {
- prompt += `๐๏ธ Architecture: ${project.architecture}\n`;
- }
+ if (uri === 'context-sync://docs/usage-guide') {
+ return {
+ contents: [
+ {
+ uri,
+ mimeType: 'text/markdown',
+ text: `# Context Sync - AI Agent Usage Guide
+
+## Core Philosophy
+Context Sync is YOUR memory system. Use it to understand projects deeply and maintain context across sessions.
+
+## Correct Tool Flow
+
+### 1 ALWAYS START: set_project
+**Before doing ANYTHING else in a new project:**
+\`\`\`javascript
+set_project({ path: "/absolute/path/to/project", purpose: "Brief description" })
+\`\`\`
+
+This initializes the project and captures:
+- Tech stack detection (Go, TypeScript, Python, etc.)
+- Dependencies with exact versions
+- Build system and commands
+- Test framework and coverage
+- Environment variables
+- Services and databases
+- Quality metrics
+
+** WRONG:** Calling structure() or search() before set_project
+** RIGHT:** set_project then structure/search/recall
+
+### 2 Explore with structure() and search()
+\`\`\`javascript
+structure({ depth: 3 }) // Get project file tree
+search({ query: "function name", type: "content" }) // Find code
+\`\`\`
+
+### 3 Save important context with remember()
+\`\`\`javascript
+remember({
+ type: "constraint",
+ content: "Always use TypeScript strict mode",
+ metadata: { files: ["tsconfig.json"] }
+})
+\`\`\`
+
+Types: active_work, constraint, problem, goal, decision, note
+
+### 4 Retrieve context with recall()
+\`\`\`javascript
+recall({ query: "what were we working on?" })
+\`\`\`
+
+Returns: active work, constraints, problems, goals, decisions, notes
+
+## Common Mistakes
+
+### Mistake 1: Calling tools before set_project
+\`\`\`
+ structure() Error: No project set
+ set_project() structure() Success
+\`\`\`
+
+### Mistake 2: Using wrong project path
+Context Sync tracks ONE project at a time. If you call set_project with a different path, it switches to that project.
+
+### Mistake 3: Not using recall() at session start
+When a user says "continue where we left off" or "good morning", ALWAYS call recall() first.
+
+## Pro Tips
+
+1. **set_project is SMART** - It detects multi-language projects (e.g., Go app distributed via npm)
+2. **Use remember() liberally** - Save architectural decisions, constraints, active work
+3. **structure() before read_file** - Understand layout first, then read specific files
+4. **git_status + git_context** - Perfect combo for understanding recent changes
+
+## Command Language (User-Facing)
+Users may use these natural commands:
+- "cs init" set_project
+- "cs remember X" remember(type: note, content: X)
+- "cs recall" or "cs status" recall()
+- "cs constraint X" remember(type: constraint, ...)`,
+ },
+ ],
+ };
+ }
- if (project.techStack && project.techStack.length > 0) {
- prompt += `โ๏ธ Tech Stack: ${project.techStack.join(', ')}\n`;
- }
+ if (uri === 'context-sync://docs/debugging-guide') {
+ return {
+ contents: [
+ {
+ uri,
+ mimeType: 'text/markdown',
+ text: `# Debugging Context Sync
+
+## Common Issues & Solutions
+
+### Issue 1: "No project set" error
+**Cause:** Trying to use tools before initializing
+**Solution:** Always call set_project() first
+
+### Issue 2: Wrong tech stack detected
+**Symptoms:** Go project shows as Node.js (or vice versa)
+**Cause:** Multi-language project (e.g., Go app with npm packaging)
+**Debug:**
+\`\`\`
+1. set_project({ path: "/path" })
+2. structure({ depth: 2 }) // Check for package.json, go.mod, etc.
+3. Check packaging/ folder for distribution wrappers
+\`\`\`
+
+**Context Sync prioritizes:**
+1. Node.js (if package.json exists)
+2. Python (if requirements.txt/pyproject.toml)
+3. Rust (if Cargo.toml)
+4. Go (if go.mod)
+
+### Issue 3: Missing dependencies/metrics
+**Cause:** Project not fully analyzed
+**Solution:** Re-run set_project, check for node_modules or equivalent
+
+### Issue 4: Database errors (e.g., "no such table")
+**Cause:** schema not migrated
+**Solution:** Delete ~/.context-sync/data.db and reinitialize
+
+### Issue 5: Tool returns empty results
+**Cause:** Wrong project path or not initialized
+**Debug:**
+\`\`\`
+1. Check current project: recall()
+2. Verify path exists: set_project with correct absolute path
+3. Confirm structure: structure({ depth: 1 })
+\`\`\`
+
+## Self-Testing Context Sync
+
+If you suspect Context Sync is broken:
+\`\`\`
+1. set_project({ path: "/known/working/project" })
+2. structure() // Should show file tree
+3. remember({ type: "note", content: "Test note" })
+4. recall() // Should show the test note
+\`\`\`
+
+## Understanding Detection Results
+
+When set_project shows unexpected results, USE structure() to understand WHY:
+- See what config files exist (package.json, go.mod, Cargo.toml)
+- Check for packaging/ folder (distribution wrappers)
+- Look for multiple language directories
+
+Example: Jot project
+- Has: go.mod, main.go (Go source)
+- Also has: packaging/npm/package.json (npm distribution)
+- Correct detection: Go (primary language)
+- npm is just a distribution wrapper`,
+ },
+ ],
+ };
+ }
- if (recentDecisions.length > 0) {
- prompt += `\n๐ Recent Decisions:\n`;
- recentDecisions.slice(0, 5).forEach((d: any, i: number) => {
- prompt += `${i + 1}. [${d.type}] ${d.description}`;
- if (d.reasoning) {
- prompt += ` - ${d.reasoning}`;
- }
- prompt += `\n`;
- });
- }
+ if (uri === 'context-sync://docs/tool-flow') {
+ return {
+ contents: [
+ {
+ uri,
+ mimeType: 'text/markdown',
+ text: `# Tool Flow Patterns
+
+## Pattern 1: New Project Investigation
+\`\`\`javascript
+1. set_project({ path: "/path/to/project" })
+2. structure({ depth: 2 })
+3. search({ query: "main entry point", type: "files" })
+4. read_file({ path: "src/index.ts" })
+5. remember({ type: "active_work", content: "Investigating project structure" })
+\`\`\`
+
+## Pattern 2: Debugging Session
+\`\`\`javascript
+1. set_project({ path: "/path/to/project" })
+2. recall() // What was I working on?
+3. git_status() // What changed?
+4. git_context({ staged: false }) // Show me the diff
+5. remember({ type: "problem", content: "Bug in user auth" })
+\`\`\`
+
+## Pattern 3: "Good Morning" Handoff
+\`\`\`javascript
+1. set_project({ path: "/path/to/project" })
+2. recall({ query: "active work and recent decisions" })
+3. git_status() // Any uncommitted changes?
+4. structure() // Refresh mental model
+\`\`\`
+
+## Pattern 4: Architecture Analysis
+\`\`\`javascript
+1. set_project({ path: "/path/to/project" })
+2. structure({ depth: 3 }) // Full tree
+3. search({ query: "class|interface|type", type: "content" })
+4. remember({ type: "constraint", content: "Layered architecture: UI Service Data" })
+\`\`\`
+
+## Pattern 5: Feature Implementation
+\`\`\`javascript
+1. recall() // Check existing constraints
+2. search({ query: "similar feature", type: "content" })
+3. read_file({ path: "examples/feature.ts" })
+4. remember({ type: "decision", content: "Using X pattern because Y" })
+5. git_status() // Track changes
+\`\`\`
+
+## Quick Reference
+
+**Always start with:** set_project()
+**For exploration:** structure() search() read_file()
+**For memory:** remember() and recall()
+**For changes:** git_status() git_context()`,
+ },
+ ],
+ };
+ }
- const lastUpdated = new Date(project.updatedAt).toLocaleString();
- prompt += `\n๐ Last Updated: ${lastUpdated}\n`;
+ throw new Error(`Unknown resource: ${uri}`);
+ });
- prompt += `\n---\n`;
- prompt += `FILE WRITING WORKFLOW (v0.3.0):\n\n`;
- prompt += `When user requests file creation/modification:\n`;
- prompt += `1. Call create_file/modify_file/delete_file โ Shows preview\n`;
- prompt += `2. Ask user: "Should I proceed?" or "Approve this change?"\n`;
- prompt += `3. If user says yes/approve/go ahead:\n`;
- prompt += ` โ Call apply_create_file/apply_modify_file/apply_delete_file\n`;
- prompt += `4. If user says no โ Don't call apply tools\n\n`;
- prompt += `IMPORTANT: Always wait for explicit user approval before calling apply_* tools!\n`;
+ // Handle tool calls
+ this.server.setRequestHandler(CallToolRequestSchema, async (request: ToolCallRequest) => {
+ const { name, arguments: args } = request.params;
- return prompt;
+ switch (name) {
+ case 'set_project':
+ return await this.handleSetProject(args as any);
+ case 'remember':
+ return await this.handleRemember(args as any);
+ case 'recall':
+ return await this.handleRecall(args as any);
+ case 'read_file':
+ return await this.handleReadFile(args as any);
+ case 'search':
+ return await this.handleSearch(args as any);
+ case 'structure':
+ return await this.handleStructure(args as any);
+ case 'git':
+ return await this.handleGit(args as any);
+ case 'notion':
+ return await this.handleNotion(args as any);
+ default:
+ throw new Error(`Unknown tool: ${name}`);
+ }
+ });
}
- private buildMigrationPrompt(migrationMessage: string): string {
- let prompt = `[CONTEXT SYNC DATABASE OPTIMIZATION REQUIRED]\n\n`;
-
- prompt += `โ ๏ธ **IMPORTANT**: Your Context Sync database needs optimization for better performance.\n\n`;
-
- // Add the migration message but format it for AI assistant context
- prompt += migrationMessage.replace(/๐ง \*\*Context Sync v1\.0\.0\+ Database Optimization Available\*\*/, '');
-
- prompt += `\n\n[IMMEDIATE ACTION REQUIRED]\n`;
- prompt += `Please inform the user about this database optimization opportunity and guide them through the migration process. `;
- prompt += `This is critical for optimal Context Sync performance.\n\n`;
-
- prompt += `**Recommended approach:**\n`;
- prompt += `1. Explain the benefits of database optimization\n`;
- prompt += `2. Show them how to preview changes: \`get_migration_stats\`\n`;
- prompt += `3. Guide them through safe migration: \`migrate_database dryRun:true\` then \`migrate_database\`\n`;
- prompt += `4. Emphasize data safety - all project data will be preserved\n\n`;
-
- prompt += `**This prompt will not appear again once migration is completed.**`;
-
- return prompt;
- }
+ // ========== CORE HANDLERS ==========
/**
- * Lazy getter for DependencyAnalyzer - only creates when needed
+ * Initialize a project - DEEP ANALYSIS (10x context quality)
+ * Goes far beyond basic detection:
+ * - Exact dependency versions
+ * - Build system commands
+ * - Test framework & coverage
+ * - Environment variables
+ * - Services & databases
+ * - Quality metrics & hotspots
*/
- private get dependencyAnalyzer(): DependencyAnalyzer | null {
- if (!this._dependencyAnalyzer) {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (workspace) {
- this._dependencyAnalyzer = new DependencyAnalyzer(workspace);
+ private async handleSetProject(args: { path: string; purpose?: string }) {
+ try {
+ const { path: projectPath, purpose } = args;
+
+ // Validate path
+ await fs.access(projectPath);
+ const stats = await fs.stat(projectPath);
+ if (!stats.isDirectory()) {
+ throw new Error('Path must be a directory');
}
- }
- return this._dependencyAnalyzer;
- }
- /**
- * Lazy getter for CallGraphAnalyzer - only creates when needed
- */
- private get callGraphAnalyzer(): CallGraphAnalyzer | null {
- if (!this._callGraphAnalyzer) {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (workspace) {
- this._callGraphAnalyzer = new CallGraphAnalyzer(workspace);
+ // Initialize workspace
+ await this.workspaceDetector.setWorkspace(projectPath);
+
+ let structurePreview = '';
+ try {
+ const structureEngine = new StructureEngine(projectPath);
+ const structure = await structureEngine.getStructure(2, {
+ includeMetadata: false,
+ analyzeComplexity: false,
+ detectHotspots: false
+ });
+ structurePreview = ` **Project Structure (depth 2)**\n\n\`\`\`\n${structure.tree}\`\`\`\n\n`;
+ } catch (error: any) {
+ structurePreview = '';
}
- }
- return this._callGraphAnalyzer;
- }
- /**
- * Lazy getter for TypeAnalyzer - only creates when needed
- */
- private get typeAnalyzer(): TypeAnalyzer | null {
- if (!this._typeAnalyzer) {
- const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (workspace) {
- this._typeAnalyzer = new TypeAnalyzer(workspace);
+ // Check if project already exists in database
+ // NORMALIZE paths to lowercase for case-insensitive comparison (Windows)
+ const normalizedPath = projectPath.toLowerCase();
+ const existingProjects = this.storage.getAllProjects();
+ const existing = existingProjects.find(p => p.path?.toLowerCase() === normalizedPath);
+
+ if (existing) {
+ // PROJECT EXISTS - Just set as current workspace, no re-analysis
+ this.currentProjectId = existing.id;
+
+ // Generate lightweight response
+ const db = this.storage.getDb();
+ const deps = db.prepare('SELECT COUNT(*) as count FROM project_dependencies WHERE project_id = ? AND critical = 1').get(existing.id) as any;
+ const envVars = db.prepare('SELECT COUNT(*) as count FROM project_env_vars WHERE project_id = ? AND required = 1').get(existing.id) as any;
+ const services = db.prepare('SELECT COUNT(*) as count FROM project_services WHERE project_id = ?').get(existing.id) as any;
+ const databases = db.prepare('SELECT COUNT(*) as count FROM project_databases WHERE project_id = ?').get(existing.id) as any;
+ const metrics = db.prepare('SELECT * FROM project_metrics WHERE project_id = ?').get(existing.id) as any;
+
+ let response = structurePreview;
+ response += ` **Workspace Set: ${existing.name}**\n\n`;
+ response += ` **Path:** ${projectPath}\n`;
+ response += ` **Tech Stack:** ${Array.isArray(existing.techStack) ? existing.techStack.join(', ') : existing.techStack}\n\n`;
+
+ if (deps?.count > 0) response += ` ${deps.count} dependencies tracked\n`;
+ if (envVars?.count > 0) response += ` ${envVars.count} required env vars\n`;
+ if (services?.count > 0) response += ` ${services.count} service(s)\n`;
+ if (databases?.count > 0) response += ` ${databases.count} database(s)\n`;
+ if (metrics) response += ` ${metrics.lines_of_code.toLocaleString()} LOC, ${metrics.file_count} files\n`;
+
+ response += `\n **Project context loaded. Use \`recall\` to see what you were working on.**`;
+
+ return {
+ content: [{ type: 'text', text: response }],
+ };
}
- }
- return this._typeAnalyzer;
- }
- private setupToolHandlers(): void {
- this.server.setRequestHandler(ListToolsRequestSchema, async () => ({
- tools: [
- // V0.2.0 tools
- ...this.getV02Tools(),
- // V0.3.0 tools (including apply_* tools)
- ...this.getV03Tools(),
- // V1.0.0 - Notion Integration tools
- ...this.getNotionTools(),
- ],
- }));
+ // NEW PROJECT - Run optimized deep analysis (3-layer architecture)
+ console.error(' Running optimized project detection (first time)...');
+ const analysis = await ProjectProfiler.analyze(projectPath);
- this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
- const { name, arguments: args } = request.params;
+ // Create new project
+ const project = this.storage.createProject(
+ path.basename(projectPath),
+ projectPath
+ );
+
+ this.storage.updateProject(project.id, {
+ architecture: analysis.architecture,
+ techStack: analysis.techStack,
+ updatedAt: new Date()
+ });
+
+ // Store enhanced identity data
+ const db = this.storage.getDb();
+ const timestamp = Date.now();
+
+ // Dependencies
+ for (const dep of analysis.dependencies) {
+ db.prepare(`
+ INSERT INTO project_dependencies (id, project_id, name, version, critical, dev)
+ VALUES (?, ?, ?, ?, ?, ?)
+ `).run(randomUUID(), project.id, dep.name, dep.version, dep.critical ? 1 : 0, dep.dev ? 1 : 0);
+ }
+
+ // Build system
+ db.prepare(`
+ INSERT OR REPLACE INTO project_build_system (project_id, type, commands, config_file)
+ VALUES (?, ?, ?, ?)
+ `).run(project.id, analysis.buildSystem.type, JSON.stringify(analysis.buildSystem.commands), analysis.buildSystem.configFile);
+
+ // Test framework
+ if (analysis.testFramework) {
+ db.prepare(`
+ INSERT OR REPLACE INTO project_test_framework (project_id, name, pattern, config_file, coverage)
+ VALUES (?, ?, ?, ?, ?)
+ `).run(
+ project.id,
+ analysis.testFramework.name,
+ analysis.testFramework.pattern,
+ analysis.testFramework.configFile || null,
+ analysis.testFramework.coverage
+ );
+ }
+
+ // Environment variables
+ for (const varName of analysis.envVars.required) {
+ db.prepare(`
+ INSERT INTO project_env_vars (id, project_id, var_name, required, example_value)
+ VALUES (?, ?, ?, 1, ?)
+ `).run(randomUUID(), project.id, varName, analysis.envVars.example[varName] || null);
+ }
+ for (const varName of analysis.envVars.optional) {
+ if (!analysis.envVars.required.includes(varName)) {
+ db.prepare(`
+ INSERT INTO project_env_vars (id, project_id, var_name, required, example_value)
+ VALUES (?, ?, ?, 0, ?)
+ `).run(randomUUID(), project.id, varName, analysis.envVars.example[varName] || null);
+ }
+ }
+
+ // Services
+ for (const service of analysis.services) {
+ db.prepare(`
+ INSERT INTO project_services (id, project_id, name, port, protocol, health_check)
+ VALUES (?, ?, ?, ?, ?, ?)
+ `).run(randomUUID(), project.id, service.name, service.port, service.protocol, service.healthCheck || null);
+ }
+
+ // Databases
+ for (const database of analysis.databases) {
+ db.prepare(`
+ INSERT INTO project_databases (id, project_id, type, connection_var, migrations, migrations_path)
+ VALUES (?, ?, ?, ?, ?, ?)
+ `).run(
+ randomUUID(),
+ project.id,
+ database.type,
+ database.connectionVar || null,
+ database.migrations ? 1 : 0,
+ database.migrationsPath || null
+ );
+ }
+
+ // Metrics
+ db.prepare(`
+ INSERT OR REPLACE INTO project_metrics (project_id, lines_of_code, file_count, last_commit, contributors, hotspots, complexity, updated_at)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
+ `).run(
+ project.id,
+ analysis.metrics.linesOfCode,
+ analysis.metrics.fileCount,
+ '', // lastCommit - empty string instead of null
+ 0, // contributors - 0 instead of null
+ '[]', // hotspots - empty array
+ analysis.metrics.complexity,
+ timestamp
+ );
+
+ // Set as current session project
+ this.currentProjectId = project.id;
- // Check if we should show Notion announcement (on any tool call)
- const announcement = this.announcementTracker.shouldShow();
+ // Generate comprehensive response
+ let response = structurePreview;
+ response += ` **Project Initialized: ${project.name}**\n\n`;
+ response += ` **Scan time:** ${analysis.scanTimeMs}ms\n\n`;
+
+ response += ` **Architecture:** ${analysis.architecture}\n`;
+ response += ` **Tech Stack:** ${analysis.techStack.join(', ')}\n\n`;
+
+ // Dependencies summary
+ const criticalDeps = analysis.dependencies.filter(d => d.critical && !d.dev);
+ if (criticalDeps.length > 0) {
+ response += ` **Core Dependencies** (${criticalDeps.length}):\n`;
+ criticalDeps.slice(0, 5).forEach(d => {
+ response += ` ${d.name}@${d.version}\n`;
+ });
+ if (criticalDeps.length > 5) {
+ response += ` ... and ${criticalDeps.length - 5} more\n`;
+ }
+ response += `\n`;
+ }
- // V0.2.0 handlers
- // Prepend announcement to the response of the first tool called
- if (name === 'get_project_context') {
- const result = await this.handleGetContext();
- if (announcement && result.content[0].type === 'text') {
- result.content[0].text = this.prependAnnouncement(result.content[0].text, announcement);
+ // Build system
+ if (analysis.buildSystem.type !== 'unknown') {
+ response += ` **Build System:** ${analysis.buildSystem.type}\n`;
+ if (analysis.buildSystem.commands.build) {
+ response += ` Build: \`${analysis.buildSystem.commands.build}\`\n`;
+ }
+ if (analysis.buildSystem.commands.test) {
+ response += ` Test: \`${analysis.buildSystem.commands.test}\`\n`;
}
- return result;
+ response += `\n`;
}
- if (name === 'save_decision') return this.handleSaveDecision(args as any);
- if (name === 'save_conversation') return this.handleSaveConversation(args as any);
-
- if (name === 'set_workspace') {
- const result = await this.handleSetWorkspace(args as any);
- if (announcement && result.content[0].type === 'text') {
- result.content[0].text = this.prependAnnouncement(result.content[0].text, announcement);
+ // Test framework
+ if (analysis.testFramework) {
+ response += ` **Testing:** ${analysis.testFramework.name}`;
+ if (analysis.testFramework.coverage !== null) {
+ response += ` (${analysis.testFramework.coverage}% coverage)`;
}
- return result;
+ response += `\n\n`;
}
- if (name === 'read_file') return this.handleReadFile(args as any);
- if (name === 'get_project_structure') return this.handleGetProjectStructure(args as any);
- if (name === 'scan_workspace') return this.handleScanWorkspace();
-
- // V0.3.0 - Preview tools
- if (name === 'create_file') return this.handleCreateFile(args as any);
- if (name === 'modify_file') return this.handleModifyFile(args as any);
- if (name === 'delete_file') return this.handleDeleteFile(args as any);
+ // Environment variables
+ if (analysis.envVars.required.length > 0) {
+ response += ` **Required Env Vars:** ${analysis.envVars.required.slice(0, 3).join(', ')}`;
+ if (analysis.envVars.required.length > 3) {
+ response += ` +${analysis.envVars.required.length - 3} more`;
+ }
+ response += `\n\n`;
+ }
- // V0.3.0 - Apply tools (NEW!)
- if (name === 'apply_create_file') return this.handleApplyCreateFile(args as any);
- if (name === 'apply_modify_file') return this.handleApplyModifyFile(args as any);
- if (name === 'apply_delete_file') return this.handleApplyDeleteFile(args as any);
+ // Services
+ if (analysis.services.length > 0) {
+ response += ` **Services:**\n`;
+ analysis.services.forEach(s => {
+ response += ` ${s.name}${s.port ? ` (port ${s.port})` : ''}\n`;
+ });
+ response += `\n`;
+ }
- // V0.3.0 - Other tools
- if (name === 'undo_file_change') return this.handleUndoFileChange(args as any);
- if (name === 'search_files') return await this.handleSearchFiles(args as any);
- if (name === 'search_content') return await this.handleSearchContent(args as any);
- if (name === 'git_status') return this.handleGitStatus();
- if (name === 'git_diff') return this.handleGitDiff(args as any);
- if (name === 'git_branch_info') return this.handleGitBranchInfo(args as any);
- if (name === 'suggest_commit_message') return this.handleSuggestCommitMessage(args as any);
-
- // V0.4.0 - Dependency Analysis
- if (name === 'analyze_dependencies') return this.handleAnalyzeDependencies(args as any);
- if (name === 'detect_circular_deps') return this.handleDetectCircularDeps(args as any);
-
- // V0.4.0 - Call Graph Analysis
- if (name === 'analyze_call_graph') return this.handleAnalyzeCallGraph(args as any);
-
- // V0.4.0 - Type Analysis
- if (name === 'find_type_definition') return await this.handleFindTypeDefinition(args as any);
- if (name === 'get_type_info') return await this.handleGetTypeInfo(args as any);
- if (name === 'find_type_usages') return await this.handleFindTypeUsages(args as any);
-
- if (name === 'switch_platform') return this.handleSwitchPlatform(args as any);
+ // Databases
+ if (analysis.databases.length > 0) {
+ response += ` **Databases:** ${analysis.databases.map(d => d.type).join(', ')}\n\n`;
+ }
- if (name === 'get_started') {
- const result = await this.handleGetStarted();
- if (announcement && result.content[0].type === 'text') {
- result.content[0].text = this.prependAnnouncement(result.content[0].text, announcement);
- }
- return result;
+ // Quality metrics
+ response += ` **Metrics:**\n`;
+ response += ` ${analysis.metrics.linesOfCode.toLocaleString()} lines of code\n`;
+ response += ` ${analysis.metrics.fileCount} files\n`;
+ if (analysis.metrics.complexity !== null) {
+ response += ` Complexity: ${analysis.metrics.complexity}\n`;
}
- if (name === 'get_performance_report') return this.handleGetPerformanceReport(args as any);
- // V0.4.0 - Todo Management Tools (with current project integration)
- if (name === 'todo_create') return this.handleTodoCreate(args as any);
- if (name === 'todo_get') return this.handleTodoGet(args as any);
- if (name === 'todo_list') return this.handleTodoList(args as any);
- if (name === 'todo_update') return this.handleTodoUpdate(args as any);
- if (name === 'todo_delete') return this.handleTodoDelete(args as any);
- if (name === 'todo_complete') return this.handleTodoComplete(args as any);
- if (name === 'todo_stats') return this.handleTodoStats(args as any);
- if (name === 'todo_tags') return this.handleTodoTags();
-
- // V1.0.0 - Database Migration Tools
- if (name === 'migrate_database') return this.handleMigrateDatabase(args as any);
- if (name === 'get_migration_stats') return this.handleGetMigrationStats();
- if (name === 'check_migration_suggestion') return this.handleCheckMigrationSuggestion();
+ // Install git hooks for automatic context capture
+ const GitHookManager = require('./git-hook-manager').GitHookManager;
+ const hookManager = new GitHookManager(projectPath, this.storage.getDbPath());
- // V1.0.0 - Notion Integration
- if (name === 'notion_search' && this.notionHandlers) return this.notionHandlers.handleNotionSearch(args as any);
- if (name === 'notion_read_page' && this.notionHandlers) return this.notionHandlers.handleNotionReadPage(args as any);
- if (name === 'notion_create_page' && this.notionHandlers) return this.notionHandlers.handleNotionCreatePage(args as any);
- if (name === 'notion_update_page' && this.notionHandlers) return this.notionHandlers.handleNotionUpdatePage(args as any);
- if (name === 'sync_decision_to_notion' && this.notionHandlers) return this.notionHandlers.handleSyncDecisionToNotion(args as any, this.storage);
- if (name === 'create_project_dashboard' && this.notionHandlers) return this.notionHandlers.handleCreateProjectDashboard(args as any, this.storage, this.currentProjectId);
-
- throw new Error(`Unknown tool: ${name}`);
- });
+ if (hookManager.isGitRepo()) {
+ const result = hookManager.installHooks();
+ if (result.success) {
+ response += `\n\n **Git Hooks:** Installed ${result.installed.length} hook(s) (${result.installed.join(', ')})`;
+ response += `\n Context Sync will now automatically track commits, pushes, merges, and branch switches!`;
+ } else {
+ response += `\n\n **Git Hooks:** Failed to install (${result.errors.join(', ')})`;
+ }
+ }
+
+ response += `\n\n **Deep context captured. Ready to work!**`;
+
+ return {
+ content: [{ type: 'text', text: response }],
+ };
+ } catch (error: any) {
+ return {
+ content: [{
+ type: 'text',
+ text: ` Failed to initialize project: ${error.message}\n\nStack: ${error.stack}`,
+ }],
+ };
+ }
}
- private getV02Tools() {
- return [
- {
- name: 'get_project_context',
- description: 'Get the current project context including recent decisions and conversations',
- inputSchema: { type: 'object', properties: {} },
- },
- {
- name: 'save_decision',
- description: 'Save an important technical decision or architectural choice',
- inputSchema: {
- type: 'object',
- properties: {
- type: { type: 'string', enum: ['architecture', 'library', 'pattern', 'configuration', 'other'] },
- description: { type: 'string' },
- reasoning: { type: 'string' },
- },
- required: ['type', 'description'],
- },
- },
- {
- name: 'save_conversation',
- description: 'Save a conversation snippet for future reference',
- inputSchema: {
- type: 'object',
- properties: {
- content: { type: 'string' },
- role: { type: 'string', enum: ['user', 'assistant'] },
- },
- required: ['content', 'role'],
- },
- },
+ /**
+ * Remember - Store context intentionally
+ */
+ private async handleRemember(args: RememberInput) {
+ const project = this.getCurrentProject();
+ if (!project) {
+ return {
+ content: [{
+ type: 'text',
+ text: ' No project initialized. Run `set_project` first.',
+ }],
+ };
+ }
- {
- name: 'set_workspace',
- description: 'Set workspace directory and initialize project. Automatically detects project type, validates path, and offers to initialize Context Sync features with user consent. This is the primary command for starting work on any project.',
- inputSchema: {
- type: 'object',
- properties: {
- path: {
- type: 'string',
- description: 'Absolute path to the project directory (must exist and be accessible)'
- }
- },
- required: ['path'],
- },
- },
- {
- name: 'read_file',
- description: 'Read a file from the current workspace',
- inputSchema: {
- type: 'object',
- properties: { path: { type: 'string' } },
- required: ['path'],
- },
- },
- {
- name: 'get_project_structure',
- description: 'Get the file/folder structure of current workspace',
- inputSchema: {
- type: 'object',
- properties: { depth: { type: 'number' } },
- },
- },
- {
- name: 'scan_workspace',
- description: 'Scan workspace and get overview of important files',
- inputSchema: { type: 'object', properties: {} },
- },
- ];
- }
+ const { type, content, metadata } = args;
- private getV03Tools() {
- return [
- // Preview tools (show preview, don't apply)
- {
- name: 'create_file',
- description: 'Preview file creation (does NOT create the file yet)',
- inputSchema: {
- type: 'object',
- properties: {
- path: { type: 'string', description: 'Relative path for new file' },
- content: { type: 'string', description: 'File content' },
- overwrite: { type: 'boolean', description: 'Overwrite if exists (default: false)' },
- },
- required: ['path', 'content'],
- },
- },
- {
- name: 'modify_file',
- description: 'Preview file modification (does NOT modify the file yet)',
- inputSchema: {
- type: 'object',
- properties: {
- path: { type: 'string' },
- changes: {
- type: 'array',
- items: {
- type: 'object',
- properties: {
- type: { type: 'string', enum: ['replace', 'insert', 'delete'] },
- line: { type: 'number', description: 'Line number for the change' },
- oldText: { type: 'string', description: 'Text to be replaced or deleted' },
- newText: { type: 'string', description: 'New text to insert or replace with' },
- },
- required: ['type', 'newText']
- },
- description: 'Array of file changes'
- },
- },
- required: ['path', 'changes'],
- },
- },
- {
- name: 'delete_file',
- description: 'Preview file deletion (does NOT delete the file yet)',
- inputSchema: {
- type: 'object',
- properties: { path: { type: 'string' } },
- required: ['path'],
- },
- },
-
- // Apply tools (actually perform the action)
- {
- name: 'apply_create_file',
- description: 'Actually create the file after user approval',
- inputSchema: {
- type: 'object',
- properties: {
- path: { type: 'string' },
- content: { type: 'string' },
- },
- required: ['path', 'content'],
- },
- },
- {
- name: 'apply_modify_file',
- description: 'Actually modify the file after user approval',
- inputSchema: {
- type: 'object',
- properties: {
- path: { type: 'string' },
- changes: {
- type: 'array',
- items: {
- type: 'object',
- properties: {
- type: {
- type: 'string',
- enum: ['replace', 'insert', 'delete']
- },
- line: {
- type: 'number',
- description: 'Line number for the change'
- },
- oldText: {
- type: 'string',
- description: 'Text to be replaced or deleted'
- },
- newText: {
- type: 'string',
- description: 'New text to insert or replace with'
- }
- },
- required: ['type', 'newText']
- },
- description: 'Array of file changes to apply'
- },
- },
- required: ['path', 'changes'],
- },
- },
- {
- name: 'apply_delete_file',
- description: 'Actually delete the file after user approval',
- inputSchema: {
- type: 'object',
- properties: { path: { type: 'string' } },
- required: ['path'],
- },
- },
-
- // Other tools
- {
- name: 'undo_file_change',
- description: 'Undo the last modification to a file',
- inputSchema: {
- type: 'object',
- properties: {
- path: { type: 'string' },
- steps: { type: 'number', description: 'Number of changes to undo (default: 1)' },
- },
- required: ['path'],
- },
- },
+ try {
+ // Use optimized remember engine with git integration
+ const engine = new RememberEngine(
+ this.storage.getDb(),
+ project.id,
+ project.path || process.cwd()
+ );
+ const result = await engine.remember({ type, content, metadata });
+
+ // Format response based on action
+ let response = '';
- // Search tools
- {
- name: 'search_files',
- description: 'Search for files by name or pattern',
- inputSchema: {
- type: 'object',
- properties: {
- pattern: { type: 'string' },
- maxResults: { type: 'number' },
- ignoreCase: { type: 'boolean' },
- },
- required: ['pattern'],
- },
- },
- {
- name: 'search_content',
- description: 'Search file contents for text or regex',
- inputSchema: {
- type: 'object',
- properties: {
- query: { type: 'string' },
- regex: { type: 'boolean' },
- caseSensitive: { type: 'boolean' },
- filePattern: { type: 'string' },
- maxResults: { type: 'number' },
- },
- required: ['query'],
- },
- },
-
- // Git tools
- {
- name: 'git_status',
- description: 'Check git repository status',
- inputSchema: { type: 'object', properties: {} },
- },
- {
- name: 'git_diff',
- description: 'View git diff for file(s)',
- inputSchema: {
- type: 'object',
- properties: {
- path: { type: 'string' },
- staged: { type: 'boolean' },
- },
- },
- },
- {
- name: 'git_branch_info',
- description: 'Get git branch information',
- inputSchema: {
- type: 'object',
- properties: {
- action: { type: 'string', enum: ['current', 'list', 'recent'] },
- },
- },
- },
- {
- name: 'suggest_commit_message',
- description: 'Suggest a commit message based on changes',
- inputSchema: {
- type: 'object',
- properties: {
- files: { type: 'array', items: { type: 'string' } },
- convention: { type: 'string', enum: ['conventional', 'simple', 'descriptive'] },
- },
- },
- },
- // V0.4.0 - Dependency Analysis Tools (ADD THESE)
- {
- name: 'analyze_dependencies',
- description: 'Analyze import/export dependencies for a file. Returns all imports, exports, files that import this file, and circular dependencies.',
- inputSchema: {
- type: 'object',
- properties: {
- filePath: {
- type: 'string',
- description: 'Path to the file to analyze (relative to workspace)'
- },
- },
- required: ['filePath'],
- },
- },
- {
- name: 'detect_circular_deps',
- description: 'Detect circular dependencies starting from a file. Shows all circular dependency chains.',
- inputSchema: {
- type: 'object',
- properties: {
- filePath: {
- type: 'string',
- description: 'Path to the file (relative to workspace)'
- },
- },
- required: ['filePath'],
- },
- },
- // V0.4.0 - Call Graph Analysis Tools (ADD THESE)
- {
- name: 'analyze_call_graph',
- description: 'Analyze the call graph for a function. Shows what functions it calls (callees) and what functions call it (callers).',
- inputSchema: {
- type: 'object',
- properties: {
- functionName: {
- type: 'string',
- description: 'Name of the function to analyze'
- },
- },
- required: ['functionName'],
- },
- },
-
- // V0.4.0 - Type Analysis Tools
- {
- name: 'find_type_definition',
- description: 'Find where a type, interface, class, or enum is defined.',
- inputSchema: {
- type: 'object',
- properties: {
- typeName: {
- type: 'string',
- description: 'Name of the type to find'
- },
- },
- required: ['typeName'],
- },
- },
- {
- name: 'get_type_info',
- description: 'Get complete information about a type including properties, methods, and usage.',
- inputSchema: {
- type: 'object',
- properties: {
- typeName: {
- type: 'string',
- description: 'Name of the type'
- },
- },
- required: ['typeName'],
- },
- },
- {
- name: 'find_type_usages',
- description: 'Find all places where a type is used in the codebase.',
- inputSchema: {
- type: 'object',
- properties: {
- typeName: {
- type: 'string',
- description: 'Name of the type'
- },
- },
- required: ['typeName'],
- },
- },
- {
- name: 'switch_platform',
- description: 'Switch between AI platforms (Claude โ Cursor) with full context handoff',
- inputSchema: {
- type: 'object',
- properties: {
- fromPlatform: {
- type: 'string',
- enum: ['claude', 'cursor', 'copilot', 'other'],
- description: 'Platform you are switching from',
- },
- toPlatform: {
- type: 'string',
- enum: ['claude', 'cursor', 'copilot', 'other'],
- description: 'Platform you are switching to',
- },
- },
- required: ['fromPlatform', 'toPlatform'],
- },
- },
- {
- name: 'get_started',
- description: 'Get started with Context Sync - shows installation status, current state, and guided next steps',
- inputSchema: {
- type: 'object',
- properties: {},
- },
- },
- {
- name: 'get_performance_report',
- description: 'Get performance metrics and statistics for database operations and system performance',
- inputSchema: {
- type: 'object',
- properties: {
- operation: {
- type: 'string',
- description: 'Specific operation to get stats for (optional)',
- },
- reset: {
- type: 'boolean',
- description: 'Reset stats after reporting (default: false)',
- },
- },
- },
- },
- // V0.4.0 - Todo Management Tools (ADD THESE)
- ...todoToolDefinitions,
-
- // V1.0.0 - Database Migration Tools
- {
- name: 'migrate_database',
- description: 'Migrate and merge duplicate projects by normalized path. This tool helps clean up database duplicates caused by path variations (case differences, trailing slashes, package.json vs folder names). AI assistants can help users run this to clean up their Context Sync database.',
- inputSchema: {
- type: 'object',
- properties: {
- dryRun: {
- type: 'boolean',
- description: 'If true, show what would be migrated without making changes (default: false)'
- },
- },
- },
- },
- {
- name: 'get_migration_stats',
- description: 'Get statistics about duplicate projects without running migration. Shows how many duplicates exist and what would be merged.',
- inputSchema: {
- type: 'object',
- properties: {},
- },
- },
- {
- name: 'check_migration_suggestion',
- description: 'Check if the user should be prompted for database migration based on current version and duplicate detection. Provides smart migration recommendations.',
- inputSchema: {
- type: 'object',
- properties: {},
- },
- },
- ];
- }
-
- /**
- * V1.0.0 - Notion Integration Tools
- */
- private getNotionTools() {
- return [
- {
- name: 'notion_search',
- description: 'Search for pages in your Notion workspace. Returns pages that match the search query with their titles, IDs, URLs, and last edited times.',
- inputSchema: {
- type: 'object',
- properties: {
- query: {
- type: 'string',
- description: 'Search query to find pages in Notion',
- },
- },
- required: ['query'],
- },
- },
- {
- name: 'notion_read_page',
- description: 'Read the contents of a specific Notion page. Returns the page title, URL, and formatted content.',
- inputSchema: {
- type: 'object',
- properties: {
- pageId: {
- type: 'string',
- description: 'The ID of the Notion page to read',
- },
- },
- required: ['pageId'],
- },
- },
- {
- name: 'notion_create_page',
- description: 'Create a new page in Notion with the specified title and markdown content. Optionally specify a parent page, or use the configured default parent page.',
- inputSchema: {
- type: 'object',
- properties: {
- title: {
- type: 'string',
- description: 'Title of the new page',
- },
- content: {
- type: 'string',
- description: 'Markdown content for the page',
- },
- parentPageId: {
- type: 'string',
- description: 'Optional parent page ID. If not provided, uses default parent page from config.',
- },
- },
- required: ['title', 'content'],
- },
- },
- {
- name: 'notion_update_page',
- description: 'Update an existing Notion page by replacing its content. The new content will completely replace the existing page content.',
- inputSchema: {
- type: 'object',
- properties: {
- pageId: {
- type: 'string',
- description: 'The ID of the page to update',
- },
- content: {
- type: 'string',
- description: 'New markdown content to replace existing content',
- },
- },
- required: ['pageId', 'content'],
- },
- },
- {
- name: 'sync_decision_to_notion',
- description: 'Sync a saved architectural decision from Context Sync to Notion. Creates a formatted Architecture Decision Record (ADR) page in Notion.',
- inputSchema: {
- type: 'object',
- properties: {
- decisionId: {
- type: 'string',
- description: 'The ID of the decision to sync (from get_project_context)',
- },
- },
- required: ['decisionId'],
- },
- },
- {
- name: 'create_project_dashboard',
- description: 'Create a comprehensive project dashboard in Notion for the current project. Includes project overview, tech stack, architecture notes, and timestamps.',
- inputSchema: {
- type: 'object',
- properties: {
- projectId: {
- type: 'string',
- description: 'Optional project ID. If not provided, uses current active project.',
- },
- },
- },
- },
- ];
- }
-
- // ========== V0.2.0 HANDLERS ==========
-
- /**
- * Prepend announcement to response text if provided
- */
- private prependAnnouncement(text: string, announcement?: string | null): string {
- if (announcement) {
- return announcement + '\n\n---\n\n' + text;
- }
- return text;
- }
-
- private async handleGetContext(announcement?: string | null) {
- const project = this.getCurrentProject();
-
- if (!project) {
- return {
- content: [{
- type: 'text',
- text: 'No active project. Use set_workspace to create one.',
- }],
- };
- }
-
- const summary = this.storage.getContextSummary(project.id);
- const contextText = this.formatContextSummary(summary);
-
- return {
- content: [{
- type: 'text',
- text: this.prependAnnouncement(contextText, announcement),
- }],
- };
- }
-
- private handleSaveDecision(args: any) {
- const project = this.getCurrentProject();
-
- if (!project) {
- return {
- content: [{ type: 'text', text: 'No active project. Use set_workspace first.' }],
- };
- }
-
- const decision = this.storage.addDecision({
- projectId: project.id,
- type: args.type,
- description: args.description,
- reasoning: args.reasoning,
- });
-
- return {
- content: [{ type: 'text', text: `Decision saved: ${decision.description}` }],
- };
- }
-
- private handleSaveConversation(args: any) {
- const project = this.getCurrentProject();
-
- if (!project) {
- return {
- content: [{ type: 'text', text: 'No active project. Use set_workspace first.' }],
- };
- }
-
- this.storage.addConversation({
- projectId: project.id,
- tool: 'claude',
- role: args.role,
- content: args.content,
- });
-
- return {
- content: [{ type: 'text', text: 'Conversation saved to project context.' }],
- };
- }
-
-
-
- private async handleSetWorkspace(args: any) {
- try {
- // 0. NORMALIZE PATH for consistent handling
- const normalizedPath = PathNormalizer.normalize(args.path);
- const displayPath = PathNormalizer.getDisplayPath(args.path);
-
- // 1. STRICT PATH VALIDATION - Fail fast on invalid paths
- await this.validatePathStrict(normalizedPath);
-
- // 2. CHECK IF PROJECT ALREADY EXISTS IN DATABASE
- const existingProject = this.storage.findProjectByPath(normalizedPath);
- if (existingProject) {
- return await this.useExistingProject(normalizedPath, existingProject, displayPath);
- }
-
- // 3. DETECT PROJECT FROM FILESYSTEM - Thorough detection
- const detectedMetadata = await this.detectProjectFromPathStrict(normalizedPath);
- if (detectedMetadata) {
- // Auto-initialize immediately (no interactive confirmation)
- return await this.initializeProjectStrict(normalizedPath, detectedMetadata);
- }
-
- // 4. NO PROJECT DETECTED - Initialize a basic workspace project without prompting
- return await this.initializeProjectStrict(normalizedPath);
-
- } catch (error) {
- return this.createErrorResponse(error, args.path);
- }
- }
-
- // ========== STRICT INTERNAL FUNCTIONS WITH ROBUST VALIDATION ==========
-
- /**
- * Strict path validation - throws descriptive errors for any path issues
- */
- private async validatePathStrict(path: string): Promise {
- if (!path || typeof path !== 'string') {
- throw new Error('Path is required and must be a string');
- }
-
- const trimmedPath = path.trim();
- if (!trimmedPath) {
- throw new Error('Path cannot be empty or just whitespace');
- }
-
- // Check if path exists
- let stats;
- try {
- stats = await fs.promises.stat(trimmedPath);
- } catch (error: any) {
- if (error.code === 'ENOENT') {
- throw new Error(`Directory does not exist: ${trimmedPath}`);
- } else if (error.code === 'EACCES') {
- throw new Error(`Permission denied accessing: ${trimmedPath}`);
- } else if (error.code === 'ENOTDIR') {
- throw new Error(`Path exists but is not a directory: ${trimmedPath}`);
- } else {
- throw new Error(`Cannot access path "${trimmedPath}": ${error.message}`);
- }
- }
-
- // Verify it's actually a directory
- if (!stats.isDirectory()) {
- throw new Error(`Path exists but is not a directory: ${trimmedPath}`);
- }
-
- // Check read permissions by trying to list contents
- try {
- await fs.promises.readdir(trimmedPath);
- } catch (error: any) {
- throw new Error(`Cannot read directory contents: ${trimmedPath} (${error.message})`);
- }
- }
-
- /**
- * Strict project detection - only returns metadata for valid, detectable projects
- */
- private async detectProjectFromPathStrict(path: string): Promise {
- try {
- // Use existing project detector but with additional validation
- const metadata = await this.projectDetector.detectFromPath(path);
-
- if (!metadata) {
- return null; // No project detected - this is fine
- }
-
- // Validate detected metadata is actually valid
- if (!metadata.name || !metadata.type) {
- console.warn(`Invalid project metadata detected at ${path}:`, metadata);
- return null;
- }
-
- // Ensure tech stack is valid
- if (!Array.isArray(metadata.techStack)) {
- console.warn(`Invalid tech stack in project metadata at ${path}:`, metadata.techStack);
- metadata.techStack = [];
- }
-
- return metadata;
-
- } catch (error) {
- console.error(`Error detecting project at ${path}:`, error);
- return null; // Don't throw - just return null for undetectable projects
- }
- }
-
- /**
- * Use existing project with full workspace setup
- */
- private async useExistingProject(path: string, project: any, displayPath?: string) {
- try {
- // Set up workspace and analyzers
- await this.initializeWorkspaceStrict(path);
-
- // Set as current project in session
- this.currentProjectId = project.id;
-
- const structure = await this.workspaceDetector.getProjectStructure(2);
- const isGit = this.gitIntegration?.isGitRepo() ? ' (Git repo โ)' : '';
-
- return {
- content: [{
- type: 'text',
- text: `โ
**Workspace Connected**: ${path}${isGit}\n\n๐ **Project**: ${project.name}\nโ๏ธ **Tech Stack**: ${project.techStack.join(', ') || 'None'}\n๐๏ธ **Architecture**: ${project.architecture || 'Not specified'}\n\n๐ **Structure Preview**:\n${structure}\n\n๐ฏ **Ready!** All Context Sync features are active for this project.\n\n**Available**:\nโข Project-specific todos\nโข Decision tracking\nโข Git integration\nโข Code analysis tools\nโข ๐ **Notion Integration** - Save docs, pull specs (\`context-sync-setup\`)`
- }]
- };
- } catch (error) {
- throw new Error(`Failed to set up existing project: ${error instanceof Error ? error.message : 'Unknown error'}`);
- }
- }
-
- /**
- * Initialize workspace with all analyzers - strict validation
- */
- private async initializeWorkspaceStrict(path: string): Promise {
- try {
- // Set workspace detector
- this.workspaceDetector.setWorkspace(path);
-
- // Initialize analyzers with error handling
- try {
- this.gitIntegration = new GitIntegration(path);
- } catch (error) {
- console.warn(`Git integration failed for ${path}:`, error);
- this.gitIntegration = null;
- }
-
- // Invalidate lazy-loaded analyzers - they'll be recreated on next access
- this._dependencyAnalyzer = null;
- this._callGraphAnalyzer = null;
- this._typeAnalyzer = null;
-
- } catch (error) {
- throw new Error(`Failed to initialize workspace: ${error instanceof Error ? error.message : 'Unknown error'}`);
- }
- }
-
- /**
- * Initialize a new project with strict validation
- */
- private async initializeProjectStrict(path: string, metadata?: any, customName?: string): Promise {
- try {
- // Validate inputs
- if (!path) {
- throw new Error('Path is required for project initialization');
- }
-
- // Re-validate path still exists (safety check)
- await this.validatePathStrict(path);
-
- // Create project name
- const projectName = customName || metadata?.name || basename(path);
- if (!projectName || projectName.trim().length === 0) {
- throw new Error('Project name cannot be empty');
- }
-
- // Create project in database
- const project = this.storage.createProject(projectName.trim(), path);
- if (!project || !project.id) {
- throw new Error('Failed to create project in database');
- }
-
- // Update with metadata if available
- if (metadata) {
- try {
- this.storage.updateProject(project.id, {
- techStack: Array.isArray(metadata.techStack) ? metadata.techStack : [],
- architecture: metadata.architecture || undefined,
- });
- } catch (error) {
- console.warn('Failed to update project metadata:', error);
- // Continue - project creation succeeded even if metadata update failed
- }
- }
-
- // Set as current project in session
- this.currentProjectId = project.id;
-
- // Initialize workspace
- await this.initializeWorkspaceStrict(path);
-
- // Return success response
- return await this.createSuccessResponse(path, project);
-
- } catch (error) {
- throw new Error(`Project initialization failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
- }
- }
-
- /**
- * Create comprehensive success response
- */
- private async createSuccessResponse(path: string, project: any) {
- try {
- const structure = await this.workspaceDetector.getProjectStructure(2);
- const isGit = this.gitIntegration?.isGitRepo() ? ' (Git repo โ)' : '';
-
- // Check for migration prompt (lightweight)
- let migrationTip = '';
- try {
- const version = this.getVersion();
- const migrationCheck = await this.storage.checkMigrationPrompt(version);
- if (migrationCheck.shouldPrompt) {
- migrationTip = `\n\n๐ก **Performance Tip:** Your database has duplicate projects that can be cleaned up. Run \`get_migration_stats\` for details.`;
- }
- } catch {
- // Ignore migration check errors in success response
- }
-
- return {
- content: [{
- type: 'text',
- text: `๐ **Project Initialized Successfully!**\n\nโ
**Workspace**: ${path}${isGit}\n๐ **Project**: ${project.name}\nโ๏ธ **Tech Stack**: ${project.techStack?.join(', ') || 'None'}\n๐๏ธ **Architecture**: ${project.architecture || 'Generic'}\n\n๐ **Structure Preview**:\n${structure}\n\n๐ **Context Sync Active!**\n\n**Available Commands**:\nโข \`todo_create "task"\` - Add project todos\nโข \`save_decision "choice"\` - Record decisions\nโข \`git_status\` - Check repository status\nโข \`search_content "term"\` - Find code\nโข \`get_project_context\` - View project info\nโข ๐ **Notion tools** - \`notion_create_page\`, \`notion_search\`, etc.\n\n**Pro Tip**: All todos and decisions are now linked to "${project.name}" automatically!\n\n๐ก **Want Notion integration?** Run \`context-sync-setup\` to save docs and pull specs from Notion!${migrationTip}`
- }]
- };
- } catch (error) {
- // Fallback response if structure preview fails
- return {
- content: [{
- type: 'text',
- text: `๐ **Project Initialized**: ${project.name}\nโ
**Workspace**: ${path}\n\n๐ Context Sync is now active for this project!`
- }]
- };
- }
- }
-
- /**
- * Create comprehensive error response
- */
- private createErrorResponse(error: any, path?: string) {
- const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
-
- return {
- content: [{
- type: 'text',
- text: `โ **Workspace Setup Failed**\n\n**Error**: ${errorMessage}\n\n**Path**: ${path || 'Not provided'}\n\n**Common Solutions**:\nโข Verify the directory path exists\nโข Check you have read permissions\nโข Ensure path points to a directory (not a file)\nโข Try using an absolute path\n\n**Need Help?** Double-check the path and try again.`
- }],
- isError: true
- };
- }
-
- private async handleReadFile(args: any) {
- try {
- const file = await this.workspaceDetector.readFile(args.path);
-
- if (!file) {
- return {
- content: [{
- type: 'text',
- text: `File not found: ${args.path}\n\nMake sure:\n1. Workspace is set (use set_workspace)\n2. Path is relative to workspace root\n3. File exists`,
- }],
- };
- }
-
- const sizeKB = file.size / 1024;
- let sizeWarning = '';
- if (sizeKB > 100) {
- sizeWarning = `\nโ ๏ธ Large file (${sizeKB.toFixed(1)}KB) - showing full content\n`;
- }
-
- return {
- content: [{
- type: 'text',
- text: `๐ ${file.path} (${file.language})${sizeWarning}\n\`\`\`${file.language.toLowerCase()}\n${file.content}\n\`\`\``,
- }],
- };
- } catch (error) {
- return {
- content: [{
- type: 'text',
- text: `Error reading file: ${error instanceof Error ? error.message : 'Unknown error'}`,
- }],
- };
- }
- }
-
- private async handleGetProjectStructure(args: any) {
- try {
- const depth = args.depth || 3;
- const structure = await this.workspaceDetector.getProjectStructure(depth);
-
- if (!structure || structure === 'No workspace open') {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
-
- return {
- content: [{ type: 'text', text: `๐ Project Structure (depth: ${depth}):\n\n${structure}` }],
- };
- } catch (error) {
- return {
- content: [{
- type: 'text',
- text: `Error getting structure: ${error instanceof Error ? error.message : 'Unknown error'}`,
- }],
- };
- }
- }
-
- private async handleScanWorkspace() {
- try {
- const snapshot = await this.workspaceDetector.createSnapshot();
-
- if (!snapshot.rootPath) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
-
- let response = `๐ Workspace Scan Results\n\n`;
- response += `๐ Root: ${snapshot.rootPath}\n\n`;
- response += `${snapshot.summary}\n\n`;
- response += `๐ Structure:\n${snapshot.structure}\n\n`;
- response += `๐ Scanned ${snapshot.files.length} important files:\n`;
-
- snapshot.files.forEach(f => {
- const icon = f.language.includes('TypeScript') ? '๐' :
- f.language.includes('JavaScript') ? '๐' :
- f.language === 'JSON' ? '๐' : '๐';
- response += `${icon} ${f.path} (${f.language}, ${(f.size / 1024).toFixed(1)}KB)\n`;
- });
-
- response += `\nUse read_file to view any specific file!`;
-
- return {
- content: [{ type: 'text', text: response }],
- };
- } catch (error) {
- return {
- content: [{
- type: 'text',
- text: `Error scanning workspace: ${error instanceof Error ? error.message : 'Unknown error'}`,
- }],
- };
- }
- }
-
- // ========== V0.3.0 HANDLERS - PREVIEW TOOLS ==========
-
- private async handleCreateFile(args: any) {
- const result = await this.fileWriter.createFile(
- args.path,
- args.content,
- args.overwrite || false
- );
-
- if (!result.success) {
- return {
- content: [{ type: 'text', text: result.message }],
- };
- }
-
- return {
- content: [{
- type: 'text',
- text: result.preview + '\n\nโ ๏ธ This is a PREVIEW only. To actually create this file, user must approve and you must call apply_create_file with the same parameters.',
- }],
- };
- }
-
- private async handleModifyFile(args: any) {
- const result = await this.fileWriter.modifyFile(args.path, args.changes);
-
- if (!result.success) {
- return {
- content: [{ type: 'text', text: result.message }],
- };
- }
-
- return {
- content: [{
- type: 'text',
- text: result.preview + '\n\nโ ๏ธ This is a PREVIEW only. To actually modify this file, user must approve and you must call apply_modify_file with the same parameters.',
- }],
- };
- }
-
- private async handleDeleteFile(args: any) {
- const result = await this.fileWriter.deleteFile(args.path);
-
- if (!result.success) {
- return {
- content: [{ type: 'text', text: result.message }],
- };
- }
-
- return {
- content: [{
- type: 'text',
- text: result.preview + '\n\nโ ๏ธ This is a PREVIEW only. To actually delete this file, user must approve and you must call apply_delete_file with the same path.',
- }],
- };
- }
-
- // ========== V0.3.0 HANDLERS - APPLY TOOLS (NEW!) ==========
-
- private async handleApplyCreateFile(args: any) {
- const result = await this.fileWriter.applyCreateFile(args.path, args.content);
- return {
- content: [{ type: 'text', text: result.message }],
- };
- }
-
- private async handleApplyModifyFile(args: any) {
- const result = await this.fileWriter.applyModifyFile(args.path, args.changes);
- return {
- content: [{ type: 'text', text: result.message }],
- };
- }
-
- private async handleApplyDeleteFile(args: any) {
- const result = await this.fileWriter.applyDeleteFile(args.path);
- return {
- content: [{ type: 'text', text: result.message }],
- };
- }
-
- // ========== V0.3.0 HANDLERS - OTHER TOOLS ==========
-
- private async handleUndoFileChange(args: any) {
- const result = await this.fileWriter.undoChange(
- args.path,
- args.steps || 1
- );
-
- return {
- content: [{ type: 'text', text: result.message }],
- };
- }
-
- private async handleSearchFiles(args: any) {
- const results = await this.fileSearcher.searchFilesAsync(args.pattern, {
- maxResults: args.maxResults,
- ignoreCase: args.ignoreCase,
- });
-
- if (results.length === 0) {
- return {
- content: [{
- type: 'text',
- text: `No files found matching pattern: "${args.pattern}"`,
- }],
- };
- }
-
- let response = `๐ Found ${results.length} files matching "${args.pattern}":\n\n`;
-
- results.forEach((file, i) => {
- const size = (file.size / 1024).toFixed(1);
- response += `${i + 1}. ${file.path} (${file.language}, ${size}KB)\n`;
- });
-
- response += `\nUse read_file to view any of these files.`;
-
- return {
- content: [{ type: 'text', text: response }],
- };
- }
-
- private async handleSearchContent(args: any) {
- const results = await this.fileSearcher.searchContentAsync(args.query, {
- regex: args.regex,
- caseSensitive: args.caseSensitive,
- filePattern: args.filePattern,
- maxResults: args.maxResults,
- });
-
- if (results.length === 0) {
- return {
- content: [{
- type: 'text',
- text: `No matches found for: "${args.query}"`,
- }],
- };
- }
-
- let response = `๐ Found ${results.length} matches for "${args.query}":\n\n`;
-
- results.slice(0, 20).forEach((match, i) => {
- response += `${i + 1}. ${match.path}:${match.line}\n`;
- response += ` ${match.content}\n\n`;
- });
-
- if (results.length > 20) {
- response += `... and ${results.length - 20} more matches`;
- }
-
- return {
- content: [{ type: 'text', text: response }],
- };
- }
-
- private handleFindSymbol(args: any) {
- const results = this.fileSearcher.findSymbol(args.symbol, args.type);
-
- if (results.length === 0) {
- return {
- content: [{
- type: 'text',
- text: `Symbol "${args.symbol}" not found`,
- }],
- };
- }
-
- let response = `๐ Found ${results.length} definition(s) of "${args.symbol}":\n\n`;
-
- results.forEach((match, i) => {
- response += `${i + 1}. ${match.path}:${match.line}\n`;
- response += ` ${match.content}\n\n`;
- });
-
- return {
- content: [{ type: 'text', text: response }],
- };
- }
-
- private handleGitStatus() {
- if (!this.gitIntegration) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
-
- const status = this.gitIntegration.getStatus();
-
- if (!status) {
- return {
- content: [{ type: 'text', text: 'Not a git repository' }],
- };
- }
-
- let response = `๐ Git Status\n\n`;
- response += `๐ Branch: ${status.branch}`;
-
- if (status.ahead > 0) response += ` (ahead ${status.ahead})`;
- if (status.behind > 0) response += ` (behind ${status.behind})`;
- response += `\n\n`;
-
- if (status.clean) {
- response += `โ
Working tree clean`;
- } else {
- if (status.staged.length > 0) {
- response += `๐ Staged (${status.staged.length}):\n`;
- status.staged.slice(0, 10).forEach(f => response += ` โข ${f}\n`);
- if (status.staged.length > 10) {
- response += ` ... and ${status.staged.length - 10} more\n`;
- }
- response += `\n`;
- }
-
- if (status.modified.length > 0) {
- response += `โ๏ธ Modified (${status.modified.length}):\n`;
- status.modified.slice(0, 10).forEach(f => response += ` โข ${f}\n`);
- if (status.modified.length > 10) {
- response += ` ... and ${status.modified.length - 10} more\n`;
- }
- response += `\n`;
- }
-
- if (status.untracked.length > 0) {
- response += `โ Untracked (${status.untracked.length}):\n`;
- status.untracked.slice(0, 10).forEach(f => response += ` โข ${f}\n`);
- if (status.untracked.length > 10) {
- response += ` ... and ${status.untracked.length - 10} more\n`;
- }
- }
- }
-
- return {
- content: [{ type: 'text', text: response }],
- };
- }
-
- private handleGitDiff(args: any) {
- if (!this.gitIntegration) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
-
- const diff = this.gitIntegration.getDiff(args.path, args.staged);
-
- if (!diff) {
- return {
- content: [{ type: 'text', text: 'Not a git repository or no changes' }],
- };
- }
-
- if (diff.trim().length === 0) {
- return {
- content: [{ type: 'text', text: 'No changes to show' }],
- };
- }
-
- return {
- content: [{
- type: 'text',
- text: `๐ Git Diff${args.staged ? ' (staged)' : ''}:\n\n\`\`\`diff\n${diff}\n\`\`\``,
- }],
- };
- }
-
- private handleGitBranchInfo(args: any) {
- if (!this.gitIntegration) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
-
- const info = this.gitIntegration.getBranchInfo(args.action || 'current');
-
- if (!info) {
- return {
- content: [{ type: 'text', text: 'Not a git repository' }],
- };
- }
-
- if (typeof info === 'string') {
- return {
- content: [{ type: 'text', text: `๐ Current branch: ${info}` }],
- };
- }
-
- let response = `๐ Git Branches\n\n`;
- response += `Current: ${info.current}\n\n`;
-
- if (info.all.length > 0) {
- response += `All branches (${info.all.length}):\n`;
- info.all.slice(0, 20).forEach(b => {
- const marker = b === info.current ? 'โ ' : ' ';
- response += `${marker}${b}\n`;
- });
- }
-
- if (info.recent.length > 0) {
- response += `\nRecent branches:\n`;
- info.recent.forEach(b => {
- const marker = b.name === info.current ? 'โ ' : ' ';
- response += `${marker}${b.name} (${b.lastCommit})\n`;
- });
- }
-
- return {
- content: [{ type: 'text', text: response }],
- };
- }
-
- private handleSuggestCommitMessage(args: any) {
- if (!this.gitIntegration) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
-
- const message = this.gitIntegration.suggestCommitMessage(
- args.files || [],
- args.convention || 'conventional'
- );
-
- if (!message) {
- return {
- content: [{ type: 'text', text: 'Not a git repository or no changes to commit' }],
- };
- }
-
- return {
- content: [{
- type: 'text',
- text: `๐ฌ Suggested Commit Message:\n\n\`\`\`\n${message}\n\`\`\``,
- }],
- };
- }
-
- private formatContextSummary(summary: any): string {
- const { project, recentDecisions, keyPoints } = summary;
-
- let text = `# Project: ${project.name}\n\n`;
-
- if (project.architecture) {
- text += `**Architecture:** ${project.architecture}\n\n`;
- }
-
- if (project.techStack.length > 0) {
- text += `**Tech Stack:** ${project.techStack.join(', ')}\n\n`;
- }
-
- if (recentDecisions.length > 0) {
- text += `## Recent Decisions\n\n`;
- recentDecisions.forEach((d: any) => {
- text += `- **${d.type}**: ${d.description}\n`;
- if (d.reasoning) {
- text += ` *Reasoning: ${d.reasoning}*\n`;
+ if (result.action === 'created') {
+ response = ` **Remembered as ${type}**\n\n`;
+ response += `"${content}"\n\n`;
+
+ // Show auto-extracted metadata
+ if (metadata?.files && metadata.files.length > 0) {
+ response += ` **Files:** ${metadata.files.join(', ')}\n`;
}
- });
- text += '\n';
- }
-
- if (keyPoints.length > 0) {
- text += `## Key Context Points\n\n`;
- keyPoints.slice(0, 10).forEach((point: string) => {
- text += `- ${point}\n`;
- });
- }
-
- return text;
- }
-
- // ========== V0.4.0 HANDLERS - DEPENDENCY ANALYSIS ==========
- private handleAnalyzeDependencies(args: any) {
- if (!this.dependencyAnalyzer) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
-
- try {
- const graph = this.dependencyAnalyzer.analyzeDependencies(args.filePath);
-
- let response = `๐ Dependency Analysis: ${graph.filePath}\n\n`;
-
- // Imports
- if (graph.imports.length > 0) {
- response += `๐ฅ Imports (${graph.imports.length}):\n`;
- graph.imports.forEach(imp => {
- const type = imp.isExternal ? '๐ฆ [external]' : '๐ [local]';
- const names = imp.importedNames.length > 0 ? `{ ${imp.importedNames.join(', ')} }` :
- imp.defaultImport ? imp.defaultImport :
- imp.namespaceImport ? `* as ${imp.namespaceImport}` : '';
- response += ` ${type} ${imp.source}${names ? ` - ${names}` : ''} (line ${imp.line})\n`;
- });
- response += '\n';
- }
-
- // Exports
- if (graph.exports.length > 0) {
- response += `๐ค Exports (${graph.exports.length}):\n`;
- graph.exports.forEach(exp => {
- if (exp.hasDefaultExport) {
- response += ` โข default export (line ${exp.line})\n`;
+
+ // Show file context from auto-enrichment
+ if (result.fileContext && result.fileContext.files.length > 0) {
+ response += `\n **File Context:**\n`;
+ for (const file of result.fileContext.files) {
+ const complexityEmoji = file.complexity === 'low' ? '' :
+ file.complexity === 'medium' ? '' :
+ file.complexity === 'high' ? '' : '';
+ response += ` ${file.path.split(/[/\\]/).pop()} ${complexityEmoji} ${file.complexity} (${file.linesOfCode} LOC`;
+ if (file.imports.length > 0) {
+ response += `, imports: ${file.imports.slice(0, 3).join(', ')}`;
+ }
+ response += `)\n`;
+ }
+ response += `\n`;
}
- if (exp.exportedNames.length > 0) {
- response += ` โข ${exp.exportedNames.join(', ')} (line ${exp.line})\n`;
+
+ if (result.gitContext) {
+ response += ` **Branch:** ${result.gitContext.branch}\n`;
+ if (result.gitContext.uncommittedFiles.length > 0) {
+ response += ` **Uncommitted:** ${result.gitContext.uncommittedFiles.length} file(s)\n`;
+ }
+ } else if (metadata?.branch) {
+ response += ` **Branch:** ${metadata.branch}\n`;
}
- });
- response += '\n';
- }
-
- // Importers
- if (graph.importers.length > 0) {
- response += `๐ฅ Imported by (${graph.importers.length} files):\n`;
- graph.importers.slice(0, 10).forEach(file => {
- const relativePath = file.replace(this.dependencyAnalyzer!['workspacePath'], '').replace(/^[\\\/]/, '');
- response += ` โข ${relativePath}\n`;
- });
- if (graph.importers.length > 10) {
- response += ` ... and ${graph.importers.length - 10} more files\n`;
- }
- response += '\n';
- }
-
- // Circular dependencies
- if (graph.circularDeps.length > 0) {
- response += `โ ๏ธ Circular Dependencies (${graph.circularDeps.length}):\n`;
- graph.circularDeps.forEach(cycle => {
- response += ` โข ${cycle.description}\n`;
- });
- } else {
- response += `โ
No circular dependencies detected\n`;
- }
-
- return {
- content: [{ type: 'text', text: response }],
- };
- } catch (error) {
- return {
- content: [{
- type: 'text',
- text: `Error analyzing dependencies: ${error instanceof Error ? error.message : 'Unknown error'}`,
- }],
- };
- }
-}
-
-private handleGetDependencyTree(args: any) {
- if (!this.dependencyAnalyzer) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
-
- try {
- const depth = Math.min(args.depth || 3, 10);
- const tree = this.dependencyAnalyzer.getDependencyTree(args.filePath, depth);
-
- const formatTree = (node: any, indent: string = '', isLast: boolean = true): string => {
- const prefix = isLast ? 'โโโ ' : 'โโโ ';
- const icon = node.isExternal ? '๐ฆ' : node.isCyclic ? '๐' : '๐';
- let result = `${indent}${prefix}${icon} ${node.file}${node.isCyclic ? ' (circular)' : ''}\n`;
-
- if (node.imports && node.imports.length > 0) {
- const newIndent = indent + (isLast ? ' ' : 'โ ');
- node.imports.forEach((child: any, i: number) => {
- result += formatTree(child, newIndent, i === node.imports.length - 1);
- });
- }
-
- return result;
- };
-
- let response = `๐ฒ Dependency Tree (depth: ${depth})\n\n`;
- response += formatTree(tree);
-
- return {
- content: [{ type: 'text', text: response }],
- };
- } catch (error) {
- return {
- content: [{
- type: 'text',
- text: `Error getting dependency tree: ${error instanceof Error ? error.message : 'Unknown error'}`,
- }],
- };
- }
-}
-
-private handleFindImporters(args: any) {
- if (!this.dependencyAnalyzer) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
-
- try {
- const importers = this.dependencyAnalyzer.findImporters(args.filePath);
-
- if (importers.length === 0) {
- return {
- content: [{
- type: 'text',
- text: `No files import ${args.filePath}\n\nThis file is either:\n- Not imported anywhere (unused)\n- An entry point\n- Only imported by external packages`,
- }],
- };
- }
-
- let response = `๐ฅ Files that import ${args.filePath} (${importers.length}):\n\n`;
-
- importers.forEach((file, i) => {
- const relativePath = file.replace(this.dependencyAnalyzer!['workspacePath'], '').replace(/^[\\\/]/, '');
- response += `${i + 1}. ${relativePath}\n`;
- });
-
- return {
- content: [{ type: 'text', text: response }],
- };
- } catch (error) {
- return {
- content: [{
- type: 'text',
- text: `Error finding importers: ${error instanceof Error ? error.message : 'Unknown error'}`,
- }],
- };
- }
-}
-
-private handleDetectCircularDeps(args: any) {
- if (!this.dependencyAnalyzer) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
-
- try {
- const cycles = this.dependencyAnalyzer.detectCircularDependencies(args.filePath);
-
- if (cycles.length === 0) {
- return {
- content: [{
- type: 'text',
- text: `โ
No circular dependencies detected for ${args.filePath}`,
- }],
- };
- }
-
- let response = `โ ๏ธ Circular Dependencies Detected (${cycles.length}):\n\n`;
-
- cycles.forEach((cycle, i) => {
- response += `${i + 1}. ${cycle.description}\n`;
- response += ` Path: ${cycle.cycle.map(f => {
- return f.replace(this.dependencyAnalyzer!['workspacePath'], '').replace(/^[\\\/]/, '');
- }).join(' โ ')}\n\n`;
- });
-
- response += `\n๐ก Tip: Circular dependencies can cause:\n`;
- response += `- Module initialization issues\n`;
- response += `- Bundler problems\n`;
- response += `- Harder to understand code\n`;
- response += `\nConsider refactoring by extracting shared code to a separate module.`;
-
- return {
- content: [{ type: 'text', text: response }],
- };
- } catch (error) {
- return {
- content: [{
- type: 'text',
- text: `Error detecting circular dependencies: ${error instanceof Error ? error.message : 'Unknown error'}`,
- }],
- };
- }
-}
-
-// ========== V0.4.0 HANDLERS - CALL GRAPH ANALYSIS ==========
-
-private handleAnalyzeCallGraph(args: any) {
- if (!this.callGraphAnalyzer) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
-
- try {
- const graph = this.callGraphAnalyzer.analyzeCallGraph(args.functionName);
-
- if (!graph) {
- return {
- content: [{
- type: 'text',
- text: `Function "${args.functionName}" not found in workspace.`,
- }],
- };
- }
-
- let response = `๐ Call Graph Analysis: ${graph.function.name}\n\n`;
-
- // Function info
- response += `๐ Location: ${this.getRelativePath(graph.function.filePath)}:${graph.function.line}\n`;
- response += `๐ง Type: ${graph.function.type}`;
- if (graph.function.className) {
- response += ` (in class ${graph.function.className})`;
- }
- response += `\n`;
- response += `๐ Call depth: ${graph.callDepth}\n`;
- if (graph.isRecursive) {
- response += `๐ Recursive: Yes\n`;
- }
- response += `\n`;
-
- // Callers (who calls this function)
- if (graph.callers.length > 0) {
- response += `๐ฅ Called by (${graph.callers.length} functions):\n`;
- graph.callers.slice(0, 10).forEach(caller => {
- const file = this.getRelativePath(caller.filePath);
- const asyncMark = caller.isAsync ? ' (async)' : '';
- response += ` โข ${caller.caller}${asyncMark} - ${file}:${caller.line}\n`;
- });
- if (graph.callers.length > 10) {
- response += ` ... and ${graph.callers.length - 10} more\n`;
- }
- response += `\n`;
- } else {
- response += `๐ฅ Not called by any function (entry point or unused)\n\n`;
- }
-
- // Callees (what this function calls)
- if (graph.callees.length > 0) {
- response += `๐ Calls (${graph.callees.length} functions):\n`;
- graph.callees.slice(0, 10).forEach(callee => {
- const file = this.getRelativePath(callee.filePath);
- const asyncMark = callee.isAsync ? ' (await)' : '';
- response += ` โข ${callee.callee}${asyncMark} - ${file}:${callee.line}\n`;
- });
- if (graph.callees.length > 10) {
- response += ` ... and ${graph.callees.length - 10} more\n`;
- }
- } else {
- response += `๐ Doesn't call any functions (leaf function)\n`;
- }
-
- return {
- content: [{ type: 'text', text: response }],
- };
- } catch (error) {
- return {
- content: [{
- type: 'text',
- text: `Error analyzing call graph: ${error instanceof Error ? error.message : 'Unknown error'}`,
- }],
- };
- }
-}
-
-private handleFindCallers(args: any) {
- if (!this.callGraphAnalyzer) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
-
- try {
- const callers = this.callGraphAnalyzer.findCallers(args.functionName);
-
- if (callers.length === 0) {
- return {
- content: [{
- type: 'text',
- text: `No functions call "${args.functionName}".\n\nThis function might be:\n- An entry point\n- Unused code\n- Only called externally`,
- }],
- };
- }
-
- let response = `๐ฅ Functions that call "${args.functionName}" (${callers.length}):\n\n`;
-
- callers.forEach((caller, i) => {
- const file = this.getRelativePath(caller.filePath);
- const asyncMark = caller.isAsync ? 'โณ' : ' ';
- response += `${i + 1}. ${asyncMark} ${caller.caller}\n`;
- response += ` ๐ ${file}:${caller.line}\n`;
- response += ` ๐ฌ ${caller.callExpression}\n\n`;
- });
-
- return {
- content: [{ type: 'text', text: response }],
- };
- } catch (error) {
- return {
- content: [{
- type: 'text',
- text: `Error finding callers: ${error instanceof Error ? error.message : 'Unknown error'}`,
- }],
- };
- }
-}
-
-private handleTraceExecutionPath(args: any) {
- if (!this.callGraphAnalyzer) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
-
- try {
- const paths = this.callGraphAnalyzer.traceExecutionPath(
- args.startFunction,
- args.endFunction,
- args.maxDepth || 10
- );
-
- if (paths.length === 0) {
- return {
- content: [{
- type: 'text',
- text: `No execution path found from "${args.startFunction}" to "${args.endFunction}".\n\nPossible reasons:\n- Functions are not connected\n- Path exceeds max depth\n- One or both functions don't exist`,
- }],
- };
- }
-
- let response = `๐ค๏ธ Execution Paths: ${args.startFunction} โ ${args.endFunction}\n\n`;
- response += `Found ${paths.length} possible path(s):\n\n`;
-
- paths.forEach((path, i) => {
- const asyncMark = path.isAsync ? ' โณ (async)' : '';
- response += `Path ${i + 1} (depth: ${path.depth})${asyncMark}:\n`;
- response += ` ${path.description}\n\n`;
- });
-
- if (paths.length > 1) {
- response += `๐ก Multiple paths exist. Consider:\n`;
- response += `- Which path is most commonly used?\n`;
- response += `- Are all paths intentional?\n`;
- response += `- Could the code be simplified?\n`;
- }
-
- return {
- content: [{ type: 'text', text: response }],
- };
- } catch (error) {
- return {
- content: [{
- type: 'text',
- text: `Error tracing execution path: ${error instanceof Error ? error.message : 'Unknown error'}`,
- }],
- };
- }
-}
-
-private handleGetCallTree(args: any) {
- if (!this.callGraphAnalyzer) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
-
- try {
- const depth = Math.min(args.depth || 3, 5);
- const tree = this.callGraphAnalyzer.getCallTree(args.functionName, depth);
-
- if (!tree) {
- return {
- content: [{
- type: 'text',
- text: `Function "${args.functionName}" not found in workspace.`,
- }],
- };
- }
-
- const formatTree = (node: any, indent: string = '', isLast: boolean = true): string => {
- const prefix = isLast ? 'โโโ ' : 'โโโ ';
- const asyncMark = node.isAsync ? 'โณ ' : '';
- const recursiveMark = node.isRecursive ? '๐ ' : '';
- let result = `${indent}${prefix}${asyncMark}${recursiveMark}${node.function} (${node.file}:${node.line})\n`;
-
- if (node.calls && node.calls.length > 0 && !node.isRecursive) {
- const newIndent = indent + (isLast ? ' ' : 'โ ');
- node.calls.forEach((child: any, i: number) => {
- result += formatTree(child, newIndent, i === node.calls.length - 1);
- });
- }
-
- return result;
- };
-
- let response = `๐ฒ Call Tree (depth: ${depth})\n\n`;
- response += formatTree(tree);
- response += `\n`;
- response += `Legend:\n`;
- response += `โณ = async function\n`;
- response += `๐ = recursive call\n`;
-
- return {
- content: [{ type: 'text', text: response }],
- };
- } catch (error) {
- return {
- content: [{
- type: 'text',
- text: `Error getting call tree: ${error instanceof Error ? error.message : 'Unknown error'}`,
- }],
- };
- }
-}
-
-// ========== V0.4.0 HANDLERS - TYPE ANALYSIS ==========
-
-private async handleFindTypeDefinition(args: any) {
- if (!this.typeAnalyzer) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
+ if (metadata?.target_date) {
+ response += ` **Target:** ${metadata.target_date}\n`;
+ }
+
+ // Show Notion suggestions if detected
+ if (metadata?.notionPages && metadata.notionPages.length > 0) {
+ response += `\n **Notion References Detected:**\n`;
+ for (const page of metadata.notionPages) {
+ response += ` ${page}\n`;
+ }
+ response += `\n Tip: Use \`notion action=read pageId=\` to view content\n`;
+ } else if (metadata?.suggestNotionSearch && metadata?.notionSearchSuggestion) {
+ response += `\n **Documentation Mentioned!**\n`;
+ response += ` Search Notion: \`notion action=search query="${metadata.notionSearchSuggestion}"\`\n`;
+ }
+
+ response += `\n This will be available in future sessions via \`recall\`.`;
+ } else if (result.action === 'updated') {
+ response = ` **Updated existing ${type}**\n\n`;
+ response += `"${content}"\n\n`;
+
+ // Show file context from auto-enrichment
+ if (result.fileContext && result.fileContext.files.length > 0) {
+ response += ` **File Context:**\n`;
+ for (const file of result.fileContext.files) {
+ const complexityEmoji = file.complexity === 'low' ? '' :
+ file.complexity === 'medium' ? '' :
+ file.complexity === 'high' ? '' : '';
+ response += ` ${file.path.split(/[/\\]/).pop()} ${complexityEmoji} ${file.complexity} (${file.linesOfCode} LOC)\n`;
+ }
+ response += `\n`;
+ }
+
+ if (result.gitContext) {
+ response += ` **Branch:** ${result.gitContext.branch}\n`;
+ }
+ response += ` Found similar context and updated it instead of creating duplicate.`;
+ } else {
+ response = ` **Skipped ${type}**\n\n`;
+ response += `Reason: ${result.reason}`;
+ }
- try {
- const definition = await this.typeAnalyzer.findTypeDefinition(args.typeName);
-
- if (!definition) {
return {
content: [{
type: 'text',
- text: `Type "${args.typeName}" not found in workspace.\n\nMake sure:\n- The type is defined in a .ts or .tsx file\n- The type name is spelled correctly\n- The file is in the workspace`,
+ text: response,
+ }],
+ };
+ } catch (error: any) {
+ return {
+ content: [{
+ type: 'text',
+ text: ` Failed to remember: ${error.message}`,
}],
};
}
-
- const file = this.getRelativePath(definition.filePath);
- let response = `๐ Type Definition: ${definition.name}\n\n`;
- response += `๐ท๏ธ Kind: ${definition.kind}\n`;
- response += `๐ Location: ${file}:${definition.line}\n`;
- response += `๐ค Exported: ${definition.isExported ? 'Yes' : 'No'}\n\n`;
- response += `Raw definition:\n\`\`\`typescript\n${definition.raw}\n\`\`\``;
-
- return {
- content: [{ type: 'text', text: response }],
- };
- } catch (error) {
- return {
- content: [{
- type: 'text',
- text: `Error finding type definition: ${error instanceof Error ? error.message : 'Unknown error'}`,
- }],
- };
- }
-}
-
-private async handleGetTypeInfo(args: any) {
- if (!this.typeAnalyzer) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
}
- try {
- const info = await this.typeAnalyzer.getTypeInfo(args.typeName);
-
- if (!info) {
+ /**
+ * Recall - Retrieve layered context
+ */
+ private async handleRecall(args?: { query?: string; limit?: number }) {
+ const project = this.getCurrentProject();
+ if (!project) {
return {
content: [{
type: 'text',
- text: `Type "${args.typeName}" not found in workspace.`,
+ text: ' No project initialized. Run `set_project` first.',
}],
};
}
- const file = this.getRelativePath(info.definition.filePath);
- let response = `๐ Complete Type Information: ${info.definition.name}\n\n`;
- response += `๐ ${file}:${info.definition.line}\n`;
- response += `๐ท๏ธ ${info.definition.kind}\n\n`;
+ const limit = args?.limit || 10;
+ const query = args?.query;
+ const db = this.storage.getDb();
- // Type-specific details
- const details = info.details;
+ try {
+ // Use optimized recall engine
+ const engine = new RecallEngine(db, project.id);
+ const synthesis = await engine.recall(query, limit);
+
+ // Format intelligent response
+ let response = ` **Context Recall: ${project.name}**\n\n`;
+
+ // 1. Smart Summary (2 paragraphs)
+ response += ` **Where You Left Off**\n\n`;
+ response += synthesis.summary;
+ response += `\n\n`;
+
+ // 2. Critical Path (ordered next steps)
+ if (synthesis.criticalPath.length > 0) {
+ response += ` **Critical Path** (in order):\n`;
+ synthesis.criticalPath.forEach((step, i) => {
+ response += ` ${i + 1}. ${step}\n`;
+ });
+ response += `\n`;
+ }
- if (details.kind === 'interface') {
- if (details.extends && details.extends.length > 0) {
- response += `๐ Extends: ${details.extends.join(', ')}\n\n`;
+ // 3. Freshness indicator
+ const { fresh, recent, stale, expired } = synthesis.freshness;
+ const total = fresh + recent + stale + expired;
+ if (total > 0) {
+ response += ` **Context Freshness**: `;
+ const parts = [];
+ if (fresh > 0) parts.push(`${fresh} fresh`);
+ if (recent > 0) parts.push(`${recent} recent`);
+ if (stale > 0) parts.push(`${stale} stale`);
+ if (expired > 0) parts.push(`${expired} expired`);
+ response += parts.join(', ');
+ response += `\n\n`;
}
- if (details.properties.length > 0) {
- response += `๐ฆ Properties (${details.properties.length}):\n`;
- details.properties.forEach(prop => {
- const optional = prop.optional ? '?' : '';
- const readonly = prop.readonly ? 'readonly ' : '';
- response += ` โข ${readonly}${prop.name}${optional}: ${prop.type}\n`;
+ // 4. Active Work
+ if (synthesis.activeWork.length > 0) {
+ response += ` **Active Work**\n`;
+ synthesis.activeWork.forEach((work: any) => {
+ const freshness = work.staleness === 'fresh' ? '' : work.staleness === 'recent' ? '' : '';
+ response += `${freshness} ${work.content}\n`;
+ if (work.metadata?.files && work.metadata.files.length > 0) {
+ response += ` Files: ${work.metadata.files.join(', ')}\n`;
+ }
});
response += `\n`;
}
- if (details.methods.length > 0) {
- response += `โ๏ธ Methods (${details.methods.length}):\n`;
- details.methods.forEach(method => {
- const params = method.params.map(p => `${p.name}: ${p.type || 'any'}`).join(', ');
- response += ` โข ${method.name}(${params}): ${method.returnType || 'void'}\n`;
+ // 4.5. Caveats (AI mistakes, tech debt, unverified changes) - HIGH PRIORITY!
+ if (synthesis.caveats.length > 0) {
+ response += ` **Tech Debt & Unresolved Issues** (${synthesis.caveats.length})\n`;
+ synthesis.caveats.forEach((cav: any) => {
+ // Severity icons
+ const severityIcon = cav.metadata?.severity === 'critical' ? '' :
+ cav.metadata?.severity === 'high' ? '' :
+ cav.metadata?.severity === 'medium' ? '' : '';
+
+ // Category badges
+ const categoryBadge = cav.metadata?.category === 'mistake' ? '[MISTAKE]' :
+ cav.metadata?.category === 'shortcut' ? '[SHORTCUT]' :
+ cav.metadata?.category === 'unverified' ? '[UNVERIFIED]' :
+ cav.metadata?.category === 'assumption' ? '[ASSUMPTION]' : '[WORKAROUND]';
+
+ response += `${severityIcon} ${categoryBadge} ${cav.content}\n`;
+
+ if (cav.metadata?.attempted) {
+ response += ` Attempted: ${cav.metadata.attempted}\n`;
+ }
+ if (cav.metadata?.recovery) {
+ response += ` Recovery: ${cav.metadata.recovery}\n`;
+ }
+ if (cav.metadata?.action_required) {
+ response += ` Action Required: ${cav.metadata.action_required}\n`;
+ }
+ if (cav.metadata?.affects_production) {
+ response += ` Affects Production: YES\n`;
+ }
});
response += `\n`;
}
- } else if (details.kind === 'type') {
- response += `๐ Definition:\n ${details.definition}\n\n`;
- } else if (details.kind === 'class') {
- if (details.extends) {
- response += `๐ Extends: ${details.extends}\n`;
- }
- if (details.implements && details.implements.length > 0) {
- response += `๐ Implements: ${details.implements.join(', ')}\n`;
- }
- response += `\n`;
- if (details.constructor) {
- const params = details.constructor.params.map(p => `${p.name}: ${p.type || 'any'}`).join(', ');
- response += `๐๏ธ Constructor(${params})\n\n`;
+ // 5. Open Problems
+ if (synthesis.problems.length > 0) {
+ response += ` **Open Problems**\n`;
+ synthesis.problems.slice(0, 3).forEach((p: any) => {
+ response += ` ${p.content}\n`;
+ });
+ if (synthesis.problems.length > 3) {
+ response += ` ... and ${synthesis.problems.length - 3} more\n`;
+ }
+ response += `\n`;
}
- if (details.properties.length > 0) {
- response += `๐ฆ Properties (${details.properties.length}):\n`;
- details.properties.forEach(prop => {
- const optional = prop.optional ? '?' : '';
- const readonly = prop.readonly ? 'readonly ' : '';
- response += ` โข ${readonly}${prop.name}${optional}: ${prop.type}\n`;
+ // 6. Constraints
+ if (synthesis.constraints.length > 0) {
+ response += ` **Constraints**\n`;
+ synthesis.constraints.slice(0, 3).forEach((c: any) => {
+ response += ` ${c.content}\n`;
});
response += `\n`;
}
- if (details.methods.length > 0) {
- response += `โ๏ธ Methods (${details.methods.length}):\n`;
- details.methods.forEach(method => {
- const vis = method.visibility || 'public';
- const stat = method.isStatic ? 'static ' : '';
- const async = method.isAsync ? 'async ' : '';
- response += ` โข ${vis} ${stat}${async}${method.name}()\n`;
+ // 7. Goals
+ if (synthesis.goals.length > 0) {
+ response += ` **Goals**\n`;
+ synthesis.goals.slice(0, 3).forEach((g: any) => {
+ response += ` ${g.content}`;
+ if (g.metadata?.status) {
+ response += ` [${g.metadata.status}]`;
+ }
+ response += `\n`;
});
response += `\n`;
}
- } else if (details.kind === 'enum') {
- response += `๐ Members (${details.members.length}):\n`;
- details.members.forEach(member => {
- const value = member.value !== undefined ? ` = ${member.value}` : '';
- response += ` โข ${member.name}${value}\n`;
- });
- response += `\n`;
- }
- // Related types
- if (info.relatedTypes.length > 0) {
- response += `๐ Related Types: ${info.relatedTypes.join(', ')}\n\n`;
- }
+ // 8. Relationships (decision files)
+ if (synthesis.relationships.size > 0) {
+ response += ` **Relationships**\n`;
+ let count = 0;
+ for (const [decision, files] of synthesis.relationships) {
+ if (count >= 2) break;
+ response += ` "${decision}" affects: ${files.join(', ')}\n`;
+ count++;
+ }
+ response += `\n`;
+ }
- // Usage count
- response += `๐ Used in ${info.usages.length} location(s)\n`;
+ // 9. Gaps (missing context)
+ if (synthesis.gaps.length > 0) {
+ response += ` **Context Gaps**\n`;
+ synthesis.gaps.forEach(gap => {
+ response += `${gap}\n`;
+ });
+ response += `\n`;
+ }
- return {
- content: [{ type: 'text', text: response }],
- };
- } catch (error) {
- return {
- content: [{
- type: 'text',
- text: `Error getting type info: ${error instanceof Error ? error.message : 'Unknown error'}`,
- }],
- };
- }
-}
+ // 10. Suggestions (actionable next steps)
+ if (synthesis.suggestions.length > 0) {
+ response += ` **Suggestions**\n`;
+ synthesis.suggestions.forEach(suggestion => {
+ response += ` ${suggestion}\n`;
+ });
+ response += `\n`;
+ }
-private async handleFindTypeUsages(args: any) {
- if (!this.typeAnalyzer) {
- return {
- content: [{ type: 'text', text: 'No workspace set. Use set_workspace first.' }],
- };
- }
+ // Empty state
+ if (total === 0) {
+ response += `\n_No context stored yet. Use \`remember\` to add important information._`;
+ }
- try {
- const usages = await this.typeAnalyzer.findTypeUsages(args.typeName);
-
- if (usages.length === 0) {
+ return {
+ content: [{ type: 'text', text: response }],
+ };
+ } catch (error: any) {
return {
content: [{
type: 'text',
- text: `Type "${args.typeName}" is not used anywhere.\n\nThis type might be:\n- Newly defined\n- Exported but not used\n- Dead code (consider removing)`,
+ text: ` Failed to recall: ${error.message}\n\nStack: ${error.stack}`,
}],
};
}
-
- let response = `๐ Usage of type "${args.typeName}" (${usages.length} locations):\n\n`;
-
- // Group by file
- const byFile = new Map();
- usages.forEach(usage => {
- const file = this.getRelativePath(usage.filePath);
- if (!byFile.has(file)) {
- byFile.set(file, []);
- }
- byFile.get(file)!.push(usage);
- });
-
- byFile.forEach((fileUsages, file) => {
- response += `๐ ${file} (${fileUsages.length} usages):\n`;
- fileUsages.slice(0, 5).forEach(usage => {
- const icon = usage.usageType === 'variable' ? '๐ฆ' :
- usage.usageType === 'parameter' ? 'โ๏ธ' :
- usage.usageType === 'return' ? 'โฉ๏ธ' :
- usage.usageType === 'generic' ? '<>' :
- usage.usageType === 'implements' ? '๐' :
- usage.usageType === 'extends' ? '๐' : 'โข';
- response += ` ${icon} Line ${usage.line}: ${usage.context}\n`;
- });
- if (fileUsages.length > 5) {
- response += ` ... and ${fileUsages.length - 5} more\n`;
- }
- response += `\n`;
- });
-
- return {
- content: [{ type: 'text', text: response }],
- };
- } catch (error) {
- return {
- content: [{
- type: 'text',
- text: `Error finding type usages: ${error instanceof Error ? error.message : 'Unknown error'}`,
- }],
- };
}
-}
-
-private getRelativePath(filePath: string): string {
- if (!this.dependencyAnalyzer) return filePath;
- return filePath.replace(this.dependencyAnalyzer['workspacePath'], '').replace(/^[\\\/]/, '');
-}
-
- // ========== V0.5.0 HANDLERS - PLATFORM SYNC ==========
-
- private handleSwitchPlatform(args: { fromPlatform: AIPlatform; toPlatform: AIPlatform }) {
- const handoff = this.platformSync.createHandoff(args.fromPlatform, args.toPlatform);
-
- if (!handoff) {
- return {
- content: [{
- type: 'text',
- text: 'No active project. Initialize a project first to enable platform handoff.',
- }],
- };
- }
-
- this.platformSync.setPlatform(args.toPlatform);
+ /**
+ * Read file from workspace
+ */
+ private async handleReadFile(args: { path: string }) {
+ const workspace = this.workspaceDetector.getCurrentWorkspace();
+ if (!workspace) {
return {
content: [{
type: 'text',
- text: handoff.summary,
+ text: ' No workspace set. Run `set_project` first.',
}],
};
}
- private handleGetPlatformStatus() {
- const status = PlatformSync.getPlatformStatus();
- const current = this.platformSync.getPlatform();
-
- let response = `๏ฟฝ **Context Sync Platform Status**\n\n`;
- response += `**Current Platform:** ${current}\n\n`;
-
- // Core platforms (fully supported)
- response += `## ๐ฏ **Core Platforms**\n`;
- response += `${status.claude ? 'โ
' : 'โ'} **Claude Desktop** - Advanced reasoning and analysis\n`;
- response += `${status.cursor ? 'โ
' : 'โ'} **Cursor IDE** - AI-powered coding environment\n`;
- response += `${status.copilot ? 'โ
' : 'โ'} **GitHub Copilot** - VS Code integration\n\n`;
-
- // Extended platforms
- response += `## ๐ง **Extended Platforms**\n`;
- response += `${status.continue ? 'โ
' : 'โ'} **Continue.dev** - Open source AI coding assistant\n`;
- response += `${status.zed ? 'โ
' : 'โ'} **Zed Editor** - Fast collaborative editor\n`;
- response += `${status.windsurf ? 'โ
' : 'โ'} **Windsurf** - Codeium's AI IDE\n`;
- response += `${status.tabnine ? 'โ
' : 'โ'} **TabNine** - Enterprise AI completion\n\n`;
+ try {
+ // Use optimized read file engine
+ const engine = new ReadFileEngine(workspace);
+ const fileContext = await engine.read(args.path);
+ // Format rich response
+ let response = ` **${fileContext.path}**\n\n`;
- // Count active platforms
- const activePlatforms = Object.values(status).filter(Boolean).length;
- response += `**Active Platforms:** ${activePlatforms}/13\n\n`;
-
- if (activePlatforms === 0) {
- response += `โ ๏ธ **No platforms configured yet**\n`;
- response += `Get started with: "help me get started with context-sync"\n`;
- } else if (activePlatforms < 3) {
- response += `๐ก **Want to add more platforms?**\n`;
- response += `Use: "switch platform to [platform-name]" for setup instructions\n`;
+ // Metadata section
+ response += ` **Metadata**\n`;
+ response += ` Language: ${fileContext.metadata.language}\n`;
+ response += ` Size: ${(fileContext.metadata.size / 1024).toFixed(1)} KB\n`;
+ response += ` Lines: ${fileContext.metadata.linesOfCode} LOC\n`;
+ response += ` Last Modified: ${fileContext.metadata.lastModified.toLocaleDateString()}\n`;
+ if (fileContext.metadata.author) {
+ response += ` Last Author: ${fileContext.metadata.author}\n`;
}
+ if (fileContext.metadata.changeFrequency > 0) {
+ response += ` Change Frequency: ${fileContext.metadata.changeFrequency} commit(s) in last 30 days\n`;
+ }
+ response += `\n`;
- return {
- content: [{ type: 'text', text: response }],
+ // Complexity section
+ const complexityEmoji = {
+ 'low': '',
+ 'medium': '',
+ 'high': '',
+ 'very-high': ''
};
- }
+ response += `${complexityEmoji[fileContext.complexity.level]} **Complexity: ${fileContext.complexity.level}** (score: ${fileContext.complexity.score})\n`;
+ if (fileContext.complexity.reasons.length > 0) {
+ response += ` ${fileContext.complexity.reasons.join(', ')}\n`;
+ }
+ response += `\n`;
- private handleGetPlatformContext(args: { platform?: AIPlatform }) {
- const platform = args.platform || this.platformSync.getPlatform();
- const context = this.platformSync.getPlatformContext(platform);
+ // Relationships section
+ if (fileContext.relationships.imports.length > 0) {
+ response += ` **Imports** (${fileContext.relationships.imports.length}):\n`;
+ fileContext.relationships.imports.slice(0, 5).forEach(imp => {
+ response += ` ${imp}\n`;
+ });
+ if (fileContext.relationships.imports.length > 5) {
+ response += ` ... and ${fileContext.relationships.imports.length - 5} more\n`;
+ }
+ response += `\n`;
+ }
- return {
- content: [{ type: 'text', text: context }],
- };
- }
+ if (fileContext.relationships.relatedTests.length > 0) {
+ response += ` **Related Tests**:\n`;
+ fileContext.relationships.relatedTests.forEach(test => {
+ response += ` ${test}\n`;
+ });
+ response += `\n`;
+ }
- private handleSetupCursor() {
- const paths = PlatformSync.getConfigPaths();
- const cursorPath = paths.cursor;
- const instructions = PlatformSync.getInstallInstructions('cursor');
+ if (fileContext.relationships.relatedConfigs.length > 0) {
+ response += ` **Related Configs**:\n`;
+ fileContext.relationships.relatedConfigs.forEach(config => {
+ response += ` ${config}\n`;
+ });
+ response += `\n`;
+ }
- let response = `๐ Cursor Setup Instructions\n\n`;
- response += instructions;
- response += `\n\n๐ Configuration File: ${cursorPath}\n\n`;
- response += `โ ๏ธ Note: You'll need to manually edit the configuration file and restart Cursor.`;
+ // Content section
+ response += ` **Content**\n\n\`\`\`${fileContext.metadata.language.toLowerCase()}\n${fileContext.content}\n\`\`\``;
return {
- content: [{ type: 'text', text: response }],
+ content: [{
+ type: 'text',
+ text: response,
+ }],
+ };
+ } catch (error: any) {
+ return {
+ content: [{
+ type: 'text',
+ text: ` Failed to read file: ${error.message}\n\nStack: ${error.stack}`,
+ }],
};
}
+ }
- private async handleGetStarted() {
- // Check installation status - if we're here, Context Sync is working
- const version = this.getVersion();
-
- // Get current state using session-based approach (NEW)
- const currentProject = this.getCurrentProject();
+ /**
+ * Search workspace (unified search for files and content)
+ */
+ private async handleSearch(args: { query: string; type: 'files' | 'content'; options?: any }) {
const workspace = this.workspaceDetector.getCurrentWorkspace();
- const detectedPlatform = PlatformSync.detectPlatform(); // Real-time detection
- const platformStatus = PlatformSync.getPlatformStatus();
-
- // Build response
- let response = `๐ **Context Sync v${version} is working!**\n\n`;
-
- // Show integrated AI platforms with counts
- const activePlatforms = Object.values(platformStatus).filter(Boolean).length;
- response += `๐ **Universal AI Platform Support (${activePlatforms}/13 active):**\n\n`;
-
- response += `**๐ฏ Core Platforms:**\n`;
- response += `${platformStatus.claude ? 'โ
' : 'โช'} Claude Desktop โข ${platformStatus.cursor ? 'โ
' : 'โช'} Cursor IDE โข ${platformStatus.copilot ? 'โ
' : 'โช'} VS Code + Copilot\n\n`;
-
- response += `**๐ง Extended Support:**\n`;
- response += `${platformStatus.continue ? 'โ
' : 'โช'} Continue.dev โข ${platformStatus.zed ? 'โ
' : 'โช'} Zed Editor โข ${platformStatus.windsurf ? 'โ
' : 'โช'} Windsurf\n`;
- response += `${platformStatus.tabnine ? 'โ
' : 'โช'} TabNine\n\n`;
-
- if (activePlatforms > 1) {
- response += `๐ **Multi-platform setup detected!** Your context syncs across ${activePlatforms} platforms.\n\n`;
- } else if (activePlatforms === 1) {
- response += `๐ก **Single platform detected.** Add more with "get platform status"\n\n`;
- }
-
- // Current status - simplified and useful
- response += `๐ **Current Status:**\n`;
- if (currentProject) {
- response += `โข Active Project: ${currentProject.name}\n`;
- }
- if (workspace) {
- response += `โข Workspace: Set\n`;
- }
- response += `\n`;
-
- // Next steps based on current state
- response += `๐ **Quick Start Options:**\n\n`;
-
- if (!currentProject) {
- response += `1๏ธโฃ **Set up your workspace**\n`;
- response += ` โ "Set workspace to /path/to/your/project"\n\n`;
- } else {
- response += `1๏ธโฃ **Explore your project**\n`;
- response += ` โ "Scan workspace" or "Get project structure"\n\n`;
- }
-
- response += `2๏ธโฃ **Try key features**\n`;
- response += ` โ "Show me what Context Sync can do"\n\n`;
-
- // Universal platform guidance
- response += `๐ก **Getting Started:**\n\n`;
-
if (!workspace) {
- response += `๐ฏ **First, set your workspace:**\n`;
- response += `โข Try: "Set workspace to /path/to/your/project"\n`;
- response += `โข This enables all Context Sync features\n\n`;
- } else {
- response += `๐ฏ **Your workspace is ready! Try these:**\n`;
- response += `โข "Scan workspace" - Get project overview\n`;
- response += `โข "Search content for TODO" - Find todos in code\n`;
- response += `โข "Create todo: Fix authentication bug" - Add todos\n`;
- response += `โข "Get project structure" - See file organization\n\n`;
- }
-
- // Show what each platform offers
- response += `**All Platforms Support:**\n`;
- response += `โข ๐ Project workspace management\n`;
- response += `โข ๐ Code search and analysis\n`;
- response += `โข ๐ Todo management with auto-linking\n`;
- response += `โข ๐ Cross-platform context sync\n`;
- response += `โข ๐ **Notion Integration** - Save docs, pull specs, export ADRs\n`;
- response += `โข โก Performance monitoring\n`;
- response += `โข ๐ง Intelligent file skimming for large codebases\n\n`;
-
- response += `๐ง **Advanced Commands:**\n`;
- response += `โข "Setup cursor" - Get Cursor IDE setup instructions\n`;
- response += `โข "Check platform status" - Verify platform configurations\n`;
- response += `โข "Get performance report" - View system metrics\n`;
- response += `โข "Show features" - See all available tools\n\n`;
-
- response += `๐ **Notion Integration** (Optional):\n`;
- response += `โข Generate and save documentation to Notion\n`;
- response += `โข Pull project specs from Notion for implementation\n`;
- response += `โข Export architecture decisions as ADRs\n`;
- response += `โข Create project dashboards automatically\n`;
- response += `โข Run \`context-sync-setup\` to enable (2 min setup)\n\n`;
-
- response += `**Ready to get started?** Choose an option above! ๐`;
-
- return {
- content: [
- {
+ return {
+ content: [{
type: 'text',
- text: response
- }
- ]
- };
- }
-
- private async handleDebugSession() {
- const version = this.getVersion();
- const platform = this.platformSync.getPlatform();
-
- // Session-based current project (NEW)
- const sessionProject = this.getCurrentProject();
-
- // Database-based current project (OLD - should be deprecated)
- const dbProject = this.storage.getCurrentProject();
-
- // Workspace information
- const workspace = this.workspaceDetector.getCurrentWorkspace();
-
- // All projects in database
- const allProjects = this.storage.getAllProjects();
-
- // Build response
- let response = `๐ **Context Sync Session Debug v${version}**\n\n`;
-
- // Session information
- response += `๐ฑ **Session State:**\n`;
- response += `โข Platform: ${platform}\n`;
- response += `โข Session Project ID: ${this.currentProjectId || 'null'}\n`;
- response += `โข Session Project: ${sessionProject ? sessionProject.name : 'None'}\n\n`;
-
- // Database state
- response += `๐พ **Database State:**\n`;
- response += `โข DB Current Project: ${dbProject ? dbProject.name : 'None'}\n`;
- response += `โข Total Projects: ${allProjects.length}\n`;
- response += `โข Workspace Set: ${workspace ? 'Yes' : 'No'}\n`;
- if (workspace) {
- response += `โข Workspace Path: ${workspace}\n`;
- }
- response += `\n`;
-
- // Project list
- if (allProjects.length > 0) {
- response += `๐ **All Projects:**\n`;
- allProjects.forEach((project: any, index: number) => {
- const isSession = sessionProject && sessionProject.id === project.id;
- const isDB = dbProject && dbProject.id === project.id;
- const markers = [];
- if (isSession) markers.push('SESSION');
- if (isDB) markers.push('DB');
- const markerText = markers.length > 0 ? ` [${markers.join(', ')}]` : '';
-
- response += `${index + 1}. ${project.name}${markerText}\n`;
- response += ` Path: ${project.path}\n`;
- response += ` ID: ${project.id}\n`;
- });
- response += `\n`;
+ text: ' No workspace set. Run `set_project` first.',
+ }],
+ };
}
-
- // Architecture validation
- response += `โ๏ธ **Architecture Validation:**\n`;
- response += `โข Session-based: ${sessionProject ? 'โ
' : 'โ'}\n`;
- response += `โข DB deprecated: ${!dbProject || sessionProject ? 'โ
' : 'โ ๏ธ'}\n`;
- response += `โข Consistency: ${(!sessionProject && !dbProject) || (sessionProject && sessionProject.id === dbProject?.id) ? 'โ
' : 'โ ๏ธ Mismatch'}\n\n`;
-
- // Multi-project testing instructions
- response += `๐งช **Multi-Project Testing:**\n`;
- response += `1. Test different MCP clients with different projects\n`;
- response += `2. Verify each maintains separate session state\n`;
- response += `3. Check todo auto-linking per session\n\n`;
-
- // Notion integration status
- response += `๏ฟฝ **Notion Integration:**\n`;
- const notionConfigPath = join(os.homedir(), '.context-sync', 'config.json');
- let notionConfigured = false;
+
try {
- if (fs.existsSync(notionConfigPath)) {
- const config = JSON.parse(fs.readFileSync(notionConfigPath, 'utf-8'));
- notionConfigured = !!(config.notion?.token);
- }
- } catch {
- // Ignore config read errors
- }
- response += `โข Status: ${notionConfigured ? 'โ
Configured' : 'โช Not configured'}\n`;
- if (!notionConfigured) {
- response += `โข Setup: Run \`context-sync-setup\` to enable Notion features\n`;
- }
- response += `โข Tools: notion_create_page, notion_search, notion_read_page, etc.\n\n`;
-
- response += `๏ฟฝ๐ก **Usage:** Use this tool to debug session isolation and project state consistency.\n`;
- response += `๐ก **Notion Issues?** Re-run \`context-sync-setup\` to reconfigure or test connection.`;
-
- return {
- content: [
- {
- type: 'text',
- text: response
- }
- ]
- };
- }
+ const { query, type, options = {} } = args;
+
+ // Use optimized search engine
+ const engine = new SearchEngine(workspace);
- private async handleGetPerformanceReport(args: { operation?: string; reset?: boolean }) {
- const { operation, reset = false } = args;
-
- let response = `๐ **Context Sync Performance Report**\n\n`;
-
- if (operation) {
- // Get stats for specific operation
- const stats = PerformanceMonitor.getStats(operation);
- if (stats.count > 0) {
- response += `๐ **Operation: ${operation}**\n`;
- response += `โข Calls: ${stats.count}\n`;
- response += `โข Total Time: ${stats.totalDuration.toFixed(2)}ms\n`;
- response += `โข Average Time: ${stats.averageDuration.toFixed(2)}ms\n`;
- response += `โข Min Time: ${stats.minDuration.toFixed(2)}ms\n`;
- response += `โข Max Time: ${stats.maxDuration.toFixed(2)}ms\n\n`;
- } else {
- response += `โ No data found for operation: ${operation}\n\n`;
- }
- } else {
- // Get all operation stats
- const allStats = PerformanceMonitor.getAllOperationStats();
- if (Object.keys(allStats).length === 0) {
- response += `โน๏ธ No performance data collected yet.\n`;
- response += `Performance monitoring tracks database operations like:\n`;
- response += `โข findProjectByPath\n`;
- response += `โข createProject\n`;
- response += `โข getAllProjects\n\n`;
- } else {
- response += `๐ **All Operations:**\n\n`;
- Object.entries(allStats).forEach(([opName, stats]) => {
- response += `**${opName}:**\n`;
- response += `โข Calls: ${stats.count}\n`;
- response += `โข Avg Time: ${stats.averageDuration.toFixed(2)}ms\n`;
- response += `โข Total Time: ${stats.totalDuration.toFixed(2)}ms\n`;
- response += `โข Range: ${stats.minDuration.toFixed(2)}ms - ${stats.maxDuration.toFixed(2)}ms\n\n`;
+ if (type === 'files') {
+ const result = await engine.searchFiles(query, {
+ maxResults: options.maxResults || 50,
+ enrichContext: true,
+ caseSensitive: options.caseSensitive || false
});
- }
-
- // Use the formatted report from PerformanceMonitor
- const detailedReport = PerformanceMonitor.getReport();
- response += `๐ **Detailed Report:**\n${detailedReport}\n\n`;
- }
-
- if (reset) {
- PerformanceMonitor.clearMetrics();
- response += `๐ **Performance data has been reset.**\n\n`;
- }
-
- response += `๐ก **Usage:** Monitor database operation performance to identify optimization opportunities.`;
-
- return {
- content: [
- {
- type: 'text',
- text: response
+
+ if (result.totalMatches === 0) {
+ return {
+ content: [{
+ type: 'text',
+ text: ` No files found matching "${query}"`,
+ }],
+ };
}
- ]
- };
- }
-
- private handleDiscoverAIPlatforms(args: { category?: 'all' | 'core' | 'extended' | 'api'; includeSetupInstructions?: boolean }) {
- const { category = 'all', includeSetupInstructions = false } = args;
-
- // Filter platforms by category
- const platforms = Object.entries(PLATFORM_REGISTRY)
- .filter(([_, metadata]) => {
- if (category === 'all') return true;
- return metadata.category === category;
- })
- .sort(([_, a], [__, b]) => {
- // Sort by category priority: core > extended > api
- const priority = { core: 0, extended: 1, api: 2 };
- return priority[a.category] - priority[b.category];
- });
-
- let response = `๐ **AI Platform Discovery** (${platforms.length} platforms)\n\n`;
-
- // Add category-specific intro
- if (category === 'core') {
- response += `๐ฏ **Core Platforms** - Fully integrated with rich MCP support:\n\n`;
- } else if (category === 'extended') {
- response += `๐ง **Extended Platforms** - Advanced integrations with growing support:\n\n`;
- } else if (category === 'api') {
- response += `๐ **API Integrations** - Direct API connections for programmatic access:\n\n`;
- } else {
- response += `**All 14 supported AI platforms categorized by integration level:**\n\n`;
- }
-
- // Group by category for display
- const categorized = platforms.reduce((acc, [platformId, metadata]) => {
- if (!acc[metadata.category]) acc[metadata.category] = [];
- acc[metadata.category].push([platformId, metadata]);
- return acc;
- }, {} as Record>);
-
- // Display each category
- const categoryTitles = {
- core: '๐ฏ **Core Platforms**',
- extended: '๐ง **Extended Platforms**',
- api: '๐ **API Integrations**'
- };
- const categoryDescriptions = {
- core: 'Full MCP integration with rich context sharing',
- extended: 'Advanced integrations with growing feature support',
- api: 'Direct API access for programmatic AI interactions'
- };
-
- for (const [cat, title] of Object.entries(categoryTitles)) {
- if (categorized[cat] && (category === 'all' || category === cat)) {
- response += `${title} - ${categoryDescriptions[cat as keyof typeof categoryDescriptions]}\n`;
+ let response = ` **Found ${result.totalMatches} files**\n\n`;
- for (const [platformId, metadata] of categorized[cat]) {
- const status = PlatformSync.getPlatformStatus();
- const isActive = status[platformId as keyof typeof status];
- const statusIcon = isActive ? 'โ
' : 'โช';
+ // Show top matches with context
+ result.matches.slice(0, 20).forEach((match, i) => {
+ const score = Math.round(match.relevanceScore);
+ const matchTypeEmoji = match.matchType === 'exact' ? '' :
+ match.matchType === 'prefix' ? '' : '';
- response += `${statusIcon} **${metadata.name}**\n`;
- response += ` ${metadata.description}\n`;
- response += ` Complexity: ${metadata.setupComplexity} โข MCP: ${metadata.mcpSupport} โข Status: ${metadata.status}\n`;
-
- if (metadata.features.length > 0) {
- response += ` Features: ${metadata.features.join(', ')}\n`;
- }
+ response += `${i + 1}. ${matchTypeEmoji} ${match.relativePath}`;
- if (includeSetupInstructions) {
- response += ` Website: ${metadata.website}\n`;
+ // Show file context if available
+ if (match.context) {
+ const complexityEmoji = match.context.complexity === 'low' ? '' :
+ match.context.complexity === 'medium' ? '' :
+ match.context.complexity === 'high' ? '' : '';
+ response += ` (${complexityEmoji} ${match.context.complexity}`;
+ if (match.context.linesOfCode) {
+ response += `, ${match.context.linesOfCode} LOC`;
+ }
+ response += `)`;
}
-
response += `\n`;
- }
- response += `\n`;
- }
- }
-
- // Add platform statistics
- const currentStatus = PlatformSync.getPlatformStatus();
- const activeCount = Object.values(currentStatus).filter(Boolean).length;
- const totalCount = Object.keys(PLATFORM_REGISTRY).length;
-
- response += `๐ **Platform Status:**\n`;
- response += `โข Active Platforms: ${activeCount}/${totalCount}\n`;
- response += `โข Current Platform: ${this.platformSync.getPlatform()}\n\n`;
-
- // Add quick actions
- response += `๐ **Quick Actions:**\n`;
- response += `โข \`get platform status\` - See detailed platform configuration\n`;
- response += `โข \`switch platform to [name]\` - Switch to a different platform\n`;
- response += `โข \`discover ai platforms core\` - View only core platforms\n`;
- response += `โข \`discover ai platforms extended\` - View extended platforms\n`;
- response += `โข \`discover ai platforms api\` - View API integrations\n\n`;
-
- // Add setup instructions if requested
- if (includeSetupInstructions) {
- response += `๐ **Setup Instructions:**\n`;
- response += `Each platform has specific setup requirements. Use the platform-specific setup commands or visit the Context Sync documentation for detailed configuration guides.\n\n`;
- }
+ });
- response += `**Universal Memory Infrastructure** - Context Sync provides consistent memory and context sharing across all supported platforms, making it truly platform-agnostic AI infrastructure.`;
+ if (result.totalMatches > 20) {
+ response += `\n... and ${result.totalMatches - 20} more matches`;
+ }
- return {
- content: [{
- type: 'text',
- text: response,
- }],
- };
- }
+ // Show suggestions
+ if (result.suggestions && result.suggestions.length > 0) {
+ response += `\n\n **Suggestions:** ${result.suggestions.join(', ')}`;
+ }
- private handleGetPlatformRecommendations(args: { useCase?: string; priority?: string }) {
- const { useCase = 'coding', priority = 'ease_of_use' } = args;
-
- let response = `๐ฏ **AI Platform Recommendations**\n\n`;
- response += `**Your Profile:** ${useCase} focused, prioritizing ${priority.replace('_', ' ')}\n\n`;
-
- // Get current status for personalization
- const currentStatus = PlatformSync.getPlatformStatus();
- const currentPlatform = this.platformSync.getPlatform();
- const activeCount = Object.values(currentStatus).filter(Boolean).length;
-
- // Define recommendation logic based on use case and priority
- const recommendations: Array<{
- platform: string;
- metadata: PlatformMetadata;
- score: number;
- reasons: string[];
- }> = [];
-
- // Score each platform based on criteria
- Object.entries(PLATFORM_REGISTRY).forEach(([platformId, metadata]) => {
- let score = 0;
- const reasons: string[] = [];
-
- // Use case scoring
- switch (useCase) {
- case 'coding':
- if (['cursor', 'copilot', 'continue'].includes(platformId)) {
- score += 3;
- reasons.push('Excellent for coding workflows');
- }
- if (metadata.features.includes('Real-time coding') ||
- metadata.features.includes('Code completion') ||
- metadata.features.includes('AI editing')) {
- score += 2;
- reasons.push('Strong coding features');
- }
- break;
-
- case 'research':
- if (['claude', 'gemini', 'openai'].includes(platformId)) {
- score += 3;
- reasons.push('Excellent for research and analysis');
- }
- if (metadata.features.includes('Advanced reasoning') ||
- metadata.features.includes('Large context')) {
- score += 2;
- reasons.push('Strong analytical capabilities');
- }
- break;
-
- case 'local':
- if (['ollama'].includes(platformId)) {
- score += 4;
- reasons.push('Runs entirely on your machine');
- }
- if (metadata.features.includes('Privacy focused') ||
- metadata.features.includes('Local models')) {
- score += 3;
- reasons.push('Privacy-first approach');
- }
- break;
+ // Show clusters
+ if (result.clusters && Object.keys(result.clusters).length > 1) {
+ response += `\n\n **Clustered by directory:**\n`;
+ Object.entries(result.clusters).slice(0, 5).forEach(([dir, matches]) => {
+ response += ` ${dir}: ${matches.length} file(s)\n`;
+ });
+ }
- case 'enterprise':
- if (['copilot', 'tabnine', 'codewisperer'].includes(platformId)) {
- score += 3;
- reasons.push('Enterprise-grade features');
- }
- if (metadata.features.includes('Enterprise') ||
- metadata.features.includes('Security')) {
- score += 2;
- reasons.push('Enterprise support available');
- }
- break;
+ return {
+ content: [{ type: 'text', text: response }],
+ };
+ } else {
+ const result = await engine.searchContent(query, {
+ maxResults: options.maxResults || 100,
+ filePattern: options.filePattern,
+ caseSensitive: options.caseSensitive || false,
+ regex: options.regex || false,
+ enrichContext: false // Skip for performance on content search
+ });
- case 'beginner':
- if (metadata.setupComplexity === 'easy') {
- score += 3;
- reasons.push('Easy to set up');
- }
- if (['claude', 'cursor'].includes(platformId)) {
- score += 2;
- reasons.push('Beginner-friendly interface');
- }
- break;
- }
+ if (result.totalMatches === 0) {
+ return {
+ content: [{
+ type: 'text',
+ text: ` No content found matching "${query}"`,
+ }],
+ };
+ }
- // Priority scoring
- switch (priority) {
- case 'ease_of_use':
- if (metadata.setupComplexity === 'easy') {
- score += 2;
- reasons.push('Simple setup process');
- }
- if (metadata.mcpSupport === 'native') {
- score += 2;
- reasons.push('Native MCP integration');
- }
- break;
-
- case 'privacy':
- if (['ollama', 'continue'].includes(platformId)) {
- score += 3;
- reasons.push('Privacy-focused design');
- }
- if (metadata.features.includes('Local') ||
- metadata.features.includes('Self-hosted')) {
- score += 2;
- reasons.push('Local processing available');
- }
- break;
-
- case 'features':
- if (metadata.features.length >= 4) {
- score += 2;
- reasons.push('Rich feature set');
- }
- if (metadata.category === 'core') {
- score += 2;
- reasons.push('Full Context Sync integration');
- }
- break;
-
- case 'cost':
- if (['continue', 'codeium', 'ollama'].includes(platformId)) {
- score += 3;
- reasons.push('Free or open source');
- }
- if (metadata.features.includes('Free tier')) {
- score += 2;
- reasons.push('Free tier available');
- }
- break;
+ let response = ` **Found ${result.totalMatches} matches**\n\n`;
- case 'performance':
- if (['cursor', 'zed'].includes(platformId)) {
- score += 2;
- reasons.push('Optimized for speed');
- }
- if (metadata.features.includes('Fast')) {
- score += 1;
- reasons.push('Fast performance');
+ // Group by file for better readability
+ if (result.clusters) {
+ const files = Object.keys(result.clusters).slice(0, 10);
+ files.forEach(file => {
+ const matches = result.clusters![file];
+ response += ` **${file}** (${matches.length} match${matches.length > 1 ? 'es' : ''})\n`;
+ matches.slice(0, 3).forEach(match => {
+ response += ` Line ${match.line}: ${match.text?.trim().substring(0, 100)}\n`;
+ });
+ if (matches.length > 3) {
+ response += ` ... and ${matches.length - 3} more\n`;
+ }
+ response += `\n`;
+ });
+
+ if (Object.keys(result.clusters).length > 10) {
+ response += `... and ${Object.keys(result.clusters).length - 10} more files\n`;
}
- break;
- }
+ }
- // Category bonus
- if (metadata.category === 'core') score += 1;
-
- // Current platform bonus/penalty
- if (platformId === currentPlatform) {
- score += 1;
- reasons.push('Currently active');
+ return {
+ content: [{ type: 'text', text: response }],
+ };
}
+ } catch (error: any) {
+ return {
+ content: [{
+ type: 'text',
+ text: ` Search failed: ${error.message}`,
+ }],
+ };
+ }
+ }
- recommendations.push({
- platform: platformId,
- metadata,
- score,
- reasons: reasons.slice(0, 3) // Limit to top 3 reasons
- });
- });
-
- // Sort by score and get top 5
- const topRecommendations = recommendations
- .sort((a, b) => b.score - a.score)
- .slice(0, 5);
+ /**
+ * Get project structure with complexity analysis
+ */
+ private async handleStructure(args?: { depth?: number }) {
+ const workspace = this.workspaceDetector.getCurrentWorkspace();
+ if (!workspace) {
+ return {
+ content: [{
+ type: 'text',
+ text: ' No workspace set. Run `set_project` first.',
+ }],
+ };
+ }
- response += `๐ **Top Recommendations for You:**\n\n`;
+ try {
+ const depth = args?.depth || 3;
+
+ // Use optimized structure engine
+ const engine = new StructureEngine(workspace);
+ const result = await engine.getStructure(depth, {
+ includeMetadata: true,
+ analyzeComplexity: true,
+ detectHotspots: true
+ });
- topRecommendations.forEach((rec, index) => {
- const isActive = currentStatus[rec.platform as keyof typeof currentStatus];
- const statusIcon = isActive ? 'โ
' : 'โญ';
- const position = index === 0 ? '๐ฅ' : index === 1 ? '๐ฅ' : index === 2 ? '๐ฅ' : `${index + 1}.`;
+ let response = ` **Project Structure**\n\n`;
+ response += `\`\`\`\n${result.tree}\`\`\`\n\n`;
- response += `${position} ${statusIcon} **${rec.metadata.name}** (Score: ${rec.score})\n`;
- response += ` ${rec.metadata.description}\n`;
- response += ` Why recommended: ${rec.reasons.join(', ')}\n`;
- response += ` Setup: ${rec.metadata.setupComplexity} โข Category: ${rec.metadata.category}\n\n`;
- });
-
- // Add setup suggestions
- response += `๐ **Next Steps:**\n\n`;
-
- if (activeCount === 0) {
- response += `1๏ธโฃ **Get Started:** Try "${topRecommendations[0].metadata.name}" - ${topRecommendations[0].reasons[0]}\n`;
- response += `2๏ธโฃ **Easy Alternative:** Consider "${topRecommendations[1].metadata.name}" as backup\n`;
- } else if (activeCount < 3) {
- const notActive = topRecommendations.filter(r =>
- !currentStatus[r.platform as keyof typeof currentStatus]
- );
- if (notActive.length > 0) {
- response += `1๏ธโฃ **Expand Your Setup:** Add "${notActive[0].metadata.name}"\n`;
- response += `2๏ธโฃ **Current Platform:** Keep using "${currentPlatform}" for familiar workflows\n`;
+ // Summary statistics
+ response += ` **Summary**\n`;
+ response += ` ${result.summary.totalFiles} files, ${result.summary.totalDirectories} directories\n`;
+ if (result.summary.totalLOC > 0) {
+ response += ` ${result.summary.totalLOC.toLocaleString()} lines of code\n`;
}
- } else {
- response += `โ
**You're all set!** With ${activeCount} platforms active, you have great coverage.\n`;
- response += `๐ก **Pro Tip:** Use "switch platform" to move contexts between platforms seamlessly.\n`;
- }
-
- response += `\n๐ง **Quick Actions:**\n`;
- response += `โข \`discover ai platforms ${topRecommendations[0].metadata.category}\` - See similar platforms\n`;
- response += `โข \`switch platform to ${topRecommendations[0].platform}\` - Try top recommendation\n`;
- response += `โข \`get platform status\` - Check current setup\n\n`;
-
- // Add use case specific tips
- switch (useCase) {
- case 'coding':
- response += `๐ป **Coding Pro Tips:**\n`;
- response += `โข Use Cursor for real-time AI assistance while coding\n`;
- response += `โข Claude excels at explaining complex code and architecture\n`;
- response += `โข Continue.dev offers the most customization for power users\n`;
- break;
+ response += ` ${(result.summary.totalSize / (1024 * 1024)).toFixed(2)} MB total size\n`;
- case 'local':
- response += `๐ **Privacy-First Setup:**\n`;
- response += `โข Ollama keeps everything on your machine\n`;
- response += `โข Continue.dev supports local models and custom endpoints\n`;
- response += `โข Consider hardware requirements for local model performance\n`;
- break;
+ if (Object.keys(result.summary.languages).length > 0) {
+ const languages = Object.entries(result.summary.languages)
+ .sort(([, a], [, b]) => b - a)
+ .map(([lang]) => lang)
+ .slice(0, 3)
+ .join(', ');
+ response += ` Languages: ${languages}\n`;
+ }
- case 'enterprise':
- response += `๐ข **Enterprise Considerations:**\n`;
- response += `โข GitHub Copilot offers the strongest enterprise policies\n`;
- response += `โข TabNine provides on-premise deployment options\n`;
- response += `โข All core platforms support team collaboration features\n`;
- break;
- }
-
- response += `\n**Context Sync makes it easy to try multiple platforms** - your memory and context seamlessly transfers between all supported AI tools!`;
-
- return {
- content: [{
- type: 'text',
- text: response,
- }],
- };
- }
-
- // ========== TODO HANDLERS WITH CURRENT PROJECT INTEGRATION ==========
+ // Architecture pattern
+ if (result.summary.architecturePattern) {
+ response += `\n **Architecture:** ${result.summary.architecturePattern}\n`;
+ }
- private async handleTodoCreate(args: any) {
- // Auto-link to current project if no projectId provided
- if (!args.projectId) {
- const currentProject = this.getCurrentProject();
- if (currentProject) {
- args.projectId = currentProject.id;
+ // Hotspots
+ if (result.summary.hotspots && result.summary.hotspots.length > 0) {
+ response += `\n **Hotspots** (high complexity areas):\n`;
+ result.summary.hotspots.forEach((hotspot, i) => {
+ const complexityEmoji = hotspot.complexity >= 60 ? '' : '';
+ response += `${i + 1}. ${complexityEmoji} ${hotspot.path} - ${hotspot.reason} (${hotspot.loc.toLocaleString()} LOC)\n`;
+ });
}
- }
- const todo = this.todoManager.createTodo(args);
- const projectInfo = args.projectId ? ` (linked to current project)` : '';
-
- return {
- content: [{
- type: 'text',
- text: `โ
Todo created: "${todo.title}"${projectInfo}\n\nID: ${todo.id}\nPriority: ${todo.priority}\nStatus: ${todo.status}${todo.dueDate ? `\nDue: ${todo.dueDate}` : ''}${todo.tags.length > 0 ? `\nTags: ${todo.tags.join(', ')}` : ''}`,
- }],
- };
- }
+ // Insights
+ if (result.insights && result.insights.length > 0) {
+ response += `\n **Insights**\n`;
+ result.insights.forEach(insight => {
+ response += ` ${insight}\n`;
+ });
+ }
- private async handleTodoGet(args: { id: string }) {
- const todo = this.todoManager.getTodo(args.id);
-
- if (!todo) {
return {
content: [{
type: 'text',
- text: `โ Todo not found: ${args.id}`,
+ text: response,
+ }],
+ };
+ } catch (error: any) {
+ return {
+ content: [{
+ type: 'text',
+ text: ` Failed to get structure: ${error.message}`,
}],
- isError: true,
};
}
-
- return {
- content: [{
- type: 'text',
- text: `๐ **${todo.title}**\n\nStatus: ${todo.status}\nPriority: ${todo.priority}\n${todo.description ? `Description: ${todo.description}\n` : ''}${todo.dueDate ? `Due: ${todo.dueDate}\n` : ''}${todo.tags.length > 0 ? `Tags: ${todo.tags.join(', ')}\n` : ''}Created: ${todo.createdAt}\nUpdated: ${todo.updatedAt}${todo.completedAt ? `\nCompleted: ${todo.completedAt}` : ''}`,
- }],
- };
}
- private async handleTodoList(args?: any) {
- // If no projectId specified and there's a current project, filter by current project
- if (!args?.projectId) {
- const currentProject = this.getCurrentProject();
- if (currentProject) {
- args = { ...args, projectId: currentProject.id };
- }
+ /**
+ * Git operations dispatcher (namespaced tool)
+ */
+ private async handleGit(args: {
+ action: 'status' | 'context' | 'hotspots' | 'coupling' | 'blame' | 'analysis';
+ staged?: boolean;
+ files?: string[];
+ path?: string;
+ limit?: number;
+ minCoupling?: number;
+ }) {
+ const { action, ...restArgs } = args;
+
+ switch (action) {
+ case 'status':
+ return await this.handleGitStatus();
+ case 'context':
+ return await this.handleGitContext(restArgs);
+ case 'hotspots':
+ return await this.handleGitHotspots(restArgs.limit);
+ case 'coupling':
+ return await this.handleGitCoupling(restArgs.minCoupling);
+ case 'blame':
+ if (!restArgs.path) {
+ return {
+ content: [{
+ type: 'text',
+ text: ` Missing required parameter 'path' for git blame action.`,
+ }],
+ };
+ }
+ return await this.handleGitBlame(restArgs.path);
+ case 'analysis':
+ return await this.handleGitAnalysis();
+ default:
+ return {
+ content: [{
+ type: 'text',
+ text: ` Unknown git action: ${action}. Use 'status', 'context', 'hotspots', 'coupling', 'blame', or 'analysis'.`,
+ }],
+ };
}
+ }
- const todos = this.todoManager.listTodos(args);
-
- if (todos.length === 0) {
- const currentProject = this.getCurrentProject();
- const projectContext = currentProject ? ` for project "${currentProject.name}"` : '';
+ /**
+ * Git status with impact analysis
+ */
+ private async handleGitStatus() {
+ const workspace = this.workspaceDetector.getCurrentWorkspace();
+ if (!workspace) {
return {
content: [{
type: 'text',
- text: `๐ No todos found${projectContext}`,
+ text: ' No workspace set. Run `set_project` first.',
}],
};
}
- const grouped = {
- urgent: todos.filter(t => t.priority === 'urgent' && t.status !== 'completed'),
- high: todos.filter(t => t.priority === 'high' && t.status !== 'completed'),
- medium: todos.filter(t => t.priority === 'medium' && t.status !== 'completed'),
- low: todos.filter(t => t.priority === 'low' && t.status !== 'completed'),
- completed: todos.filter(t => t.status === 'completed')
- };
+ try {
+ // Use optimized git status engine
+ const engine = new GitStatusEngine(workspace);
+ const result = await engine.getStatus({
+ analyzeImpact: true,
+ enrichContext: true
+ });
- const currentProject = this.getCurrentProject();
- const projectContext = currentProject ? ` for project "${currentProject.name}"` : '';
- let output = `๐ Found ${todos.length} todo(s)${projectContext}\n\n`;
+ let response = ` **Git Status**\n\n`;
+ response += ` Branch: ${result.branch}\n`;
+
+ if (result.ahead > 0) response += ` Ahead: ${result.ahead} commit(s)\n`;
+ if (result.behind > 0) response += ` Behind: ${result.behind} commit(s)\n`;
+
+ response += `\n`;
- const formatTodo = (todo: any) => {
- const statusEmoji: { [key: string]: string } = {
- pending: 'โณ',
- in_progress: '๐',
- completed: 'โ
',
- cancelled: 'โ'
- };
- return `${statusEmoji[todo.status] || '๐'} ${todo.title}${todo.dueDate ? ` (Due: ${todo.dueDate})` : ''}\n ID: ${todo.id}`;
- };
+ if (result.clean) {
+ response += ` Working tree clean`;
+ } else {
+ // Staged files with context
+ if (result.changes.staged.length > 0) {
+ response += ` **Staged** (${result.changes.staged.length}):\n`;
+ result.changes.staged.forEach(change => {
+ const impactEmoji = change.impact === 'high' ? '' :
+ change.impact === 'medium' ? '' : '';
+ const complexityEmoji = change.complexity === 'low' ? '' :
+ change.complexity === 'medium' ? '' :
+ change.complexity === 'high' ? '' :
+ change.complexity === 'very-high' ? '' : '';
+
+ response += ` ${impactEmoji} ${change.path}`;
+ if (change.category) response += ` [${change.category}]`;
+ if (complexityEmoji) response += ` ${complexityEmoji}`;
+ response += `\n`;
+ });
+ }
- if (grouped.urgent.length > 0) {
- output += `๐ด **URGENT** (${grouped.urgent.length})\n`;
- grouped.urgent.forEach((todo: any) => output += formatTodo(todo) + '\n');
- output += '\n';
- }
+ // Modified files
+ if (result.changes.modified.length > 0) {
+ response += `\n **Modified** (${result.changes.modified.length}):\n`;
+ result.changes.modified.slice(0, 10).forEach(change => {
+ const impactEmoji = change.impact === 'high' ? '' :
+ change.impact === 'medium' ? '' : '';
+ response += ` ${impactEmoji} ${change.path}`;
+ if (change.category) response += ` [${change.category}]`;
+ response += `\n`;
+ });
+ if (result.changes.modified.length > 10) {
+ response += ` ... and ${result.changes.modified.length - 10} more\n`;
+ }
+ }
- if (grouped.high.length > 0) {
- output += `๐ **HIGH** (${grouped.high.length})\n`;
- grouped.high.forEach((todo: any) => output += formatTodo(todo) + '\n');
- output += '\n';
- }
+ // Untracked files
+ if (result.changes.untracked.length > 0) {
+ response += `\n **Untracked** (${result.changes.untracked.length}):\n`;
+ result.changes.untracked.slice(0, 5).forEach(change => {
+ response += ` ${change.path}`;
+ if (change.category) response += ` [${change.category}]`;
+ response += `\n`;
+ });
+ if (result.changes.untracked.length > 5) {
+ response += ` ... and ${result.changes.untracked.length - 5} more\n`;
+ }
+ }
- if (grouped.medium.length > 0) {
- output += `๐ก **MEDIUM** (${grouped.medium.length})\n`;
- grouped.medium.forEach((todo: any) => output += formatTodo(todo) + '\n');
- output += '\n';
- }
+ // Deleted files
+ if (result.changes.deleted.length > 0) {
+ response += `\n **Deleted** (${result.changes.deleted.length}):\n`;
+ result.changes.deleted.forEach(change => {
+ response += ` ${change.path}\n`;
+ });
+ }
+ }
- if (grouped.low.length > 0) {
- output += `๐ข **LOW** (${grouped.low.length})\n`;
- grouped.low.forEach((todo: any) => output += formatTodo(todo) + '\n');
- output += '\n';
- }
+ // Summary
+ if (result.summary.totalChanges > 0) {
+ response += `\n **Summary:**\n`;
+ response += ` ${result.summary.totalChanges} total change(s)`;
+ if (result.summary.highImpact > 0) {
+ response += ` (${result.summary.highImpact} high-impact)`;
+ }
+ response += `\n`;
+
+ if (Object.keys(result.summary.categories).length > 0) {
+ const categories = Object.entries(result.summary.categories)
+ .map(([cat, count]) => `${count} ${cat}`)
+ .join(', ');
+ response += ` Categories: ${categories}\n`;
+ }
- if (grouped.completed.length > 0 && !args?.status) {
- output += `โ
**COMPLETED** (${grouped.completed.length})\n`;
- grouped.completed.slice(0, 5).forEach((todo: any) => output += formatTodo(todo) + '\n');
- if (grouped.completed.length > 5) {
- output += ` ... and ${grouped.completed.length - 5} more\n`;
+ if (result.summary.complexity.high > 0) {
+ response += ` ${result.summary.complexity.high} complex file(s) changed\n`;
+ }
}
- }
- return {
- content: [{
- type: 'text',
- text: output,
- }],
- };
- }
+ // Commit readiness
+ if (result.changes.staged.length > 0) {
+ response += `\n **Commit Readiness:** ${result.commitReadiness.ready ? 'Ready' : 'Review needed'}\n`;
+
+ if (result.commitReadiness.warnings.length > 0) {
+ response += `\n **Warnings:**\n`;
+ result.commitReadiness.warnings.forEach(w => response += ` ${w}\n`);
+ }
+
+ if (result.commitReadiness.suggestions.length > 0) {
+ response += `\n **Suggestions:**\n`;
+ result.commitReadiness.suggestions.forEach(s => response += ` ${s}\n`);
+ }
+ }
- private async handleTodoUpdate(args: any) {
- const todo = this.todoManager.updateTodo(args);
-
- if (!todo) {
return {
- content: [{
- type: 'text',
- text: `โ Todo not found: ${args.id}`,
- }],
- isError: true,
+ content: [{ type: 'text', text: response }],
};
- }
-
- return {
- content: [{
- type: 'text',
- text: `โ
Todo updated: "${todo.title}"\n\nStatus: ${todo.status}\nPriority: ${todo.priority}\nUpdated: ${todo.updatedAt}`,
- }],
- };
- }
-
- private async handleTodoDelete(args: { id: string }) {
- const success = this.todoManager.deleteTodo(args.id);
-
- if (!success) {
+ } catch (error: any) {
return {
content: [{
type: 'text',
- text: `โ Todo not found: ${args.id}`,
+ text: ` Git status failed: ${error.message}`,
}],
- isError: true,
};
}
-
- return {
- content: [{
- type: 'text',
- text: `โ
Todo deleted: ${args.id}`,
- }],
- };
}
- private async handleTodoComplete(args: { id: string }) {
- const todo = this.todoManager.completeTodo(args.id);
-
- if (!todo) {
+ /**
+ * Git context with smart commit message generation
+ */
+ private async handleGitContext(args?: { staged?: boolean; files?: string[] }) {
+ const workspace = this.workspaceDetector.getCurrentWorkspace();
+ if (!workspace) {
return {
content: [{
type: 'text',
- text: `โ Todo not found: ${args.id}`,
+ text: ' No workspace set. Run `set_project` first.',
}],
- isError: true,
};
}
- return {
- content: [{
- type: 'text',
- text: `โ
Todo completed: "${todo.title}"\n\nCompleted at: ${todo.completedAt}`,
- }],
- };
- }
+ try {
+ // Use optimized git context engine
+ const engine = new GitContextEngine(workspace);
+ const context = await engine.getContext({
+ generateCommitMessage: true,
+ analyzeChanges: true
+ });
+
+ let response = ` **Git Context**\n\n`;
+
+ // Branch info
+ response += ` **Current Branch**: ${context.branch}\n`;
+ if (context.ahead > 0) response += ` Ahead: ${context.ahead} commit(s)\n`;
+ if (context.behind > 0) response += ` Behind: ${context.behind} commit(s)\n`;
+ response += `\n`;
- private async handleTodoStats(args?: { projectId?: string }) {
- // Use current project if no projectId specified
- let projectId = args?.projectId;
- if (!projectId) {
- const currentProject = this.getCurrentProject();
- if (currentProject) {
- projectId = currentProject.id;
+ // Last commit
+ if (context.lastCommit) {
+ response += ` **Last Commit**:\n`;
+ response += ` Hash: ${context.lastCommit.hash}\n`;
+ response += ` Author: ${context.lastCommit.author}\n`;
+ response += ` Date: ${context.lastCommit.date.toLocaleDateString()}\n`;
+ response += ` Message: ${context.lastCommit.message}\n\n`;
+ }
+
+ // Changes summary
+ const totalChanges = context.stagedFiles.length + context.uncommittedFiles.length;
+ if (totalChanges > 0) {
+ response += ` **Changes**: ${totalChanges} file(s)\n`;
+ if (context.stagedFiles.length > 0) {
+ response += ` Staged: ${context.stagedFiles.length}\n`;
+ }
+ if (context.uncommittedFiles.length > 0) {
+ response += ` Uncommitted: ${context.uncommittedFiles.length}\n`;
+ }
+ response += `\n`;
}
- }
- const stats = this.todoManager.getStats(projectId);
- const currentProject = this.getCurrentProject();
- const projectContext = projectId && currentProject ? ` for project "${currentProject.name}"` : '';
-
- let output = `๐ Todo Statistics${projectContext}\n\n`;
- output += `**Total:** ${stats.total} todos\n\n`;
-
- output += `**By Status:**\n`;
- output += `โณ Pending: ${stats.byStatus.pending}\n`;
- output += `๐ In Progress: ${stats.byStatus.in_progress}\n`;
- output += `โ
Completed: ${stats.byStatus.completed}\n`;
- output += `โ Cancelled: ${stats.byStatus.cancelled}\n\n`;
-
- output += `**By Priority:**\n`;
- output += `๐ด Urgent: ${stats.byPriority.urgent}\n`;
- output += `๐ High: ${stats.byPriority.high}\n`;
- output += `๐ก Medium: ${stats.byPriority.medium}\n`;
- output += `๐ข Low: ${stats.byPriority.low}\n\n`;
-
- if (stats.overdue > 0) {
- output += `โ ๏ธ **${stats.overdue} overdue** todo(s)\n`;
- }
-
- if (stats.dueSoon > 0) {
- output += `โฐ **${stats.dueSoon} due soon** (within 24 hours)\n`;
+ // Change analysis
+ if (context.changeAnalysis) {
+ const analysis = context.changeAnalysis;
+ response += ` **Change Analysis**:\n`;
+ response += ` Files changed: ${analysis.filesChanged}\n`;
+ response += ` Insertions: +${analysis.insertions}\n`;
+ response += ` Deletions: -${analysis.deletions}\n`;
+
+ if (analysis.primaryCategory) {
+ response += ` Primary category: ${analysis.primaryCategory}\n`;
+ }
+ if (analysis.scope) {
+ response += ` Scope: ${analysis.scope}\n`;
+ }
+
+ if (Object.keys(analysis.categories).length > 0) {
+ const categories = Object.entries(analysis.categories)
+ .map(([cat, count]) => `${count} ${cat}`)
+ .join(', ');
+ response += ` Categories: ${categories}\n`;
+ }
+ response += `\n`;
+ }
+
+ // Suggested commit message
+ if (context.suggestedCommitMessage) {
+ response += ` **Suggested Commit Message**:\n\`\`\`\n${context.suggestedCommitMessage}\n\`\`\`\n\n`;
+ response += ` This follows conventional commits format. Edit as needed.`;
+ } else if (context.stagedFiles.length === 0) {
+ response += ` Stage files to get a suggested commit message.`;
+ }
+
+ return {
+ content: [{ type: 'text', text: response }],
+ };
+ } catch (error: any) {
+ return {
+ content: [{
+ type: 'text',
+ text: ` Git context failed: ${error.message}`,
+ }],
+ };
}
-
- return {
- content: [{
- type: 'text',
- text: output,
- }],
- };
}
- private async handleTodoTags() {
- const tags = this.todoManager.getAllTags();
+ /**
+ * Git hotspots - files with high change frequency (risk analysis)
+ */
+ private async handleGitHotspots(limit: number = 10) {
+ const workspace = this.workspaceDetector.getCurrentWorkspace();
- if (tags.length === 0) {
+ if (!workspace) {
return {
content: [{
type: 'text',
- text: '๐ท๏ธ No tags found',
+ text: ' No workspace set. Run `set_project` first.',
}],
};
}
- return {
- content: [{
- type: 'text',
- text: `๐ท๏ธ Available tags (${tags.length}):\n\n${tags.join(', ')}`,
- }],
- };
- }
-
- // ========== V1.0.0 HANDLERS - DATABASE MIGRATION ==========
-
- private async handleCheckMigrationSuggestion() {
try {
- const version = this.getVersion();
- const migrationCheck = await this.storage.checkMigrationPrompt(version);
+ const git = new GitIntegration(workspace);
- if (!migrationCheck.shouldPrompt) {
+ if (!git.isGitRepo()) {
return {
content: [{
type: 'text',
- text: `โ
**No Migration Needed**\n\nYour Context Sync database is already optimized!\n\n๐ **Status:**\nโข No duplicate projects detected\nโข Database is clean and performant\nโข All systems running optimally\n\n๐ You're all set to use Context Sync at peak performance!`,
+ text: ' Not a git repository',
}],
};
}
+
+ const hotspots = git.getHotspots(limit);
+ if (!hotspots || hotspots.length === 0) {
+ return {
+ content: [{
+ type: 'text',
+ text: ' No hotspots found. Repository may be too new or have limited history.',
+ }],
+ };
+ }
+
+ let response = ` **Git Hotspots - Risk Analysis**\n\n`;
+ response += `Files with high change frequency (last 6 months):\n\n`;
+
+ for (const spot of hotspots) {
+ const riskIcon = spot.risk === 'critical' ? '' :
+ spot.risk === 'high' ? '' :
+ spot.risk === 'medium' ? '' : '';
+
+ response += `${riskIcon} **${spot.file}** (${spot.risk} risk)\n`;
+ response += ` ${spot.changes} changes\n`;
+ response += ` Last changed: ${spot.lastChanged}\n\n`;
+ }
+
+ response += `\n **Why This Matters:**\n`;
+ response += ` High churn = complexity or instability\n`;
+ response += ` Critical/high risk files need extra testing\n`;
+ response += ` Consider refactoring frequently changed files\n`;
+
+ return {
+ content: [{ type: 'text', text: response }],
+ };
+ } catch (error: any) {
return {
content: [{
type: 'text',
- text: migrationCheck.message,
+ text: ` Git hotspots failed: ${error.message}`,
}],
};
-
- } catch (error) {
+ }
+ }
+
+ /**
+ * Git coupling - files that change together (hidden dependencies)
+ */
+ private async handleGitCoupling(minCoupling: number = 3) {
+ const workspace = this.workspaceDetector.getCurrentWorkspace();
+
+ if (!workspace) {
return {
content: [{
type: 'text',
- text: `โ ๏ธ **Migration Check Failed**\n\nError: ${error instanceof Error ? error.message : 'Unknown error'}\n\nYou can still try running migration tools manually:\nโข \`get_migration_stats\` - Check for duplicates\nโข \`migrate_database dryRun:true\` - Preview migration`,
+ text: ' No workspace set. Run `set_project` first.',
}],
- isError: true,
};
}
- }
- private async handleAnalyzeConversationContext(args: { conversationText: string; autoSave?: boolean }) {
try {
- const { conversationText, autoSave = false } = args;
- const analysis = ContextAnalyzer.analyzeConversation(conversationText);
+ const git = new GitIntegration(workspace);
- let response = `๐ง **Conversation Context Analysis**\n\n`;
- response += `${analysis.summary}\n\n`;
-
- if (analysis.decisions.length === 0 && analysis.todos.length === 0 && analysis.insights.length === 0) {
- response += `โ
**No significant context detected** in this conversation.\n\n`;
- response += `The conversation appears to be general discussion without specific:\n`;
- response += `โข Technical decisions\n`;
- response += `โข Action items or todos\n`;
- response += `โข Key insights or breakthroughs\n\n`;
- response += `๐ก **Tip**: Context Sync automatically detects technical discussions, architecture decisions, and action items.`;
-
+ if (!git.isGitRepo()) {
return {
- content: [{ type: 'text', text: response }],
+ content: [{
+ type: 'text',
+ text: ' Not a git repository',
+ }],
};
}
- // Show analysis results
- if (analysis.decisions.length > 0) {
- response += `๐ **Technical Decisions Detected (${analysis.decisions.length}):**\n`;
- analysis.decisions.forEach((decision, i) => {
- const priorityIcon = decision.priority === 'high' ? '๐ด' : decision.priority === 'medium' ? '๐ก' : '๐ข';
- response += `${i + 1}. ${priorityIcon} ${decision.content}\n`;
- response += ` *${decision.reasoning}*\n\n`;
- });
- }
-
- if (analysis.todos.length > 0) {
- response += `โ
**Action Items Detected (${analysis.todos.length}):**\n`;
- analysis.todos.forEach((todo, i) => {
- const priorityIcon = todo.priority === 'high' ? '๐ด' : todo.priority === 'medium' ? '๐ก' : '๐ข';
- response += `${i + 1}. ${priorityIcon} ${todo.content}\n`;
- response += ` *${todo.reasoning}*\n\n`;
- });
- }
-
- if (analysis.insights.length > 0) {
- response += `๐ก **Key Insights Detected (${analysis.insights.length}):**\n`;
- analysis.insights.forEach((insight, i) => {
- const priorityIcon = insight.priority === 'high' ? '๐ด' : insight.priority === 'medium' ? '๐ก' : '๐ข';
- response += `${i + 1}. ${priorityIcon} ${insight.content}\n`;
- response += ` *${insight.reasoning}*\n\n`;
- });
+ const couplings = git.getFileCoupling(minCoupling);
+
+ if (!couplings || couplings.length === 0) {
+ return {
+ content: [{
+ type: 'text',
+ text: ` No strong file couplings found (minimum ${minCoupling} co-changes).`,
+ }],
+ };
}
- if (autoSave) {
- response += `๐ค **Auto-saving detected context...**\n\n`;
- let savedCount = 0;
-
- // Auto-save decisions
- for (const decision of analysis.decisions.filter(d => d.priority !== 'low')) {
- try {
- const decisionData = ContextAnalyzer.extractDecision(decision.content);
- if (decisionData) {
- await this.handleSaveDecision({
- type: decisionData.type,
- description: decisionData.description,
- reasoning: decisionData.reasoning
- });
- savedCount++;
- }
- } catch (error) {
- console.warn('Failed to auto-save decision:', error);
- }
- }
-
- // Auto-save todos
- for (const todo of analysis.todos.filter(t => t.priority !== 'low')) {
- try {
- const todoData = ContextAnalyzer.extractTodo(todo.content);
- if (todoData) {
- await this.handleTodoCreate({
- title: todoData.title,
- description: todoData.description,
- priority: todoData.priority
- });
- savedCount++;
- }
- } catch (error) {
- console.warn('Failed to auto-save todo:', error);
- }
- }
-
- // Auto-save insights
- for (const insight of analysis.insights.filter(i => i.priority === 'high')) {
- try {
- await this.handleSaveConversation({
- content: insight.content,
- role: 'assistant'
- });
- savedCount++;
- } catch (error) {
- console.warn('Failed to auto-save insight:', error);
- }
- }
+ let response = ` **Git Coupling - Hidden Dependencies**\n\n`;
+ response += `Files that frequently change together (last 6 months):\n\n`;
- response += `โ
**Auto-saved ${savedCount} context items**\n\n`;
- } else {
- response += `๐ **Recommended Actions:**\n`;
- response += `โข Use \`save_decision\` for technical decisions\n`;
- response += `โข Use \`todo_create\` for action items\n`;
- response += `โข Use \`save_conversation\` for key insights\n`;
- response += `โข Or re-run with \`autoSave: true\` to save automatically\n\n`;
+ for (const coupling of couplings) {
+ const strengthIcon = coupling.coupling === 'strong' ? '' :
+ coupling.coupling === 'medium' ? '' : '';
+
+ response += `${strengthIcon} **${coupling.coupling.toUpperCase()} coupling** (${coupling.timesChanged} together)\n`;
+ response += ` ${coupling.fileA}\n`;
+ response += ` ${coupling.fileB}\n\n`;
}
- response += `๐ก **Pro Tip**: Enable auto-context saving in your AI assistant prompt for seamless context preservation!`;
+ response += `\n **Why This Matters:**\n`;
+ response += ` Strong coupling = hidden dependencies\n`;
+ response += ` Files that change together should maybe be merged\n`;
+ response += ` Or they need better abstraction/interfaces\n`;
+ response += ` Use this to find refactoring opportunities\n`;
return {
content: [{ type: 'text', text: response }],
};
-
- } catch (error) {
+ } catch (error: any) {
return {
content: [{
type: 'text',
- text: `โ **Context Analysis Failed**\n\nError: ${error instanceof Error ? error.message : 'Unknown error'}\n\nMake sure to provide conversation text for analysis.`,
+ text: ` Git coupling failed: ${error.message}`,
}],
- isError: true,
};
}
}
- private async handleSuggestMissingContext(args: { includeFileAnalysis?: boolean }) {
+ /**
+ * Git blame - code ownership analysis
+ */
+ private async handleGitBlame(filepath: string) {
+ const workspace = this.workspaceDetector.getCurrentWorkspace();
+
+ if (!workspace) {
+ return {
+ content: [{
+ type: 'text',
+ text: ' No workspace set. Run `set_project` first.',
+ }],
+ };
+ }
+
try {
- const { includeFileAnalysis = true } = args;
- const currentProject = this.getCurrentProject();
+ const git = new GitIntegration(workspace);
- if (!currentProject) {
+ if (!git.isGitRepo()) {
return {
content: [{
type: 'text',
- text: `โ **No Active Project**\n\nUse \`set_workspace\` to set up a project first before analyzing missing context.`,
+ text: ' Not a git repository',
}],
- isError: true,
};
}
- let response = `๐ **Missing Context Analysis for "${currentProject.name}"**\n\n`;
+ const ownership = git.getBlame(filepath);
- // Get current context
- const summary = this.storage.getContextSummary(currentProject.id);
- const decisions = summary.recentDecisions || [];
- const conversations = summary.recentConversations || [];
-
- response += `๐ **Current Context State:**\n`;
- response += `โข Decisions: ${decisions.length}\n`;
- response += `โข Conversations: ${conversations.length}\n`;
- response += `โข Tech Stack: ${currentProject.techStack?.length || 0} items\n`;
- response += `โข Architecture: ${currentProject.architecture || 'Not specified'}\n\n`;
-
- // Analyze missing context
- const suggestions: string[] = [];
-
- // Check for missing architecture
- if (!currentProject.architecture || currentProject.architecture === 'Not specified') {
- suggestions.push(`๐๏ธ **Architecture Decision Missing**: Document the overall architecture pattern (microservices, monolith, serverless, etc.)`);
- }
-
- // Check for missing tech stack decisions
- if (!currentProject.techStack || currentProject.techStack.length === 0) {
- suggestions.push(`โ๏ธ **Technology Stack Missing**: Document key technologies, frameworks, and libraries used`);
- }
-
- // Check for recent decisions
- if (decisions.length === 0) {
- suggestions.push(`๐ **No Technical Decisions Recorded**: Start documenting architectural choices, library selections, and design decisions`);
- } else if (decisions.length < 3) {
- suggestions.push(`๐ **Limited Decision History**: Most projects have 5-10+ key decisions documented`);
- }
-
- // Check for configuration decisions
- const hasConfigDecisions = decisions.some((d: any) =>
- d.type === 'configuration' || d.description.toLowerCase().includes('config')
- );
- if (!hasConfigDecisions) {
- suggestions.push(`โ๏ธ **Configuration Decisions Missing**: Document environment setup, deployment configs, and key settings`);
- }
-
- // Check for security decisions
- const hasSecurityDecisions = decisions.some((d: any) =>
- d.description.toLowerCase().includes('security') ||
- d.description.toLowerCase().includes('auth')
- );
- if (!hasSecurityDecisions) {
- suggestions.push(`๐ **Security Context Missing**: Document authentication, authorization, and security patterns`);
- }
-
- // Check for performance decisions
- const hasPerformanceDecisions = decisions.some((d: any) =>
- d.description.toLowerCase().includes('performance') ||
- d.description.toLowerCase().includes('optimize')
- );
- if (!hasPerformanceDecisions) {
- suggestions.push(`โก **Performance Context Missing**: Document optimization decisions and performance considerations`);
+ if (!ownership || ownership.length === 0) {
+ return {
+ content: [{
+ type: 'text',
+ text: ` Could not get blame info for ${filepath}. File may not exist or not be tracked.`,
+ }],
+ };
}
-
- if (suggestions.length === 0) {
- response += `โ
**Well-Documented Project!**\n\n`;
- response += `Your project has comprehensive context coverage:\n`;
- response += `โข Architecture documented\n`;
- response += `โข Technology stack defined\n`;
- response += `โข Multiple decisions recorded\n`;
- response += `โข Various decision types covered\n\n`;
- response += `๐ก **Keep up the good work!** Continue documenting new decisions as the project evolves.`;
- } else {
- response += `โ ๏ธ **Missing Context Areas (${suggestions.length}):**\n\n`;
- suggestions.forEach((suggestion, i) => {
- response += `${i + 1}. ${suggestion}\n\n`;
- });
-
- response += `๐ **Quick Actions:**\n`;
- response += `โข Use \`save_decision\` to document technical choices\n`;
- response += `โข Update project info with \`update_project\` (if available)\n`;
- response += `โข Use \`analyze_conversation_context\` on recent discussions\n`;
- response += `โข Document key patterns and conventions as decisions\n\n`;
+
+ let response = ` **Code Ownership - ${filepath}**\n\n`;
+
+ for (const owner of ownership) {
+ const barLength = Math.floor(owner.percentage / 5);
+ const bar = ''.repeat(barLength) + ''.repeat(20 - barLength);
- response += `๐ก **Pro Tip**: Well-documented projects help AI assistants provide more relevant and context-aware assistance!`;
+ response += `**${owner.author}** - ${owner.percentage}%\n`;
+ response += `${bar}\n`;
+ response += ` ${owner.lines} lines\n`;
+ response += ` Last edit: ${owner.lastEdit}\n\n`;
}
+ const primaryOwner = ownership[0];
+ response += `\n **Primary Expert:** ${primaryOwner.author} (${primaryOwner.percentage}% ownership)\n`;
+ response += `Ask them about this file's architecture and design decisions.\n`;
+
return {
content: [{ type: 'text', text: response }],
};
-
- } catch (error) {
+ } catch (error: any) {
return {
content: [{
type: 'text',
- text: `โ **Missing Context Analysis Failed**\n\nError: ${error instanceof Error ? error.message : 'Unknown error'}`,
+ text: ` Git blame failed: ${error.message}`,
}],
- isError: true,
};
}
}
- private async handleMigrateDatabase(args: { dryRun?: boolean }) {
- const { dryRun = false } = args;
+ /**
+ * Git analysis - comprehensive overview
+ */
+ private async handleGitAnalysis() {
+ const workspace = this.workspaceDetector.getCurrentWorkspace();
+ if (!workspace) {
+ return {
+ content: [{
+ type: 'text',
+ text: ' No workspace set. Run `set_project` first.',
+ }],
+ };
+ }
+
try {
- // Use null to let DatabaseMigrator use default path (same as Storage)
- const migrator = new DatabaseMigrator();
+ const git = new GitIntegration(workspace);
- if (dryRun) {
- // For dry run, just get the stats
- const stats = await migrator.getMigrationStats();
-
- let response = `๏ฟฝ **Database Migration Preview (Dry Run)**\n\n`;
-
- if (stats.duplicateGroups === 0) {
- response += `โ
No duplicate projects found! Your database is clean.\n\n`;
- response += `๐ **Summary:**\n`;
- response += `โข Total projects: ${stats.totalProjects}\n`;
- response += `โข Duplicates: 0\n`;
- response += `โข No migration needed\n`;
-
- migrator.close();
- return {
- content: [{ type: 'text', text: response }],
- };
- }
-
- response += `๐ **Would be migrated:**\n`;
- response += `โข Total projects: ${stats.totalProjects}\n`;
- response += `โข Duplicate groups: ${stats.duplicateGroups}\n`;
- response += `โข Total duplicates: ${stats.totalDuplicates}\n`;
- response += `โข Projects after cleanup: ${stats.totalProjects - stats.totalDuplicates}\n\n`;
-
- response += `๐ **Duplicate groups found:**\n\n`;
-
- stats.duplicateDetails.forEach((group, i) => {
- response += `${i + 1}. **${group.path}**\n`;
- response += ` โ ${group.count} projects: ${group.names.slice(0, 3).join(', ')}`;
- if (group.names.length > 3) {
- response += ` +${group.names.length - 3} more`;
- }
- response += `\n\n`;
- });
-
- response += `๐ก **Next Steps:**\n`;
- response += `โข Review the changes above\n`;
- response += `โข Run "migrate database" (without dryRun) to apply changes\n`;
- response += `โข Backup recommended before running actual migration\n`;
-
- migrator.close();
+ if (!git.isGitRepo()) {
return {
- content: [{ type: 'text', text: response }],
+ content: [{
+ type: 'text',
+ text: ' Not a git repository',
+ }],
};
}
+
+ const analysis = git.getAnalysis();
- // Actual migration
- const result = await migrator.migrateDuplicateProjects();
-
- let response = `๐ **Database Migration Complete**\n\n`;
+ if (!analysis) {
+ return {
+ content: [{
+ type: 'text',
+ text: ' Could not analyze repository',
+ }],
+ };
+ }
+
+ let response = ` **Git Repository Analysis**\n\n`;
+
+ // Branch health
+ response += ` **Branch Health**\n`;
+ response += ` Current: ${analysis.branchHealth.current}\n`;
- if (result.duplicatesFound === 0) {
- response += `โ
No duplicate projects found! Your database was already clean.\n\n`;
- response += `๐ **Summary:**\n`;
- response += `โข No duplicates found\n`;
- response += `โข No changes made\n`;
- } else {
- response += `โ
**Migration successful!**\n\n`;
- response += `๏ฟฝ **Summary:**\n`;
- response += `โข Duplicates found: ${result.duplicatesFound}\n`;
- response += `โข Duplicates removed: ${result.duplicatesRemoved}\n`;
- response += `โข Projects merged: ${result.projectsMerged}\n\n`;
-
- if (result.details.length > 0) {
- response += `๐ **Details:**\n`;
- result.details.forEach(detail => {
- response += `โข ${detail}\n`;
- });
+ if (analysis.branchHealth.ahead > 0) {
+ response += ` Ahead: ${analysis.branchHealth.ahead} commits\n`;
+ }
+ if (analysis.branchHealth.behind > 0) {
+ response += ` Behind: ${analysis.branchHealth.behind} commits`;
+ if (analysis.branchHealth.stale) {
+ response += ` STALE - merge main!\n`;
+ } else {
response += `\n`;
}
-
- response += `๐ **Migration Complete!**\n`;
- response += `โข Database has been cleaned up\n`;
- response += `โข All related data (conversations, decisions, todos) preserved\n`;
- response += `โข You should see fewer duplicate projects now\n`;
}
+ response += `\n`;
+
+ // Top contributors
+ if (analysis.contributors.length > 0) {
+ response += ` **Top Contributors** (last 6 months)\n`;
+ for (const contributor of analysis.contributors.slice(0, 5)) {
+ response += ` ${contributor.name} - ${contributor.commits} commits (last: ${contributor.lastCommit})\n`;
+ }
+ response += `\n`;
+ }
+
+ // Top hotspots
+ if (analysis.hotspots.length > 0) {
+ response += ` **Top 5 Hotspots** (high-risk files)\n`;
+ for (const spot of analysis.hotspots.slice(0, 5)) {
+ const riskIcon = spot.risk === 'critical' ? '' :
+ spot.risk === 'high' ? '' :
+ spot.risk === 'medium' ? '' : '';
+ response += ` ${riskIcon} ${spot.file} - ${spot.changes} changes\n`;
+ }
+ response += `\n`;
+ }
+
+ // Strongest couplings
+ if (analysis.coupling.length > 0) {
+ response += ` **Strongest Couplings** (hidden dependencies)\n`;
+ for (const coupling of analysis.coupling.slice(0, 5)) {
+ response += ` ${coupling.fileA} ${coupling.fileB} (${coupling.timesChanged} together)\n`;
+ }
+ response += `\n`;
+ }
+
+ // Recommendations
+ response += ` **Recommendations:**\n`;
- if (result.errors.length > 0) {
- response += `\nโ ๏ธ **Warnings:**\n`;
- result.errors.forEach(error => {
- response += `โข ${error}\n`;
- });
+ if (analysis.branchHealth.stale) {
+ response += ` Merge main branch - you're ${analysis.branchHealth.behind} commits behind\n`;
+ }
+
+ if (analysis.hotspots.some((h: any) => h.risk === 'critical')) {
+ const criticalFiles = analysis.hotspots.filter((h: any) => h.risk === 'critical');
+ response += ` Review ${criticalFiles.length} critical-risk file(s) - consider refactoring\n`;
}
- migrator.close();
+ if (analysis.coupling.some((c: any) => c.coupling === 'strong')) {
+ const strongCouplings = analysis.coupling.filter((c: any) => c.coupling === 'strong');
+ response += ` ${strongCouplings.length} strong coupling(s) detected - refactor to reduce dependencies\n`;
+ }
+
return {
content: [{ type: 'text', text: response }],
};
-
- } catch (error) {
+ } catch (error: any) {
return {
content: [{
type: 'text',
- text: `โ **Migration Failed**\n\nError: ${error instanceof Error ? error.message : 'Unknown error'}\n\nThe database was not modified. Please try again or check the logs.`,
+ text: ` Git analysis failed: ${error.message}`,
}],
- isError: true,
};
}
}
-
- private async handleGetMigrationStats() {
- try {
- const migrator = new DatabaseMigrator();
- const stats = await migrator.getMigrationStats();
-
- let response = `๐ **Database Migration Statistics**\n\n`;
-
- response += `๐ **Current State:**\n`;
- response += `โข Total projects: ${stats.totalProjects}\n`;
- response += `โข Projects with paths: ${stats.projectsWithPaths}\n`;
- response += `โข Duplicate groups: ${stats.duplicateGroups}\n`;
- response += `โข Total duplicates: ${stats.totalDuplicates}\n\n`;
-
- if (stats.duplicateGroups === 0) {
- response += `โ
**No duplicates found!** Your database is clean.\n\n`;
- response += `๐ฏ **Recommendations:**\n`;
- response += `โข Your Context Sync database is optimized\n`;
- response += `โข No migration needed\n`;
- response += `โข All projects have unique paths\n`;
-
- migrator.close();
- return {
- content: [{ type: 'text', text: response }],
- };
- }
-
- response += `โ ๏ธ **Duplicates Detected:**\n\n`;
-
- stats.duplicateDetails.forEach((group, i) => {
- response += `${i + 1}. **${group.path}**\n`;
- response += ` Projects: ${group.count}\n`;
- response += ` Names: ${group.names.slice(0, 3).join(', ')}`;
- if (group.names.length > 3) {
- response += ` +${group.names.length - 3} more`;
- }
- response += `\n\n`;
- });
-
- response += `๐ ๏ธ **Next Steps:**\n`;
- response += `โข Run "migrate database dryRun:true" to preview changes\n`;
- response += `โข Run "migrate database" to clean up duplicates\n`;
- response += `โข This will preserve all your data while removing duplicates\n\n`;
-
- response += `๐ก **Migration Benefits:**\n`;
- response += `โข Cleaner project list\n`;
- response += `โข Improved performance\n`;
- response += `โข Consolidated project context\n`;
- response += `โข Better AI tool integration\n`;
-
- migrator.close();
- return {
- content: [{ type: 'text', text: response }],
- };
-
- } catch (error) {
+
+ private async handleNotion(args: { action: 'search' | 'read'; query?: string; pageId?: string }) {
+ // Validate action parameter
+ if (!args.action) {
return {
content: [{
type: 'text',
- text: `โ **Failed to get migration stats**\n\nError: ${error instanceof Error ? error.message : 'Unknown error'}`,
+ text: ' Missing required parameter: action (must be "search" or "read")',
}],
isError: true,
};
}
+
+ // Handle search action
+ if (args.action === 'search') {
+ if (!args.query) {
+ return {
+ content: [{
+ type: 'text',
+ text: ' Missing required parameter: query (required for search action)',
+ }],
+ isError: true,
+ };
+ }
+ return await this.notionHandlers.handleNotionSearch({ query: args.query });
+ }
+
+ // Handle read action
+ if (args.action === 'read') {
+ if (!args.pageId) {
+ return {
+ content: [{
+ type: 'text',
+ text: ' Missing required parameter: pageId (required for read action)',
+ }],
+ isError: true,
+ };
+ }
+ return await this.notionHandlers.handleNotionReadPage({ pageId: args.pageId });
+ }
+
+ // Unknown action
+ return {
+ content: [{
+ type: 'text',
+ text: ` Unknown action: "${args.action}". Use "search" or "read".`,
+ }],
+ isError: true,
+ };
+ }
+
+ // ========== HELPERS ==========
+
+ private getCurrentProject() {
+ if (!this.currentProjectId) return null;
+ return this.storage.getProject(this.currentProjectId);
+ }
+
+ private parseKeyValue(content: string): { key: string; value: string } {
+ // Try to parse "Key: Value" or "Key = Value" format
+ const match = content.match(/^(.+?)[:=](.+)$/);
+ if (match) {
+ return {
+ key: match[1].trim(),
+ value: match[2].trim()
+ };
+ }
+ // Fallback: use content as key
+ return {
+ key: content,
+ value: ''
+ };
}
async run(): Promise {
const transport = new StdioServerTransport();
await this.server.connect(transport);
-
- console.error('Context Sync MCP server v1.0.3 running on stdio');
}
close(): void {
this.storage.close();
}
-}
\ No newline at end of file
+}
+
+
diff --git a/src/storage.ts b/src/storage.ts
index 9da0248..13cdadf 100644
--- a/src/storage.ts
+++ b/src/storage.ts
@@ -1,4 +1,4 @@
-import Database from 'better-sqlite3';
+๏ปฟimport Database from 'better-sqlite3';
import { randomUUID } from 'crypto';
import * as crypto from 'crypto';
import * as path from 'path';
@@ -11,13 +11,12 @@ import type {
ContextSummary,
StorageInterface,
} from './types.js';
-import {createTodoTable} from './todo-schema.js';
import { PathNormalizer } from './path-normalizer.js';
-import { PerformanceMonitor } from './performance-monitor.js';
-import { MigrationPrompter } from './migration-prompter.js';
+import { migrateSchema, isSchemaCurrent } from './schema.js';
export class Storage implements StorageInterface {
private db: Database.Database;
+ private dbPath: string;
// Prepared statement cache for 2-5x faster queries with LRU eviction
private preparedStatements: Map = new Map();
@@ -26,17 +25,20 @@ export class Storage implements StorageInterface {
constructor(dbPath?: string) {
// Default to user's home directory
const defaultPath = path.join(os.homedir(), '.context-sync', 'data.db');
- const actualPath = dbPath || defaultPath;
+ this.dbPath = dbPath || defaultPath;
// Ensure directory exists
- const dir = path.dirname(actualPath);
+ const dir = path.dirname(this.dbPath);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
- this.db = new Database(actualPath);
+ this.db = new Database(this.dbPath);
this.initDatabase();
- createTodoTable(this.db);
+ // Migrate schema if needed
+ if (!isSchemaCurrent(this.db)) {
+ migrateSchema(this.db);
+ }
}
private initDatabase(): void {
@@ -113,9 +115,7 @@ export class Storage implements StorageInterface {
return statement;
}
- createProject(name: string, projectPath?: string): ProjectContext {
- const timer = PerformanceMonitor.startTimer('createProject');
-
+ createProject(name: string, projectPath?: string): ProjectContext {
// Normalize the path before storing if provided
const normalizedPath = projectPath ? PathNormalizer.normalize(projectPath) : undefined;
@@ -123,7 +123,6 @@ export class Storage implements StorageInterface {
if (normalizedPath) {
const existing = this.findProjectByPath(normalizedPath);
if (existing) {
- timer();
return existing;
}
}
@@ -136,8 +135,6 @@ export class Storage implements StorageInterface {
VALUES (?, ?, ?, ?, ?)
`).run(id, name, normalizedPath, now, now);
- timer();
-
return {
id,
name,
@@ -226,8 +223,6 @@ export class Storage implements StorageInterface {
}
findProjectByPath(projectPath: string): ProjectContext | null {
- const timer = PerformanceMonitor.startTimer('findProjectByPath');
-
// Normalize the input path for consistent lookup
const normalizedPath = PathNormalizer.normalize(projectPath);
@@ -235,8 +230,6 @@ export class Storage implements StorageInterface {
SELECT * FROM projects WHERE path = ?
`).get(normalizedPath) as any;
- timer();
-
if (!row) return null;
return this.rowToProject(row);
@@ -301,85 +294,6 @@ export class Storage implements StorageInterface {
}));
}
- /**
- * Stream conversations one at a time for memory efficiency
- * Use when processing large result sets (>100 rows)
- *
- * Usage examples:
- *
- * // Process all conversations lazily (low memory)
- * for (const conv of storage.streamConversations(projectId)) {
- * console.log(conv.content);
- * }
- *
- * // Take first 10 efficiently
- * const first10 = Storage.take(storage.streamConversations(projectId), 10);
- *
- * // Filter + map (lazy, no intermediate arrays)
- * const userMessages = Storage.filter(
- * storage.streamConversations(projectId),
- * c => c.role === 'user'
- * );
- */
- *streamConversations(projectId: string, limit?: number): Generator {
- const sql = limit
- ? `SELECT * FROM conversations WHERE project_id = ? ORDER BY timestamp DESC LIMIT ?`
- : `SELECT * FROM conversations WHERE project_id = ? ORDER BY timestamp DESC`;
-
- const stmt = this.getStatement(sql);
- const iterator = limit ? stmt.iterate(projectId, limit) : stmt.iterate(projectId);
-
- for (const row of iterator as any) {
- yield {
- id: row.id,
- projectId: row.project_id,
- tool: row.tool,
- role: row.role,
- content: row.content,
- timestamp: new Date(row.timestamp),
- metadata: row.metadata ? JSON.parse(row.metadata) : undefined,
- };
- }
- }
-
- /**
- * Stream decisions one at a time for memory efficiency
- * Use when processing large result sets (>100 rows)
- */
- *streamDecisions(projectId: string, limit?: number): Generator {
- const sql = limit
- ? `SELECT * FROM decisions WHERE project_id = ? ORDER BY timestamp DESC LIMIT ?`
- : `SELECT * FROM decisions WHERE project_id = ? ORDER BY timestamp DESC`;
-
- const stmt = this.getStatement(sql);
- const iterator = limit ? stmt.iterate(projectId, limit) : stmt.iterate(projectId);
-
- for (const row of iterator as any) {
- yield {
- id: row.id,
- projectId: row.project_id,
- type: row.type,
- description: row.description,
- reasoning: row.reasoning,
- timestamp: new Date(row.timestamp),
- };
- }
- }
-
- /**
- * Stream all projects one at a time for memory efficiency
- * Useful for bulk operations across many projects
- */
- *streamAllProjects(): Generator {
- const stmt = this.getStatement(`
- SELECT * FROM projects ORDER BY updated_at DESC
- `);
-
- for (const row of stmt.iterate() as any) {
- yield this.rowToProject(row);
- }
- }
-
getContextSummary(projectId: string): ContextSummary {
const project = this.getProject(projectId);
if (!project) {
@@ -417,14 +331,11 @@ export class Storage implements StorageInterface {
}
getAllProjects(): ProjectContext[] {
- const timer = PerformanceMonitor.startTimer('getAllProjects');
-
const rows = this.getStatement(`
SELECT * FROM projects ORDER BY updated_at DESC
`).all();
const projects = rows.map((row: any) => this.rowToProject(row));
- timer();
return projects;
}
@@ -434,70 +345,13 @@ export class Storage implements StorageInterface {
}
/**
- * Check if user should be prompted for database migration
- * This is called by the server to provide migration suggestions
+ * Get the database file path
*/
- async checkMigrationPrompt(currentVersion: string): Promise<{ shouldPrompt: boolean; message: string }> {
- try {
- const result = await MigrationPrompter.shouldPromptForMigration(currentVersion, this.db.name);
- return {
- shouldPrompt: result.shouldPrompt,
- message: result.message
- };
- } catch (error) {
- console.warn('Migration prompt check failed:', error);
- return { shouldPrompt: false, message: '' };
- }
- }
-
- // ========== STREAMING UTILITY METHODS ==========
-
- /**
- * Helper: Take first N items from a generator (efficient limit)
- * Example: const first10 = Storage.take(storage.streamDecisions(projectId), 10);
- */
- static take(generator: Generator, count: number): T[] {
- const results: T[] = [];
- let i = 0;
- for (const item of generator) {
- if (i >= count) break;
- results.push(item);
- i++;
- }
- return results;
- }
-
- /**
- * Helper: Convert generator to array (use sparingly with limits!)
- * Example: const allDecisions = Storage.toArray(storage.streamDecisions(projectId, 100));
- */
- static toArray(generator: Generator): T[] {
- return Array.from(generator);
- }
-
- /**
- * Helper: Filter generator items (lazy evaluation)
- * Example: const filtered = Storage.filter(storage.streamDecisions(projectId), d => d.type === 'architecture');
- */
- static *filter(generator: Generator, predicate: (item: T) => boolean): Generator {
- for (const item of generator) {
- if (predicate(item)) {
- yield item;
- }
- }
- }
-
- /**
- * Helper: Map generator items (lazy evaluation)
- * Example: const descriptions = Storage.map(storage.streamDecisions(projectId), d => d.description);
- */
- static *map(generator: Generator, transform: (item: T) => U): Generator {
- for (const item of generator) {
- yield transform(item);
- }
+ getDbPath(): string {
+ return this.dbPath;
}
close(): void {
this.db.close();
}
-}
\ No newline at end of file
+}
diff --git a/src/structure-engine.ts b/src/structure-engine.ts
new file mode 100644
index 0000000..c57d31a
--- /dev/null
+++ b/src/structure-engine.ts
@@ -0,0 +1,555 @@
+๏ปฟ/**
+ * Structure Engine
+ *
+ * Layer 1: Fast directory tree generation
+ * Layer 2: Complexity analysis per directory
+ * Layer 3: Architecture insights and hotspots
+ *
+ * Features:
+ * - Directory complexity scoring
+ * - Architecture pattern detection
+ * - Hotspot identification (most complex areas)
+ * - Size and LOC per directory
+ * - Test coverage indicators
+ * - Configuration detection
+ */
+
+import * as fs from 'fs/promises';
+import * as path from 'path';
+import { ReadFileEngine } from './read-file-engine.js';
+
+interface DirectoryNode {
+ name: string;
+ path: string;
+ type: 'file' | 'directory';
+ size?: number;
+ children?: DirectoryNode[];
+ metadata?: {
+ fileCount?: number;
+ totalSize?: number;
+ totalLOC?: number;
+ avgComplexity?: string;
+ complexityScore?: number;
+ languages?: string[];
+ hasTests?: boolean;
+ hasConfig?: boolean;
+ };
+}
+
+interface StructureResult {
+ tree: string;
+ summary: {
+ totalFiles: number;
+ totalDirectories: number;
+ totalSize: number;
+ totalLOC: number;
+ languages: { [key: string]: number };
+ architecturePattern?: string;
+ hotspots: Array<{
+ path: string;
+ reason: string;
+ complexity: number;
+ loc: number;
+ }>;
+ };
+ insights: string[];
+}
+
+export class StructureEngine {
+ private workspacePath: string;
+ private readFileEngine: ReadFileEngine;
+ private analysisCache: Map;
+
+ constructor(workspacePath: string) {
+ this.workspacePath = workspacePath;
+ this.readFileEngine = new ReadFileEngine(workspacePath);
+ this.analysisCache = new Map();
+ }
+
+ /**
+ * Layer 1 + 2 + 3: Get enriched project structure
+ */
+ async getStructure(
+ depth: number = 3,
+ options: {
+ includeMetadata?: boolean;
+ analyzeComplexity?: boolean;
+ detectHotspots?: boolean;
+ } = {}
+ ): Promise {
+ const {
+ includeMetadata = true,
+ analyzeComplexity = true,
+ detectHotspots = true
+ } = options;
+
+ // Layer 1: Build directory tree
+ const rootNode = await this.buildTree(this.workspacePath, depth);
+
+ // Layer 2: Analyze directories if requested
+ if (includeMetadata && analyzeComplexity) {
+ await this.analyzeDirectories(rootNode);
+ }
+
+ // Generate tree string
+ const tree = this.renderTree(rootNode, 0, depth);
+
+ // Layer 3: Generate insights and detect hotspots
+ const summary = await this.generateSummary(rootNode);
+ const insights = this.generateInsights(rootNode, summary);
+
+ if (detectHotspots) {
+ summary.hotspots = await this.detectHotspots(rootNode);
+ }
+
+ return {
+ tree,
+ summary,
+ insights
+ };
+ }
+
+ /**
+ * Layer 1: Build directory tree efficiently
+ */
+ private async buildTree(
+ dirPath: string,
+ maxDepth: number,
+ currentDepth: number = 0
+ ): Promise {
+ const name = path.basename(dirPath);
+ const relativePath = path.relative(this.workspacePath, dirPath);
+
+ const node: DirectoryNode = {
+ name: name || 'root',
+ path: relativePath || '.',
+ type: 'directory',
+ children: []
+ };
+
+ if (currentDepth >= maxDepth) {
+ return node;
+ }
+
+ try {
+ const entries = await fs.readdir(dirPath, { withFileTypes: true });
+
+ for (const entry of entries) {
+ // Skip common ignore patterns
+ if (this.shouldIgnore(entry.name)) {
+ continue;
+ }
+
+ const fullPath = path.join(dirPath, entry.name);
+ const relPath = path.relative(this.workspacePath, fullPath);
+
+ if (entry.isDirectory()) {
+ const childNode = await this.buildTree(fullPath, maxDepth, currentDepth + 1);
+ node.children!.push(childNode);
+ } else if (entry.isFile()) {
+ const stats = await fs.stat(fullPath);
+ node.children!.push({
+ name: entry.name,
+ path: relPath,
+ type: 'file',
+ size: stats.size
+ });
+ }
+ }
+
+ // Sort: directories first, then files, alphabetically
+ node.children!.sort((a, b) => {
+ if (a.type !== b.type) {
+ return a.type === 'directory' ? -1 : 1;
+ }
+ return a.name.localeCompare(b.name);
+ });
+ } catch (err) {
+ // Skip inaccessible directories
+ }
+
+ return node;
+ }
+
+ /**
+ * Layer 2: Analyze directories with complexity scoring
+ */
+ private async analyzeDirectories(node: DirectoryNode): Promise {
+ if (node.type === 'directory' && node.children) {
+ let totalSize = 0;
+ let totalLOC = 0;
+ let fileCount = 0;
+ let complexitySum = 0;
+ let complexityCount = 0;
+ const languages = new Set();
+ let hasTests = false;
+ let hasConfig = false;
+
+ for (const child of node.children) {
+ if (child.type === 'file') {
+ fileCount++;
+ totalSize += child.size || 0;
+
+ // Check for tests and configs
+ if (child.name.includes('test') || child.name.includes('spec')) {
+ hasTests = true;
+ }
+ if (child.name.includes('config') || child.name === 'package.json' || child.name === 'tsconfig.json') {
+ hasConfig = true;
+ }
+
+ // Analyze code files for complexity (limit to avoid slowdown)
+ if (fileCount <= 20 && this.isCodeFile(child.name)) {
+ try {
+ const fullPath = path.join(this.workspacePath, child.path);
+ const fileCtx = await this.readFileEngine.read(fullPath);
+
+ totalLOC += fileCtx.metadata.linesOfCode;
+ languages.add(fileCtx.metadata.language);
+
+ // Convert complexity level to score
+ const complexityScore = this.complexityToScore(fileCtx.complexity.level);
+ complexitySum += complexityScore;
+ complexityCount++;
+ } catch (err) {
+ // Skip files that can't be analyzed
+ }
+ }
+ } else if (child.type === 'directory') {
+ // Recursively analyze child directories
+ await this.analyzeDirectories(child);
+
+ // Aggregate child metadata
+ if (child.metadata) {
+ totalSize += child.metadata.totalSize || 0;
+ totalLOC += child.metadata.totalLOC || 0;
+ fileCount += child.metadata.fileCount || 0;
+ if (child.metadata.hasTests) hasTests = true;
+ if (child.metadata.hasConfig) hasConfig = true;
+ if (child.metadata.languages) {
+ child.metadata.languages.forEach(l => languages.add(l));
+ }
+ if (child.metadata.complexityScore) {
+ complexitySum += child.metadata.complexityScore;
+ complexityCount++;
+ }
+ }
+ }
+ }
+
+ // Calculate average complexity
+ const avgComplexityScore = complexityCount > 0 ? complexitySum / complexityCount : 0;
+ const avgComplexity = this.scoreToComplexity(avgComplexityScore);
+
+ node.metadata = {
+ fileCount,
+ totalSize,
+ totalLOC,
+ avgComplexity,
+ complexityScore: avgComplexityScore,
+ languages: Array.from(languages),
+ hasTests,
+ hasConfig
+ };
+ }
+ }
+
+ /**
+ * Layer 1: Render tree as string
+ */
+ private renderTree(
+ node: DirectoryNode,
+ depth: number,
+ maxDepth: number,
+ prefix: string = '',
+ isLast: boolean = true
+ ): string {
+ let result = '';
+
+ if (depth === 0) {
+ result += `${node.name}/\n`;
+ } else {
+ const connector = isLast ? ' ' : ' ';
+ const icon = node.type === 'directory' ? ' ' : ' ';
+
+ let line = `${prefix}${connector}${icon}${node.name}`;
+
+ // Add metadata for directories
+ if (node.type === 'directory' && node.metadata) {
+ const meta = node.metadata;
+ if (meta.fileCount) {
+ line += ` (${meta.fileCount} files`;
+ if (meta.totalLOC && meta.totalLOC > 0) {
+ line += `, ${meta.totalLOC.toLocaleString()} LOC`;
+ }
+ if (meta.avgComplexity) {
+ const emoji = meta.avgComplexity === 'low' ? '' :
+ meta.avgComplexity === 'medium' ? '' :
+ meta.avgComplexity === 'high' ? '' : '';
+ line += `, ${emoji} ${meta.avgComplexity}`;
+ }
+ line += ')';
+ }
+ }
+ // Add size for files
+ else if (node.type === 'file' && node.size) {
+ line += ` (${this.formatBytes(node.size)})`;
+ }
+
+ result += line + '\n';
+ }
+
+ if (depth < maxDepth && node.children) {
+ const newPrefix = depth === 0 ? '' : prefix + (isLast ? ' ' : ' ');
+
+ for (let i = 0; i < node.children.length; i++) {
+ const child = node.children[i];
+ const childIsLast = i === node.children.length - 1;
+ result += this.renderTree(child, depth + 1, maxDepth, newPrefix, childIsLast);
+ }
+ }
+
+ return result;
+ }
+
+ /**
+ * Layer 3: Generate summary statistics
+ */
+ private async generateSummary(node: DirectoryNode): Promise {
+ const summary: StructureResult['summary'] = {
+ totalFiles: 0,
+ totalDirectories: 0,
+ totalSize: 0,
+ totalLOC: 0,
+ languages: {} as { [key: string]: number },
+ architecturePattern: undefined,
+ hotspots: []
+ };
+
+ this.collectStats(node, summary);
+
+ // Detect architecture pattern
+ summary.architecturePattern = this.detectArchitecture(node);
+
+ return summary;
+ }
+
+ /**
+ * Collect statistics recursively
+ */
+ private collectStats(node: DirectoryNode, summary: StructureResult['summary']): void {
+ if (node.type === 'directory') {
+ summary.totalDirectories++;
+
+ if (node.metadata) {
+ summary.totalSize += node.metadata.totalSize || 0;
+ summary.totalLOC += node.metadata.totalLOC || 0;
+
+ if (node.metadata.languages) {
+ node.metadata.languages.forEach(lang => {
+ summary.languages[lang] = (summary.languages[lang] || 0) + 1;
+ });
+ }
+ }
+
+ if (node.children) {
+ node.children.forEach(child => this.collectStats(child, summary));
+ }
+ } else {
+ summary.totalFiles++;
+ summary.totalSize += node.size || 0;
+ }
+ }
+
+ /**
+ * Layer 3: Detect architecture pattern
+ */
+ private detectArchitecture(node: DirectoryNode): string {
+ const dirNames = new Set();
+
+ const collectDirNames = (n: DirectoryNode) => {
+ if (n.type === 'directory') {
+ dirNames.add(n.name.toLowerCase());
+ if (n.children) {
+ n.children.forEach(child => collectDirNames(child));
+ }
+ }
+ };
+
+ collectDirNames(node);
+
+ // Detect patterns
+ if (dirNames.has('src') && dirNames.has('components') && dirNames.has('pages')) {
+ return 'Next.js / React Framework';
+ }
+ if (dirNames.has('src') && dirNames.has('routes') && dirNames.has('views')) {
+ return 'MVC / Web Framework';
+ }
+ if (dirNames.has('src') && dirNames.has('lib') && dirNames.has('bin')) {
+ return 'Library / CLI Tool';
+ }
+ if (dirNames.has('src') && dirNames.has('test')) {
+ return 'Standard Project Structure';
+ }
+ if (dirNames.has('server') && dirNames.has('client')) {
+ return 'Full-Stack Application';
+ }
+
+ return 'Custom Structure';
+ }
+
+ /**
+ * Layer 3: Detect hotspots (most complex areas)
+ */
+ private async detectHotspots(node: DirectoryNode): Promise {
+ const hotspots: StructureResult['summary']['hotspots'] = [];
+
+ const traverse = (n: DirectoryNode) => {
+ if (n.type === 'directory' && n.metadata) {
+ const score = n.metadata.complexityScore || 0;
+ const loc = n.metadata.totalLOC || 0;
+
+ // Hotspot criteria: high complexity OR large LOC
+ if ((score >= 40 && loc > 100) || loc > 5000) {
+ let reason = '';
+ if (score >= 60) {
+ reason = 'Very high complexity';
+ } else if (score >= 40) {
+ reason = 'High complexity';
+ } else if (loc > 5000) {
+ reason = 'Large codebase';
+ }
+
+ hotspots.push({
+ path: n.path,
+ reason,
+ complexity: Math.round(score),
+ loc
+ });
+ }
+
+ if (n.children) {
+ n.children.forEach(child => traverse(child));
+ }
+ }
+ };
+
+ traverse(node);
+
+ // Sort by complexity + LOC
+ hotspots.sort((a, b) => (b.complexity + b.loc / 100) - (a.complexity + a.loc / 100));
+
+ return hotspots.slice(0, 5); // Top 5 hotspots
+ }
+
+ /**
+ * Layer 3: Generate insights
+ */
+ private generateInsights(node: DirectoryNode, summary: StructureResult['summary']): string[] {
+ const insights: string[] = [];
+
+ // Language insights
+ const languages = Object.keys(summary.languages);
+ if (languages.length > 0) {
+ const primary = languages[0];
+ insights.push(`Primary language: ${primary}`);
+ }
+
+ // Size insights
+ if (summary.totalLOC > 10000) {
+ insights.push('Large codebase - consider modularization');
+ } else if (summary.totalLOC < 1000) {
+ insights.push('Small codebase - good for quick understanding');
+ }
+
+ // Test insights
+ const hasTests = this.hasTestDirectory(node);
+ if (!hasTests) {
+ insights.push(' No test directory detected - consider adding tests');
+ }
+
+ // Architecture insights
+ if (summary.architecturePattern) {
+ insights.push(`Architecture: ${summary.architecturePattern}`);
+ }
+
+ return insights;
+ }
+
+ /**
+ * Helper: Check if directory tree has tests
+ */
+ private hasTestDirectory(node: DirectoryNode): boolean {
+ if (node.type === 'directory') {
+ const name = node.name.toLowerCase();
+ if (name === 'test' || name === 'tests' || name === '__tests__') {
+ return true;
+ }
+ if (node.children) {
+ return node.children.some(child => this.hasTestDirectory(child));
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Helper: Check if should ignore
+ */
+ private shouldIgnore(name: string): boolean {
+ const ignorePatterns = [
+ 'node_modules',
+ '.git',
+ 'dist',
+ 'build',
+ '.next',
+ 'coverage',
+ '.cache',
+ '.DS_Store',
+ 'Thumbs.db'
+ ];
+
+ return ignorePatterns.includes(name) || name.startsWith('.');
+ }
+
+ /**
+ * Helper: Check if code file
+ */
+ private isCodeFile(name: string): boolean {
+ const extensions = ['.ts', '.js', '.tsx', '.jsx', '.py', '.go', '.rs', '.java', '.cpp', '.c', '.h'];
+ return extensions.some(ext => name.endsWith(ext));
+ }
+
+ /**
+ * Helper: Convert complexity level to score
+ */
+ private complexityToScore(level: string): number {
+ switch (level) {
+ case 'low': return 10;
+ case 'medium': return 30;
+ case 'high': return 50;
+ case 'very-high': return 70;
+ default: return 0;
+ }
+ }
+
+ /**
+ * Helper: Convert score to complexity level
+ */
+ private scoreToComplexity(score: number): string {
+ if (score < 20) return 'low';
+ if (score < 40) return 'medium';
+ if (score < 60) return 'high';
+ return 'very-high';
+ }
+
+ /**
+ * Helper: Format bytes
+ */
+ private formatBytes(bytes: number): string {
+ if (bytes < 1024) return `${bytes} B`;
+ if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
+ return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
+ }
+}
+
+
diff --git a/src/todo-handlers.ts b/src/todo-handlers.ts
deleted file mode 100644
index f0aa0a4..0000000
--- a/src/todo-handlers.ts
+++ /dev/null
@@ -1,283 +0,0 @@
-/**
- * MCP Tool handlers for todo management
- * Add these handlers to your server.ts file
- */
-
-import { TodoManager } from './todo-manager.js';
-import { CreateTodoInput, UpdateTodoInput, TodoFilter, Todo } from './todo-types.js';
-
-export function createTodoHandlers(todoManager: TodoManager) {
- return {
- /**
- * Create a new todo item
- */
- 'todo_create': async (params: CreateTodoInput) => {
- const todo = todoManager.createTodo(params);
- return {
- content: [
- {
- type: 'text',
- text: `โ
Todo created: "${todo.title}"\n\nID: ${todo.id}\nPriority: ${todo.priority}\nStatus: ${todo.status}${todo.dueDate ? `\nDue: ${todo.dueDate}` : ''}${todo.tags.length > 0 ? `\nTags: ${todo.tags.join(', ')}` : ''}`
- }
- ]
- };
- },
-
- /**
- * Get a specific todo by ID
- */
- 'todo_get': async (params: { id: string }) => {
- const todo = todoManager.getTodo(params.id);
-
- if (!todo) {
- return {
- content: [
- {
- type: 'text',
- text: `โ Todo not found: ${params.id}`
- }
- ],
- isError: true
- };
- }
-
- return {
- content: [
- {
- type: 'text',
- text: `๐ **${todo.title}**\n\nStatus: ${todo.status}\nPriority: ${todo.priority}\n${todo.description ? `Description: ${todo.description}\n` : ''}${todo.dueDate ? `Due: ${todo.dueDate}\n` : ''}${todo.tags.length > 0 ? `Tags: ${todo.tags.join(', ')}\n` : ''}Created: ${todo.createdAt}\nUpdated: ${todo.updatedAt}${todo.completedAt ? `\nCompleted: ${todo.completedAt}` : ''}`
- }
- ]
- };
- },
-
- /**
- * List todos with optional filters
- */
- 'todo_list': async (params?: TodoFilter) => {
- const todos = todoManager.listTodos(params);
-
- if (todos.length === 0) {
- return {
- content: [
- {
- type: 'text',
- text: '๐ No todos found'
- }
- ]
- };
- }
-
- const grouped = {
- urgent: todos.filter(t => t.priority === 'urgent' && t.status !== 'completed'),
- high: todos.filter(t => t.priority === 'high' && t.status !== 'completed'),
- medium: todos.filter(t => t.priority === 'medium' && t.status !== 'completed'),
- low: todos.filter(t => t.priority === 'low' && t.status !== 'completed'),
- completed: todos.filter(t => t.status === 'completed')
- };
-
- let output = `๐ Found ${todos.length} todo(s)\n\n`;
-
- const formatTodo = (todo: Todo) => {
- const statusEmoji = {
- pending: 'โณ',
- in_progress: '๐',
- completed: 'โ
',
- cancelled: 'โ'
- };
- return `${statusEmoji[todo.status]} ${todo.title}${todo.dueDate ? ` (Due: ${todo.dueDate})` : ''}\n ID: ${todo.id}`;
- };
-
- if (grouped.urgent.length > 0) {
- output += `๐ด **URGENT** (${grouped.urgent.length})\n`;
- grouped.urgent.forEach(todo => output += formatTodo(todo) + '\n');
- output += '\n';
- }
-
- if (grouped.high.length > 0) {
- output += `๐ **HIGH** (${grouped.high.length})\n`;
- grouped.high.forEach(todo => output += formatTodo(todo) + '\n');
- output += '\n';
- }
-
- if (grouped.medium.length > 0) {
- output += `๐ก **MEDIUM** (${grouped.medium.length})\n`;
- grouped.medium.forEach(todo => output += formatTodo(todo) + '\n');
- output += '\n';
- }
-
- if (grouped.low.length > 0) {
- output += `๐ข **LOW** (${grouped.low.length})\n`;
- grouped.low.forEach(todo => output += formatTodo(todo) + '\n');
- output += '\n';
- }
-
- if (grouped.completed.length > 0 && !params?.status) {
- output += `โ
**COMPLETED** (${grouped.completed.length})\n`;
- grouped.completed.slice(0, 5).forEach(todo => output += formatTodo(todo) + '\n');
- if (grouped.completed.length > 5) {
- output += ` ... and ${grouped.completed.length - 5} more\n`;
- }
- }
-
- return {
- content: [
- {
- type: 'text',
- text: output
- }
- ]
- };
- },
-
- /**
- * Update a todo
- */
- 'todo_update': async (params: UpdateTodoInput) => {
- const todo = todoManager.updateTodo(params);
-
- if (!todo) {
- return {
- content: [
- {
- type: 'text',
- text: `โ Todo not found: ${params.id}`
- }
- ],
- isError: true
- };
- }
-
- return {
- content: [
- {
- type: 'text',
- text: `โ
Todo updated: "${todo.title}"\n\nStatus: ${todo.status}\nPriority: ${todo.priority}\nUpdated: ${todo.updatedAt}`
- }
- ]
- };
- },
-
- /**
- * Delete a todo
- */
- 'todo_delete': async (params: { id: string }) => {
- const success = todoManager.deleteTodo(params.id);
-
- if (!success) {
- return {
- content: [
- {
- type: 'text',
- text: `โ Todo not found: ${params.id}`
- }
- ],
- isError: true
- };
- }
-
- return {
- content: [
- {
- type: 'text',
- text: `โ
Todo deleted: ${params.id}`
- }
- ]
- };
- },
-
- /**
- * Mark todo as completed
- */
- 'todo_complete': async (params: { id: string }) => {
- const todo = todoManager.completeTodo(params.id);
-
- if (!todo) {
- return {
- content: [
- {
- type: 'text',
- text: `โ Todo not found: ${params.id}`
- }
- ],
- isError: true
- };
- }
-
- return {
- content: [
- {
- type: 'text',
- text: `โ
Todo completed: "${todo.title}"\n\nCompleted at: ${todo.completedAt}`
- }
- ]
- };
- },
-
- /**
- * Get todo statistics
- */
- 'todo_stats': async (params?: { projectId?: string }) => {
- const stats = todoManager.getStats(params?.projectId);
-
- let output = `๐ Todo Statistics\n\n`;
- output += `**Total:** ${stats.total} todos\n\n`;
-
- output += `**By Status:**\n`;
- output += `โณ Pending: ${stats.byStatus.pending}\n`;
- output += `๐ In Progress: ${stats.byStatus.in_progress}\n`;
- output += `โ
Completed: ${stats.byStatus.completed}\n`;
- output += `โ Cancelled: ${stats.byStatus.cancelled}\n\n`;
-
- output += `**By Priority:**\n`;
- output += `๐ด Urgent: ${stats.byPriority.urgent}\n`;
- output += `๐ High: ${stats.byPriority.high}\n`;
- output += `๐ก Medium: ${stats.byPriority.medium}\n`;
- output += `๐ข Low: ${stats.byPriority.low}\n\n`;
-
- if (stats.overdue > 0) {
- output += `โ ๏ธ **${stats.overdue} overdue** todo(s)\n`;
- }
-
- if (stats.dueSoon > 0) {
- output += `โฐ **${stats.dueSoon} due soon** (within 24 hours)\n`;
- }
-
- return {
- content: [
- {
- type: 'text',
- text: output
- }
- ]
- };
- },
-
- /**
- * Get all tags
- */
- 'todo_tags': async () => {
- const tags = todoManager.getAllTags();
-
- if (tags.length === 0) {
- return {
- content: [
- {
- type: 'text',
- text: '๐ท๏ธ No tags found'
- }
- ]
- };
- }
-
- return {
- content: [
- {
- type: 'text',
- text: `๐ท๏ธ Available tags (${tags.length}):\n\n${tags.join(', ')}`
- }
- ]
- };
- }
- };
-}
diff --git a/src/todo-manager.ts b/src/todo-manager.ts
deleted file mode 100644
index 3239d62..0000000
--- a/src/todo-manager.ts
+++ /dev/null
@@ -1,334 +0,0 @@
-/**
- * Todo management handlers for Context Sync MCP server
- */
-
-import Database from 'better-sqlite3';
-import { randomUUID } from 'crypto';
-import {
- Todo,
- TodoStatus,
- TodoPriority,
- CreateTodoInput,
- UpdateTodoInput,
- TodoFilter,
- TodoStats
-} from './todo-types';
-
-export class TodoManager {
- // Prepared statement cache for 2-5x faster queries
- private preparedStatements: Map = new Map();
-
- constructor(private db: Database.Database) {}
-
- /**
- * Get or create a prepared statement for faster queries (2-5x performance improvement)
- */
- private getStatement(sql: string): Database.Statement {
- if (this.preparedStatements.has(sql)) {
- return this.preparedStatements.get(sql)!;
- }
-
- const statement = this.db.prepare(sql);
- this.preparedStatements.set(sql, statement);
- return statement;
- }
-
- /**
- * Create a new todo item
- */
- createTodo(input: CreateTodoInput): Todo {
- const id = randomUUID();
- const now = new Date().toISOString();
-
- const todo: Todo = {
- id,
- title: input.title,
- description: input.description,
- status: 'pending',
- priority: input.priority || 'medium',
- tags: input.tags || [],
- dueDate: input.dueDate,
- createdAt: now,
- updatedAt: now,
- projectId: input.projectId
- };
-
- const stmt = this.getStatement(`
- INSERT INTO todos (
- id, title, description, status, priority, tags,
- due_date, created_at, updated_at, project_id
- ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
- `);
-
- stmt.run(
- todo.id,
- todo.title,
- todo.description || null,
- todo.status,
- todo.priority,
- JSON.stringify(todo.tags),
- todo.dueDate || null,
- todo.createdAt,
- todo.updatedAt,
- todo.projectId || null
- );
-
- return todo;
- }
-
- /**
- * Get a todo by ID
- */
- getTodo(id: string): Todo | null {
- const stmt = this.getStatement(`
- SELECT * FROM todos WHERE id = ?
- `);
-
- const row = stmt.get(id) as any;
- if (!row) return null;
-
- return this.rowToTodo(row);
- }
-
- /**
- * List todos with optional filtering
- */
- listTodos(filter?: TodoFilter): Todo[] {
- let query = 'SELECT * FROM todos WHERE 1=1';
- const params: any[] = [];
-
- if (filter?.status) {
- if (Array.isArray(filter.status)) {
- query += ` AND status IN (${filter.status.map(() => '?').join(',')})`;
- params.push(...filter.status);
- } else {
- query += ' AND status = ?';
- params.push(filter.status);
- }
- }
-
- if (filter?.priority) {
- if (Array.isArray(filter.priority)) {
- query += ` AND priority IN (${filter.priority.map(() => '?').join(',')})`;
- params.push(...filter.priority);
- } else {
- query += ' AND priority = ?';
- params.push(filter.priority);
- }
- }
-
- if (filter?.projectId) {
- query += ' AND project_id = ?';
- params.push(filter.projectId);
- }
-
- if (filter?.dueBefore) {
- query += ' AND due_date <= ?';
- params.push(filter.dueBefore);
- }
-
- if (filter?.dueAfter) {
- query += ' AND due_date >= ?';
- params.push(filter.dueAfter);
- }
-
- if (filter?.search) {
- query += ' AND (title LIKE ? OR description LIKE ?)';
- const searchPattern = `%${filter.search}%`;
- params.push(searchPattern, searchPattern);
- }
-
- if (filter?.tags && filter.tags.length > 0) {
- // Search for todos that have at least one of the specified tags
- const tagConditions = filter.tags.map(() => 'tags LIKE ?').join(' OR ');
- query += ` AND (${tagConditions})`;
- filter.tags.forEach(tag => {
- params.push(`%"${tag}"%`);
- });
- }
-
- query += ' ORDER BY priority DESC, due_date ASC, created_at DESC';
-
- const stmt = this.getStatement(query);
- const rows = stmt.all(...params) as any[];
-
- return rows.map(row => this.rowToTodo(row));
- }
-
- /**
- * Update a todo
- */
- updateTodo(input: UpdateTodoInput): Todo | null {
- const existing = this.getTodo(input.id);
- if (!existing) return null;
-
- const now = new Date().toISOString();
- const updates: string[] = [];
- const params: any[] = [];
-
- if (input.title !== undefined) {
- updates.push('title = ?');
- params.push(input.title);
- }
-
- if (input.description !== undefined) {
- updates.push('description = ?');
- params.push(input.description);
- }
-
- if (input.status !== undefined) {
- updates.push('status = ?');
- params.push(input.status);
-
- // Set completed_at when marking as completed
- if (input.status === 'completed') {
- updates.push('completed_at = ?');
- params.push(now);
- }
- }
-
- if (input.priority !== undefined) {
- updates.push('priority = ?');
- params.push(input.priority);
- }
-
- if (input.tags !== undefined) {
- updates.push('tags = ?');
- params.push(JSON.stringify(input.tags));
- }
-
- if (input.dueDate !== undefined) {
- updates.push('due_date = ?');
- params.push(input.dueDate);
- }
-
- if (input.projectId !== undefined) {
- updates.push('project_id = ?');
- params.push(input.projectId);
- }
-
- updates.push('updated_at = ?');
- params.push(now);
-
- params.push(input.id);
-
- const stmt = this.getStatement(`
- UPDATE todos SET ${updates.join(', ')} WHERE id = ?
- `);
-
- stmt.run(...params);
-
- return this.getTodo(input.id);
- }
-
- /**
- * Delete a todo
- */
- deleteTodo(id: string): boolean {
- const stmt = this.getStatement('DELETE FROM todos WHERE id = ?');
- const result = stmt.run(id);
- return result.changes > 0;
- }
-
- /**
- * Mark todo as completed
- */
- completeTodo(id: string): Todo | null {
- return this.updateTodo({
- id,
- status: 'completed'
- });
- }
-
- /**
- * Get todo statistics
- */
- getStats(projectId?: string): TodoStats {
- let baseQuery = 'SELECT status, priority, due_date FROM todos';
- const params: any[] = [];
-
- if (projectId) {
- baseQuery += ' WHERE project_id = ?';
- params.push(projectId);
- }
-
- const stmt = this.getStatement(baseQuery);
- const rows = stmt.all(...params) as any[];
-
- const stats: TodoStats = {
- total: rows.length,
- byStatus: {
- pending: 0,
- in_progress: 0,
- completed: 0,
- cancelled: 0
- },
- byPriority: {
- low: 0,
- medium: 0,
- high: 0,
- urgent: 0
- },
- overdue: 0,
- dueSoon: 0
- };
-
- const now = new Date();
- const tomorrow = new Date(now.getTime() + 24 * 60 * 60 * 1000);
-
- rows.forEach(row => {
- stats.byStatus[row.status as TodoStatus]++;
- stats.byPriority[row.priority as TodoPriority]++;
-
- if (row.due_date && row.status !== 'completed' && row.status !== 'cancelled') {
- const dueDate = new Date(row.due_date);
- if (dueDate < now) {
- stats.overdue++;
- } else if (dueDate < tomorrow) {
- stats.dueSoon++;
- }
- }
- });
-
- return stats;
- }
-
- /**
- * Get all unique tags across todos
- */
- getAllTags(): string[] {
- const stmt = this.getStatement('SELECT tags FROM todos WHERE tags IS NOT NULL');
- const rows = stmt.all() as any[];
-
- const tagSet = new Set();
- rows.forEach(row => {
- try {
- const tags = JSON.parse(row.tags);
- tags.forEach((tag: string) => tagSet.add(tag));
- } catch (e) {
- // Skip invalid JSON
- }
- });
-
- return Array.from(tagSet).sort();
- }
-
- /**
- * Convert database row to Todo object
- */
- private rowToTodo(row: any): Todo {
- return {
- id: row.id,
- title: row.title,
- description: row.description || undefined,
- status: row.status,
- priority: row.priority,
- tags: row.tags ? JSON.parse(row.tags) : [],
- dueDate: row.due_date || undefined,
- createdAt: row.created_at,
- updatedAt: row.updated_at,
- completedAt: row.completed_at || undefined,
- projectId: row.project_id || undefined
- };
- }
-}
diff --git a/src/todo-schema.ts b/src/todo-schema.ts
deleted file mode 100644
index dbcc8ab..0000000
--- a/src/todo-schema.ts
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Database schema for todos
- * This should be integrated into your existing storage.ts file
- */
-
-import Database from 'better-sqlite3';
-
-export function createTodoTable(db: Database.Database): void {
- db.exec(`
- CREATE TABLE IF NOT EXISTS todos (
- id TEXT PRIMARY KEY,
- title TEXT NOT NULL,
- description TEXT,
- status TEXT NOT NULL DEFAULT 'pending',
- priority TEXT NOT NULL DEFAULT 'medium',
- tags TEXT, -- JSON array of tags
- due_date TEXT, -- ISO 8601 format
- created_at TEXT NOT NULL,
- updated_at TEXT NOT NULL,
- completed_at TEXT,
- project_id TEXT,
- FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE SET NULL
- );
-
- CREATE INDEX IF NOT EXISTS idx_todos_status ON todos(status);
- CREATE INDEX IF NOT EXISTS idx_todos_priority ON todos(priority);
- CREATE INDEX IF NOT EXISTS idx_todos_due_date ON todos(due_date);
- CREATE INDEX IF NOT EXISTS idx_todos_project_id ON todos(project_id);
- CREATE INDEX IF NOT EXISTS idx_todos_created_at ON todos(created_at);
- `);
-}
diff --git a/src/todo-tools.ts b/src/todo-tools.ts
deleted file mode 100644
index 0525b52..0000000
--- a/src/todo-tools.ts
+++ /dev/null
@@ -1,201 +0,0 @@
-/**
- * MCP Tool definitions for todo management
- * Add these tool definitions to your server's tool list
- */
-
-export const todoToolDefinitions = [
- {
- name: 'todo_create',
- description: 'Create a new todo item. If no projectId is specified, the todo will be automatically linked to the current active project (if any).',
- inputSchema: {
- type: 'object',
- properties: {
- title: {
- type: 'string',
- description: 'Title of the todo item'
- },
- description: {
- type: 'string',
- description: 'Detailed description of the todo (optional)'
- },
- priority: {
- type: 'string',
- enum: ['low', 'medium', 'high', 'urgent'],
- description: 'Priority level (default: medium)'
- },
- tags: {
- type: 'array',
- items: { type: 'string' },
- description: 'Tags to categorize the todo (optional)'
- },
- dueDate: {
- type: 'string',
- description: 'Due date in ISO 8601 format (optional)'
- },
- projectId: {
- type: 'string',
- description: 'Link to a specific project (optional - if not provided, uses current active project)'
- }
- },
- required: ['title']
- }
- },
- {
- name: 'todo_get',
- description: 'Get a specific todo item by ID',
- inputSchema: {
- type: 'object',
- properties: {
- id: {
- type: 'string',
- description: 'The ID of the todo item'
- }
- },
- required: ['id']
- }
- },
- {
- name: 'todo_list',
- description: 'List todos with optional filters. Returns todos grouped by priority. By default, shows todos for the current active project. Use projectId parameter to view todos from other projects, or omit projectId to see all todos.',
- inputSchema: {
- type: 'object',
- properties: {
- status: {
- oneOf: [
- { type: 'string', enum: ['pending', 'in_progress', 'completed', 'cancelled'] },
- {
- type: 'array',
- items: { type: 'string', enum: ['pending', 'in_progress', 'completed', 'cancelled'] }
- }
- ],
- description: 'Filter by status (single value or array)'
- },
- priority: {
- oneOf: [
- { type: 'string', enum: ['low', 'medium', 'high', 'urgent'] },
- {
- type: 'array',
- items: { type: 'string', enum: ['low', 'medium', 'high', 'urgent'] }
- }
- ],
- description: 'Filter by priority (single value or array)'
- },
- tags: {
- type: 'array',
- items: { type: 'string' },
- description: 'Filter by tags (returns todos with any of these tags)'
- },
- projectId: {
- type: 'string',
- description: 'Filter by project ID (if omitted, shows todos for current active project, or all todos if no project is active)'
- },
- dueBefore: {
- type: 'string',
- description: 'Filter todos due before this date (ISO 8601)'
- },
- dueAfter: {
- type: 'string',
- description: 'Filter todos due after this date (ISO 8601)'
- },
- search: {
- type: 'string',
- description: 'Search in title and description'
- }
- }
- }
- },
- {
- name: 'todo_update',
- description: 'Update a todo item. Only provided fields will be updated.',
- inputSchema: {
- type: 'object',
- properties: {
- id: {
- type: 'string',
- description: 'The ID of the todo to update'
- },
- title: {
- type: 'string',
- description: 'New title'
- },
- description: {
- type: 'string',
- description: 'New description'
- },
- status: {
- type: 'string',
- enum: ['pending', 'in_progress', 'completed', 'cancelled'],
- description: 'New status'
- },
- priority: {
- type: 'string',
- enum: ['low', 'medium', 'high', 'urgent'],
- description: 'New priority'
- },
- tags: {
- type: 'array',
- items: { type: 'string' },
- description: 'New tags (replaces existing tags)'
- },
- dueDate: {
- type: 'string',
- description: 'New due date in ISO 8601 format'
- },
- projectId: {
- type: 'string',
- description: 'New project ID'
- }
- },
- required: ['id']
- }
- },
- {
- name: 'todo_delete',
- description: 'Delete a todo item',
- inputSchema: {
- type: 'object',
- properties: {
- id: {
- type: 'string',
- description: 'The ID of the todo to delete'
- }
- },
- required: ['id']
- }
- },
- {
- name: 'todo_complete',
- description: 'Mark a todo as completed (shortcut for updating status)',
- inputSchema: {
- type: 'object',
- properties: {
- id: {
- type: 'string',
- description: 'The ID of the todo to complete'
- }
- },
- required: ['id']
- }
- },
- {
- name: 'todo_stats',
- description: 'Get statistics about todos (counts by status, priority, overdue, etc.)',
- inputSchema: {
- type: 'object',
- properties: {
- projectId: {
- type: 'string',
- description: 'Get stats for a specific project (optional)'
- }
- }
- }
- },
- {
- name: 'todo_tags',
- description: 'Get a list of all unique tags used in todos',
- inputSchema: {
- type: 'object',
- properties: {}
- }
- }
-];
diff --git a/src/todo-types.ts b/src/todo-types.ts
deleted file mode 100644
index e64635a..0000000
--- a/src/todo-types.ts
+++ /dev/null
@@ -1,58 +0,0 @@
-/**
- * Todo item types and interfaces
- */
-
-export type TodoStatus = 'pending' | 'in_progress' | 'completed' | 'cancelled';
-export type TodoPriority = 'low' | 'medium' | 'high' | 'urgent';
-
-export interface Todo {
- id: string;
- title: string;
- description?: string;
- status: TodoStatus;
- priority: TodoPriority;
- tags: string[];
- dueDate?: string; // ISO 8601 format
- createdAt: string;
- updatedAt: string;
- completedAt?: string;
- projectId?: string; // Optional link to a project
-}
-
-export interface CreateTodoInput {
- title: string;
- description?: string;
- priority?: TodoPriority;
- tags?: string[];
- dueDate?: string;
- projectId?: string;
-}
-
-export interface UpdateTodoInput {
- id: string;
- title?: string;
- description?: string;
- status?: TodoStatus;
- priority?: TodoPriority;
- tags?: string[];
- dueDate?: string;
- projectId?: string;
-}
-
-export interface TodoFilter {
- status?: TodoStatus | TodoStatus[];
- priority?: TodoPriority | TodoPriority[];
- tags?: string[];
- projectId?: string;
- dueBefore?: string;
- dueAfter?: string;
- search?: string; // Search in title and description
-}
-
-export interface TodoStats {
- total: number;
- byStatus: Record;
- byPriority: Record;
- overdue: number;
- dueSoon: number; // Due within 24 hours
-}
diff --git a/src/type-analyzer.ts b/src/type-analyzer.ts
deleted file mode 100644
index 06d6a41..0000000
--- a/src/type-analyzer.ts
+++ /dev/null
@@ -1,660 +0,0 @@
-import * as fs from 'fs';
-import { promises as fsAsync } from 'fs';
-import * as path from 'path';
-
-// Types for type analysis
-export interface TypeDefinition {
- name: string;
- kind: 'interface' | 'type' | 'class' | 'enum';
- filePath: string;
- line: number;
- isExported: boolean;
- raw: string; // Raw definition text
-}
-
-export interface InterfaceInfo extends TypeDefinition {
- kind: 'interface';
- properties: PropertyInfo[];
- methods: MethodInfo[];
- extends?: string[];
-}
-
-export interface TypeAliasInfo extends TypeDefinition {
- kind: 'type';
- definition: string; // The actual type definition
-}
-
-export interface ClassInfo extends TypeDefinition {
- kind: 'class';
- properties: PropertyInfo[];
- methods: MethodInfo[];
- constructor?: MethodInfo;
- extends?: string;
- implements?: string[];
-}
-
-export interface EnumInfo extends TypeDefinition {
- kind: 'enum';
- members: EnumMember[];
-}
-
-export interface PropertyInfo {
- name: string;
- type: string;
- optional: boolean;
- readonly: boolean;
- line: number;
-}
-
-export interface MethodInfo {
- name: string;
- params: ParameterInfo[];
- returnType?: string;
- isAsync: boolean;
- isStatic: boolean;
- visibility?: 'public' | 'private' | 'protected';
- line: number;
-}
-
-export interface ParameterInfo {
- name: string;
- type?: string;
- optional: boolean;
- defaultValue?: string;
-}
-
-export interface EnumMember {
- name: string;
- value?: string | number;
- line: number;
-}
-
-export interface TypeUsage {
- filePath: string;
- line: number;
- context: string; // The line of code where it's used
- usageType: 'variable' | 'parameter' | 'return' | 'generic' | 'implements' | 'extends';
-}
-
-export interface TypeInfo {
- definition: TypeDefinition;
- details: InterfaceInfo | TypeAliasInfo | ClassInfo | EnumInfo;
- usages: TypeUsage[];
- relatedTypes: string[]; // Types referenced in this type
-}
-
-export class TypeAnalyzer {
- private workspacePath: string;
- private fileCache: Map;
- private typeCache: Map;
-
- // File size limits to prevent OOM crashes
- private readonly MAX_FILE_SIZE = 5 * 1024 * 1024; // 5MB - prevents OOM crashes
- private readonly WARN_FILE_SIZE = 1 * 1024 * 1024; // 1MB - warn but still process
-
- constructor(workspacePath: string) {
- this.workspacePath = workspacePath;
- this.fileCache = new Map();
- this.typeCache = new Map();
- }
-
- /**
- * Find type definition by name
- */
- public async findTypeDefinition(typeName: string): Promise {
- const allFiles = await this.getAllProjectFiles();
-
- for (const file of allFiles) {
- const types = await this.extractTypes(file);
- const found = types.find(t => t.name === typeName);
- if (found) return found;
- }
-
- return null;
- }
-
- /**
- * Get complete information about a type
- */
- public async getTypeInfo(typeName: string): Promise {
- const definition = await this.findTypeDefinition(typeName);
- if (!definition) return null;
-
- const details = await this.getTypeDetails(definition);
- const usages = await this.findTypeUsages(typeName);
- const relatedTypes = await this.extractRelatedTypes(definition);
-
- return {
- definition,
- details,
- usages,
- relatedTypes
- };
- }
-
- /**
- * Get detailed information based on type kind
- */
- private async getTypeDetails(definition: TypeDefinition): Promise {
- const content = await this.readFile(definition.filePath);
- const lines = content.split('\n');
-
- switch (definition.kind) {
- case 'interface':
- return await this.parseInterface(definition, lines);
- case 'type':
- return await this.parseTypeAlias(definition, lines);
- case 'class':
- return await this.parseClass(definition, lines);
- case 'enum':
- return await this.parseEnum(definition, lines);
- }
- }
-
- /**
- * Parse interface details
- */
- private async parseInterface(definition: TypeDefinition, lines: string[]): Promise {
- const properties: PropertyInfo[] = [];
- const methods: MethodInfo[] = [];
- let extendsTypes: string[] = [];
-
- // Find extends
- const headerLine = lines[definition.line - 1];
- const extendsMatch = /extends\s+([\w\s,]+)/.exec(headerLine);
- if (extendsMatch) {
- extendsTypes = extendsMatch[1].split(',').map(t => t.trim());
- }
-
- // Parse body
- let inInterface = false;
- let braceCount = 0;
-
- for (let i = definition.line - 1; i < lines.length; i++) {
- const line = lines[i];
- const trimmed = line.trim();
-
- if (trimmed.includes('interface')) {
- inInterface = true;
- }
-
- if (!inInterface) continue;
-
- braceCount += (line.match(/{/g) || []).length;
- braceCount -= (line.match(/}/g) || []).length;
-
- // Property: name: type or name?: type
- const propMatch = /^(readonly\s+)?(\w+)(\?)?:\s*([^;]+);?/.exec(trimmed);
- if (propMatch && !trimmed.includes('(')) {
- properties.push({
- name: propMatch[2],
- type: propMatch[4].trim(),
- optional: !!propMatch[3],
- readonly: !!propMatch[1],
- line: i + 1
- });
- continue;
- }
-
- // Method: name(): returnType or name(params): returnType
- const methodMatch = /(\w+)\s*\(([^)]*)\)\s*:\s*([^;]+)/.exec(trimmed);
- if (methodMatch) {
- methods.push({
- name: methodMatch[1],
- params: this.parseParameters(methodMatch[2]),
- returnType: methodMatch[3].trim(),
- isAsync: false,
- isStatic: false,
- line: i + 1
- });
- }
-
- if (braceCount === 0 && inInterface && i > definition.line - 1) {
- break;
- }
- }
-
- return {
- ...definition,
- kind: 'interface',
- properties,
- methods,
- extends: extendsTypes.length > 0 ? extendsTypes : undefined
- };
- }
-
- /**
- * Parse type alias details
- */
- private async parseTypeAlias(definition: TypeDefinition, lines: string[]): Promise {
- const line = lines[definition.line - 1];
- const match = /type\s+\w+\s*=\s*(.+)/.exec(line);
- const typeDefinition = match ? match[1].trim() : '';
-
- return {
- ...definition,
- kind: 'type',
- definition: typeDefinition
- };
- }
-
- /**
- * Parse class details
- */
- private async parseClass(definition: TypeDefinition, lines: string[]): Promise {
- const properties: PropertyInfo[] = [];
- const methods: MethodInfo[] = [];
- let constructorInfo: MethodInfo | undefined;
- let extendsClass: string | undefined;
- let implementsInterfaces: string[] = [];
-
- // Find extends and implements
- const headerLine = lines[definition.line - 1];
- const extendsMatch = /extends\s+(\w+)/.exec(headerLine);
- if (extendsMatch) {
- extendsClass = extendsMatch[1];
- }
-
- const implementsMatch = /implements\s+([\w\s,]+)/.exec(headerLine);
- if (implementsMatch) {
- implementsInterfaces = implementsMatch[1].split(',').map(t => t.trim());
- }
-
- // Parse body
- let inClass = false;
- let braceCount = 0;
-
- for (let i = definition.line - 1; i < lines.length; i++) {
- const line = lines[i];
- const trimmed = line.trim();
-
- if (trimmed.includes('class')) {
- inClass = true;
- }
-
- if (!inClass) continue;
-
- braceCount += (line.match(/{/g) || []).length;
- braceCount -= (line.match(/}/g) || []).length;
-
- // Property: private/public/protected name: type
- const propMatch = /^(public|private|protected)?\s*(readonly\s+)?(\w+)(\?)?:\s*([^;=]+)/.exec(trimmed);
- if (propMatch && !trimmed.includes('(')) {
- properties.push({
- name: propMatch[3],
- type: propMatch[5].trim(),
- optional: !!propMatch[4],
- readonly: !!propMatch[2],
- line: i + 1
- });
- continue;
- }
-
- // Constructor
- if (trimmed.includes('constructor')) {
- const constructorMatch = /constructor\s*\(([^)]*)\)/.exec(trimmed);
- if (constructorMatch) {
- constructorInfo = {
- name: 'constructor',
- params: this.parseParameters(constructorMatch[1]),
- isAsync: false,
- isStatic: false,
- line: i + 1
- };
- }
- continue;
- }
-
- // Method
- const methodMatch = /(public|private|protected)?\s*(static\s+)?(async\s+)?(\w+)\s*\(([^)]*)\)/.exec(trimmed);
- if (methodMatch && !trimmed.includes('if') && !trimmed.includes('while')) {
- const methodName = methodMatch[4];
- if (methodName !== 'constructor') {
- methods.push({
- name: methodName,
- params: this.parseParameters(methodMatch[5]),
- isAsync: !!methodMatch[3],
- isStatic: !!methodMatch[2],
- visibility: methodMatch[1] as any || 'public',
- line: i + 1
- });
- }
- }
-
- if (braceCount === 0 && inClass && i > definition.line - 1) {
- break;
- }
- }
-
- return {
- ...definition,
- kind: 'class',
- properties,
- methods,
- constructor: constructorInfo,
- extends: extendsClass,
- implements: implementsInterfaces.length > 0 ? implementsInterfaces : undefined
- };
- }
-
- /**
- * Parse enum details
- */
- private async parseEnum(definition: TypeDefinition, lines: string[]): Promise {
- const members: EnumMember[] = [];
- let inEnum = false;
- let braceCount = 0;
-
- for (let i = definition.line - 1; i < lines.length; i++) {
- const line = lines[i];
- const trimmed = line.trim();
-
- if (trimmed.includes('enum')) {
- inEnum = true;
- }
-
- if (!inEnum) continue;
-
- braceCount += (line.match(/{/g) || []).length;
- braceCount -= (line.match(/}/g) || []).length;
-
- // Enum member: NAME = value or NAME
- const memberMatch = /(\w+)\s*=?\s*([^,}]+)?/.exec(trimmed);
- if (memberMatch && !trimmed.includes('enum') && trimmed !== '}') {
- const value = memberMatch[2]?.trim().replace(/[,}]/g, '');
- members.push({
- name: memberMatch[1],
- value: value ? (isNaN(Number(value)) ? value : Number(value)) : undefined,
- line: i + 1
- });
- }
-
- if (braceCount === 0 && inEnum && i > definition.line - 1) {
- break;
- }
- }
-
- return {
- ...definition,
- kind: 'enum',
- members
- };
- }
-
- /**
- * Find all usages of a type
- */
- public async findTypeUsages(typeName: string): Promise {
- const usages: TypeUsage[] = [];
- const allFiles = await this.getAllProjectFiles();
-
- // Pre-compile regex patterns for better performance (fixes regex-in-loops)
- const escapedTypeName = typeName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
- const variableRegex = new RegExp(`\\w+\\s*:\\s*${escapedTypeName}`);
- const returnTypeRegex = new RegExp(`\\)\\s*:\\s*${escapedTypeName}`);
-
- for (const file of allFiles) {
- const content = await this.readFile(file);
- const lines = content.split('\n');
-
- lines.forEach((line, lineNumber) => {
- const trimmed = line.trim();
-
- // Skip the definition itself
- if (trimmed.includes(`interface ${typeName}`) ||
- trimmed.includes(`type ${typeName}`) ||
- trimmed.includes(`class ${typeName}`) ||
- trimmed.includes(`enum ${typeName}`)) {
- return;
- }
-
- // Check for usage
- if (trimmed.includes(typeName)) {
- let usageType: TypeUsage['usageType'] = 'variable';
-
- if (trimmed.includes('implements') && trimmed.includes(typeName)) {
- usageType = 'implements';
- } else if (trimmed.includes('extends') && trimmed.includes(typeName)) {
- usageType = 'extends';
- } else if (trimmed.includes('<') && trimmed.includes(typeName)) {
- usageType = 'generic';
- } else if (trimmed.match(variableRegex)) {
- usageType = 'variable';
- } else if (trimmed.match(returnTypeRegex)) {
- usageType = 'return';
- }
-
- usages.push({
- filePath: file,
- line: lineNumber + 1,
- context: trimmed,
- usageType
- });
- }
- });
- }
-
- return usages;
- }
-
- /**
- * Extract all type definitions from a file
- */
- private async extractTypes(filePath: string): Promise {
- if (this.typeCache.has(filePath)) {
- return this.typeCache.get(filePath)!;
- }
-
- const content = await this.readFile(filePath);
- const types: TypeDefinition[] = [];
- const lines = content.split('\n');
-
- lines.forEach((line, lineNumber) => {
- const trimmed = line.trim();
-
- // Interface
- const interfaceMatch = /(?:export\s+)?interface\s+(\w+)/.exec(trimmed);
- if (interfaceMatch) {
- types.push({
- name: interfaceMatch[1],
- kind: 'interface',
- filePath,
- line: lineNumber + 1,
- isExported: trimmed.includes('export'),
- raw: line
- });
- return;
- }
-
- // Type alias
- const typeMatch = /(?:export\s+)?type\s+(\w+)\s*=/.exec(trimmed);
- if (typeMatch) {
- types.push({
- name: typeMatch[1],
- kind: 'type',
- filePath,
- line: lineNumber + 1,
- isExported: trimmed.includes('export'),
- raw: line
- });
- return;
- }
-
- // Class
- const classMatch = /(?:export\s+)?(?:abstract\s+)?class\s+(\w+)/.exec(trimmed);
- if (classMatch) {
- types.push({
- name: classMatch[1],
- kind: 'class',
- filePath,
- line: lineNumber + 1,
- isExported: trimmed.includes('export'),
- raw: line
- });
- return;
- }
-
- // Enum
- const enumMatch = /(?:export\s+)?enum\s+(\w+)/.exec(trimmed);
- if (enumMatch) {
- types.push({
- name: enumMatch[1],
- kind: 'enum',
- filePath,
- line: lineNumber + 1,
- isExported: trimmed.includes('export'),
- raw: line
- });
- }
- });
-
- this.typeCache.set(filePath, types);
- return types;
- }
-
- /**
- * Extract related types referenced in this type
- */
- private async extractRelatedTypes(definition: TypeDefinition): Promise {
- const content = await this.readFile(definition.filePath);
- const lines = content.split('\n');
- const relatedTypes = new Set();
-
- // Get the type definition block
- let inType = false;
- let braceCount = 0;
-
- for (let i = definition.line - 1; i < lines.length; i++) {
- const line = lines[i];
-
- if (i === definition.line - 1) {
- inType = true;
- }
-
- if (!inType) continue;
-
- braceCount += (line.match(/{/g) || []).length;
- braceCount -= (line.match(/}/g) || []).length;
-
- // Extract type references (capitalized words that might be types)
- const typeReferences = line.match(/:\s*([A-Z]\w+)/g);
- if (typeReferences) {
- typeReferences.forEach(ref => {
- const typeName = ref.replace(/:\s*/, '');
- if (typeName !== definition.name) {
- relatedTypes.add(typeName);
- }
- });
- }
-
- if (braceCount === 0 && inType && i > definition.line - 1) {
- break;
- }
- }
-
- return Array.from(relatedTypes);
- }
-
- /**
- * Parse function/method parameters
- */
- private parseParameters(paramString: string): ParameterInfo[] {
- if (!paramString || !paramString.trim()) return [];
-
- return paramString.split(',').map(param => {
- const trimmed = param.trim();
- const optional = trimmed.includes('?');
- const hasDefault = trimmed.includes('=');
-
- // Extract name, type, and default value
- const match = /(\w+)(\?)?:\s*([^=]+)(?:=\s*(.+))?/.exec(trimmed);
-
- if (match) {
- return {
- name: match[1],
- type: match[3]?.trim(),
- optional: optional || hasDefault,
- defaultValue: match[4]?.trim()
- };
- }
-
- // Simple parameter without type
- const simpleMatch = /(\w+)(\?)?/.exec(trimmed);
- return {
- name: simpleMatch?.[1] || trimmed,
- optional: optional,
- };
- });
- }
-
- // Helper methods
-
- private async readFile(filePath: string): Promise {
- if (this.fileCache.has(filePath)) {
- return this.fileCache.get(filePath)!;
- }
-
- try {
- // Check file size first to prevent OOM crashes
- const stats = await fsAsync.stat(filePath);
-
- if (stats.size > this.MAX_FILE_SIZE) {
- console.error(`โ ๏ธ File too large for type analysis (${(stats.size / 1024 / 1024).toFixed(1)}MB), skipping: ${this.getRelativePath(filePath)}`);
- return '';
- }
-
- if (stats.size > this.WARN_FILE_SIZE) {
- console.error(`โ ๏ธ Large file in type analysis (${(stats.size / 1024 / 1024).toFixed(1)}MB): ${this.getRelativePath(filePath)}`);
- }
-
- const content = await fsAsync.readFile(filePath, 'utf-8');
- this.fileCache.set(filePath, content);
- return content;
- } catch (error) {
- return '';
- }
- }
-
- private getRelativePath(filePath: string): string {
- return path.relative(this.workspacePath, filePath);
- }
-
- private async getAllProjectFiles(): Promise {
- const files: string[] = [];
- const extensions = ['.ts', '.tsx']; // Only TypeScript files for type analysis
-
- const walk = async (dir: string): Promise => {
- try {
- const entries = await fsAsync.readdir(dir, { withFileTypes: true });
-
- for (const entry of entries) {
- const fullPath = path.join(dir, entry.name);
-
- if (entry.isDirectory()) {
- if (!['node_modules', 'dist', 'build', '.git', '.next', 'out', 'coverage'].includes(entry.name)) {
- await walk(fullPath);
- }
- } else {
- const ext = path.extname(entry.name);
- if (extensions.includes(ext)) {
- files.push(fullPath);
- }
- }
- }
- } catch (error) {
- // Skip directories we can't read
- }
- };
-
- await walk(this.workspacePath);
- return files;
- }
-
- /**
- * Clear caches
- */
- public clearCache() {
- this.fileCache.clear();
- this.typeCache.clear();
- }
-}
\ No newline at end of file
diff --git a/src/types.ts b/src/types.ts
index 801598d..4e12710 100644
--- a/src/types.ts
+++ b/src/types.ts
@@ -1,4 +1,4 @@
-// Core types for Context Sync
+๏ปฟ// Core types for Context Sync
export interface ProjectContext {
id: string;
@@ -49,18 +49,13 @@ export interface StorageInterface {
// Conversations
addConversation(conv: Omit): Conversation;
getRecentConversations(projectId: string, limit?: number): Conversation[];
- streamConversations(projectId: string, limit?: number): Generator;
// Decisions
addDecision(decision: Omit): Decision;
getDecisions(projectId: string): Decision[];
- streamDecisions(projectId: string, limit?: number): Generator;
-
- // Projects streaming
- streamAllProjects(): Generator;
// Context
getContextSummary(projectId: string): ContextSummary;
findProjectByPath(projectPath: string): ProjectContext | null;
-}
\ No newline at end of file
+}
diff --git a/src/workspace-detector.ts b/src/workspace-detector.ts
index 163682f..93fe2ce 100644
--- a/src/workspace-detector.ts
+++ b/src/workspace-detector.ts
@@ -1,4 +1,4 @@
-// IDE Workspace Detection and File Reading
+๏ปฟ// IDE Workspace Detection and File Reading
import * as fs from 'fs';
import { promises as fsAsync } from 'fs';
@@ -55,7 +55,7 @@ export class WorkspaceDetector {
console.error('Error auto-detecting project:', error);
});
- console.error(`๐ Workspace set: ${workspacePath}`);
+ console.error(` Workspace set: ${workspacePath}`);
}
/**
@@ -85,20 +85,21 @@ export class WorkspaceDetector {
});
this.fileWatcher
- .on('change', (filePath) => {
+ .on('change', (filePath: string) => {
this.invalidateFileCache(filePath);
})
- .on('add', (filePath) => {
+ .on('add', (filePath: string) => {
this.invalidateFileCache(filePath);
})
- .on('unlink', (filePath) => {
+ .on('unlink', (filePath: string) => {
this.invalidateFileCache(filePath);
})
- .on('error', (error) => {
- console.error('File watcher error:', error);
+ .on('error', (error: unknown) => {
+ const message = error instanceof Error ? error : String(error);
+ console.error('File watcher error:', message);
});
- console.error('๐ File watcher active for cache invalidation');
+ console.error(' File watcher active for cache invalidation');
}
/**
@@ -108,7 +109,7 @@ export class WorkspaceDetector {
// Remove from file cache
if (this.fileCache.has(filePath)) {
this.fileCache.delete(filePath);
- console.error(`๐ Cache invalidated: ${path.relative(this.currentWorkspace || '', filePath)}`);
+ console.error(` Cache invalidated: ${path.relative(this.currentWorkspace || '', filePath)}`);
}
// Also remove any related cached files (for relative path variations)
@@ -152,12 +153,12 @@ export class WorkspaceDetector {
const stats = await fsAsync.stat(fullPath);
if (stats.size > this.MAX_FILE_SIZE) {
- console.error(`โ ๏ธ File too large (${(stats.size / 1024 / 1024).toFixed(1)}MB), skipping: ${relativePath}`);
+ console.error(` File too large (${(stats.size / 1024 / 1024).toFixed(1)}MB), skipping: ${relativePath}`);
return null;
}
if (stats.size > this.WARN_FILE_SIZE) {
- console.error(`โ ๏ธ Large file detected (${(stats.size / 1024 / 1024).toFixed(1)}MB): ${relativePath}`);
+ console.error(` Large file detected (${(stats.size / 1024 / 1024).toFixed(1)}MB): ${relativePath}`);
}
const content = await fsAsync.readFile(fullPath, 'utf8');
@@ -215,12 +216,12 @@ export class WorkspaceDetector {
for (let index = 0; index < filtered.length; index++) {
const entry = filtered[index];
const isLast = index === filtered.length - 1;
- const marker = isLast ? 'โโโ ' : 'โโโ ';
+ const marker = isLast ? ' ' : ' ';
const fullPath = path.join(dirPath, entry.name);
if (entry.isDirectory()) {
- output.push(`${prefix}${marker}๐ ${entry.name}/`);
- const newPrefix = prefix + (isLast ? ' ' : 'โ ');
+ output.push(`${prefix}${marker} ${entry.name}/`);
+ const newPrefix = prefix + (isLast ? ' ' : ' ');
await this.buildStructure(fullPath, newPrefix, depth + 1, maxDepth, output);
} else {
const icon = this.getFileIcon(entry.name);
@@ -377,20 +378,20 @@ export class WorkspaceDetector {
const ext = path.extname(filename).toLowerCase();
const iconMap: Record = {
- '.ts': '๐',
- '.tsx': 'โ๏ธ',
- '.js': '๐',
- '.jsx': 'โ๏ธ',
- '.json': '๐',
- '.md': '๐',
- '.css': '๐จ',
- '.html': '๐',
- '.py': '๐',
- '.rs': '๐ฆ',
- '.go': '๐ท',
+ '.ts': '',
+ '.tsx': '',
+ '.js': '',
+ '.jsx': '',
+ '.json': '',
+ '.md': '',
+ '.css': '',
+ '.html': '',
+ '.py': '',
+ '.rs': '',
+ '.go': '',
};
- return iconMap[ext] || '๐';
+ return iconMap[ext] || '';
}
/**
@@ -445,7 +446,8 @@ export class WorkspaceDetector {
if (this.fileWatcher) {
this.fileWatcher.close();
this.fileWatcher = null;
- console.error('๐ File watcher disposed');
+ console.error(' File watcher disposed');
}
}
-}
\ No newline at end of file
+}
+