diff --git a/README.md b/README.md
index 8b32690e..d633db17 100644
--- a/README.md
+++ b/README.md
@@ -12,7 +12,7 @@
## Overview
-AI-Driven Development (vibe coding) on Databricks just got a whole lot better. The **AI Dev Kit** gives your AI coding assistant (Claude Code, Cursor, Antigravity, Windsurf, etc.) the trusted sources it needs to build faster and smarter on Databricks.
+AI-Driven Development (vibe coding) on Databricks just got a whole lot better. The **AI Dev Kit** gives your AI coding assistant (Claude Code, Cursor, Antigravity, Kiro, Windsurf, etc.) the trusted sources it needs to build faster and smarter on Databricks.
@@ -58,11 +58,12 @@ AI-Driven Development (vibe coding) on Databricks just got a whole lot better. T
- [Cursor](https://cursor.com)
- [Gemini CLI](https://github.com/google-gemini/gemini-cli)
- [Antigravity](https://antigravity.google)
+ - [Kiro](https://kiro.dev)
### Install in existing project
By default this will install at a project level rather than a user level. This is often a good fit, but requires you to run your client from the exact directory that was used for the install.
-_Note: Project configuration files can be re-used in other projects. You find these configs under .claude, .cursor, .gemini, or .agents_
+_Note: Project configuration files can be re-used in other projects. You find these configs under .claude, .cursor, .gemini, .kiro, or .agents_
#### Mac / Linux
@@ -90,7 +91,7 @@ bash <(curl -sL https://raw.githubusercontent.com/databricks-solutions/ai-dev-ki
**Install for specific tools only**
```bash
-bash <(curl -sL https://raw.githubusercontent.com/databricks-solutions/ai-dev-kit/main/install.sh) --tools cursor,gemini,antigravity
+bash <(curl -sL https://raw.githubusercontent.com/databricks-solutions/ai-dev-kit/main/install.sh) --tools cursor,gemini,antigravity,kiro
```
@@ -130,7 +131,7 @@ irm https://raw.githubusercontent.com/databricks-solutions/ai-dev-kit/main/insta
**Install for specific tools only**
```powershell
-.\install.ps1 -Tools cursor,gemini,antigravity
+.\install.ps1 -Tools cursor,gemini,antigravity,kiro
```
diff --git a/databricks-skills/README.md b/databricks-skills/README.md
index a81730a2..8e7cb455 100644
--- a/databricks-skills/README.md
+++ b/databricks-skills/README.md
@@ -1,10 +1,12 @@
-# Databricks Skills for Claude Code
+# Databricks Skills for AI Coding Assistants
-Skills that teach Claude Code how to work effectively with Databricks - providing patterns, best practices, and code examples that work with Databricks MCP tools.
+Skills that teach AI coding assistants (Claude Code, Cursor, Kiro, etc.) how to work effectively with Databricks - providing patterns, best practices, and code examples that work with Databricks MCP tools.
## Installation
-Run from your **project root** (the directory where you want `.claude/skills` created).
+Run from your **project root** (the directory where you want skills created).
+
+> **Skill directories by tool:** `.claude/skills/` (Claude Code), `.cursor/skills/` (Cursor), `.kiro/skills/` (Kiro), `.github/skills/` (Copilot), `.gemini/skills/` (Gemini CLI), `.agents/skills/` (Codex). The unified installer (`install.sh` / `install.ps1`) handles this automatically.
### From this repository (local script)
@@ -57,18 +59,24 @@ curl -sSL https://raw.githubusercontent.com/databricks-solutions/ai-dev-kit/main
curl -sSL https://raw.githubusercontent.com/databricks-solutions/ai-dev-kit/main/databricks-skills/install_skills.sh | bash -s -- --install-to-genie --profile prod
```
-`--install-to-genie` uploads the tree under `./.claude/skills` to the workspace (requires the `databricks` CLI).
+`--install-to-genie` uploads the tree under `./skills` to the workspace (requires the `databricks` CLI).
-This creates `.claude/skills/` and downloads all skills. Claude Code loads them automatically.
+This creates a skills directory and downloads all skills. Your AI coding assistant loads them automatically.
- **Databricks skills** are downloaded from this repository
- **MLflow skills** are fetched dynamically from [github.com/mlflow/skills](https://github.com/mlflow/skills)
-**Manual install:**
+**Manual install (example for Claude Code):**
```bash
mkdir -p .claude/skills
cp -r ai-dev-kit/databricks-skills/databricks-agent-bricks .claude/skills/
```
+**Manual install (example for Kiro):**
+```bash
+mkdir -p .kiro/skills
+cp -r ai-dev-kit/databricks-skills/databricks-agent-bricks .kiro/skills/
+```
+
## Available Skills
### 🤖 AI & Agents
@@ -113,18 +121,25 @@ cp -r ai-dev-kit/databricks-skills/databricks-agent-bricks .claude/skills/
## How It Works
```
-┌────────────────────────────────────────────────┐
-│ .claude/skills/ + .claude/mcp.json │
-│ (Knowledge) (Actions) │
-│ │
-│ Skills teach HOW + MCP does it │
-│ ↓ ↓ │
-│ Claude Code learns patterns and executes │
-└────────────────────────────────────────────────┘
+┌─────────────────────────────────────────────────────────┐
+│ /skills/ + MCP config │
+│ (Knowledge) (Actions) │
+│ │
+│ Skills teach HOW + MCP does it │
+│ ↓ ↓ │
+│ Your AI assistant learns patterns and executes │
+│ │
+│ Tool Skills dir MCP config │
+│ Claude Code .claude/skills/ .mcp.json │
+│ Cursor .cursor/skills/ .cursor/mcp.json │
+│ Kiro .kiro/skills/ .kiro/settings/mcp.json │
+│ Copilot .github/skills/ .vscode/mcp.json │
+│ Gemini CLI .gemini/skills/ .gemini/settings.json │
+└─────────────────────────────────────────────────────────┘
```
**Example:** User says "Create a sales dashboard"
-1. Claude loads `databricks-aibi-dashboards` skill → learns validation workflow
+1. The assistant loads `databricks-aibi-dashboards` skill → learns validation workflow
2. Calls `get_table_stats_and_schema()` → gets schemas
3. Calls `execute_sql()` → tests queries
4. Calls `manage_dashboard(action="create_or_update")` → deploys
@@ -132,7 +147,7 @@ cp -r ai-dev-kit/databricks-skills/databricks-agent-bricks .claude/skills/
## Custom Skills
-Create your own in `.claude/skills/my-skill/SKILL.md`:
+Create your own in your tool's skills directory (e.g., `.claude/skills/my-skill/SKILL.md` or `.kiro/skills/my-skill/SKILL.md`):
```markdown
---
@@ -151,7 +166,7 @@ description: "What this teaches"
## Troubleshooting
-**Skills not loading?** Check `.claude/skills/` exists and each skill has `SKILL.md`
+**Skills not loading?** Check that your tool's skills directory exists and each skill has `SKILL.md`
**Install fails?** Run `bash install_skills.sh` or check write permissions
diff --git a/databricks-skills/install_kiro_skills.py b/databricks-skills/install_kiro_skills.py
new file mode 100644
index 00000000..ac43d778
--- /dev/null
+++ b/databricks-skills/install_kiro_skills.py
@@ -0,0 +1,336 @@
+#!/usr/bin/env python3
+"""
+Databricks + MLflow + APX Skills Installer for Kiro
+(adapted from ai-dev-kit install_skills.sh)
+
+- Databricks skills: copied from local ai-dev-kit repo
+- MLflow skills: downloaded from github.com/mlflow/skills
+- APX skills: downloaded from github.com/databricks-solutions/apx
+
+All installed to ~/.kiro/skills/ (user-level, available globally).
+
+Usage:
+ python install_kiro_skills.py # Install/update all
+ python install_kiro_skills.py --list # List available
+ python install_kiro_skills.py databricks-jobs # Install specific
+ python install_kiro_skills.py agent-evaluation # MLflow skill
+ python install_kiro_skills.py --dry-run # Preview changes
+ python install_kiro_skills.py --only databricks # Only Databricks
+ python install_kiro_skills.py --only mlflow # Only MLflow
+ python install_kiro_skills.py --only apx # Only APX
+ python install_kiro_skills.py --mlflow-ref v1.0.0 # Pin MLflow version
+"""
+import os
+import sys
+import shutil
+import argparse
+from pathlib import Path
+from urllib.request import urlopen, Request
+from urllib.error import URLError, HTTPError
+
+# ── Configuration ──────────────────────────────────────────────────────────────
+# Auto-detect ai-dev-kit root: this script lives in /databricks-skills/
+SCRIPT_DIR = Path(__file__).resolve().parent
+AI_DEV_KIT = Path(os.environ.get("AI_DEV_KIT_PATH", str(SCRIPT_DIR.parent)))
+SKILLS_SOURCE = AI_DEV_KIT / "databricks-skills"
+KIRO_SKILLS_DIR = Path.home() / ".kiro" / "skills"
+
+MLFLOW_BASE_URL = "https://raw.githubusercontent.com/mlflow/skills"
+MLFLOW_REF = "main"
+APX_BASE_URL = "https://raw.githubusercontent.com/databricks-solutions/apx"
+APX_REF = "main"
+APX_SKILL_PATH = "skills/apx"
+
+# ── Skill Definitions ─────────────────────────────────────────────────────────
+DATABRICKS_SKILLS = [
+ "databricks-agent-bricks",
+ "databricks-ai-functions",
+ "databricks-aibi-dashboards",
+ "databricks-app-python",
+ "databricks-bundles",
+ "databricks-config",
+ "databricks-dbsql",
+ "databricks-docs",
+ "databricks-execution-compute",
+ "databricks-genie",
+ "databricks-iceberg",
+ "databricks-jobs",
+ "databricks-lakebase-autoscale",
+ "databricks-lakebase-provisioned",
+ "databricks-metric-views",
+ "databricks-mlflow-evaluation",
+ "databricks-model-serving",
+ "databricks-python-sdk",
+ "databricks-spark-declarative-pipelines",
+ "databricks-spark-structured-streaming",
+ "databricks-synthetic-data-gen",
+ "databricks-unity-catalog",
+ "databricks-unstructured-pdf-generation",
+ "databricks-vector-search",
+ "databricks-zerobus-ingest",
+ "spark-python-data-source",
+]
+
+MLFLOW_SKILLS = [
+ "agent-evaluation",
+ "analyze-mlflow-chat-session",
+ "analyze-mlflow-trace",
+ "instrumenting-with-mlflow-tracing",
+ "mlflow-onboarding",
+ "querying-mlflow-metrics",
+ "retrieving-mlflow-traces",
+ "searching-mlflow-docs",
+]
+
+MLFLOW_EXTRA_FILES = {
+ "agent-evaluation": [
+ "references/dataset-preparation.md",
+ "references/scorers-constraints.md",
+ "references/scorers.md",
+ "references/setup-guide.md",
+ "references/tracing-integration.md",
+ "references/troubleshooting.md",
+ "scripts/analyze_results.py",
+ "scripts/create_dataset_template.py",
+ "scripts/list_datasets.py",
+ "scripts/run_evaluation_template.py",
+ "scripts/setup_mlflow.py",
+ "scripts/validate_agent_tracing.py",
+ "scripts/validate_auth.py",
+ "scripts/validate_environment.py",
+ "scripts/validate_tracing_runtime.py",
+ ],
+ "analyze-mlflow-chat-session": [
+ "scripts/discover_schema.sh",
+ "scripts/inspect_turn.sh",
+ ],
+ "analyze-mlflow-trace": ["references/trace-structure.md"],
+ "instrumenting-with-mlflow-tracing": [
+ "references/advanced-patterns.md",
+ "references/distributed-tracing.md",
+ "references/feedback-collection.md",
+ "references/production.md",
+ "references/python.md",
+ "references/typescript.md",
+ ],
+ "querying-mlflow-metrics": [
+ "references/api_reference.md",
+ "scripts/fetch_metrics.py",
+ ],
+}
+
+APX_SKILLS = ["databricks-app-apx"]
+APX_EXTRA_FILES = {
+ "databricks-app-apx": [
+ "backend-patterns.md",
+ "best-practices.md",
+ "frontend-patterns.md",
+ ],
+}
+
+ALL_SKILLS = DATABRICKS_SKILLS + MLFLOW_SKILLS + APX_SKILLS
+
+
+def get_source_type(skill_name):
+ if skill_name in DATABRICKS_SKILLS:
+ return "databricks"
+ elif skill_name in MLFLOW_SKILLS:
+ return "mlflow"
+ elif skill_name in APX_SKILLS:
+ return "apx"
+ return None
+
+
+# ── Download Helper ────────────────────────────────────────────────────────────
+def download_file(url, dest_path):
+ """Download a file from URL. Returns True on success, False on 404/error."""
+ try:
+ req = Request(url, headers={"User-Agent": "kiro-skills-installer/1.0"})
+ with urlopen(req, timeout=30) as resp:
+ dest_path.parent.mkdir(parents=True, exist_ok=True)
+ dest_path.write_bytes(resp.read())
+ return True
+ except HTTPError as e:
+ if e.code == 404:
+ return False
+ print(f" HTTP {e.code} downloading {url}")
+ return False
+ except (URLError, OSError) as e:
+ print(f" Network error: {e}")
+ return False
+
+
+# ── Install Functions ──────────────────────────────────────────────────────────
+def install_databricks_skill(skill_name, dry_run=False):
+ src = SKILLS_SOURCE / skill_name
+ dst = KIRO_SKILLS_DIR / skill_name
+ if not src.exists() or not (src / "SKILL.md").exists():
+ print(f" SKIP {skill_name} - source not found")
+ return False
+ if dry_run:
+ fc = sum(1 for _ in src.rglob("*") if _.is_file())
+ print(f" [DRY-RUN] {skill_name} ({fc} files) [local]")
+ return True
+ # Overwrite file-by-file (avoids lock issues when Kiro has files open)
+ dst.mkdir(parents=True, exist_ok=True)
+ fc = 0
+ for src_file in src.rglob("*"):
+ if src_file.is_file():
+ rel = src_file.relative_to(src)
+ dst_file = dst / rel
+ dst_file.parent.mkdir(parents=True, exist_ok=True)
+ shutil.copy2(src_file, dst_file)
+ fc += 1
+ print(f" OK {skill_name} ({fc} files) [local]")
+ return True
+
+
+def install_remote_skill(
+ skill_name, base_url, ref, extra_files_map, skill_path="", dry_run=False
+):
+ dst = KIRO_SKILLS_DIR / skill_name
+ prefix = (
+ f"{base_url}/{ref}/{skill_path}"
+ if skill_path
+ else f"{base_url}/{ref}/{skill_name}"
+ )
+ if dry_run:
+ extras = extra_files_map.get(skill_name, [])
+ print(f" [DRY-RUN] {skill_name} (1 + {len(extras)} files) [remote]")
+ return True
+ dst.mkdir(parents=True, exist_ok=True)
+ if not download_file(f"{prefix}/SKILL.md", dst / "SKILL.md"):
+ print(f" FAIL {skill_name} - SKILL.md not found")
+ return False
+ fc = 1
+ for extra in extra_files_map.get(skill_name, []):
+ if download_file(f"{prefix}/{extra}", dst / extra):
+ fc += 1
+ else:
+ print(f" optional: {extra} not found")
+ print(f" OK {skill_name} ({fc} files) [remote]")
+ return True
+
+
+def install_skill(skill_name, dry_run=False, mlflow_ref="main", apx_ref="main"):
+ stype = get_source_type(skill_name)
+ if stype == "databricks":
+ return install_databricks_skill(skill_name, dry_run)
+ elif stype == "mlflow":
+ return install_remote_skill(
+ skill_name, MLFLOW_BASE_URL, mlflow_ref, MLFLOW_EXTRA_FILES, dry_run=dry_run
+ )
+ elif stype == "apx":
+ return install_remote_skill(
+ skill_name,
+ APX_BASE_URL,
+ apx_ref,
+ APX_EXTRA_FILES,
+ skill_path=APX_SKILL_PATH,
+ dry_run=dry_run,
+ )
+ print(f" SKIP {skill_name} - unknown source")
+ return False
+
+
+# ── List ───────────────────────────────────────────────────────────────────────
+def list_skills():
+ print(f"\nLocal source: {SKILLS_SOURCE}")
+ print(f"Target: {KIRO_SKILLS_DIR}\n")
+ for label, skills in [
+ ("Databricks", DATABRICKS_SKILLS),
+ ("MLflow", MLFLOW_SKILLS),
+ ("APX", APX_SKILLS),
+ ]:
+ print(f" [{label}]")
+ for skill in skills:
+ status = (
+ "installed" if (KIRO_SKILLS_DIR / skill / "SKILL.md").exists() else "-"
+ )
+ print(f" {skill:<45} {status}")
+ print()
+
+
+# ── Main ───────────────────────────────────────────────────────────────────────
+def main():
+ parser = argparse.ArgumentParser(
+ description="Install Databricks/MLflow/APX skills for Kiro"
+ )
+ parser.add_argument("skills", nargs="*", help="Specific skills (default: all)")
+ parser.add_argument(
+ "--list", "-l", action="store_true", help="List available skills"
+ )
+ parser.add_argument(
+ "--dry-run", "-n", action="store_true", help="Preview without installing"
+ )
+ parser.add_argument(
+ "--only",
+ choices=["databricks", "mlflow", "apx"],
+ help="Install only one category",
+ )
+ parser.add_argument(
+ "--mlflow-ref", default=MLFLOW_REF, help="MLflow repo ref (default: main)"
+ )
+ parser.add_argument(
+ "--apx-ref", default=APX_REF, help="APX repo ref (default: main)"
+ )
+ args = parser.parse_args()
+
+ if args.list:
+ list_skills()
+ return
+
+ KIRO_SKILLS_DIR.mkdir(parents=True, exist_ok=True)
+
+ if args.skills:
+ skills = args.skills
+ for s in skills:
+ if s not in ALL_SKILLS:
+ print(
+ f"ERROR: Unknown skill '{s}'. Use --list to see available skills."
+ )
+ sys.exit(1)
+ elif args.only == "databricks":
+ skills = DATABRICKS_SKILLS
+ elif args.only == "mlflow":
+ skills = MLFLOW_SKILLS
+ elif args.only == "apx":
+ skills = APX_SKILLS
+ else:
+ skills = ALL_SKILLS
+
+ has_db = any(get_source_type(s) == "databricks" for s in skills)
+ if has_db and not SKILLS_SOURCE.exists():
+ print(f"ERROR: ai-dev-kit not found at {SKILLS_SOURCE}")
+ print(
+ f"Set AI_DEV_KIT_PATH env var or ensure this script is inside ai-dev-kit/databricks-skills/"
+ )
+ sys.exit(1)
+
+ action = "DRY-RUN" if args.dry_run else "Installing"
+ db = sum(1 for s in skills if get_source_type(s) == "databricks")
+ ml = sum(1 for s in skills if get_source_type(s) == "mlflow")
+ ax = sum(1 for s in skills if get_source_type(s) == "apx")
+ print(f"\n{action} {len(skills)} skills ({db} databricks, {ml} mlflow, {ax} apx)")
+ print(f"Target: {KIRO_SKILLS_DIR}\n")
+
+ ok, fail = 0, 0
+ for skill in skills:
+ try:
+ if install_skill(skill, args.dry_run, args.mlflow_ref, args.apx_ref):
+ ok += 1
+ else:
+ fail += 1
+ except Exception as e:
+ print(f" FAIL {skill}: {e}")
+ fail += 1
+
+ print(f"\nDone: {ok} installed, {fail} failed\n")
+
+
+if __name__ == "__main__":
+ try:
+ main()
+ except Exception as e:
+ print(f"ERROR: {e}")
+ sys.exit(1)
diff --git a/databricks-skills/install_skills.sh b/databricks-skills/install_skills.sh
index 7630615c..a1d422de 100755
--- a/databricks-skills/install_skills.sh
+++ b/databricks-skills/install_skills.sh
@@ -2,8 +2,8 @@
#
# Databricks Skills Installer
#
-# Installs Databricks skills for Claude Code into your project.
-# These skills teach Claude how to work with Databricks using MCP tools.
+# Installs Databricks skills for AI coding assistants (Claude Code, Kiro, etc.) into your project.
+# These skills teach your assistant how to work with Databricks using MCP tools.
#
# Usage:
# # Install all skills (Databricks + MLflow + APX)
@@ -17,7 +17,9 @@
# ./install_skills.sh databricks-bundles agent-evaluation # Install specific skills
# ./install_skills.sh --mlflow-version v1.0.0 # Pin MLflow skills version
# ./install_skills.sh --local # Install Databricks skills from local directory
-# ./install_skills.sh --install-to-genie # Install then upload ./.claude/skills to workspace (Genie Code / Assistant)
+# ./install_skills.sh --tool kiro # Install to .kiro/skills/ for Kiro
+# ./install_skills.sh --tool kiro --local # Install from local for Kiro
+# ./install_skills.sh --install-to-genie # Install then upload skills to workspace (Genie Code / Assistant)
# ./install_skills.sh --install-to-genie --profile prod --local
# ./install_skills.sh --list # List available skills
# ./install_skills.sh --help # Show help
@@ -175,7 +177,7 @@ get_mlflow_skill_extra_files() {
# Show usage
show_help() {
- echo -e "${BLUE}Databricks Skills Installer for Claude Code${NC}"
+ echo -e "${BLUE}Databricks Skills Installer for AI Coding Assistants${NC}"
echo ""
echo "Usage:"
echo " ./install_skills.sh [options] [skill1 skill2 ...]"
@@ -185,7 +187,9 @@ show_help() {
echo " --list, -l List all available skills"
echo " --all, -a Install all skills (default if no skills specified)"
echo " --local Install from local files instead of downloading"
- echo " --install-to-genie After install, upload ./.claude/skills to workspace"
+ echo " --tool Target tool: claude (default), kiro, cursor, copilot, codex, gemini"
+ echo " Sets the skills directory (e.g., --tool kiro → .kiro/skills)"
+ echo " --install-to-genie After install, upload skills to workspace"
echo " /Users//.assistant/skills for Genie Code (uses cwd; requires databricks CLI)"
echo " --profile Databricks config profile for workspace upload (default: DEFAULT or \$DATABRICKS_CONFIG_PROFILE)"
echo " --mlflow-version [ Pin MLflow skills to specific version/branch/tag (default: main)"
@@ -199,6 +203,8 @@ show_help() {
echo " ./install_skills.sh --mlflow-version v1.0.0 # Pin MLflow skills version"
echo " ./install_skills.sh --apx-version v1.0.0 # Pin APX skills version"
echo " ./install_skills.sh --local # Install all from local directory"
+ echo " ./install_skills.sh --tool kiro # Install to .kiro/skills/ for Kiro"
+ echo " ./install_skills.sh --tool kiro --local # Install from local for Kiro"
echo " ./install_skills.sh --install-to-genie # Install all, then upload to workspace for Genie Code"
echo " ./install_skills.sh --install-to-genie --profile prod # Same with explicit Databricks CLI profile"
echo " ./install_skills.sh --list # List available skills"
@@ -552,6 +558,22 @@ while [ $# -gt 0 ]; do
INSTALL_FROM_LOCAL=true
shift
;;
+ --tool)
+ if [ -z "$2" ] || [ "${2:0:1}" = "-" ]; then
+ echo -e "${RED}Error: --tool requires a tool name (claude, kiro, cursor, copilot, codex, gemini)${NC}"
+ exit 1
+ fi
+ case "$2" in
+ claude) SKILLS_DIR=".claude/skills" ;;
+ kiro) SKILLS_DIR=".kiro/skills" ;;
+ cursor) SKILLS_DIR=".cursor/skills" ;;
+ copilot) SKILLS_DIR=".github/skills" ;;
+ codex) SKILLS_DIR=".agents/skills" ;;
+ gemini) SKILLS_DIR=".gemini/skills" ;;
+ *) echo -e "${RED}Unknown tool: $2. Use: claude, kiro, cursor, copilot, codex, gemini${NC}"; exit 1 ;;
+ esac
+ shift 2
+ ;;
--install-to-genie|--deploy-to-assistant)
INSTALL_TO_GENIE=true
shift
@@ -612,7 +634,7 @@ fi
# Header
echo -e "${BLUE}╔════════════════════════════════════════════════════════════╗${NC}"
-echo -e "${BLUE}║ Databricks Skills Installer for Claude Code ║${NC}"
+echo -e "${BLUE}║ Databricks Skills Installer for AI Coding Assistants ║${NC}"
echo -e "${BLUE}╚════════════════════════════════════════════════════════════╝${NC}"
echo ""
@@ -627,7 +649,7 @@ if [ ! -d ".git" ] && [ ! -f "pyproject.toml" ] && [ ! -f "package.json" ] && [
fi
fi
-# Create .claude/skills directory if it doesn't exist
+# Create skills directory if it doesn't exist
if [ ! -d "$SKILLS_DIR" ]; then
echo -e "${GREEN}Creating $SKILLS_DIR directory...${NC}"
mkdir -p "$SKILLS_DIR"
diff --git a/install.ps1 b/install.ps1
index e4b4ab67..43a8469c 100644
--- a/install.ps1
+++ b/install.ps1
@@ -1,7 +1,7 @@
#
# Databricks AI Dev Kit - Unified Installer (Windows)
#
-# Installs skills, MCP server, and configuration for Claude Code, Cursor, OpenAI Codex, GitHub Copilot, Gemini CLI, and Antigravity.
+# Installs skills, MCP server, and configuration for Claude Code, Cursor, OpenAI Codex, GitHub Copilot, Gemini CLI, Antigravity, and Kiro.
#
# Usage: irm https://raw.githubusercontent.com/databricks-solutions/ai-dev-kit/main/install.ps1 -OutFile install.ps1
# .\install.ps1 [OPTIONS]
@@ -569,6 +569,8 @@ function Invoke-DetectTools {
$hasGemini = $null -ne (Get-Command gemini -ErrorAction SilentlyContinue)
$hasAntigravity = ($null -ne (Get-Command antigravity -ErrorAction SilentlyContinue)) -or
(Test-Path "$env:LOCALAPPDATA\Programs\Antigravity\Antigravity.exe")
+ $hasKiro = ($null -ne (Get-Command kiro -ErrorAction SilentlyContinue)) -or
+ (Test-Path "$env:LOCALAPPDATA\Programs\kiro\Kiro.exe")
$claudeState = $hasClaude; $claudeHint = if ($hasClaude) { "detected" } else { "not found" }
$cursorState = $hasCursor; $cursorHint = if ($hasCursor) { "detected" } else { "not found" }
@@ -576,9 +578,10 @@ function Invoke-DetectTools {
$copilotState = $hasCopilot; $copilotHint = if ($hasCopilot) { "detected" } else { "not found" }
$geminiState = $hasGemini; $geminiHint = if ($hasGemini) { "detected" } else { "not found" }
$antigravityState = $hasAntigravity; $antigravityHint = if ($hasAntigravity) { "detected" } else { "not found" }
+ $kiroState = $hasKiro; $kiroHint = if ($hasKiro) { "detected" } else { "not found" }
# If nothing detected, default to claude
- if (-not $hasClaude -and -not $hasCursor -and -not $hasCodex -and -not $hasCopilot -and -not $hasGemini -and -not $hasAntigravity) {
+ if (-not $hasClaude -and -not $hasCursor -and -not $hasCodex -and -not $hasCopilot -and -not $hasGemini -and -not $hasAntigravity -and -not $hasKiro) {
$claudeState = $true
$claudeHint = "default"
}
@@ -595,6 +598,7 @@ function Invoke-DetectTools {
@{ Label = "OpenAI Codex"; Value = "codex"; State = $codexState; Hint = $codexHint }
@{ Label = "Gemini CLI"; Value = "gemini"; State = $geminiState; Hint = $geminiHint }
@{ Label = "Antigravity"; Value = "antigravity"; State = $antigravityState; Hint = $antigravityHint }
+ @{ Label = "Kiro"; Value = "kiro"; State = $kiroState; Hint = $kiroHint }
)
$result = Select-Checkbox -Items $items
@@ -1164,6 +1168,13 @@ function Install-Skills {
$dirs += Join-Path $BaseDir ".agents\skills"
}
}
+ "kiro" {
+ if ($script:Scope -eq "global") {
+ $dirs += Join-Path $env:USERPROFILE ".kiro\skills"
+ } else {
+ $dirs += Join-Path $BaseDir ".kiro\skills"
+ }
+ }
}
}
$dirs = $dirs | Select-Object -Unique
@@ -1589,6 +1600,14 @@ function Write-McpConfigs {
Write-GeminiMcpJson (Join-Path $env:USERPROFILE ".gemini\antigravity\mcp_config.json")
Write-Ok "Antigravity MCP config"
}
+ "kiro" {
+ if ($script:Scope -eq "global") {
+ Write-McpJson (Join-Path $env:USERPROFILE ".kiro\settings\mcp.json")
+ } else {
+ Write-McpJson (Join-Path $BaseDir ".kiro\settings\mcp.json")
+ }
+ Write-Ok "Kiro MCP config"
+ }
}
}
}
diff --git a/install.sh b/install.sh
index 114cb2c4..be0bada8 100644
--- a/install.sh
+++ b/install.sh
@@ -2,7 +2,7 @@
#
# Databricks AI Dev Kit - Unified Installer
#
-# Installs skills, MCP server, and configuration for Claude Code, Cursor, OpenAI Codex, GitHub Copilot, Gemini CLI, and Antigravity.
+# Installs skills, MCP server, and configuration for Claude Code, Cursor, OpenAI Codex, GitHub Copilot, Gemini CLI, Antigravity, and Kiro.
#
# Usage: bash <(curl -sL https://raw.githubusercontent.com/databricks-solutions/ai-dev-kit/main/install.sh) [OPTIONS]
#
@@ -503,6 +503,7 @@ detect_tools() {
local has_copilot=false
local has_gemini=false
local has_antigravity=false
+ local has_kiro=false
command -v claude >/dev/null 2>&1 && has_claude=true
{ [ -d "/Applications/Cursor.app" ] || command -v cursor >/dev/null 2>&1; } && has_cursor=true
@@ -510,19 +511,21 @@ detect_tools() {
{ [ -d "/Applications/Visual Studio Code.app" ] || command -v code >/dev/null 2>&1; } && has_copilot=true
{ command -v gemini >/dev/null 2>&1 || [ -f "$HOME/.gemini/local/gemini" ]; } && has_gemini=true
{ [ -d "/Applications/Antigravity.app" ] || command -v antigravity >/dev/null 2>&1; } && has_antigravity=true
+ command -v kiro >/dev/null 2>&1 && has_kiro=true
# Build checkbox items: "Label|value|on_or_off|hint"
- local claude_state="off" cursor_state="off" codex_state="off" copilot_state="off" gemini_state="off" antigravity_state="off"
- local claude_hint="not found" cursor_hint="not found" codex_hint="not found" copilot_hint="not found" gemini_hint="not found" antigravity_hint="not found"
+ local claude_state="off" cursor_state="off" codex_state="off" copilot_state="off" gemini_state="off" antigravity_state="off" kiro_state="off"
+ local claude_hint="not found" cursor_hint="not found" codex_hint="not found" copilot_hint="not found" gemini_hint="not found" antigravity_hint="not found" kiro_hint="not found"
[ "$has_claude" = true ] && claude_state="on" && claude_hint="detected"
[ "$has_cursor" = true ] && cursor_state="on" && cursor_hint="detected"
[ "$has_codex" = true ] && codex_state="on" && codex_hint="detected"
[ "$has_copilot" = true ] && copilot_state="on" && copilot_hint="detected"
[ "$has_gemini" = true ] && gemini_state="on" && gemini_hint="detected"
[ "$has_antigravity" = true ] && antigravity_state="on" && antigravity_hint="detected"
+ [ "$has_kiro" = true ] && kiro_state="on" && kiro_hint="detected"
# If nothing detected, pre-select claude as default
- if [ "$has_claude" = false ] && [ "$has_cursor" = false ] && [ "$has_codex" = false ] && [ "$has_copilot" = false ] && [ "$has_gemini" = false ] && [ "$has_antigravity" = false ]; then
+ if [ "$has_claude" = false ] && [ "$has_cursor" = false ] && [ "$has_codex" = false ] && [ "$has_copilot" = false ] && [ "$has_gemini" = false ] && [ "$has_antigravity" = false ] && [ "$has_kiro" = false ]; then
claude_state="on"
claude_hint="default"
fi
@@ -539,6 +542,7 @@ detect_tools() {
"OpenAI Codex|codex|${codex_state}|${codex_hint}" \
"Gemini CLI|gemini|${gemini_state}|${gemini_hint}" \
"Antigravity|antigravity|${antigravity_state}|${antigravity_hint}" \
+ "Kiro|kiro|${kiro_state}|${kiro_hint}" \
)
else
# Silent: use detected defaults
@@ -549,6 +553,7 @@ detect_tools() {
[ "$has_codex" = true ] && tools="${tools:+$tools }codex"
[ "$has_gemini" = true ] && tools="${tools:+$tools }gemini"
[ "$has_antigravity" = true ] && tools="${tools:+$tools }antigravity"
+ [ "$has_kiro" = true ] && tools="${tools:+$tools }kiro"
[ -z "$tools" ] && tools="claude"
TOOLS="$tools"
fi
@@ -1096,6 +1101,13 @@ install_skills() {
dirs+=("$base_dir/.agents/skills")
fi
;;
+ kiro)
+ if [ "$SCOPE" = "global" ]; then
+ dirs+=("$HOME/.kiro/skills")
+ else
+ dirs+=("$base_dir/.kiro/skills")
+ fi
+ ;;
esac
done
@@ -1486,6 +1498,14 @@ write_mcp_configs() {
write_gemini_mcp_json "$HOME/.gemini/antigravity/mcp_config.json"
ok "Antigravity MCP config"
;;
+ kiro)
+ if [ "$SCOPE" = "global" ]; then
+ write_mcp_json "$HOME/.kiro/settings/mcp.json"
+ else
+ write_mcp_json "$base_dir/.kiro/settings/mcp.json"
+ fi
+ ok "Kiro MCP config"
+ ;;
esac
done
}
]