diff --git a/docs/installation.md b/docs/installation.md index 970d9fbdd..f4bc35893 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -10,21 +10,45 @@ brew install --cask seqra/tap/opentaint ## Install Scripts +The install scripts accept an optional version argument. Without one, the latest GitHub release is installed. + **Linux/macOS:** ```bash +# Latest curl -fsSL https://raw.githubusercontent.com/seqra/opentaint/main/scripts/install/install.sh | bash + +# Specific version (leading 'v' is optional) +curl -fsSL https://raw.githubusercontent.com/seqra/opentaint/main/scripts/install/install.sh | bash -s -- 1.2.3 ``` **Windows (PowerShell):** ```powershell +# Latest irm https://raw.githubusercontent.com/seqra/opentaint/main/scripts/install/install.ps1 | iex + +# Specific version +& ([scriptblock]::Create((irm https://raw.githubusercontent.com/seqra/opentaint/main/scripts/install/install.ps1))) -Version 1.2.3 ``` **Windows (CMD):** ```cmd +:: Latest curl -fsSL https://raw.githubusercontent.com/seqra/opentaint/main/scripts/install/install.cmd -o install.cmd && install.cmd && del install.cmd + +:: Specific version +curl -fsSL https://raw.githubusercontent.com/seqra/opentaint/main/scripts/install/install.cmd -o install.cmd && install.cmd 1.2.3 && del install.cmd ``` +### Environment variables + +| Variable | Effect | +|---|---| +| `OPENTAINT_REPOSITORY` | Override `seqra/opentaint` (for forks or mirrors) | +| `OPENTAINT_INSTALL_DIR` | Override the install destination | +| `OPENTAINT_FORCE` | Set to `1` to install side-by-side with an existing Homebrew install | + +If opentaint is already installed via Homebrew, the install scripts refuse to run and print the Homebrew upgrade command instead. Set `OPENTAINT_FORCE=1` to force a parallel install. + ## Docker No local installation required: diff --git a/docs/superpowers/plans/2026-04-16-prune-lockfiles-granular-flags.md b/docs/superpowers/plans/2026-04-16-prune-lockfiles-granular-flags.md deleted file mode 100644 index 3af3b40e2..000000000 --- a/docs/superpowers/plans/2026-04-16-prune-lockfiles-granular-flags.md +++ /dev/null @@ -1,1304 +0,0 @@ -# Prune: Lockfiles & Granular Flags Implementation Plan - -> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. - -**Goal:** Add cross-platform file locking and granular category flags to the `opentaint prune` command, and move logs to a separate top-level directory. - -**Architecture:** Introduce a `lock.go` module wrapping `github.com/gofrs/flock` for cross-platform advisory file locks. Refactor `ScanForStaleArtifacts` to accept a category set instead of a single `all` bool. Move log writing from `cache//logs/` to `logs//`. Replace `HasStagingDir` heuristic with flock-based compile locks. - -**Tech Stack:** Go 1.25, `github.com/gofrs/flock`, Cobra CLI framework - -**Spec:** `docs/superpowers/specs/2026-04-16-prune-lockfiles-granular-flags-design.md` - ---- - -### Task 1: Add `github.com/gofrs/flock` dependency - -**Files:** -- Modify: `cli/go.mod` -- Modify: `cli/go.sum` - -- [ ] **Step 1: Add the dependency** - -```bash -cd cli && go get github.com/gofrs/flock -``` - -- [ ] **Step 2: Verify it compiles** - -```bash -cd cli && go build ./... -``` -Expected: no errors - -- [ ] **Step 3: Commit** - -```bash -git add cli/go.mod cli/go.sum -git commit -m "chore: add github.com/gofrs/flock dependency" -``` - ---- - -### Task 2: Implement `lock.go` — cross-platform file locking - -**Files:** -- Create: `cli/internal/utils/lock.go` -- Create: `cli/internal/utils/lock_test.go` - -- [ ] **Step 1: Write failing tests for lock acquisition** - -Create `cli/internal/utils/lock_test.go` with tests: - -```go -package utils - -import ( - "os" - "path/filepath" - "testing" -) - -func TestTryLock(t *testing.T) { - t.Run("acquires lock on new file", func(t *testing.T) { - lockPath := filepath.Join(t.TempDir(), "test.lock") - lock, err := TryLock(lockPath, LockMeta{PID: os.Getpid(), Command: "test"}) - if err != nil { - t.Fatalf("TryLock() error = %v", err) - } - defer lock.Unlock() - }) - - t.Run("second lock on same file returns ErrLocked", func(t *testing.T) { - lockPath := filepath.Join(t.TempDir(), "test.lock") - lock1, err := TryLock(lockPath, LockMeta{PID: os.Getpid(), Command: "first"}) - if err != nil { - t.Fatalf("first TryLock() error = %v", err) - } - defer lock1.Unlock() - - _, err = TryLock(lockPath, LockMeta{PID: os.Getpid(), Command: "second"}) - if err != ErrLocked { - t.Fatalf("expected ErrLocked, got %v", err) - } - }) - - t.Run("lock released after Unlock allows re-acquisition", func(t *testing.T) { - lockPath := filepath.Join(t.TempDir(), "test.lock") - lock1, err := TryLock(lockPath, LockMeta{PID: os.Getpid(), Command: "first"}) - if err != nil { - t.Fatalf("first TryLock() error = %v", err) - } - lock1.Unlock() - - lock2, err := TryLock(lockPath, LockMeta{PID: os.Getpid(), Command: "second"}) - if err != nil { - t.Fatalf("second TryLock() error = %v", err) - } - defer lock2.Unlock() - }) -} - -func TestReadLockMeta(t *testing.T) { - t.Run("reads PID and command from lock file", func(t *testing.T) { - lockPath := filepath.Join(t.TempDir(), "test.lock") - meta := LockMeta{PID: 12345, Command: "compile", Project: "/tmp/my-project"} - lock, err := TryLock(lockPath, meta) - if err != nil { - t.Fatalf("TryLock() error = %v", err) - } - defer lock.Unlock() - - got, err := ReadLockMeta(lockPath) - if err != nil { - t.Fatalf("ReadLockMeta() error = %v", err) - } - if got.PID != 12345 { - t.Errorf("PID = %d, want 12345", got.PID) - } - if got.Command != "compile" { - t.Errorf("Command = %q, want %q", got.Command, "compile") - } - if got.Project != "/tmp/my-project" { - t.Errorf("Project = %q, want %q", got.Project, "/tmp/my-project") - } - }) - - t.Run("returns error for missing file", func(t *testing.T) { - _, err := ReadLockMeta(filepath.Join(t.TempDir(), "missing.lock")) - if err == nil { - t.Fatal("expected error for missing file") - } - }) -} -``` - -- [ ] **Step 2: Run tests to verify they fail** - -```bash -cd cli && go test ./internal/utils/ -run "TestTryLock|TestReadLockMeta" -v -``` -Expected: FAIL — `TryLock`, `LockMeta`, `ErrLocked`, `ReadLockMeta` undefined - -- [ ] **Step 3: Implement `lock.go`** - -Create `cli/internal/utils/lock.go`: - -```go -package utils - -import ( - "errors" - "fmt" - "os" - "path/filepath" - "strconv" - "strings" - - "github.com/gofrs/flock" -) - -// ErrLocked is returned when a lock file is already held by another process. -var ErrLocked = errors.New("lock is held by another process") - -// LockMeta holds diagnostic information written into lock files. -type LockMeta struct { - PID int - Command string - Project string -} - -// FileLock wraps a flock.Flock with its path for cleanup. -type FileLock struct { - flock *flock.Flock - path string -} - -// Unlock releases the advisory lock and removes the lock file. -func (l *FileLock) Unlock() { - _ = l.flock.Unlock() - _ = os.Remove(l.path) -} - -// TryLock attempts a non-blocking exclusive lock on the given path. -// On success it writes meta into the file and returns a FileLock. -// On failure because the lock is held, it returns ErrLocked. -func TryLock(lockPath string, meta LockMeta) (*FileLock, error) { - if err := os.MkdirAll(filepath.Dir(lockPath), 0o755); err != nil { - return nil, fmt.Errorf("failed to create lock directory: %w", err) - } - - fl := flock.New(lockPath) - locked, err := fl.TryLock() - if err != nil { - return nil, fmt.Errorf("failed to acquire lock: %w", err) - } - if !locked { - return nil, ErrLocked - } - - content := fmt.Sprintf("pid=%d\ncommand=%s\n", meta.PID, meta.Command) - if meta.Project != "" { - content += fmt.Sprintf("project=%s\n", meta.Project) - } - _ = os.WriteFile(lockPath, []byte(content), 0o644) - - return &FileLock{flock: fl, path: lockPath}, nil -} - -// ReadLockMeta reads diagnostic metadata from a lock file. -func ReadLockMeta(lockPath string) (LockMeta, error) { - data, err := os.ReadFile(lockPath) - if err != nil { - return LockMeta{}, err - } - var meta LockMeta - for _, line := range strings.Split(string(data), "\n") { - key, value, ok := strings.Cut(line, "=") - if !ok { - continue - } - switch key { - case "pid": - meta.PID, _ = strconv.Atoi(value) - case "command": - meta.Command = value - case "project": - meta.Project = value - } - } - return meta, nil -} - -// PruneLockPath returns the path to the global prune lock: ~/.opentaint/.prune.lock -func PruneLockPath() (string, error) { - home, err := GetOpenTaintHomePath() - if err != nil { - return "", err - } - return filepath.Join(home, ".prune.lock"), nil -} - -// CompileLockPath returns the path to a per-project compile lock: -// ~/.opentaint/cache//.compile.lock -func CompileLockPath(projectCachePath string) string { - return filepath.Join(projectCachePath, ".compile.lock") -} -``` - -- [ ] **Step 4: Run tests to verify they pass** - -```bash -cd cli && go test ./internal/utils/ -run "TestTryLock|TestReadLockMeta" -v -``` -Expected: PASS - -- [ ] **Step 5: Commit** - -```bash -git add cli/internal/utils/lock.go cli/internal/utils/lock_test.go -git commit -m "feat: add cross-platform file locking with gofrs/flock" -``` - ---- - -### Task 3: Move log writing to `~/.opentaint/logs//` - -**Files:** -- Modify: `cli/internal/utils/log/project_log.go` -- Modify: `cli/internal/utils/model_cache.go` (add `GetLogCacheDirPath` / `GetProjectLogPath`) -- Modify: `cli/cmd/logging.go` (use new log path) -- Modify: `cli/cmd/scan.go` (pass log path instead of cache path) - -- [ ] **Step 1: Write failing test for new log path helper** - -Add to `cli/internal/utils/model_cache_test.go` (or create it): - -```go -package utils - -import ( - "path/filepath" - "testing" -) - -func TestGetProjectLogPath(t *testing.T) { - home := t.TempDir() - t.Setenv("HOME", home) - - projectPath := "/Users/dev/my-project" - logPath, err := GetProjectLogPath(projectPath) - if err != nil { - t.Fatalf("GetProjectLogPath() error = %v", err) - } - - slugHash := ProjectPathSlugHash(projectPath) - expected := filepath.Join(home, ".opentaint", "logs", slugHash) - if logPath != expected { - t.Errorf("got %q, want %q", logPath, expected) - } -} - -func TestGetLogCacheDirPath(t *testing.T) { - home := t.TempDir() - t.Setenv("HOME", home) - - logDir, err := GetLogCacheDirPath() - if err != nil { - t.Fatalf("GetLogCacheDirPath() error = %v", err) - } - expected := filepath.Join(home, ".opentaint", "logs") - if logDir != expected { - t.Errorf("got %q, want %q", logDir, expected) - } -} -``` - -- [ ] **Step 2: Run tests to verify they fail** - -```bash -cd cli && go test ./internal/utils/ -run "TestGetProjectLogPath|TestGetLogCacheDirPath" -v -``` -Expected: FAIL — undefined functions - -- [ ] **Step 3: Add log path helpers to `model_cache.go`** - -Add to `cli/internal/utils/model_cache.go`: - -```go -const logsCacheDir = "logs" - -// GetLogCacheDirPath returns ~/.opentaint/logs/ without creating it. -func GetLogCacheDirPath() (string, error) { - opentaintHome, err := GetOpenTaintHomePath() - if err != nil { - return "", err - } - return filepath.Join(opentaintHome, logsCacheDir), nil -} - -// GetProjectLogPath returns ~/.opentaint/logs// for a project path, -// without creating the directory. The project path is canonicalized before hashing. -func GetProjectLogPath(projectPath string) (string, error) { - absPath, err := filepath.Abs(projectPath) - if err != nil { - return "", fmt.Errorf("failed to resolve absolute path: %w", err) - } - absPath, err = filepath.EvalSymlinks(absPath) - if err != nil { - return "", fmt.Errorf("failed to resolve symlinks: %w", err) - } - - logsDir, err := GetLogCacheDirPath() - if err != nil { - return "", err - } - - return filepath.Join(logsDir, ProjectPathSlugHash(absPath)), nil -} -``` - -- [ ] **Step 4: Run tests to verify they pass** - -```bash -cd cli && go test ./internal/utils/ -run "TestGetProjectLogPath|TestGetLogCacheDirPath" -v -``` -Expected: PASS - -- [ ] **Step 5: Update `project_log.go` to accept any directory** - -Modify `cli/internal/utils/log/project_log.go` — the function already takes a `cacheDir` parameter, so the caller just needs to pass the new logs dir. No change needed to `project_log.go` itself. - -- [ ] **Step 6: Update `logging.go` to use the new log directory** - -Modify `cli/cmd/logging.go`. Change `activateLogging` to accept a `logDir` parameter (the `logs/` path) instead of `projectCachePath`: - -```go -func activateLogging(logFilePath string, logDir string) { - var logPath string - var err error - - if logFilePath != "" { - logPath = log.AbsPathOrExit(logFilePath, "log file") - if _, err = log.OpenLogFileAt(logPath); err != nil { - out.Fatalf("Failed to open log file: %s", err) - } - } else if logDir != "" { - logPath, err = log.OpenProjectLog(logDir) - if err != nil { - out.Fatalf("Failed to open project log file: %s", err) - } - } - - if logPath != "" { - globals.LogPath = logPath - out.SetLogWriter(log.LogWriter()) - } -} - -func activateLoggingForProject(logFilePath string, projectPath string) { - logPath, err := utils.GetProjectLogPath(projectPath) - if err != nil { - output.LogInfof("Failed to resolve project log path: %v", err) - } - activateLogging(logFilePath, logPath) -} -``` - -- [ ] **Step 7: Update `scan.go` to pass log path** - -In `cli/cmd/scan.go`, in function `scan()` around line 164-169, change the logging block: - -Replace: -```go -if !DryRunScan { - if cfg.projectCachePath != "" { - activateLogging(ScanLogFile, cfg.projectCachePath) - } else { - activateLoggingForProject(ScanLogFile, absUserProjectRoot) - } -} -``` - -With: -```go -if !DryRunScan { - activateLoggingForProject(ScanLogFile, absUserProjectRoot) -} -``` - -The `activateLoggingForProject` function now resolves the log path via `GetProjectLogPath` which uses `logs//`, so the `projectCachePath` branch is no longer needed. - -- [ ] **Step 8: Verify everything compiles and existing tests pass** - -```bash -cd cli && go build ./... && go test ./... -``` -Expected: all pass - -- [ ] **Step 9: Commit** - -```bash -git add cli/internal/utils/model_cache.go cli/internal/utils/model_cache_test.go cli/cmd/logging.go cli/cmd/scan.go cli/internal/utils/log/project_log.go -git commit -m "refactor: move project logs to ~/.opentaint/logs//" -``` - ---- - -### Task 4: Introduce `PruneCategories` and refactor `ScanForStaleArtifacts` - -**Files:** -- Modify: `cli/internal/utils/prune.go` -- Modify: `cli/internal/utils/prune_test.go` - -- [ ] **Step 1: Define `PruneCategories` type and constants** - -Add to the top of `cli/internal/utils/prune.go`, replacing the existing `ScanForStaleArtifacts` signature. First, add the categories type: - -```go -// PruneCategory represents a class of artifacts that can be selectively pruned. -type PruneCategory int - -const ( - PruneCategoryArtifacts PruneCategory = 1 << iota - PruneCategoryRules - PruneCategoryJDK - PruneCategoryModels - PruneCategoryLogs - PruneCategoryInstall -) - -// PruneCategoriesDefault is the set pruned with no flags: artifacts + rules + jdk + models. -const PruneCategoriesDefault = PruneCategoryArtifacts | PruneCategoryRules | PruneCategoryJDK | PruneCategoryModels - -// PruneCategoriesAll is the set pruned with --all. -const PruneCategoriesAll = PruneCategoryArtifacts | PruneCategoryRules | PruneCategoryJDK | PruneCategoryModels | PruneCategoryLogs | PruneCategoryInstall -``` - -- [ ] **Step 2: Write failing tests for category-based scanning** - -Add new tests to `cli/internal/utils/prune_test.go`: - -```go -func TestScanForStaleArtifacts_Categories(t *testing.T) { - setupPruneTestGlobals(t) - - t.Run("artifacts-only prunes jars not rules", func(t *testing.T) { - home := t.TempDir() - t.Setenv("HOME", home) - opentaintHome := filepath.Join(home, ".opentaint") - createTestFile(t, filepath.Join(opentaintHome, "analyzer_0.9.0.jar"), 100) - createTestFile(t, filepath.Join(opentaintHome, "rules_v0.9.0", "rule.yaml"), 50) - - result, err := ScanForStaleArtifacts(PruneCategoryArtifacts) - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - assertHasKind(t, result, StaleKindAnalyzer) - assertNoKind(t, result, StaleKindRules) - }) - - t.Run("rules-only prunes rules not jars", func(t *testing.T) { - home := t.TempDir() - t.Setenv("HOME", home) - opentaintHome := filepath.Join(home, ".opentaint") - createTestFile(t, filepath.Join(opentaintHome, "analyzer_0.9.0.jar"), 100) - createTestFile(t, filepath.Join(opentaintHome, "rules_v0.9.0", "rule.yaml"), 50) - - result, err := ScanForStaleArtifacts(PruneCategoryRules) - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - assertNoKind(t, result, StaleKindAnalyzer) - assertHasKind(t, result, StaleKindRules) - }) - - t.Run("logs-only scans logs dir", func(t *testing.T) { - home := t.TempDir() - t.Setenv("HOME", home) - logsDir := filepath.Join(home, ".opentaint", "logs", "my-project-a1b2c3d4") - createTestFile(t, filepath.Join(logsDir, "2026-01-01.log"), 200) - - result, err := ScanForStaleArtifacts(PruneCategoryLogs) - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - assertHasKind(t, result, StaleKindLog) - }) - - t.Run("default categories match old false behavior", func(t *testing.T) { - home := t.TempDir() - t.Setenv("HOME", home) - opentaintHome := filepath.Join(home, ".opentaint") - createTestFile(t, filepath.Join(opentaintHome, "analyzer_0.9.0.jar"), 100) - createTestFile(t, filepath.Join(opentaintHome, "rules_v0.9.0", "rule.yaml"), 50) - jdkDir := filepath.Join(opentaintHome, "jdk", "temurin-17-jdk+35") - createTestFile(t, filepath.Join(jdkDir, "bin", "java"), 50) - pmPath := filepath.Join(opentaintHome, "cache", "proj-abc12345", "project-model", "project.yaml") - createTestFile(t, pmPath, 50) - logsDir := filepath.Join(opentaintHome, "logs", "proj-abc12345") - createTestFile(t, filepath.Join(logsDir, "app.log"), 100) - - result, err := ScanForStaleArtifacts(PruneCategoriesDefault) - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - assertHasKind(t, result, StaleKindAnalyzer) - assertHasKind(t, result, StaleKindRules) - assertHasKind(t, result, StaleKindJDK) - assertHasKind(t, result, StaleKindModel) - assertNoKind(t, result, StaleKindLog) - assertNoKind(t, result, StaleKindInstallLib) - }) - - t.Run("all categories include logs and install", func(t *testing.T) { - home := t.TempDir() - t.Setenv("HOME", home) - opentaintHome := filepath.Join(home, ".opentaint") - createTestFile(t, filepath.Join(opentaintHome, "analyzer_0.9.0.jar"), 100) - installLib := filepath.Join(opentaintHome, "install", "lib") - createTestFile(t, filepath.Join(installLib, "artifact.jar"), 100) - logsDir := filepath.Join(opentaintHome, "logs", "proj-abc12345") - createTestFile(t, filepath.Join(logsDir, "app.log"), 100) - - result, err := ScanForStaleArtifacts(PruneCategoriesAll) - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - assertHasKind(t, result, StaleKindAnalyzer) - assertHasKind(t, result, StaleKindLog) - assertHasKind(t, result, StaleKindInstallLib) - }) -} -``` - -- [ ] **Step 3: Run tests to verify they fail** - -```bash -cd cli && go test ./internal/utils/ -run "TestScanForStaleArtifacts_Categories" -v -``` -Expected: FAIL — `ScanForStaleArtifacts` signature mismatch (expects `bool`, now called with `PruneCategory`) - -- [ ] **Step 4: Refactor `ScanForStaleArtifacts` to accept `PruneCategory`** - -Rewrite `cli/internal/utils/prune.go`. The `ScanForStaleArtifacts` function changes its parameter from `all bool` to `categories PruneCategory`. Key changes: - -```go -// has reports whether the given category is included in the set. -func (c PruneCategory) has(cat PruneCategory) bool { - return c&cat != 0 -} - -func ScanForStaleArtifacts(categories PruneCategory) (*PruneResult, error) { - opentaintHome, err := GetOpenTaintHomePath() - if err != nil { - return nil, fmt.Errorf("failed to get opentaint home: %w", err) - } - - result := &PruneResult{} - - entries, err := os.ReadDir(opentaintHome) - if os.IsNotExist(err) { - return result, nil - } - if err != nil { - return nil, fmt.Errorf("failed to read opentaint home: %w", err) - } - - if categories.has(PruneCategoryArtifacts) || categories.has(PruneCategoryRules) { - artifacts := globals.Artifacts() - for _, entry := range entries { - name := entry.Name() - fullPath := filepath.Join(opentaintHome, name) - - if name == "cache" || name == "logs" || name == "install" || strings.HasPrefix(name, ".") { - continue - } - - for _, def := range artifacts { - isArtifact := def.Kind() == "rules" - wantCategory := PruneCategoryRules - if !isArtifact { - wantCategory = PruneCategoryArtifacts - } - if !categories.has(wantCategory) { - if strings.HasPrefix(name, def.CachePrefix) { - break - } - continue - } - if artifact := checkStale(def, name, fullPath); artifact != nil { - result.Add(*artifact) - break - } - if strings.HasPrefix(name, def.CachePrefix) { - break - } - } - } - } - - // JDK/JRE - if categories.has(PruneCategoryJDK) { - for _, kind := range []string{StaleKindJDK, StaleKindJRE} { - javaDir := filepath.Join(opentaintHome, kind) - subEntries, err := os.ReadDir(javaDir) - if err != nil { - continue - } - currentPrefix := fmt.Sprintf("temurin-%d-", globals.DefaultJavaVersion) - for _, subEntry := range subEntries { - if strings.HasPrefix(subEntry.Name(), currentPrefix) { - continue - } - subPath := filepath.Join(javaDir, subEntry.Name()) - size, _ := dirSize(subPath) - result.Add(StaleArtifact{Path: subPath, Size: size, Kind: kind}) - } - } - } - - // Install-tier - if categories.has(PruneCategoryInstall) { - for _, check := range []struct { - path string - kind string - }{ - {GetInstallLibPath(), StaleKindInstallLib}, - {GetInstallJREPath(), StaleKindInstallJRE}, - } { - if check.path == "" { - continue - } - if _, err := os.Stat(check.path); err != nil { - continue - } - size, _ := dirSize(check.path) - result.Add(StaleArtifact{Path: check.path, Size: size, Kind: check.kind}) - } - } - - // Models (cache dir) - if categories.has(PruneCategoryModels) { - modelsDir, mErr := GetModelCacheDirPath() - if mErr != nil { - output.LogDebugf("Failed to resolve model cache path: %v", mErr) - } - if info, err := os.Stat(modelsDir); err == nil && info.IsDir() { - modelEntries, err := os.ReadDir(modelsDir) - if err == nil { - for _, modelEntry := range modelEntries { - if !modelEntry.IsDir() { - continue - } - projectCachePath := filepath.Join(modelsDir, modelEntry.Name()) - scanProjectCacheSubdirs(projectCachePath, result) - } - } - } - } - - // Logs (separate logs dir) - if categories.has(PruneCategoryLogs) { - logsDir, lErr := GetLogCacheDirPath() - if lErr != nil { - output.LogDebugf("Failed to resolve log cache path: %v", lErr) - } - if info, err := os.Stat(logsDir); err == nil && info.IsDir() { - logEntries, err := os.ReadDir(logsDir) - if err == nil { - for _, logEntry := range logEntries { - if !logEntry.IsDir() { - continue - } - logProjectDir := filepath.Join(logsDir, logEntry.Name()) - size, _ := dirSize(logProjectDir) - if size > 0 { - result.Add(StaleArtifact{Path: logProjectDir, Size: size, Kind: StaleKindLog}) - } - } - } - } - } - - return result, nil -} -``` - -Note: The `scanProjectCacheSubdirs` function remains for non-`--all` model scanning. With the categories approach, `--all` maps to `PruneCategoriesAll` which includes `PruneCategoryModels` — but models now always prune just `project-model/` and `.staging-*` (the old `--all` behavior of pruning entire cache dirs is replaced by `--models` + `--logs` together covering the same ground, since logs are now in a separate dir). - -- [ ] **Step 5: Update existing tests to use new signature** - -In `cli/internal/utils/prune_test.go`, update all calls to `ScanForStaleArtifacts`: - -- `ScanForStaleArtifacts(false)` → `ScanForStaleArtifacts(PruneCategoriesDefault)` -- `ScanForStaleArtifacts(true)` → `ScanForStaleArtifacts(PruneCategoriesAll)` - -Tests related to "logs in cache dirs" can be updated or removed since logs now live in `logs/` not `cache/`. The `TestScanForStaleArtifacts_LogsInCacheDirs` tests should be replaced with tests for the new `logs/` directory scanning. - -Update `TestScanForStaleArtifacts_CachedModels`: -- Remove the "cached model with all prunes entire dir" test — models now always target `project-model/` specifically -- Update "no size double-counting" test — logs are in a separate dir now, so the test creates files in `logs/` instead of `cache//logs/` -- The "logs preserved without all flag" test becomes: logs are in `logs/` dir, `PruneCategoriesDefault` doesn't include `PruneCategoryLogs` - -- [ ] **Step 6: Run all tests** - -```bash -cd cli && go test ./internal/utils/ -v -``` -Expected: PASS - -- [ ] **Step 7: Commit** - -```bash -git add cli/internal/utils/prune.go cli/internal/utils/prune_test.go -git commit -m "refactor: replace bool flag with PruneCategory bitmask in ScanForStaleArtifacts" -``` - ---- - -### Task 5: Add granular flags to the prune command - -**Files:** -- Modify: `cli/cmd/prune.go` - -- [ ] **Step 1: Update `prune.go` with new flags and category resolution** - -Rewrite `cli/cmd/prune.go`: - -```go -package cmd - -import ( - "fmt" - - "github.com/seqra/opentaint/internal/output" - "github.com/seqra/opentaint/internal/utils" - "github.com/spf13/cobra" -) - -var ( - pruneDryRun bool - pruneYes bool - pruneAll bool - pruneArtifacts bool - pruneRules bool - pruneJDK bool - pruneModels bool - pruneLogs bool - pruneInstall bool -) - -// resolveCategories maps CLI flags to a PruneCategory bitmask. -// Returns an error if --all is combined with specific flags. -func resolveCategories() (utils.PruneCategory, error) { - specific := pruneArtifacts || pruneRules || pruneJDK || pruneModels || pruneLogs || pruneInstall - if pruneAll && specific { - return 0, fmt.Errorf("--all cannot be combined with specific category flags (--artifacts, --rules, --jdk, --models, --logs, --install)") - } - if pruneAll { - return utils.PruneCategoriesAll, nil - } - if !specific { - return utils.PruneCategoriesDefault, nil - } - - var cats utils.PruneCategory - if pruneArtifacts { - cats |= utils.PruneCategoryArtifacts - } - if pruneRules { - cats |= utils.PruneCategoryRules - } - if pruneJDK { - cats |= utils.PruneCategoryJDK - } - if pruneModels { - cats |= utils.PruneCategoryModels - } - if pruneLogs { - cats |= utils.PruneCategoryLogs - } - if pruneInstall { - cats |= utils.PruneCategoryInstall - } - return cats, nil -} - -var pruneCmd = &cobra.Command{ - Use: "prune", - Short: "Remove stale downloaded artifacts from ~/.opentaint", - Long: `Remove stale downloaded artifacts from the local cache (~/.opentaint). - -Identifies artifacts that are no longer needed: -- Old versions of analyzer JARs, autobuilder JARs, and rules -- Downloaded JDK/JRE versions that don't match the current version -- Cached project models and staging directories - -Use category flags to prune selectively: - --artifacts Stale analyzer and autobuilder JARs - --rules Stale rules directories - --jdk Old JDK/JRE versions - --models Cached project models and staging directories - --logs Project log files - --install Install-tier lib and JRE artifacts - -Without category flags, prunes: artifacts + rules + jdk + models. -With --all: prunes everything including logs and install-tier.`, - Run: func(cmd *cobra.Command, args []string) { - categories, err := resolveCategories() - if err != nil { - out.FatalErr(err) - } - - result, err := utils.ScanForStaleArtifacts(categories) - if err != nil { - out.Fatalf("Failed to scan for stale artifacts: %s", err) - } - - if result.TotalCount == 0 { - out.Print("No stale artifacts found. Nothing to prune.") - return - } - - sb := out.Section("Stale Artifacts") - for _, artifact := range result.Stale { - sb.Text(fmt.Sprintf("%s (%s) - %s", artifact.Path, artifact.Kind, output.FormatSize(artifact.Size))) - } - sb.Line(). - Text(fmt.Sprintf("Total: %d items, %s", result.TotalCount, output.FormatSize(result.TotalSize))). - Render() - - if pruneDryRun { - out.Print("Dry run mode. No files were deleted.") - return - } - - if !pruneYes { - if !out.Confirm("Delete these artifacts?", false) { - out.Print("Prune cancelled.") - return - } - } - - if err := utils.DeleteArtifacts(result.Stale); err != nil { - out.Fatalf("Failed to delete artifacts: %s", err) - } - - out.Successf("Pruned %d items, freed %s", result.TotalCount, output.FormatSize(result.TotalSize)) - }, -} - -func init() { - rootCmd.AddCommand(pruneCmd) - - pruneCmd.Flags().BoolVar(&pruneDryRun, "dry-run", false, "Show what would be deleted without deleting") - pruneCmd.Flags().BoolVar(&pruneYes, "yes", false, "Skip interactive confirmation") - pruneCmd.Flags().BoolVar(&pruneAll, "all", false, "Prune everything including logs and install-tier artifacts") - pruneCmd.Flags().BoolVar(&pruneArtifacts, "artifacts", false, "Prune stale analyzer and autobuilder JARs") - pruneCmd.Flags().BoolVar(&pruneRules, "rules", false, "Prune stale rules directories") - pruneCmd.Flags().BoolVar(&pruneJDK, "jdk", false, "Prune old JDK/JRE versions") - pruneCmd.Flags().BoolVar(&pruneModels, "models", false, "Prune cached project models and staging directories") - pruneCmd.Flags().BoolVar(&pruneLogs, "logs", false, "Prune project log files") - pruneCmd.Flags().BoolVar(&pruneInstall, "install", false, "Prune install-tier lib and JRE artifacts") -} -``` - -- [ ] **Step 2: Verify it compiles** - -```bash -cd cli && go build ./... -``` -Expected: no errors - -- [ ] **Step 3: Commit** - -```bash -git add cli/cmd/prune.go -git commit -m "feat: add granular prune flags (--artifacts, --rules, --jdk, --models, --logs, --install)" -``` - ---- - -### Task 6: Add locking to prune command - -**Files:** -- Modify: `cli/cmd/prune.go` -- Modify: `cli/internal/utils/prune.go` (add `SkippedProject` to `PruneResult`) - -- [ ] **Step 1: Write failing test for skip reporting** - -Add to `cli/internal/utils/prune_test.go`: - -```go -func TestPruneResult_AddSkipped(t *testing.T) { - result := &PruneResult{} - result.AddSkipped(SkippedProject{ - Path: "/home/user/.opentaint/cache/my-project-abc12345", - Meta: LockMeta{PID: 12345, Command: "compile"}, - }) - - if len(result.Skipped) != 1 { - t.Fatalf("expected 1 skipped, got %d", len(result.Skipped)) - } - if result.Skipped[0].Meta.PID != 12345 { - t.Errorf("expected PID 12345, got %d", result.Skipped[0].Meta.PID) - } -} -``` - -- [ ] **Step 2: Run test to verify it fails** - -```bash -cd cli && go test ./internal/utils/ -run "TestPruneResult_AddSkipped" -v -``` -Expected: FAIL — `SkippedProject`, `AddSkipped`, `Skipped` undefined - -- [ ] **Step 3: Add `SkippedProject` to prune types** - -In `cli/internal/utils/prune.go`, add: - -```go -// SkippedProject represents a project cache that was skipped because a compile lock was held. -type SkippedProject struct { - Path string - Meta LockMeta -} - -// PruneResult contains the results of scanning for stale artifacts. -type PruneResult struct { - Stale []StaleArtifact - Skipped []SkippedProject - TotalSize int64 - TotalCount int -} - -// AddSkipped records a project that was skipped due to an active compile lock. -func (r *PruneResult) AddSkipped(s SkippedProject) { - r.Skipped = append(r.Skipped, s) -} -``` - -- [ ] **Step 4: Run test to verify it passes** - -```bash -cd cli && go test ./internal/utils/ -run "TestPruneResult_AddSkipped" -v -``` -Expected: PASS - -- [ ] **Step 5: Add lock-aware model scanning** - -In the models section of `ScanForStaleArtifacts`, before scanning each project cache dir, try the compile lock. If locked, skip and record. Update the models block: - -```go -// Models (cache dir) — lock-aware -if categories.has(PruneCategoryModels) { - modelsDir, mErr := GetModelCacheDirPath() - if mErr != nil { - output.LogDebugf("Failed to resolve model cache path: %v", mErr) - } - if info, err := os.Stat(modelsDir); err == nil && info.IsDir() { - modelEntries, err := os.ReadDir(modelsDir) - if err == nil { - for _, modelEntry := range modelEntries { - if !modelEntry.IsDir() { - continue - } - projectCachePath := filepath.Join(modelsDir, modelEntry.Name()) - lockPath := CompileLockPath(projectCachePath) - lock, lockErr := TryLock(lockPath, LockMeta{PID: os.Getpid(), Command: "prune"}) - if lockErr == ErrLocked { - meta, _ := ReadLockMeta(lockPath) - result.AddSkipped(SkippedProject{Path: projectCachePath, Meta: meta}) - continue - } - if lock != nil { - lock.Unlock() - } - scanProjectCacheSubdirs(projectCachePath, result) - } - } - } -} -``` - -- [ ] **Step 6: Add global prune lock and skip display to `prune.go` command** - -In `cli/cmd/prune.go`, add locking around the Run function body: - -```go -Run: func(cmd *cobra.Command, args []string) { - categories, err := resolveCategories() - if err != nil { - out.FatalErr(err) - } - - // Acquire global prune lock - pruneLockPath, err := utils.PruneLockPath() - if err != nil { - out.Fatalf("Failed to resolve prune lock path: %s", err) - } - pruneLock, err := utils.TryLock(pruneLockPath, utils.LockMeta{ - PID: os.Getpid(), - Command: "prune", - }) - if err == utils.ErrLocked { - out.Fatal("Another prune is already running") - } - if err != nil { - out.Fatalf("Failed to acquire prune lock: %s", err) - } - defer pruneLock.Unlock() - - result, err := utils.ScanForStaleArtifacts(categories) - if err != nil { - out.Fatalf("Failed to scan for stale artifacts: %s", err) - } - - // Display skipped projects - if len(result.Skipped) > 0 { - sb := out.Section("Skipped (compilation in progress)") - for _, s := range result.Skipped { - if s.Meta.PID != 0 { - sb.Text(fmt.Sprintf("%s (locked by PID %d)", s.Path, s.Meta.PID)) - } else { - sb.Text(fmt.Sprintf("%s (locked)", s.Path)) - } - } - sb.Render() - } - - if result.TotalCount == 0 { - out.Print("No stale artifacts found. Nothing to prune.") - return - } - - sb := out.Section("Stale Artifacts") - for _, artifact := range result.Stale { - sb.Text(fmt.Sprintf("%s (%s) - %s", artifact.Path, artifact.Kind, output.FormatSize(artifact.Size))) - } - sb.Line(). - Text(fmt.Sprintf("Total: %d items, %s", result.TotalCount, output.FormatSize(result.TotalSize))). - Render() - - if pruneDryRun { - out.Print("Dry run mode. No files were deleted.") - return - } - - if !pruneYes { - if !out.Confirm("Delete these artifacts?", false) { - out.Print("Prune cancelled.") - return - } - } - - if err := utils.DeleteArtifacts(result.Stale); err != nil { - out.Fatalf("Failed to delete artifacts: %s", err) - } - - out.Successf("Pruned %d items, freed %s", result.TotalCount, output.FormatSize(result.TotalSize)) -}, -``` - -Don't forget to add `"os"` to imports in `prune.go`. - -- [ ] **Step 7: Verify it compiles and tests pass** - -```bash -cd cli && go build ./... && go test ./internal/utils/ -v -``` -Expected: PASS - -- [ ] **Step 8: Commit** - -```bash -git add cli/internal/utils/prune.go cli/internal/utils/prune_test.go cli/cmd/prune.go -git commit -m "feat: add lock-aware prune with skip reporting for active compilations" -``` - ---- - -### Task 7: Replace `HasStagingDir` with compile lock in scan/compile - -**Files:** -- Modify: `cli/cmd/scan.go` -- Modify: `cli/internal/utils/model_cache.go` (remove `HasStagingDir`) - -- [ ] **Step 1: Replace `HasStagingDir` in `scan.go`** - -In `cli/cmd/scan.go`, in `resolveScanConfig()` around lines 425-429, replace: - -```go -if utils.HasStagingDir(projectCachePath) { - out.Error("Compilation already in progress for this project") - suggest("To scan an existing model instead", utils.NewScanCommand("").WithProjectModel("").Build()) - os.Exit(1) -} -``` - -With: - -```go -compileLock, lockErr := utils.TryLock( - utils.CompileLockPath(projectCachePath), - utils.LockMeta{PID: os.Getpid(), Command: "compile", Project: absUserProjectRoot}, -) -if lockErr == utils.ErrLocked { - out.Error("Compilation already in progress for this project") - suggest("To scan an existing model instead", utils.NewScanCommand("").WithProjectModel("").Build()) - os.Exit(1) -} -if lockErr != nil { - out.Fatalf("Failed to acquire compile lock: %s", lockErr) -} -``` - -The `compileLock` must be stored in the `scanConfig` and released after compilation + promotion. Add a field to `scanConfig`: - -```go -type scanConfig struct { - mode ScanMode - absProjectModel string - projectCachePath string - stagingDir string - needsCompilation bool - compileLock *utils.FileLock // non-nil when we hold the compile lock -} -``` - -Release it at the end of the `scan()` function (after all operations complete): - -```go -defer func() { - if cfg.compileLock != nil { - cfg.compileLock.Unlock() - } -}() -``` - -And set it in `resolveScanConfig`: - -```go -return scanConfig{ - mode: CompileAndScan, - absProjectModel: filepath.Join(stagingDir, "project-model"), - projectCachePath: projectCachePath, - stagingDir: stagingDir, - needsCompilation: true, - compileLock: compileLock, -} -``` - -Note: `resolveScanConfig` needs `absUserProjectRoot` as a parameter now (for the lock meta). Update its signature and the call site. - -- [ ] **Step 2: Remove `HasStagingDir` and `CleanupStagingDir` if unused** - -Remove from `cli/internal/utils/model_cache.go`: -- `HasStagingDir` function (lines 116-127) -- Keep `CleanupStagingDir` — it's still used by `scan.go` for cleanup on compile failure - -- [ ] **Step 3: Verify everything compiles** - -```bash -cd cli && go build ./... -``` -Expected: no errors - -- [ ] **Step 4: Run all tests** - -```bash -cd cli && go test ./... -``` -Expected: PASS. If any tests reference `HasStagingDir`, remove those tests. - -- [ ] **Step 5: Commit** - -```bash -git add cli/cmd/scan.go cli/internal/utils/model_cache.go -git commit -m "refactor: replace HasStagingDir heuristic with flock-based compile lock" -``` - ---- - -### Task 8: Add compile lock to standalone compile command - -**Files:** -- Modify: `cli/cmd/compile.go` - -- [ ] **Step 1: Add compile lock acquisition** - -The `compile` command in `compile.go` doesn't currently use the cache at all — it writes to `--output`. However, if we want compile locking to also protect the compile command when it writes to cache (via `activateLoggingForProject` which creates the project cache path), we should add locking here too. - -Actually, looking at `compile.go`, the standalone compile command writes to `--output` (user-specified path), not to the cache. The cache interaction only happens via `scan.go`. The compile command only uses the cache for logging. - -Since compile doesn't write to the model cache, no compile lock is needed here. The lock is only for cache-based compilation (scan command). Skip this task. - -- [ ] **Step 2: Verify compile still works** - -```bash -cd cli && go build ./... -``` -Expected: no errors - -- [ ] **Step 3: Commit (if any changes)** - -No changes expected for this task. Move on. - ---- - -### Task 9: Final integration verification - -**Files:** None (verification only) - -- [ ] **Step 1: Run full test suite** - -```bash -cd cli && go test ./... -v -``` -Expected: all PASS - -- [ ] **Step 2: Build the binary** - -```bash -cd cli && go build -o opentaint . -``` -Expected: binary builds successfully - -- [ ] **Step 3: Manual smoke test — prune with no flags** - -```bash -./opentaint prune --dry-run -``` -Expected: shows stale artifacts (or "nothing to prune"), no errors - -- [ ] **Step 4: Manual smoke test — prune with specific flags** - -```bash -./opentaint prune --logs --dry-run -./opentaint prune --models --dry-run -./opentaint prune --artifacts --rules --dry-run -``` -Expected: each shows only the relevant category - -- [ ] **Step 5: Manual smoke test — mutual exclusivity** - -```bash -./opentaint prune --all --logs -``` -Expected: error message about mutual exclusivity - -- [ ] **Step 6: Manual smoke test — prune lock** - -Run two prune commands simultaneously (in separate terminals): -```bash -# Terminal 1: -./opentaint prune --dry-run # should succeed - -# Terminal 2 (while 1 is running): -./opentaint prune --dry-run # should fail with "Another prune is already running" -``` - -- [ ] **Step 7: Commit any final fixes** - -If smoke tests reveal issues, fix and commit. diff --git a/docs/superpowers/specs/2026-04-16-prune-lockfiles-granular-flags-design.md b/docs/superpowers/specs/2026-04-16-prune-lockfiles-granular-flags-design.md deleted file mode 100644 index 8cc2fc101..000000000 --- a/docs/superpowers/specs/2026-04-16-prune-lockfiles-granular-flags-design.md +++ /dev/null @@ -1,162 +0,0 @@ -# Prune: Lockfiles & Granular Flags - -## Problem - -The `opentaint prune` command lacks: -1. **Concurrency safety** — no file locking; concurrent prune/compile can corrupt state. The existing `HasStagingDir` is a best-effort TOCTOU heuristic. -2. **Granular control** — only `--all` vs default. Users cannot selectively prune logs, models, artifacts, etc. -3. **Log isolation** — logs live inside `cache//`, making it impossible to prune them independently of models. - -## Directory Structure - -Flat layout for JARs/rules/JDK/JRE is **unchanged**. Only the cache area is restructured — logs move to their own top-level directory: - -``` -~/.opentaint/ -├── analyzer_.jar # unchanged -├── autobuilder_.jar # unchanged -├── rules_/ # unchanged -├── jdk/ # unchanged -├── jre/ # unchanged -├── install/ # unchanged -│ ├── lib/ -│ ├── jre/ -│ └── .versions -├── cache/ # models only (no more logs here) -│ └── / -│ ├── project-model/ -│ ├── .staging-*/ -│ └── .compile.lock # per-project compile lock -├── logs/ # mirrors cache/ structure -│ └── / -│ └── .log -└── .prune.lock # global prune lock -``` - -### Changes from current layout -- `logs/` becomes a top-level sibling of `cache/`, using the same `` subdirs. -- Lock files live where they protect: `.compile.lock` per project, `.prune.lock` globally. -- No migration needed for old logs in `cache/` dirs — they get cleaned up naturally by prune. - -## Granular Prune Flags - -| Flag | Targets | Default prune | `--all` | -|------|---------|:---:|:---:| -| `--artifacts` | Stale analyzer + autobuilder JARs | yes | yes | -| `--rules` | Stale rules directories | yes | yes | -| `--jdk` | Old JDK/JRE versions | yes | yes | -| `--models` | `cache//project-model/` + `.staging-*` | yes | yes | -| `--logs` | `logs//` | no | yes | -| `--install` | `install/lib/` + `install/jre/` | no | yes | - -### Behavior - -- **No flags** = default prune (artifacts, rules, jdk, models). Same as today minus `--all` stuff. -- **`--all`** = everything including logs and install-tier. -- **Specific flags** (e.g., `--models --logs`) = only those categories, nothing else. -- Specific flags and `--all` are **mutually exclusive** — error if combined. - -Examples: -```bash -opentaint prune # default: artifacts + rules + jdk + models -opentaint prune --logs # only logs -opentaint prune --models --logs # models and logs -opentaint prune --all # everything -opentaint prune --all --logs # error: mutually exclusive -``` - -## Locking - -### Dependency - -`github.com/gofrs/flock` — cross-platform file locking (flock on Unix, LockFileEx on Windows). Auto-releases on process crash. - -### Lock scopes - -| Lock | File | Acquired by | Purpose | -|------|------|-------------|---------| -| Global prune | `~/.opentaint/.prune.lock` | `prune` command | Prevent concurrent prunes | -| Per-project compile | `cache//.compile.lock` | `scan`/`compile` | Protect active compilations | - -### Lock file content (diagnostics) - -``` -pid=12345 -command=compile -project=/Users/dev/my-app -``` - -PID and metadata written after acquiring the lock. Used for skip reporting only — not part of the locking mechanism itself. - -### Prune flow - -1. Try exclusive non-blocking lock on `.prune.lock`. - - If held: fail fast with "Another prune is already running". -2. Scan for stale artifacts across all requested categories. -3. For each project in `cache/`, try non-blocking lock on `.compile.lock`. - - Acquired: include project in prune candidates, release (re-acquire at delete time). - - Locked: skip project, add to "skipped" report. -4. Display results + skipped projects. -5. If not dry-run and user confirms: delete artifacts (re-acquiring per-project locks before removing). -6. Release `.prune.lock`. - -### Compile flow - -1. Acquire exclusive lock on `cache//.compile.lock`. - - If held: fail with "Compilation already in progress for this project". -2. Create staging dir, compile, promote to cache. -3. Write logs to `logs//`. -4. Release lock. - -This replaces the `HasStagingDir` heuristic entirely. - -### Skip reporting - -When prune skips locked projects: -``` -Skipped (compilation in progress): - ~/.opentaint/cache/my-app-a1b2c3d4 (locked by PID 12345) - -Stale Artifacts: - ~/.opentaint/analyzer_0.9.0.jar (artifacts) - 250.5 MB - Total: 1 item, 250.5 MB -``` - -## Migration & Backward Compatibility - -### Logs - -- `scan`/`compile` commands updated to write logs to `logs//`. -- No automatic migration of old logs from `cache//`. -- Old logs inside cache dirs are treated as part of the model cache for pruning — cleaned up by `--models` or `--all`. - -### HasStagingDir removal - -- `HasStagingDir()` removed entirely; replaced by flock-based compile lock. -- `.staging-*` directories still used for the staging workflow itself. - -### No breaking changes - -- Flat artifact layout unchanged. -- JDK/JRE storage unchanged. -- Install-tier unchanged. -- Project model cache paths unchanged. -- `opentaint prune` with no flags behaves identically to today's default. - -## File Changes - -### New files -- `cli/internal/utils/lock.go` — lock acquisition/release helpers, PID content, skip reporting. -- `cli/internal/utils/lock_test.go` — lock tests. - -### Modified files -- `cli/internal/utils/prune.go` — granular category scanning, lock-aware flow. -- `cli/internal/utils/prune_test.go` — tests for new flags and locking. -- `cli/cmd/prune.go` — new flags, mutual exclusivity validation, skip reporting output. -- `cli/internal/utils/model_cache.go` — remove `HasStagingDir`, add log path helpers. -- `cli/cmd/scan.go` — compile lock instead of `HasStagingDir`, write logs to new location. -- `cli/cmd/compile.go` — same lock changes as scan. -- `go.mod` / `go.sum` — add `github.com/gofrs/flock`. - -### Removed -- `HasStagingDir()` function. diff --git a/scripts/install/install.cmd b/scripts/install/install.cmd index 4026c5f37..530585660 100644 --- a/scripts/install/install.cmd +++ b/scripts/install/install.cmd @@ -1,17 +1,29 @@ @echo off REM OpenTaint installer for Windows (CMD wrapper) REM This script invokes the PowerShell installer. +REM Usage: install.cmd installs latest +REM install.cmd 1.2.3 installs version 1.2.3 where powershell >nul 2>nul if %ERRORLEVEL% equ 0 ( - powershell -NoProfile -ExecutionPolicy Bypass -File "%~dp0install.ps1" + call :run_installer powershell %* + exit /b %ERRORLEVEL% +) +where pwsh >nul 2>nul +if %ERRORLEVEL% equ 0 ( + call :run_installer pwsh %* + exit /b %ERRORLEVEL% +) +echo Error: PowerShell is required to install opentaint. +echo Please install PowerShell or use the manual installation method. +echo See https://github.com/seqra/opentaint/blob/main/docs/installation.md for alternatives. +exit /b 1 + +:run_installer +REM %1 = shell executable (powershell or pwsh), %2 = optional version +if "%~2"=="" ( + %1 -NoProfile -ExecutionPolicy Bypass -File "%~dp0install.ps1" ) else ( - where pwsh >nul 2>nul - if %ERRORLEVEL% equ 0 ( - pwsh -NoProfile -ExecutionPolicy Bypass -File "%~dp0install.ps1" - ) else ( - echo Error: PowerShell is required to install opentaint. - echo Please install PowerShell or use the manual installation method. - exit /b 1 - ) + %1 -NoProfile -ExecutionPolicy Bypass -File "%~dp0install.ps1" -Version "%~2" ) +exit /b %ERRORLEVEL% diff --git a/scripts/install/install.ps1 b/scripts/install/install.ps1 index 523d502f1..811b6ec38 100644 --- a/scripts/install/install.ps1 +++ b/scripts/install/install.ps1 @@ -1,10 +1,51 @@ # OpenTaint installer for Windows (PowerShell) -# Usage: irm https://raw.githubusercontent.com/seqra/opentaint/main/scripts/install/install.ps1 | iex +# Usage: +# irm https://raw.githubusercontent.com/seqra/opentaint/main/scripts/install/install.ps1 | iex +# & ([scriptblock]::Create((irm https://raw.githubusercontent.com/seqra/opentaint/main/scripts/install/install.ps1))) -Version 1.2.3 + +param( + [string]$Version = "latest" +) $ErrorActionPreference = 'Stop' +function Test-Version { + param([string]$Raw) + + if (-not $Raw -or $Raw -eq "latest") { + return @{ PathSegment = "latest/download"; Tag = "latest" } + } + + if ($Raw -match '^(v)?(?[0-9]+\.[0-9]+\.[0-9]+(-[A-Za-z0-9._-]+)?)$') { + $normalized = $Matches['ver'] + return @{ PathSegment = "download/v$normalized"; Tag = "v$normalized" } + } + + [Console]::Error.WriteLine("Error: Invalid version '$Raw'. Expected 'latest' or 'X.Y.Z' (optionally prefixed with 'v').") + exit 2 +} + +function Test-HomebrewInstall { + $cmd = Get-Command opentaint -ErrorAction SilentlyContinue + if (-not $cmd) { + return $null + } + $path = $cmd.Source + try { + $resolved = (Resolve-Path -LiteralPath $path -ErrorAction Stop).Path + $path = $resolved + } catch { } + + # Patterns target POSIX-style Homebrew paths; meaningful only when pwsh runs + # on macOS/Linux. Native Windows paths use backslashes and will not match. + $lower = $path.ToLower() + if ($lower -match '/cellar/' -or $lower -match '/caskroom/' -or $lower -match '/homebrew/') { + return $path + } + return $null +} + $Repo = if ($env:OPENTAINT_REPOSITORY) { $env:OPENTAINT_REPOSITORY } else { "seqra/opentaint" } -$BaseUrl = if ($env:OPENTAINT_DOWNLOAD_BASE_URL) { $env:OPENTAINT_DOWNLOAD_BASE_URL } else { "https://github.com/$Repo/releases/latest/download" } function Get-Architecture { $arch = $env:PROCESSOR_ARCHITECTURE @@ -12,16 +53,37 @@ function Get-Architecture { "AMD64" { return "amd64" } "ARM64" { return "arm64" } default { - Write-Error "Unsupported architecture: $arch" + [Console]::Error.WriteLine("Error: Unsupported architecture: $arch") + [Console]::Error.WriteLine("See https://github.com/seqra/opentaint/blob/main/docs/installation.md for alternatives.") exit 1 } } } +function Invoke-Download { + param( + [string]$Url, + [string]$OutFile, + [bool]$ShowProgress = $false + ) + + $previous = $ProgressPreference + if (-not $ShowProgress) { + $ProgressPreference = 'SilentlyContinue' + } + try { + Invoke-WebRequest -Uri $Url -OutFile $OutFile -UseBasicParsing + } + finally { + $ProgressPreference = $previous + } +} + function Verify-Checksum { param( [string]$ArchivePath, - [string]$ArchiveName + [string]$ArchiveName, + [string]$BaseUrl ) $checksumsUrl = "$BaseUrl/checksums.txt" @@ -29,7 +91,7 @@ function Verify-Checksum { Write-Host "Verifying checksum..." try { - Invoke-WebRequest -Uri $checksumsUrl -OutFile $checksumsFile -UseBasicParsing + Invoke-Download -Url $checksumsUrl -OutFile $checksumsFile -ShowProgress $false } catch { Write-Warning "Could not download checksums.txt, skipping verification." return @@ -45,7 +107,9 @@ function Verify-Checksum { $actual = (Get-FileHash -Path $ArchivePath -Algorithm SHA256).Hash.ToLower() if ($expected -ne $actual) { - Write-Error "Checksum verification failed!`n Expected: $expected`n Actual: $actual" + [Console]::Error.WriteLine("Error: Checksum verification failed!") + [Console]::Error.WriteLine(" Expected: $expected") + [Console]::Error.WriteLine(" Actual: $actual") exit 1 } Write-Host "Checksum verified." @@ -59,11 +123,26 @@ function Get-InstallDir { } function Main { + $existingBrew = Test-HomebrewInstall + if ($existingBrew -and $env:OPENTAINT_FORCE -ne "1") { + [Console]::Error.WriteLine("Error: opentaint is already installed via Homebrew at $existingBrew.") + [Console]::Error.WriteLine("Run 'brew upgrade --cask opentaint' to update, or set") + [Console]::Error.WriteLine("`$env:OPENTAINT_FORCE='1' to install side-by-side anyway.") + exit 3 + } + + $versionInfo = Test-Version -Raw $Version + $baseUrl = if ($env:OPENTAINT_DOWNLOAD_BASE_URL) { + $env:OPENTAINT_DOWNLOAD_BASE_URL + } else { + "https://github.com/$Repo/releases/$($versionInfo.PathSegment)" + } + $arch = Get-Architecture Write-Host "Architecture: $arch" $archiveName = "opentaint-full_windows_${arch}.zip" - $url = "$BaseUrl/$archiveName" + $url = "$baseUrl/$archiveName" $installDir = Get-InstallDir Write-Host "Install directory: $installDir" @@ -73,9 +152,9 @@ function Main { try { $archivePath = Join-Path $tmpDir $archiveName Write-Host "Downloading $archiveName..." - Invoke-WebRequest -Uri $url -OutFile $archivePath -UseBasicParsing + Invoke-Download -Url $url -OutFile $archivePath -ShowProgress $true - Verify-Checksum -ArchivePath $archivePath -ArchiveName $archiveName + Verify-Checksum -ArchivePath $archivePath -ArchiveName $archiveName -BaseUrl $baseUrl Write-Host "Extracting..." Expand-Archive -Path $archivePath -DestinationPath $tmpDir -Force @@ -87,7 +166,6 @@ function Main { New-Item -ItemType Directory -Path $binDir -Force | Out-Null Copy-Item -Path (Join-Path $tmpDir "opentaint.exe") -Destination (Join-Path $binDir "opentaint.exe") -Force - # Install bundled lib and jre if present (next to the binary) $libSrc = Join-Path $tmpDir "lib" if (Test-Path $libSrc) { $libDst = Join-Path $binDir "lib" @@ -102,7 +180,6 @@ function Main { Copy-Item -Recurse -Path $jreSrc -Destination $jreDst } - # Add to PATH if not already there $userPath = [Environment]::GetEnvironmentVariable("Path", "User") if ($userPath -notlike "*$binDir*") { [Environment]::SetEnvironmentVariable("Path", "$binDir;$userPath", "User") diff --git a/scripts/install/install.sh b/scripts/install/install.sh index d5523850d..d6ccf386d 100755 --- a/scripts/install/install.sh +++ b/scripts/install/install.sh @@ -2,11 +2,108 @@ set -euo pipefail # OpenTaint installer for Linux and macOS -# Usage: curl -fsSL https://raw.githubusercontent.com/seqra/opentaint/main/scripts/install/install.sh | bash +# Usage: +# curl -fsSL https://raw.githubusercontent.com/seqra/opentaint/main/scripts/install/install.sh | bash +# curl -fsSL https://raw.githubusercontent.com/seqra/opentaint/main/scripts/install/install.sh | bash -s -- 1.2.3 REPO="${OPENTAINT_REPOSITORY:-seqra/opentaint}" INSTALL_DIR="${OPENTAINT_INSTALL_DIR:-}" -DOWNLOAD_BASE_URL="${OPENTAINT_DOWNLOAD_BASE_URL:-https://github.com/${REPO}/releases/latest/download}" + +DOWNLOADER="" + +# Populates VERSION_PATH_SEGMENT and VERSION_TAG from the raw version argument. +# Accepts: +# (empty) -> latest +# latest +# X.Y.Z +# vX.Y.Z +# X.Y.Z-suffix +# vX.Y.Z-suffix +# Exits 2 on invalid input. +validate_version() { + local raw="${1:-latest}" + + if [ "$raw" = "latest" ] || [ -z "$raw" ]; then + VERSION_PATH_SEGMENT="latest/download" + VERSION_TAG="latest" + return + fi + + if [[ "$raw" =~ ^v?[0-9]+\.[0-9]+\.[0-9]+(-[A-Za-z0-9._-]+)?$ ]]; then + local normalized="${raw#v}" + VERSION_PATH_SEGMENT="download/v${normalized}" + VERSION_TAG="v${normalized}" + return + fi + + echo "Error: invalid version '$raw'." >&2 + echo "Expected 'latest' or 'X.Y.Z' (optionally prefixed with 'v')." >&2 + exit 2 +} + +# Prints the resolved path of an existing opentaint binary if it appears to +# belong to a Homebrew installation (mirrors cli/internal/utils/updater.go +# classification). Prints nothing otherwise. +detect_homebrew_install() { + if ! command -v opentaint >/dev/null 2>&1; then + return 0 + fi + local path resolved + path="$(command -v opentaint)" + # Resolve symlinks so we can compare against the real location. + # readlink -f is GNU-only; fall back to realpath on macOS/BSD. + if resolved="$(readlink -f "$path" 2>/dev/null)" && [ -n "$resolved" ]; then + path="$resolved" + elif resolved="$(realpath "$path" 2>/dev/null)" && [ -n "$resolved" ]; then + path="$resolved" + fi + case "$(printf '%s' "$path" | tr '[:upper:]' '[:lower:]')" in + */cellar/*|*/caskroom/*|*/homebrew/*) + echo "$path" + ;; + esac +} + +pick_downloader() { + if command -v curl >/dev/null 2>&1; then + DOWNLOADER="curl" + return + fi + if command -v wget >/dev/null 2>&1; then + DOWNLOADER="wget" + return + fi + echo "Error: curl or wget is required but neither is installed." >&2 + echo "Install curl or wget and re-run the installer." >&2 + exit 1 +} + +download() { + local url="$1" + local output="$2" + local progress="${3:-0}" + + case "$DOWNLOADER" in + curl) + if [ "$progress" = "1" ]; then + curl -fSL --progress-bar -o "$output" "$url" + else + curl -fsSL -o "$output" "$url" + fi + ;; + wget) + if [ "$progress" = "1" ]; then + wget --show-progress --progress=bar:force:noscroll -q -O "$output" "$url" + else + wget -q -O "$output" "$url" + fi + ;; + *) + echo "Error: no downloader configured." >&2 + exit 1 + ;; + esac +} detect_platform() { local os arch @@ -17,6 +114,7 @@ detect_platform() { Darwin) os="darwin" ;; *) echo "Error: Unsupported operating system: $os" >&2 + echo "See https://github.com/seqra/opentaint/blob/main/docs/installation.md for alternatives." >&2 exit 1 ;; esac @@ -27,10 +125,19 @@ detect_platform() { arm64|aarch64) arch="arm64" ;; *) echo "Error: Unsupported architecture: $arch" >&2 + echo "See https://github.com/seqra/opentaint/blob/main/docs/installation.md for alternatives." >&2 exit 1 ;; esac + # Rosetta-2: a shell running as amd64 under Rosetta on Apple Silicon + # should download the native arm64 archive instead. + if [ "$os" = "darwin" ] && [ "$arch" = "amd64" ]; then + if [ "$(sysctl -n sysctl.proc_translated 2>/dev/null)" = "1" ]; then + arch="arm64" + fi + fi + echo "${os}_${arch}" } @@ -40,7 +147,7 @@ verify_checksum() { local checksums_url="${DOWNLOAD_BASE_URL}/checksums.txt" echo "Verifying checksum..." - if ! curl -fsSL -o "$tmp_dir/checksums.txt" "$checksums_url" 2>/dev/null; then + if ! download "$checksums_url" "$tmp_dir/checksums.txt" 2>/dev/null; then echo "Warning: Could not download checksums.txt, skipping verification." >&2 return 0 fi @@ -87,6 +194,21 @@ get_install_dir() { main() { local platform archive_name url install_dir bin_dir + validate_version "${1:-}" + pick_downloader + + local existing_brew + existing_brew="$(detect_homebrew_install)" + if [ -n "$existing_brew" ] && [ "${OPENTAINT_FORCE:-0}" != "1" ]; then + echo "Error: opentaint is already installed via Homebrew at $existing_brew" >&2 + echo "Run 'brew upgrade --cask opentaint' to update, or set" >&2 + echo "OPENTAINT_FORCE=1 to install side-by-side anyway." >&2 + exit 3 + fi + + DOWNLOAD_BASE_URL="${OPENTAINT_DOWNLOAD_BASE_URL:-https://github.com/${REPO}/releases/${VERSION_PATH_SEGMENT}}" + + echo "Version: $VERSION_TAG" echo "Detecting platform..." platform="$(detect_platform)" echo "Platform: $platform" @@ -101,7 +223,7 @@ main() { trap 'rm -rf "$tmp_dir"' EXIT echo "Downloading ${archive_name}..." - curl -fsSL -o "$tmp_dir/$archive_name" "$url" + download "$url" "$tmp_dir/$archive_name" 1 verify_checksum "$tmp_dir/$archive_name" "$archive_name"