diff --git a/CLAUDE.md b/CLAUDE.md index d1672f5f1..42baf51fe 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -12,7 +12,7 @@ Kubernetes-native AI automation platform that orchestrates agentic sessions thro - `components/runners/ambient-runner/` - Python runner executing Claude Code CLI in Job pods - `components/ambient-cli/` - Go CLI (`acpctl`), manages agentic sessions from the command line - `components/public-api/` - Stateless HTTP gateway, proxies to backend (no direct K8s access) -- `components/ambient-api-server/` - Go REST API microservice (rh-trex-ai framework), PostgreSQL-backed +- `components/ambient-api-server/` - Go REST API microservice (rh-trex-ai framework), PostgreSQL-backed. Each domain resource (agents, repo intelligences, etc.) is a self-contained plugin with model, DAO, service, handler, and migration - `components/ambient-sdk/` - Go + Python client SDK for the platform's public REST API - `components/open-webui-llm/` - Open WebUI LLM integration - `components/manifests/` - Kustomize-based deployment manifests and overlays diff --git a/components/ambient-api-server/Dockerfile b/components/ambient-api-server/Dockerfile index fb32013ac..79a884b7e 100755 --- a/components/ambient-api-server/Dockerfile +++ b/components/ambient-api-server/Dockerfile @@ -12,8 +12,8 @@ COPY pkg/ pkg/ COPY plugins/ plugins/ COPY openapi/ openapi/ -# Build the binary -RUN go build -ldflags="-s -w" -o ambient-api-server ./cmd/ambient-api-server +# Build the binary (GOPROXY=direct works around HTTP/2 stream errors with proxy.golang.org inside Docker) +RUN GOPROXY=direct go build -ldflags="-s -w" -o ambient-api-server ./cmd/ambient-api-server # Runtime stage FROM registry.access.redhat.com/ubi9/ubi-minimal:latest diff --git a/components/ambient-api-server/cmd/ambient-api-server/main.go b/components/ambient-api-server/cmd/ambient-api-server/main.go index 2e5f8cec4..abe89c5ae 100644 --- a/components/ambient-api-server/cmd/ambient-api-server/main.go +++ b/components/ambient-api-server/cmd/ambient-api-server/main.go @@ -24,6 +24,11 @@ import ( _ "github.com/ambient-code/platform/components/ambient-api-server/plugins/roles" _ "github.com/ambient-code/platform/components/ambient-api-server/plugins/sessions" _ "github.com/ambient-code/platform/components/ambient-api-server/plugins/users" + + // Project intelligence memory plugins + _ "github.com/ambient-code/platform/components/ambient-api-server/plugins/repoEvents" + _ "github.com/ambient-code/platform/components/ambient-api-server/plugins/repoFindings" + _ "github.com/ambient-code/platform/components/ambient-api-server/plugins/repoIntelligences" ) func main() { diff --git a/components/ambient-api-server/plugins/repoEvents/dao.go b/components/ambient-api-server/plugins/repoEvents/dao.go new file mode 100644 index 000000000..b61c7a7d5 --- /dev/null +++ b/components/ambient-api-server/plugins/repoEvents/dao.go @@ -0,0 +1,62 @@ +package repoEvents + +import ( + "context" + + "gorm.io/gorm/clause" + + "github.com/openshift-online/rh-trex-ai/pkg/db" +) + +type RepoEventDao interface { + Get(ctx context.Context, id string) (*RepoEvent, error) + Create(ctx context.Context, re *RepoEvent) (*RepoEvent, error) + FindByIDs(ctx context.Context, ids []string) (RepoEventList, error) + All(ctx context.Context) (RepoEventList, error) +} + +var _ RepoEventDao = &sqlRepoEventDao{} + +type sqlRepoEventDao struct { + sessionFactory *db.SessionFactory +} + +func NewRepoEventDao(sessionFactory *db.SessionFactory) RepoEventDao { + return &sqlRepoEventDao{sessionFactory: sessionFactory} +} + +func (d *sqlRepoEventDao) Get(ctx context.Context, id string) (*RepoEvent, error) { + g2 := (*d.sessionFactory).New(ctx) + var re RepoEvent + if err := g2.Take(&re, "id = ?", id).Error; err != nil { + return nil, err + } + return &re, nil +} + +func (d *sqlRepoEventDao) Create(ctx context.Context, re *RepoEvent) (*RepoEvent, error) { + g2 := (*d.sessionFactory).New(ctx) + if err := g2.Omit(clause.Associations).Create(re).Error; err != nil { + db.MarkForRollback(ctx, err) + return nil, err + } + return re, nil +} + +func (d *sqlRepoEventDao) FindByIDs(ctx context.Context, ids []string) (RepoEventList, error) { + g2 := (*d.sessionFactory).New(ctx) + items := RepoEventList{} + if err := g2.Where("id in (?)", ids).Find(&items).Error; err != nil { + return nil, err + } + return items, nil +} + +func (d *sqlRepoEventDao) All(ctx context.Context) (RepoEventList, error) { + g2 := (*d.sessionFactory).New(ctx) + items := RepoEventList{} + if err := g2.Find(&items).Error; err != nil { + return nil, err + } + return items, nil +} diff --git a/components/ambient-api-server/plugins/repoEvents/handler.go b/components/ambient-api-server/plugins/repoEvents/handler.go new file mode 100644 index 000000000..5a9fb5cdd --- /dev/null +++ b/components/ambient-api-server/plugins/repoEvents/handler.go @@ -0,0 +1,72 @@ +package repoEvents + +import ( + "net/http" + + "github.com/gorilla/mux" + + "github.com/openshift-online/rh-trex-ai/pkg/errors" + "github.com/openshift-online/rh-trex-ai/pkg/handlers" + "github.com/openshift-online/rh-trex-ai/pkg/services" + + "github.com/ambient-code/platform/components/ambient-api-server/plugins/common" +) + +type repoEventHandler struct { + service RepoEventService + generic services.GenericService +} + +func NewRepoEventHandler(svc RepoEventService, generic services.GenericService) *repoEventHandler { + return &repoEventHandler{ + service: svc, + generic: generic, + } +} + +func (h repoEventHandler) List(w http.ResponseWriter, r *http.Request) { + cfg := &handlers.HandlerConfig{ + Action: func() (interface{}, *errors.ServiceError) { + ctx := r.Context() + listArgs := services.NewListArguments(r.URL.Query()) + + if serr := common.ApplyProjectScope(r, listArgs); serr != nil { + return nil, serr + } + + var items []RepoEvent + paging, err := h.generic.List(ctx, "id", listArgs, &items) + if err != nil { + return nil, err + } + + list := RepoEventListAPI{ + Kind: "RepoEventList", + Page: int32(paging.Page), + Size: int32(paging.Size), + Total: int32(paging.Total), + Items: []RepoEventAPI{}, + } + for _, item := range items { + list.Items = append(list.Items, PresentRepoEvent(&item)) + } + return list, nil + }, + } + handlers.HandleList(w, r, cfg) +} + +func (h repoEventHandler) Get(w http.ResponseWriter, r *http.Request) { + cfg := &handlers.HandlerConfig{ + Action: func() (interface{}, *errors.ServiceError) { + id := mux.Vars(r)["id"] + ctx := r.Context() + re, err := h.service.Get(ctx, id) + if err != nil { + return nil, err + } + return PresentRepoEvent(re), nil + }, + } + handlers.HandleGet(w, r, cfg) +} diff --git a/components/ambient-api-server/plugins/repoEvents/migration.go b/components/ambient-api-server/plugins/repoEvents/migration.go new file mode 100644 index 000000000..54a3c0144 --- /dev/null +++ b/components/ambient-api-server/plugins/repoEvents/migration.go @@ -0,0 +1,45 @@ +package repoEvents + +import ( + "gorm.io/gorm" + + "github.com/go-gormigrate/gormigrate/v2" + "github.com/openshift-online/rh-trex-ai/pkg/db" +) + +func migration() *gormigrate.Migration { + type RepoEvent struct { + db.Model + ResourceType string `gorm:"not null"` + ResourceID string `gorm:"not null"` + Action string `gorm:"not null"` + ActorType string `gorm:"not null"` + ActorID string `gorm:"not null"` + ProjectID string `gorm:"not null"` + Reason *string + Diff *string `gorm:"type:text"` + } + + return &gormigrate.Migration{ + ID: "202604091202", + Migrate: func(tx *gorm.DB) error { + if err := tx.AutoMigrate(&RepoEvent{}); err != nil { + return err + } + stmts := []string{ + `CREATE INDEX IF NOT EXISTS idx_re_resource_type ON repo_events(resource_type)`, + `CREATE INDEX IF NOT EXISTS idx_re_resource_id ON repo_events(resource_id)`, + `CREATE INDEX IF NOT EXISTS idx_re_project_id ON repo_events(project_id)`, + } + for _, s := range stmts { + if err := tx.Exec(s).Error; err != nil { + return err + } + } + return nil + }, + Rollback: func(tx *gorm.DB) error { + return tx.Migrator().DropTable("repo_events") + }, + } +} diff --git a/components/ambient-api-server/plugins/repoEvents/model.go b/components/ambient-api-server/plugins/repoEvents/model.go new file mode 100644 index 000000000..ab310ad6a --- /dev/null +++ b/components/ambient-api-server/plugins/repoEvents/model.go @@ -0,0 +1,40 @@ +package repoEvents + +import ( + "github.com/openshift-online/rh-trex-ai/pkg/api" + "gorm.io/gorm" +) + +type RepoEvent struct { + api.Meta + + // What changed + ResourceType string `json:"resource_type" gorm:"not null;index"` + ResourceID string `json:"resource_id" gorm:"not null;index"` + Action string `json:"action" gorm:"not null"` + + // Who + ActorType string `json:"actor_type" gorm:"not null"` + ActorID string `json:"actor_id" gorm:"not null"` + + // Context + ProjectID string `json:"project_id" gorm:"not null;index"` + Reason *string `json:"reason,omitempty"` + Diff *string `json:"diff,omitempty" gorm:"type:text"` +} + +type RepoEventList []*RepoEvent +type RepoEventIndex map[string]*RepoEvent + +func (l RepoEventList) Index() RepoEventIndex { + index := RepoEventIndex{} + for _, o := range l { + index[o.ID] = o + } + return index +} + +func (d *RepoEvent) BeforeCreate(tx *gorm.DB) error { + d.ID = api.NewID() + return nil +} diff --git a/components/ambient-api-server/plugins/repoEvents/plugin.go b/components/ambient-api-server/plugins/repoEvents/plugin.go new file mode 100644 index 000000000..8f12ac652 --- /dev/null +++ b/components/ambient-api-server/plugins/repoEvents/plugin.go @@ -0,0 +1,65 @@ +package repoEvents + +import ( + "net/http" + + "github.com/gorilla/mux" + "github.com/openshift-online/rh-trex-ai/pkg/api/presenters" + "github.com/openshift-online/rh-trex-ai/pkg/auth" + "github.com/openshift-online/rh-trex-ai/pkg/db" + "github.com/openshift-online/rh-trex-ai/pkg/environments" + "github.com/openshift-online/rh-trex-ai/pkg/registry" + pkgserver "github.com/openshift-online/rh-trex-ai/pkg/server" + "github.com/openshift-online/rh-trex-ai/plugins/generic" + + pkgrbac "github.com/ambient-code/platform/components/ambient-api-server/plugins/rbac" +) + +type ServiceLocator func() RepoEventService + +func NewServiceLocator(env *environments.Env) ServiceLocator { + return func() RepoEventService { + return NewRepoEventService( + NewRepoEventDao(&env.Database.SessionFactory), + ) + } +} + +func Service(s *environments.Services) RepoEventService { + if s == nil { + return nil + } + if obj := s.GetService("RepoEvents"); obj != nil { + locator := obj.(ServiceLocator) + return locator() + } + return nil +} + +func init() { + registry.RegisterService("RepoEvents", func(env interface{}) interface{} { + return NewServiceLocator(env.(*environments.Env)) + }) + + pkgserver.RegisterRoutes("repo_events", func(apiV1Router *mux.Router, services pkgserver.ServicesInterface, authMiddleware environments.JWTMiddleware, authzMiddleware auth.AuthorizationMiddleware) { + envServices := services.(*environments.Services) + if dbAuthz := pkgrbac.Middleware(envServices); dbAuthz != nil { + authzMiddleware = dbAuthz + } + svc := Service(envServices) + handler := NewRepoEventHandler(svc, generic.Service(envServices)) + + router := apiV1Router.PathPrefix("/repo_events").Subrouter() + router.HandleFunc("", handler.List).Methods(http.MethodGet) + router.HandleFunc("/{id}", handler.Get).Methods(http.MethodGet) + router.Use(authMiddleware.AuthenticateAccountJWT) + router.Use(authzMiddleware.AuthorizeApi) + }) + + presenters.RegisterPath(RepoEvent{}, "repo_events") + presenters.RegisterPath(&RepoEvent{}, "repo_events") + presenters.RegisterKind(RepoEvent{}, "RepoEvent") + presenters.RegisterKind(&RepoEvent{}, "RepoEvent") + + db.RegisterMigration(migration()) +} diff --git a/components/ambient-api-server/plugins/repoEvents/presenter.go b/components/ambient-api-server/plugins/repoEvents/presenter.go new file mode 100644 index 000000000..98be3d56c --- /dev/null +++ b/components/ambient-api-server/plugins/repoEvents/presenter.go @@ -0,0 +1,54 @@ +package repoEvents + +import ( + "time" + + "github.com/openshift-online/rh-trex-ai/pkg/api/presenters" +) + +type RepoEventAPI struct { + ID *string `json:"id,omitempty"` + Kind *string `json:"kind,omitempty"` + Href *string `json:"href,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` + + ResourceType string `json:"resource_type"` + ResourceID string `json:"resource_id"` + Action string `json:"action"` + ActorType string `json:"actor_type"` + ActorID string `json:"actor_id"` + ProjectID string `json:"project_id"` + Reason *string `json:"reason,omitempty"` + Diff *string `json:"diff,omitempty"` +} + +type RepoEventListAPI struct { + Kind string `json:"kind"` + Page int32 `json:"page"` + Size int32 `json:"size"` + Total int32 `json:"total"` + Items []RepoEventAPI `json:"items"` +} + +func ptrTime(v time.Time) *time.Time { return &v } + +func PresentRepoEvent(re *RepoEvent) RepoEventAPI { + ref := presenters.PresentReference(re.ID, re) + return RepoEventAPI{ + ID: ref.Id, + Kind: ref.Kind, + Href: ref.Href, + CreatedAt: ptrTime(re.CreatedAt), + UpdatedAt: ptrTime(re.UpdatedAt), + + ResourceType: re.ResourceType, + ResourceID: re.ResourceID, + Action: re.Action, + ActorType: re.ActorType, + ActorID: re.ActorID, + ProjectID: re.ProjectID, + Reason: re.Reason, + Diff: re.Diff, + } +} diff --git a/components/ambient-api-server/plugins/repoEvents/service.go b/components/ambient-api-server/plugins/repoEvents/service.go new file mode 100644 index 000000000..d806d7b10 --- /dev/null +++ b/components/ambient-api-server/plugins/repoEvents/service.go @@ -0,0 +1,66 @@ +package repoEvents + +import ( + "context" + + "github.com/openshift-online/rh-trex-ai/pkg/errors" + "github.com/openshift-online/rh-trex-ai/pkg/logger" + "github.com/openshift-online/rh-trex-ai/pkg/services" +) + +type RepoEventService interface { + Get(ctx context.Context, id string) (*RepoEvent, *errors.ServiceError) + Create(ctx context.Context, re *RepoEvent) (*RepoEvent, *errors.ServiceError) + All(ctx context.Context) (RepoEventList, *errors.ServiceError) + FindByIDs(ctx context.Context, ids []string) (RepoEventList, *errors.ServiceError) + + OnUpsert(ctx context.Context, id string) error +} + +func NewRepoEventService(dao RepoEventDao) RepoEventService { + return &sqlRepoEventService{dao: dao} +} + +var _ RepoEventService = &sqlRepoEventService{} + +type sqlRepoEventService struct { + dao RepoEventDao +} + +func (s *sqlRepoEventService) OnUpsert(ctx context.Context, id string) error { + log := logger.NewLogger(ctx) + log.Infof("RepoEvent recorded: %s", id) + return nil +} + +func (s *sqlRepoEventService) Get(ctx context.Context, id string) (*RepoEvent, *errors.ServiceError) { + re, err := s.dao.Get(ctx, id) + if err != nil { + return nil, services.HandleGetError("RepoEvent", "id", id, err) + } + return re, nil +} + +func (s *sqlRepoEventService) Create(ctx context.Context, re *RepoEvent) (*RepoEvent, *errors.ServiceError) { + re, err := s.dao.Create(ctx, re) + if err != nil { + return nil, services.HandleCreateError("RepoEvent", err) + } + return re, nil +} + +func (s *sqlRepoEventService) FindByIDs(ctx context.Context, ids []string) (RepoEventList, *errors.ServiceError) { + items, err := s.dao.FindByIDs(ctx, ids) + if err != nil { + return nil, errors.GeneralError("unable to find repo events: %s", err) + } + return items, nil +} + +func (s *sqlRepoEventService) All(ctx context.Context) (RepoEventList, *errors.ServiceError) { + items, err := s.dao.All(ctx) + if err != nil { + return nil, errors.GeneralError("unable to get all repo events: %s", err) + } + return items, nil +} diff --git a/components/ambient-api-server/plugins/repoFindings/dao.go b/components/ambient-api-server/plugins/repoFindings/dao.go new file mode 100644 index 000000000..c43e6d66b --- /dev/null +++ b/components/ambient-api-server/plugins/repoFindings/dao.go @@ -0,0 +1,94 @@ +package repoFindings + +import ( + "context" + + "gorm.io/gorm/clause" + + "github.com/openshift-online/rh-trex-ai/pkg/api" + "github.com/openshift-online/rh-trex-ai/pkg/db" +) + +type RepoFindingDao interface { + Get(ctx context.Context, id string) (*RepoFinding, error) + Create(ctx context.Context, rf *RepoFinding) (*RepoFinding, error) + Replace(ctx context.Context, rf *RepoFinding) (*RepoFinding, error) + Delete(ctx context.Context, id string) error + FindByIDs(ctx context.Context, ids []string) (RepoFindingList, error) + All(ctx context.Context) (RepoFindingList, error) + LookupProjectID(ctx context.Context, intelligenceID string) (string, error) +} + +var _ RepoFindingDao = &sqlRepoFindingDao{} + +type sqlRepoFindingDao struct { + sessionFactory *db.SessionFactory +} + +func NewRepoFindingDao(sessionFactory *db.SessionFactory) RepoFindingDao { + return &sqlRepoFindingDao{sessionFactory: sessionFactory} +} + +func (d *sqlRepoFindingDao) Get(ctx context.Context, id string) (*RepoFinding, error) { + g2 := (*d.sessionFactory).New(ctx) + var rf RepoFinding + if err := g2.Take(&rf, "id = ?", id).Error; err != nil { + return nil, err + } + return &rf, nil +} + +func (d *sqlRepoFindingDao) Create(ctx context.Context, rf *RepoFinding) (*RepoFinding, error) { + g2 := (*d.sessionFactory).New(ctx) + if err := g2.Omit(clause.Associations).Create(rf).Error; err != nil { + db.MarkForRollback(ctx, err) + return nil, err + } + return rf, nil +} + +func (d *sqlRepoFindingDao) Replace(ctx context.Context, rf *RepoFinding) (*RepoFinding, error) { + g2 := (*d.sessionFactory).New(ctx) + if err := g2.Omit(clause.Associations).Save(rf).Error; err != nil { + db.MarkForRollback(ctx, err) + return nil, err + } + return rf, nil +} + +func (d *sqlRepoFindingDao) Delete(ctx context.Context, id string) error { + g2 := (*d.sessionFactory).New(ctx) + if err := g2.Omit(clause.Associations).Delete(&RepoFinding{Meta: api.Meta{ID: id}}).Error; err != nil { + db.MarkForRollback(ctx, err) + return err + } + return nil +} + +func (d *sqlRepoFindingDao) FindByIDs(ctx context.Context, ids []string) (RepoFindingList, error) { + g2 := (*d.sessionFactory).New(ctx) + items := RepoFindingList{} + if err := g2.Where("id in (?)", ids).Find(&items).Error; err != nil { + return nil, err + } + return items, nil +} + +func (d *sqlRepoFindingDao) All(ctx context.Context) (RepoFindingList, error) { + g2 := (*d.sessionFactory).New(ctx) + items := RepoFindingList{} + if err := g2.Find(&items).Error; err != nil { + return nil, err + } + return items, nil +} + +func (d *sqlRepoFindingDao) LookupProjectID(ctx context.Context, intelligenceID string) (string, error) { + g2 := (*d.sessionFactory).New(ctx) + var projectID string + err := g2.Table("repo_intelligences"). + Select("project_id"). + Where("id = ?", intelligenceID). + Take(&projectID).Error + return projectID, err +} diff --git a/components/ambient-api-server/plugins/repoFindings/handler.go b/components/ambient-api-server/plugins/repoFindings/handler.go new file mode 100644 index 000000000..f7b44c566 --- /dev/null +++ b/components/ambient-api-server/plugins/repoFindings/handler.go @@ -0,0 +1,152 @@ +package repoFindings + +import ( + "net/http" + "strings" + + "github.com/gorilla/mux" + + "github.com/openshift-online/rh-trex-ai/pkg/errors" + "github.com/openshift-online/rh-trex-ai/pkg/handlers" + "github.com/openshift-online/rh-trex-ai/pkg/services" +) + +type repoFindingHandler struct { + service RepoFindingService + generic services.GenericService +} + +func NewRepoFindingHandler(svc RepoFindingService, generic services.GenericService) *repoFindingHandler { + return &repoFindingHandler{ + service: svc, + generic: generic, + } +} + +func (h repoFindingHandler) Create(w http.ResponseWriter, r *http.Request) { + var body RepoFindingAPI + cfg := &handlers.HandlerConfig{ + Body: &body, + Validators: []handlers.Validate{ + handlers.ValidateEmpty(&body, "ID", "id"), + }, + Action: func() (interface{}, *errors.ServiceError) { + ctx := r.Context() + model := ConvertRepoFinding(body) + model, err := h.service.Create(ctx, model) + if err != nil { + return nil, err + } + return PresentRepoFinding(model), nil + }, + ErrorHandler: handlers.HandleError, + } + handlers.Handle(w, r, cfg, http.StatusCreated) +} + +func (h repoFindingHandler) Patch(w http.ResponseWriter, r *http.Request) { + var patch RepoFindingPatchRequest + cfg := &handlers.HandlerConfig{ + Body: &patch, + Validators: []handlers.Validate{}, + Action: func() (interface{}, *errors.ServiceError) { + ctx := r.Context() + id := mux.Vars(r)["id"] + found, err := h.service.Get(ctx, id) + if err != nil { + return nil, err + } + + if patch.Status != nil { + found.Status = *patch.Status + } + if patch.Severity != nil { + found.Severity = patch.Severity + } + if patch.Title != nil { + found.Title = *patch.Title + } + if patch.Body != nil { + found.Body = *patch.Body + } + if patch.ResolvedBy != nil { + found.ResolvedBy = patch.ResolvedBy + } + if patch.ResolvedReason != nil { + found.ResolvedReason = patch.ResolvedReason + } + + model, err := h.service.Replace(ctx, found) + if err != nil { + return nil, err + } + return PresentRepoFinding(model), nil + }, + ErrorHandler: handlers.HandleError, + } + handlers.Handle(w, r, cfg, http.StatusOK) +} + +func (h repoFindingHandler) List(w http.ResponseWriter, r *http.Request) { + cfg := &handlers.HandlerConfig{ + Action: func() (interface{}, *errors.ServiceError) { + ctx := r.Context() + listArgs := services.NewListArguments(r.URL.Query()) + + // Findings must be scoped to an intelligence record to prevent + // cross-tenant data leaks (RepoFinding has no project_id column). + if !strings.Contains(listArgs.Search, "intelligence_id") { + return nil, errors.Validation("intelligence_id filter is required (findings must be scoped to an intelligence record)") + } + + var items []RepoFinding + paging, err := h.generic.List(ctx, "id", listArgs, &items) + if err != nil { + return nil, err + } + + list := RepoFindingListAPI{ + Kind: "RepoFindingList", + Page: int32(paging.Page), + Size: int32(paging.Size), + Total: int32(paging.Total), + Items: []RepoFindingAPI{}, + } + for _, item := range items { + list.Items = append(list.Items, PresentRepoFinding(&item)) + } + return list, nil + }, + } + handlers.HandleList(w, r, cfg) +} + +func (h repoFindingHandler) Get(w http.ResponseWriter, r *http.Request) { + cfg := &handlers.HandlerConfig{ + Action: func() (interface{}, *errors.ServiceError) { + id := mux.Vars(r)["id"] + ctx := r.Context() + rf, err := h.service.Get(ctx, id) + if err != nil { + return nil, err + } + return PresentRepoFinding(rf), nil + }, + } + handlers.HandleGet(w, r, cfg) +} + +func (h repoFindingHandler) Delete(w http.ResponseWriter, r *http.Request) { + cfg := &handlers.HandlerConfig{ + Action: func() (interface{}, *errors.ServiceError) { + id := mux.Vars(r)["id"] + ctx := r.Context() + err := h.service.Delete(ctx, id) + if err != nil { + return nil, err + } + return nil, nil + }, + } + handlers.HandleDelete(w, r, cfg, http.StatusNoContent) +} diff --git a/components/ambient-api-server/plugins/repoFindings/migration.go b/components/ambient-api-server/plugins/repoFindings/migration.go new file mode 100644 index 000000000..68de3b2d1 --- /dev/null +++ b/components/ambient-api-server/plugins/repoFindings/migration.go @@ -0,0 +1,53 @@ +package repoFindings + +import ( + "gorm.io/gorm" + + "github.com/go-gormigrate/gormigrate/v2" + "github.com/openshift-online/rh-trex-ai/pkg/db" +) + +func migration() *gormigrate.Migration { + type RepoFinding struct { + db.Model + IntelligenceID string `gorm:"not null"` + FilePath string `gorm:"not null"` + Category string `gorm:"not null"` + Status string `gorm:"not null;default:'active'"` + Title string `gorm:"not null"` + Body string `gorm:"type:text;not null"` + Severity *string + Confidence *float64 + SourceType string `gorm:"not null"` + SourceRef *string + SessionID *string + AgentID *string + ResolvedBy *string + ResolvedReason *string + } + + return &gormigrate.Migration{ + ID: "202604091201", + Migrate: func(tx *gorm.DB) error { + if err := tx.AutoMigrate(&RepoFinding{}); err != nil { + return err + } + stmts := []string{ + `CREATE INDEX IF NOT EXISTS idx_rf_intelligence_id ON repo_findings(intelligence_id)`, + `CREATE INDEX IF NOT EXISTS idx_rf_file_path ON repo_findings(file_path)`, + `CREATE INDEX IF NOT EXISTS idx_rf_category ON repo_findings(category)`, + `CREATE INDEX IF NOT EXISTS idx_rf_status ON repo_findings(status)`, + `CREATE INDEX IF NOT EXISTS idx_rf_session_id ON repo_findings(session_id)`, + } + for _, s := range stmts { + if err := tx.Exec(s).Error; err != nil { + return err + } + } + return nil + }, + Rollback: func(tx *gorm.DB) error { + return tx.Migrator().DropTable("repo_findings") + }, + } +} diff --git a/components/ambient-api-server/plugins/repoFindings/model.go b/components/ambient-api-server/plugins/repoFindings/model.go new file mode 100644 index 000000000..3e5c2deaa --- /dev/null +++ b/components/ambient-api-server/plugins/repoFindings/model.go @@ -0,0 +1,62 @@ +package repoFindings + +import ( + "github.com/openshift-online/rh-trex-ai/pkg/api" + "gorm.io/gorm" +) + +type RepoFinding struct { + api.Meta + + // Parent + IntelligenceID string `json:"intelligence_id" gorm:"not null;index"` + + // Scope + FilePath string `json:"file_path" gorm:"not null;index"` + Category string `json:"category" gorm:"not null;index"` + Status string `json:"status" gorm:"not null;default:'active';index"` + + // Content + Title string `json:"title" gorm:"not null"` + Body string `json:"body" gorm:"type:text;not null"` + Severity *string `json:"severity,omitempty"` + Confidence *float64 `json:"confidence,omitempty"` + + // Provenance + SourceType string `json:"source_type" gorm:"not null"` + SourceRef *string `json:"source_ref,omitempty"` + SessionID *string `json:"session_id,omitempty" gorm:"index"` + AgentID *string `json:"agent_id,omitempty"` + + // Resolution + ResolvedBy *string `json:"resolved_by,omitempty"` + ResolvedReason *string `json:"resolved_reason,omitempty"` +} + +type RepoFindingList []*RepoFinding +type RepoFindingIndex map[string]*RepoFinding + +func (l RepoFindingList) Index() RepoFindingIndex { + index := RepoFindingIndex{} + for _, o := range l { + index[o.ID] = o + } + return index +} + +func (d *RepoFinding) BeforeCreate(tx *gorm.DB) error { + d.ID = api.NewID() + if d.Status == "" { + d.Status = "active" + } + return nil +} + +type RepoFindingPatchRequest struct { + Status *string `json:"status,omitempty"` + Severity *string `json:"severity,omitempty"` + ResolvedBy *string `json:"resolved_by,omitempty"` + ResolvedReason *string `json:"resolved_reason,omitempty"` + Title *string `json:"title,omitempty"` + Body *string `json:"body,omitempty"` +} diff --git a/components/ambient-api-server/plugins/repoFindings/plugin.go b/components/ambient-api-server/plugins/repoFindings/plugin.go new file mode 100644 index 000000000..7c467f37e --- /dev/null +++ b/components/ambient-api-server/plugins/repoFindings/plugin.go @@ -0,0 +1,88 @@ +package repoFindings + +import ( + "net/http" + + "github.com/gorilla/mux" + "github.com/openshift-online/rh-trex-ai/pkg/api" + "github.com/openshift-online/rh-trex-ai/pkg/api/presenters" + "github.com/openshift-online/rh-trex-ai/pkg/auth" + "github.com/openshift-online/rh-trex-ai/pkg/controllers" + "github.com/openshift-online/rh-trex-ai/pkg/db" + "github.com/openshift-online/rh-trex-ai/pkg/environments" + "github.com/openshift-online/rh-trex-ai/pkg/registry" + pkgserver "github.com/openshift-online/rh-trex-ai/pkg/server" + "github.com/openshift-online/rh-trex-ai/plugins/events" + "github.com/openshift-online/rh-trex-ai/plugins/generic" + + pkgrbac "github.com/ambient-code/platform/components/ambient-api-server/plugins/rbac" + "github.com/ambient-code/platform/components/ambient-api-server/plugins/repoEvents" +) + +type ServiceLocator func() RepoFindingService + +func NewServiceLocator(env *environments.Env) ServiceLocator { + return func() RepoFindingService { + return NewRepoFindingService( + db.NewAdvisoryLockFactory(env.Database.SessionFactory), + NewRepoFindingDao(&env.Database.SessionFactory), + events.Service(&env.Services), + repoEvents.Service(&env.Services), + ) + } +} + +func Service(s *environments.Services) RepoFindingService { + if s == nil { + return nil + } + if obj := s.GetService("RepoFindings"); obj != nil { + locator := obj.(ServiceLocator) + return locator() + } + return nil +} + +func init() { + registry.RegisterService("RepoFindings", func(env interface{}) interface{} { + return NewServiceLocator(env.(*environments.Env)) + }) + + pkgserver.RegisterRoutes("repo_findings", func(apiV1Router *mux.Router, services pkgserver.ServicesInterface, authMiddleware environments.JWTMiddleware, authzMiddleware auth.AuthorizationMiddleware) { + envServices := services.(*environments.Services) + if dbAuthz := pkgrbac.Middleware(envServices); dbAuthz != nil { + authzMiddleware = dbAuthz + } + svc := Service(envServices) + handler := NewRepoFindingHandler(svc, generic.Service(envServices)) + + router := apiV1Router.PathPrefix("/repo_findings").Subrouter() + router.HandleFunc("", handler.List).Methods(http.MethodGet) + router.HandleFunc("", handler.Create).Methods(http.MethodPost) + router.HandleFunc("/{id}", handler.Get).Methods(http.MethodGet) + router.HandleFunc("/{id}", handler.Patch).Methods(http.MethodPatch) + router.HandleFunc("/{id}", handler.Delete).Methods(http.MethodDelete) + router.Use(authMiddleware.AuthenticateAccountJWT) + router.Use(authzMiddleware.AuthorizeApi) + }) + + pkgserver.RegisterController("RepoFindings", func(manager *controllers.KindControllerManager, services pkgserver.ServicesInterface) { + svc := Service(services.(*environments.Services)) + + manager.Add(&controllers.ControllerConfig{ + Source: "RepoFindings", + Handlers: map[api.EventType][]controllers.ControllerHandlerFunc{ + api.CreateEventType: {svc.OnUpsert}, + api.UpdateEventType: {svc.OnUpsert}, + api.DeleteEventType: {svc.OnDelete}, + }, + }) + }) + + presenters.RegisterPath(RepoFinding{}, "repo_findings") + presenters.RegisterPath(&RepoFinding{}, "repo_findings") + presenters.RegisterKind(RepoFinding{}, "RepoFinding") + presenters.RegisterKind(&RepoFinding{}, "RepoFinding") + + db.RegisterMigration(migration()) +} diff --git a/components/ambient-api-server/plugins/repoFindings/presenter.go b/components/ambient-api-server/plugins/repoFindings/presenter.go new file mode 100644 index 000000000..b53c5b71a --- /dev/null +++ b/components/ambient-api-server/plugins/repoFindings/presenter.go @@ -0,0 +1,94 @@ +package repoFindings + +import ( + "time" + + "github.com/openshift-online/rh-trex-ai/pkg/api/presenters" +) + +type RepoFindingAPI struct { + ID *string `json:"id,omitempty"` + Kind *string `json:"kind,omitempty"` + Href *string `json:"href,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` + + IntelligenceID string `json:"intelligence_id"` + FilePath string `json:"file_path"` + Category string `json:"category"` + Status string `json:"status"` + Title string `json:"title"` + Body string `json:"body"` + Severity *string `json:"severity,omitempty"` + Confidence *float64 `json:"confidence,omitempty"` + SourceType string `json:"source_type"` + SourceRef *string `json:"source_ref,omitempty"` + SessionID *string `json:"session_id,omitempty"` + AgentID *string `json:"agent_id,omitempty"` + ResolvedBy *string `json:"resolved_by,omitempty"` + ResolvedReason *string `json:"resolved_reason,omitempty"` +} + +type RepoFindingListAPI struct { + Kind string `json:"kind"` + Page int32 `json:"page"` + Size int32 `json:"size"` + Total int32 `json:"total"` + Items []RepoFindingAPI `json:"items"` +} + +func ptrTime(v time.Time) *time.Time { return &v } + +func ConvertRepoFinding(a RepoFindingAPI) *RepoFinding { + rf := &RepoFinding{} + if a.ID != nil { + rf.ID = *a.ID + } + rf.IntelligenceID = a.IntelligenceID + rf.FilePath = a.FilePath + rf.Category = a.Category + rf.Status = a.Status + rf.Title = a.Title + rf.Body = a.Body + rf.Severity = a.Severity + rf.Confidence = a.Confidence + rf.SourceType = a.SourceType + rf.SourceRef = a.SourceRef + rf.SessionID = a.SessionID + rf.AgentID = a.AgentID + rf.ResolvedBy = a.ResolvedBy + rf.ResolvedReason = a.ResolvedReason + if a.CreatedAt != nil { + rf.CreatedAt = *a.CreatedAt + } + if a.UpdatedAt != nil { + rf.UpdatedAt = *a.UpdatedAt + } + return rf +} + +func PresentRepoFinding(rf *RepoFinding) RepoFindingAPI { + ref := presenters.PresentReference(rf.ID, rf) + return RepoFindingAPI{ + ID: ref.Id, + Kind: ref.Kind, + Href: ref.Href, + CreatedAt: ptrTime(rf.CreatedAt), + UpdatedAt: ptrTime(rf.UpdatedAt), + + IntelligenceID: rf.IntelligenceID, + FilePath: rf.FilePath, + Category: rf.Category, + Status: rf.Status, + Title: rf.Title, + Body: rf.Body, + Severity: rf.Severity, + Confidence: rf.Confidence, + SourceType: rf.SourceType, + SourceRef: rf.SourceRef, + SessionID: rf.SessionID, + AgentID: rf.AgentID, + ResolvedBy: rf.ResolvedBy, + ResolvedReason: rf.ResolvedReason, + } +} diff --git a/components/ambient-api-server/plugins/repoFindings/service.go b/components/ambient-api-server/plugins/repoFindings/service.go new file mode 100644 index 000000000..ac97506a7 --- /dev/null +++ b/components/ambient-api-server/plugins/repoFindings/service.go @@ -0,0 +1,175 @@ +package repoFindings + +import ( + "context" + + "github.com/openshift-online/rh-trex-ai/pkg/api" + "github.com/openshift-online/rh-trex-ai/pkg/db" + "github.com/openshift-online/rh-trex-ai/pkg/errors" + "github.com/openshift-online/rh-trex-ai/pkg/logger" + "github.com/openshift-online/rh-trex-ai/pkg/services" + + "github.com/ambient-code/platform/components/ambient-api-server/plugins/repoEvents" +) + +const repoFindingsLockType db.LockType = "repo_findings" + +type RepoFindingService interface { + Get(ctx context.Context, id string) (*RepoFinding, *errors.ServiceError) + Create(ctx context.Context, rf *RepoFinding) (*RepoFinding, *errors.ServiceError) + Replace(ctx context.Context, rf *RepoFinding) (*RepoFinding, *errors.ServiceError) + Delete(ctx context.Context, id string) *errors.ServiceError + All(ctx context.Context) (RepoFindingList, *errors.ServiceError) + FindByIDs(ctx context.Context, ids []string) (RepoFindingList, *errors.ServiceError) + + OnUpsert(ctx context.Context, id string) error + OnDelete(ctx context.Context, id string) error +} + +func NewRepoFindingService(lockFactory db.LockFactory, dao RepoFindingDao, events services.EventService, auditSvc repoEvents.RepoEventService) RepoFindingService { + return &sqlRepoFindingService{ + lockFactory: lockFactory, + dao: dao, + events: events, + auditSvc: auditSvc, + } +} + +var _ RepoFindingService = &sqlRepoFindingService{} + +type sqlRepoFindingService struct { + lockFactory db.LockFactory + dao RepoFindingDao + events services.EventService + auditSvc repoEvents.RepoEventService +} + +func (s *sqlRepoFindingService) logAuditEvent(ctx context.Context, rf *RepoFinding, action string) { + if s.auditSvc == nil { + return + } + actorType := "system" + actorID := "api-server" + if rf.SessionID != nil { + actorType = "session" + actorID = *rf.SessionID + } + projectID, lookupErr := s.dao.LookupProjectID(ctx, rf.IntelligenceID) + if lookupErr != nil { + projectID = rf.IntelligenceID // fallback to intelligence_id if lookup fails + } + _, _ = s.auditSvc.Create(ctx, &repoEvents.RepoEvent{ + ResourceType: "finding", + ResourceID: rf.ID, + Action: action, + ActorType: actorType, + ActorID: actorID, + ProjectID: projectID, + }) +} + +func (s *sqlRepoFindingService) OnUpsert(ctx context.Context, id string) error { + log := logger.NewLogger(ctx) + log.Infof("RepoFinding upserted: %s", id) + return nil +} + +func (s *sqlRepoFindingService) OnDelete(ctx context.Context, id string) error { + log := logger.NewLogger(ctx) + log.Infof("RepoFinding deleted: %s", id) + return nil +} + +func (s *sqlRepoFindingService) Get(ctx context.Context, id string) (*RepoFinding, *errors.ServiceError) { + rf, err := s.dao.Get(ctx, id) + if err != nil { + return nil, services.HandleGetError("RepoFinding", "id", id, err) + } + return rf, nil +} + +func (s *sqlRepoFindingService) Create(ctx context.Context, rf *RepoFinding) (*RepoFinding, *errors.ServiceError) { + rf, err := s.dao.Create(ctx, rf) + if err != nil { + return nil, services.HandleCreateError("RepoFinding", err) + } + + _, evErr := s.events.Create(ctx, &api.Event{ + Source: "RepoFindings", + SourceID: rf.ID, + EventType: api.CreateEventType, + }) + if evErr != nil { + return nil, services.HandleCreateError("RepoFinding", evErr) + } + + s.logAuditEvent(ctx, rf, "created") + + return rf, nil +} + +func (s *sqlRepoFindingService) Replace(ctx context.Context, rf *RepoFinding) (*RepoFinding, *errors.ServiceError) { + lockOwnerID, err := s.lockFactory.NewAdvisoryLock(ctx, rf.ID, repoFindingsLockType) + if err != nil { + return nil, errors.DatabaseAdvisoryLock(err) + } + defer s.lockFactory.Unlock(ctx, lockOwnerID) + + rf, err = s.dao.Replace(ctx, rf) + if err != nil { + return nil, services.HandleUpdateError("RepoFinding", err) + } + + _, evErr := s.events.Create(ctx, &api.Event{ + Source: "RepoFindings", + SourceID: rf.ID, + EventType: api.UpdateEventType, + }) + if evErr != nil { + return nil, services.HandleUpdateError("RepoFinding", evErr) + } + + s.logAuditEvent(ctx, rf, "updated") + + return rf, nil +} + +func (s *sqlRepoFindingService) Delete(ctx context.Context, id string) *errors.ServiceError { + rf, getErr := s.dao.Get(ctx, id) + if getErr != nil { + return services.HandleDeleteError("RepoFinding", errors.GeneralError("unable to delete repo finding: %s", getErr)) + } + + if err := s.dao.Delete(ctx, id); err != nil { + return services.HandleDeleteError("RepoFinding", errors.GeneralError("unable to delete repo finding: %s", err)) + } + + _, evErr := s.events.Create(ctx, &api.Event{ + Source: "RepoFindings", + SourceID: id, + EventType: api.DeleteEventType, + }) + if evErr != nil { + return services.HandleDeleteError("RepoFinding", evErr) + } + + s.logAuditEvent(ctx, rf, "deleted") + + return nil +} + +func (s *sqlRepoFindingService) FindByIDs(ctx context.Context, ids []string) (RepoFindingList, *errors.ServiceError) { + items, err := s.dao.FindByIDs(ctx, ids) + if err != nil { + return nil, errors.GeneralError("unable to find repo findings: %s", err) + } + return items, nil +} + +func (s *sqlRepoFindingService) All(ctx context.Context) (RepoFindingList, *errors.ServiceError) { + items, err := s.dao.All(ctx) + if err != nil { + return nil, errors.GeneralError("unable to get all repo findings: %s", err) + } + return items, nil +} diff --git a/components/ambient-api-server/plugins/repoIntelligences/dao.go b/components/ambient-api-server/plugins/repoIntelligences/dao.go new file mode 100644 index 000000000..fa24dda49 --- /dev/null +++ b/components/ambient-api-server/plugins/repoIntelligences/dao.go @@ -0,0 +1,102 @@ +package repoIntelligences + +import ( + "context" + + "gorm.io/gorm/clause" + + "github.com/openshift-online/rh-trex-ai/pkg/api" + "github.com/openshift-online/rh-trex-ai/pkg/db" +) + +type RepoIntelligenceDao interface { + Get(ctx context.Context, id string) (*RepoIntelligence, error) + Create(ctx context.Context, ri *RepoIntelligence) (*RepoIntelligence, error) + Replace(ctx context.Context, ri *RepoIntelligence) (*RepoIntelligence, error) + Delete(ctx context.Context, id string) error + FindByIDs(ctx context.Context, ids []string) (RepoIntelligenceList, error) + All(ctx context.Context) (RepoIntelligenceList, error) + GetByProjectAndRepo(ctx context.Context, projectID, repoURL string) (*RepoIntelligence, error) +} + +var _ RepoIntelligenceDao = &sqlRepoIntelligenceDao{} + +type sqlRepoIntelligenceDao struct { + sessionFactory *db.SessionFactory +} + +func NewRepoIntelligenceDao(sessionFactory *db.SessionFactory) RepoIntelligenceDao { + return &sqlRepoIntelligenceDao{sessionFactory: sessionFactory} +} + +func (d *sqlRepoIntelligenceDao) Get(ctx context.Context, id string) (*RepoIntelligence, error) { + g2 := (*d.sessionFactory).New(ctx) + var ri RepoIntelligence + if err := g2.Take(&ri, "id = ?", id).Error; err != nil { + return nil, err + } + return &ri, nil +} + +func (d *sqlRepoIntelligenceDao) Create(ctx context.Context, ri *RepoIntelligence) (*RepoIntelligence, error) { + g2 := (*d.sessionFactory).New(ctx) + if err := g2.Omit(clause.Associations).Create(ri).Error; err != nil { + db.MarkForRollback(ctx, err) + return nil, err + } + return ri, nil +} + +func (d *sqlRepoIntelligenceDao) Replace(ctx context.Context, ri *RepoIntelligence) (*RepoIntelligence, error) { + g2 := (*d.sessionFactory).New(ctx) + if err := g2.Omit(clause.Associations).Save(ri).Error; err != nil { + db.MarkForRollback(ctx, err) + return nil, err + } + return ri, nil +} + +func (d *sqlRepoIntelligenceDao) Delete(ctx context.Context, id string) error { + g2 := (*d.sessionFactory).New(ctx) + + // Cascade soft-delete child findings to prevent orphans. + if err := g2.Exec( + "UPDATE repo_findings SET deleted_at = NOW() WHERE intelligence_id = ? AND deleted_at IS NULL", id, + ).Error; err != nil { + db.MarkForRollback(ctx, err) + return err + } + + if err := g2.Omit(clause.Associations).Delete(&RepoIntelligence{Meta: api.Meta{ID: id}}).Error; err != nil { + db.MarkForRollback(ctx, err) + return err + } + return nil +} + +func (d *sqlRepoIntelligenceDao) FindByIDs(ctx context.Context, ids []string) (RepoIntelligenceList, error) { + g2 := (*d.sessionFactory).New(ctx) + items := RepoIntelligenceList{} + if err := g2.Where("id in (?)", ids).Find(&items).Error; err != nil { + return nil, err + } + return items, nil +} + +func (d *sqlRepoIntelligenceDao) All(ctx context.Context) (RepoIntelligenceList, error) { + g2 := (*d.sessionFactory).New(ctx) + items := RepoIntelligenceList{} + if err := g2.Find(&items).Error; err != nil { + return nil, err + } + return items, nil +} + +func (d *sqlRepoIntelligenceDao) GetByProjectAndRepo(ctx context.Context, projectID, repoURL string) (*RepoIntelligence, error) { + g2 := (*d.sessionFactory).New(ctx) + var ri RepoIntelligence + if err := g2.Where("project_id = ? AND repo_url = ?", projectID, repoURL).Take(&ri).Error; err != nil { + return nil, err + } + return &ri, nil +} diff --git a/components/ambient-api-server/plugins/repoIntelligences/handler.go b/components/ambient-api-server/plugins/repoIntelligences/handler.go new file mode 100644 index 000000000..a3f8e5aa5 --- /dev/null +++ b/components/ambient-api-server/plugins/repoIntelligences/handler.go @@ -0,0 +1,354 @@ +package repoIntelligences + +import ( + "fmt" + "net/http" + "strconv" + "strings" + + "github.com/gorilla/mux" + + "github.com/openshift-online/rh-trex-ai/pkg/errors" + "github.com/openshift-online/rh-trex-ai/pkg/handlers" + "github.com/openshift-online/rh-trex-ai/pkg/services" + + "github.com/ambient-code/platform/components/ambient-api-server/plugins/common" + "github.com/ambient-code/platform/components/ambient-api-server/plugins/repoFindings" +) + +type repoIntelligenceHandler struct { + service RepoIntelligenceService + generic services.GenericService + findingsGeneric services.GenericService +} + +func NewRepoIntelligenceHandler(svc RepoIntelligenceService, generic services.GenericService, findingsGeneric services.GenericService) *repoIntelligenceHandler { + return &repoIntelligenceHandler{ + service: svc, + generic: generic, + findingsGeneric: findingsGeneric, + } +} + +func (h repoIntelligenceHandler) Create(w http.ResponseWriter, r *http.Request) { + var body RepoIntelligenceAPI + cfg := &handlers.HandlerConfig{ + Body: &body, + Validators: []handlers.Validate{ + handlers.ValidateEmpty(&body, "ID", "id"), + }, + Action: func() (interface{}, *errors.ServiceError) { + ctx := r.Context() + model := ConvertRepoIntelligence(body) + model, err := h.service.Create(ctx, model) + if err != nil { + return nil, err + } + return PresentRepoIntelligence(model), nil + }, + ErrorHandler: handlers.HandleError, + } + handlers.Handle(w, r, cfg, http.StatusCreated) +} + +func (h repoIntelligenceHandler) Patch(w http.ResponseWriter, r *http.Request) { + var patch RepoIntelligencePatchRequest + cfg := &handlers.HandlerConfig{ + Body: &patch, + Validators: []handlers.Validate{}, + Action: func() (interface{}, *errors.ServiceError) { + ctx := r.Context() + id := mux.Vars(r)["id"] + found, err := h.service.Get(ctx, id) + if err != nil { + return nil, err + } + + if patch.Summary != nil { + found.Summary = *patch.Summary + } + if patch.Language != nil { + found.Language = *patch.Language + } + if patch.Framework != nil { + found.Framework = patch.Framework + } + if patch.BuildSystem != nil { + found.BuildSystem = patch.BuildSystem + } + if patch.TestStrategy != nil { + found.TestStrategy = patch.TestStrategy + } + if patch.Architecture != nil { + found.Architecture = patch.Architecture + } + if patch.Conventions != nil { + found.Conventions = patch.Conventions + } + if patch.Caveats != nil { + found.Caveats = patch.Caveats + } + if patch.Confidence != nil { + found.Confidence = patch.Confidence + } + if patch.RepoBranch != nil { + found.RepoBranch = *patch.RepoBranch + } + + found.Version++ + + model, err := h.service.Replace(ctx, found) + if err != nil { + return nil, err + } + return PresentRepoIntelligence(model), nil + }, + ErrorHandler: handlers.HandleError, + } + handlers.Handle(w, r, cfg, http.StatusOK) +} + +func (h repoIntelligenceHandler) List(w http.ResponseWriter, r *http.Request) { + cfg := &handlers.HandlerConfig{ + Action: func() (interface{}, *errors.ServiceError) { + ctx := r.Context() + listArgs := services.NewListArguments(r.URL.Query()) + + if serr := common.ApplyProjectScope(r, listArgs); serr != nil { + return nil, serr + } + + var items []RepoIntelligence + paging, err := h.generic.List(ctx, "id", listArgs, &items) + if err != nil { + return nil, err + } + + list := RepoIntelligenceListAPI{ + Kind: "RepoIntelligenceList", + Page: int32(paging.Page), + Size: int32(paging.Size), + Total: int32(paging.Total), + Items: []RepoIntelligenceAPI{}, + } + for _, item := range items { + list.Items = append(list.Items, PresentRepoIntelligence(&item)) + } + return list, nil + }, + } + handlers.HandleList(w, r, cfg) +} + +func (h repoIntelligenceHandler) Get(w http.ResponseWriter, r *http.Request) { + cfg := &handlers.HandlerConfig{ + Action: func() (interface{}, *errors.ServiceError) { + id := mux.Vars(r)["id"] + ctx := r.Context() + ri, err := h.service.Get(ctx, id) + if err != nil { + return nil, err + } + return PresentRepoIntelligence(ri), nil + }, + } + handlers.HandleGet(w, r, cfg) +} + +func (h repoIntelligenceHandler) Delete(w http.ResponseWriter, r *http.Request) { + cfg := &handlers.HandlerConfig{ + Action: func() (interface{}, *errors.ServiceError) { + id := mux.Vars(r)["id"] + ctx := r.Context() + err := h.service.Delete(ctx, id) + if err != nil { + return nil, err + } + return nil, nil + }, + } + handlers.HandleDelete(w, r, cfg, http.StatusNoContent) +} + +func (h repoIntelligenceHandler) DeleteByLookup(w http.ResponseWriter, r *http.Request) { + cfg := &handlers.HandlerConfig{ + Action: func() (interface{}, *errors.ServiceError) { + projectID := r.URL.Query().Get("project_id") + repoURL := r.URL.Query().Get("repo_url") + if projectID == "" || repoURL == "" { + return nil, errors.Validation("project_id and repo_url query parameters are required") + } + ctx := r.Context() + ri, err := h.service.GetByProjectAndRepo(ctx, projectID, repoURL) + if err != nil { + return nil, err + } + deleteErr := h.service.Delete(ctx, ri.ID) + if deleteErr != nil { + return nil, deleteErr + } + return nil, nil + }, + } + handlers.HandleDelete(w, r, cfg, http.StatusNoContent) +} + +func (h repoIntelligenceHandler) Lookup(w http.ResponseWriter, r *http.Request) { + cfg := &handlers.HandlerConfig{ + Action: func() (interface{}, *errors.ServiceError) { + projectID := r.URL.Query().Get("project_id") + repoURL := r.URL.Query().Get("repo_url") + if projectID == "" || repoURL == "" { + return nil, errors.Validation("project_id and repo_url query parameters are required") + } + ctx := r.Context() + ri, err := h.service.GetByProjectAndRepo(ctx, projectID, repoURL) + if err != nil { + return nil, err + } + return PresentRepoIntelligence(ri), nil + }, + } + handlers.HandleGet(w, r, cfg) +} + +func (h repoIntelligenceHandler) ListFindings(w http.ResponseWriter, r *http.Request) { + cfg := &handlers.HandlerConfig{ + Action: func() (interface{}, *errors.ServiceError) { + id := mux.Vars(r)["id"] + ctx := r.Context() + + // Verify the intelligence record exists + _, err := h.service.Get(ctx, id) + if err != nil { + return nil, err + } + + listArgs := services.NewListArguments(r.URL.Query()) + + // Inject parent filter into search (follows agents/subresource_handler pattern) + filter := fmt.Sprintf("intelligence_id = '%s'", id) + if listArgs.Search != "" { + listArgs.Search = filter + " and (" + listArgs.Search + ")" + } else { + listArgs.Search = filter + } + + var items []repoFindings.RepoFinding + paging, serr := h.findingsGeneric.List(ctx, "id", listArgs, &items) + if serr != nil { + return nil, serr + } + + list := repoFindings.RepoFindingListAPI{ + Kind: "RepoFindingList", + Page: int32(paging.Page), + Size: int32(paging.Size), + Total: int32(paging.Total), + Items: []repoFindings.RepoFindingAPI{}, + } + for _, item := range items { + list.Items = append(list.Items, repoFindings.PresentRepoFinding(&item)) + } + return list, nil + }, + } + handlers.HandleList(w, r, cfg) +} + +func (h repoIntelligenceHandler) Context(w http.ResponseWriter, r *http.Request) { + cfg := &handlers.HandlerConfig{ + Action: func() (interface{}, *errors.ServiceError) { + projectID := r.URL.Query().Get("project_id") + repoURLsParam := r.URL.Query().Get("repo_urls") + if projectID == "" || repoURLsParam == "" { + return nil, errors.Validation("project_id and repo_urls query parameters are required") + } + + repoURLs := strings.Split(repoURLsParam, ",") + maxEntries, _ := strconv.Atoi(r.URL.Query().Get("max_entries")) + if maxEntries <= 0 { + maxEntries = 20 + } + + ctx := r.Context() + var allIntel []RepoIntelligenceAPI + var allFindings []repoFindings.RepoFindingAPI + + for _, repoURL := range repoURLs { + repoURL = strings.TrimSpace(repoURL) + if repoURL == "" { + continue + } + + intel, err := h.service.GetByProjectAndRepo(ctx, projectID, repoURL) + if err != nil { + continue // skip repos without intelligence + } + allIntel = append(allIntel, PresentRepoIntelligence(intel)) + + // Fetch active findings for this intelligence, capped at maxEntries + listArgs := services.NewListArguments(r.URL.Query()) + listArgs.Search = fmt.Sprintf("intelligence_id = '%s' and status = 'active'", intel.ID) + listArgs.Size = int64(maxEntries) + + var findings []repoFindings.RepoFinding + _, serr := h.findingsGeneric.List(ctx, "id", listArgs, &findings) + if serr != nil { + continue + } + for _, f := range findings { + allFindings = append(allFindings, repoFindings.PresentRepoFinding(&f)) + } + } + + if allIntel == nil { + allIntel = []RepoIntelligenceAPI{} + } + if allFindings == nil { + allFindings = []repoFindings.RepoFindingAPI{} + } + + return map[string]interface{}{ + "intelligences": allIntel, + "findings": allFindings, + "injected_context": buildInjectedContext(allIntel, allFindings), + }, nil + }, + ErrorHandler: handlers.HandleError, + } + handlers.HandleGet(w, r, cfg) +} + +func buildInjectedContext(intels []RepoIntelligenceAPI, findings []repoFindings.RepoFindingAPI) string { + if len(intels) == 0 && len(findings) == 0 { + return "" + } + + var sb strings.Builder + sb.WriteString("\n") + sb.WriteString("## Project Intelligence\n\n") + + for _, intel := range intels { + sb.WriteString(fmt.Sprintf("### %s (%s)\n", intel.RepoURL, intel.Language)) + sb.WriteString(fmt.Sprintf("%s\n", intel.Summary)) + if intel.Caveats != nil && *intel.Caveats != "" { + sb.WriteString(fmt.Sprintf("**Caveats:** %s\n", *intel.Caveats)) + } + sb.WriteString("\n") + } + + if len(findings) > 0 { + sb.WriteString("### Active Findings\n\n") + for _, f := range findings { + severity := "info" + if f.Severity != nil { + severity = *f.Severity + } + sb.WriteString(fmt.Sprintf("- **[%s]** %s (`%s`): %s\n", severity, f.Title, f.FilePath, f.Body)) + } + } + + sb.WriteString("\n") + return sb.String() +} diff --git a/components/ambient-api-server/plugins/repoIntelligences/migration.go b/components/ambient-api-server/plugins/repoIntelligences/migration.go new file mode 100644 index 000000000..4f0642910 --- /dev/null +++ b/components/ambient-api-server/plugins/repoIntelligences/migration.go @@ -0,0 +1,56 @@ +package repoIntelligences + +import ( + "time" + + "gorm.io/gorm" + + "github.com/go-gormigrate/gormigrate/v2" + "github.com/openshift-online/rh-trex-ai/pkg/db" +) + +func migration() *gormigrate.Migration { + type RepoIntelligence struct { + db.Model + ProjectID string `gorm:"not null"` + RepoURL string `gorm:"not null"` + RepoBranch string `gorm:"not null;default:'main'"` + Summary string `gorm:"type:text;not null"` + Language string `gorm:"not null"` + Framework *string + BuildSystem *string + TestStrategy *string `gorm:"type:text"` + Architecture *string `gorm:"type:text"` + Conventions *string `gorm:"type:text"` + Caveats *string `gorm:"type:text"` + AnalyzedBySessionID *string + AnalyzedByAgentID *string + AnalyzedAt *time.Time + Confidence *float64 + Version int `gorm:"not null;default:1"` + } + + return &gormigrate.Migration{ + ID: "202604091200", + Migrate: func(tx *gorm.DB) error { + if err := tx.AutoMigrate(&RepoIntelligence{}); err != nil { + return err + } + stmts := []string{ + `CREATE UNIQUE INDEX IF NOT EXISTS idx_ri_project_repo ON repo_intelligences(project_id, repo_url)`, + `CREATE INDEX IF NOT EXISTS idx_ri_project_id ON repo_intelligences(project_id)`, + `CREATE INDEX IF NOT EXISTS idx_ri_repo_url ON repo_intelligences(repo_url)`, + `CREATE INDEX IF NOT EXISTS idx_ri_analyzed_by_session ON repo_intelligences(analyzed_by_session_id)`, + } + for _, s := range stmts { + if err := tx.Exec(s).Error; err != nil { + return err + } + } + return nil + }, + Rollback: func(tx *gorm.DB) error { + return tx.Migrator().DropTable("repo_intelligences") + }, + } +} diff --git a/components/ambient-api-server/plugins/repoIntelligences/migration_fix_unique_index.go b/components/ambient-api-server/plugins/repoIntelligences/migration_fix_unique_index.go new file mode 100644 index 000000000..362691bd6 --- /dev/null +++ b/components/ambient-api-server/plugins/repoIntelligences/migration_fix_unique_index.go @@ -0,0 +1,39 @@ +package repoIntelligences + +import ( + "gorm.io/gorm" + + "github.com/go-gormigrate/gormigrate/v2" +) + +func migrationFixUniqueIndex() *gormigrate.Migration { + return &gormigrate.Migration{ + ID: "202604091210", + Migrate: func(tx *gorm.DB) error { + stmts := []string{ + // Drop the old unique index that doesn't exclude soft-deleted rows + `DROP INDEX IF EXISTS idx_ri_project_repo`, + // Create a partial unique index that only applies to non-deleted rows + `CREATE UNIQUE INDEX IF NOT EXISTS idx_ri_project_repo ON repo_intelligences(project_id, repo_url) WHERE deleted_at IS NULL`, + } + for _, s := range stmts { + if err := tx.Exec(s).Error; err != nil { + return err + } + } + return nil + }, + Rollback: func(tx *gorm.DB) error { + stmts := []string{ + `DROP INDEX IF EXISTS idx_ri_project_repo`, + `CREATE UNIQUE INDEX IF NOT EXISTS idx_ri_project_repo ON repo_intelligences(project_id, repo_url)`, + } + for _, s := range stmts { + if err := tx.Exec(s).Error; err != nil { + return err + } + } + return nil + }, + } +} diff --git a/components/ambient-api-server/plugins/repoIntelligences/model.go b/components/ambient-api-server/plugins/repoIntelligences/model.go new file mode 100644 index 000000000..0a2d99378 --- /dev/null +++ b/components/ambient-api-server/plugins/repoIntelligences/model.go @@ -0,0 +1,66 @@ +package repoIntelligences + +import ( + "time" + + "github.com/openshift-online/rh-trex-ai/pkg/api" + "gorm.io/gorm" +) + +type RepoIntelligence struct { + api.Meta + + // Scoping + ProjectID string `json:"project_id" gorm:"not null;index"` + RepoURL string `json:"repo_url" gorm:"not null;index"` + RepoBranch string `json:"repo_branch" gorm:"not null;default:'main'"` + + // Content + Summary string `json:"summary" gorm:"type:text;not null"` + Language string `json:"language" gorm:"not null"` + Framework *string `json:"framework,omitempty"` + BuildSystem *string `json:"build_system,omitempty"` + TestStrategy *string `json:"test_strategy,omitempty" gorm:"type:text"` + Architecture *string `json:"architecture,omitempty" gorm:"type:text"` + Conventions *string `json:"conventions,omitempty" gorm:"type:text"` + Caveats *string `json:"caveats,omitempty" gorm:"type:text"` + + // Metadata + AnalyzedBySessionID *string `json:"analyzed_by_session_id,omitempty" gorm:"index"` + AnalyzedByAgentID *string `json:"analyzed_by_agent_id,omitempty"` + AnalyzedAt *time.Time `json:"analyzed_at,omitempty"` + Confidence *float64 `json:"confidence,omitempty"` + Version int `json:"version" gorm:"not null;default:1"` +} + +type RepoIntelligenceList []*RepoIntelligence +type RepoIntelligenceIndex map[string]*RepoIntelligence + +func (l RepoIntelligenceList) Index() RepoIntelligenceIndex { + index := RepoIntelligenceIndex{} + for _, o := range l { + index[o.ID] = o + } + return index +} + +func (d *RepoIntelligence) BeforeCreate(tx *gorm.DB) error { + d.ID = api.NewID() + if d.Version == 0 { + d.Version = 1 + } + return nil +} + +type RepoIntelligencePatchRequest struct { + Summary *string `json:"summary,omitempty"` + Language *string `json:"language,omitempty"` + Framework *string `json:"framework,omitempty"` + BuildSystem *string `json:"build_system,omitempty"` + TestStrategy *string `json:"test_strategy,omitempty"` + Architecture *string `json:"architecture,omitempty"` + Conventions *string `json:"conventions,omitempty"` + Caveats *string `json:"caveats,omitempty"` + Confidence *float64 `json:"confidence,omitempty"` + RepoBranch *string `json:"repo_branch,omitempty"` +} diff --git a/components/ambient-api-server/plugins/repoIntelligences/plugin.go b/components/ambient-api-server/plugins/repoIntelligences/plugin.go new file mode 100644 index 000000000..5305a5262 --- /dev/null +++ b/components/ambient-api-server/plugins/repoIntelligences/plugin.go @@ -0,0 +1,94 @@ +package repoIntelligences + +import ( + "net/http" + + "github.com/gorilla/mux" + "github.com/openshift-online/rh-trex-ai/pkg/api" + "github.com/openshift-online/rh-trex-ai/pkg/api/presenters" + "github.com/openshift-online/rh-trex-ai/pkg/auth" + "github.com/openshift-online/rh-trex-ai/pkg/controllers" + "github.com/openshift-online/rh-trex-ai/pkg/db" + "github.com/openshift-online/rh-trex-ai/pkg/environments" + "github.com/openshift-online/rh-trex-ai/pkg/registry" + pkgserver "github.com/openshift-online/rh-trex-ai/pkg/server" + "github.com/openshift-online/rh-trex-ai/plugins/events" + "github.com/openshift-online/rh-trex-ai/plugins/generic" + + pkgrbac "github.com/ambient-code/platform/components/ambient-api-server/plugins/rbac" + "github.com/ambient-code/platform/components/ambient-api-server/plugins/repoEvents" +) + +type ServiceLocator func() RepoIntelligenceService + +func NewServiceLocator(env *environments.Env) ServiceLocator { + return func() RepoIntelligenceService { + return NewRepoIntelligenceService( + db.NewAdvisoryLockFactory(env.Database.SessionFactory), + NewRepoIntelligenceDao(&env.Database.SessionFactory), + events.Service(&env.Services), + repoEvents.Service(&env.Services), + ) + } +} + +func Service(s *environments.Services) RepoIntelligenceService { + if s == nil { + return nil + } + if obj := s.GetService("RepoIntelligences"); obj != nil { + locator := obj.(ServiceLocator) + return locator() + } + return nil +} + +func init() { + registry.RegisterService("RepoIntelligences", func(env interface{}) interface{} { + return NewServiceLocator(env.(*environments.Env)) + }) + + pkgserver.RegisterRoutes("repo_intelligences", func(apiV1Router *mux.Router, services pkgserver.ServicesInterface, authMiddleware environments.JWTMiddleware, authzMiddleware auth.AuthorizationMiddleware) { + envServices := services.(*environments.Services) + if dbAuthz := pkgrbac.Middleware(envServices); dbAuthz != nil { + authzMiddleware = dbAuthz + } + svc := Service(envServices) + genericSvc := generic.Service(envServices) + handler := NewRepoIntelligenceHandler(svc, genericSvc, genericSvc) + + router := apiV1Router.PathPrefix("/repo_intelligences").Subrouter() + router.HandleFunc("", handler.List).Methods(http.MethodGet) + router.HandleFunc("", handler.Create).Methods(http.MethodPost) + router.HandleFunc("/lookup", handler.Lookup).Methods(http.MethodGet) + router.HandleFunc("/lookup", handler.DeleteByLookup).Methods(http.MethodDelete) + router.HandleFunc("/context", handler.Context).Methods(http.MethodGet) + router.HandleFunc("/{id}", handler.Get).Methods(http.MethodGet) + router.HandleFunc("/{id}", handler.Patch).Methods(http.MethodPatch) + router.HandleFunc("/{id}", handler.Delete).Methods(http.MethodDelete) + router.HandleFunc("/{id}/findings", handler.ListFindings).Methods(http.MethodGet) + router.Use(authMiddleware.AuthenticateAccountJWT) + router.Use(authzMiddleware.AuthorizeApi) + }) + + pkgserver.RegisterController("RepoIntelligences", func(manager *controllers.KindControllerManager, services pkgserver.ServicesInterface) { + svc := Service(services.(*environments.Services)) + + manager.Add(&controllers.ControllerConfig{ + Source: "RepoIntelligences", + Handlers: map[api.EventType][]controllers.ControllerHandlerFunc{ + api.CreateEventType: {svc.OnUpsert}, + api.UpdateEventType: {svc.OnUpsert}, + api.DeleteEventType: {svc.OnDelete}, + }, + }) + }) + + presenters.RegisterPath(RepoIntelligence{}, "repo_intelligences") + presenters.RegisterPath(&RepoIntelligence{}, "repo_intelligences") + presenters.RegisterKind(RepoIntelligence{}, "RepoIntelligence") + presenters.RegisterKind(&RepoIntelligence{}, "RepoIntelligence") + + db.RegisterMigration(migration()) + db.RegisterMigration(migrationFixUniqueIndex()) +} diff --git a/components/ambient-api-server/plugins/repoIntelligences/presenter.go b/components/ambient-api-server/plugins/repoIntelligences/presenter.go new file mode 100644 index 000000000..a40936252 --- /dev/null +++ b/components/ambient-api-server/plugins/repoIntelligences/presenter.go @@ -0,0 +1,109 @@ +package repoIntelligences + +import ( + "time" + + "github.com/openshift-online/rh-trex-ai/pkg/api/presenters" +) + +// RepoIntelligenceAPI is the API representation returned to clients. +// Defined here rather than in the generated openapi package so the plugin +// is self-contained and does not require running `make generate`. +type RepoIntelligenceAPI struct { + ID *string `json:"id,omitempty"` + Kind *string `json:"kind,omitempty"` + Href *string `json:"href,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` + + ProjectID string `json:"project_id"` + RepoURL string `json:"repo_url"` + RepoBranch string `json:"repo_branch"` + + Summary string `json:"summary"` + Language string `json:"language"` + Framework *string `json:"framework,omitempty"` + BuildSystem *string `json:"build_system,omitempty"` + TestStrategy *string `json:"test_strategy,omitempty"` + Architecture *string `json:"architecture,omitempty"` + Conventions *string `json:"conventions,omitempty"` + Caveats *string `json:"caveats,omitempty"` + + AnalyzedBySessionID *string `json:"analyzed_by_session_id,omitempty"` + AnalyzedByAgentID *string `json:"analyzed_by_agent_id,omitempty"` + AnalyzedAt *time.Time `json:"analyzed_at,omitempty"` + Confidence *float64 `json:"confidence,omitempty"` + Version int `json:"version"` +} + +type RepoIntelligenceListAPI struct { + Kind string `json:"kind"` + Page int32 `json:"page"` + Size int32 `json:"size"` + Total int32 `json:"total"` + Items []RepoIntelligenceAPI `json:"items"` +} + +func ptrTime(v time.Time) *time.Time { return &v } + +func ConvertRepoIntelligence(a RepoIntelligenceAPI) *RepoIntelligence { + ri := &RepoIntelligence{} + if a.ID != nil { + ri.ID = *a.ID + } + ri.ProjectID = a.ProjectID + ri.RepoURL = a.RepoURL + ri.RepoBranch = a.RepoBranch + ri.Summary = a.Summary + ri.Language = a.Language + ri.Framework = a.Framework + ri.BuildSystem = a.BuildSystem + ri.TestStrategy = a.TestStrategy + ri.Architecture = a.Architecture + ri.Conventions = a.Conventions + ri.Caveats = a.Caveats + ri.AnalyzedBySessionID = a.AnalyzedBySessionID + ri.AnalyzedByAgentID = a.AnalyzedByAgentID + ri.AnalyzedAt = a.AnalyzedAt + ri.Confidence = a.Confidence + if a.Version > 0 { + ri.Version = a.Version + } + if a.CreatedAt != nil { + ri.CreatedAt = *a.CreatedAt + } + if a.UpdatedAt != nil { + ri.UpdatedAt = *a.UpdatedAt + } + return ri +} + +func PresentRepoIntelligence(ri *RepoIntelligence) RepoIntelligenceAPI { + ref := presenters.PresentReference(ri.ID, ri) + return RepoIntelligenceAPI{ + ID: ref.Id, + Kind: ref.Kind, + Href: ref.Href, + CreatedAt: ptrTime(ri.CreatedAt), + UpdatedAt: ptrTime(ri.UpdatedAt), + + ProjectID: ri.ProjectID, + RepoURL: ri.RepoURL, + RepoBranch: ri.RepoBranch, + + Summary: ri.Summary, + Language: ri.Language, + Framework: ri.Framework, + BuildSystem: ri.BuildSystem, + TestStrategy: ri.TestStrategy, + Architecture: ri.Architecture, + Conventions: ri.Conventions, + Caveats: ri.Caveats, + + AnalyzedBySessionID: ri.AnalyzedBySessionID, + AnalyzedByAgentID: ri.AnalyzedByAgentID, + AnalyzedAt: ri.AnalyzedAt, + Confidence: ri.Confidence, + Version: ri.Version, + } +} diff --git a/components/ambient-api-server/plugins/repoIntelligences/service.go b/components/ambient-api-server/plugins/repoIntelligences/service.go new file mode 100644 index 000000000..af1857e16 --- /dev/null +++ b/components/ambient-api-server/plugins/repoIntelligences/service.go @@ -0,0 +1,181 @@ +package repoIntelligences + +import ( + "context" + + "github.com/openshift-online/rh-trex-ai/pkg/api" + "github.com/openshift-online/rh-trex-ai/pkg/db" + "github.com/openshift-online/rh-trex-ai/pkg/errors" + "github.com/openshift-online/rh-trex-ai/pkg/logger" + "github.com/openshift-online/rh-trex-ai/pkg/services" + + "github.com/ambient-code/platform/components/ambient-api-server/plugins/repoEvents" +) + +const repoIntelligencesLockType db.LockType = "repo_intelligences" + +type RepoIntelligenceService interface { + Get(ctx context.Context, id string) (*RepoIntelligence, *errors.ServiceError) + Create(ctx context.Context, ri *RepoIntelligence) (*RepoIntelligence, *errors.ServiceError) + Replace(ctx context.Context, ri *RepoIntelligence) (*RepoIntelligence, *errors.ServiceError) + Delete(ctx context.Context, id string) *errors.ServiceError + All(ctx context.Context) (RepoIntelligenceList, *errors.ServiceError) + FindByIDs(ctx context.Context, ids []string) (RepoIntelligenceList, *errors.ServiceError) + GetByProjectAndRepo(ctx context.Context, projectID, repoURL string) (*RepoIntelligence, *errors.ServiceError) + + OnUpsert(ctx context.Context, id string) error + OnDelete(ctx context.Context, id string) error +} + +func NewRepoIntelligenceService(lockFactory db.LockFactory, dao RepoIntelligenceDao, events services.EventService, auditSvc repoEvents.RepoEventService) RepoIntelligenceService { + return &sqlRepoIntelligenceService{ + lockFactory: lockFactory, + dao: dao, + events: events, + auditSvc: auditSvc, + } +} + +var _ RepoIntelligenceService = &sqlRepoIntelligenceService{} + +type sqlRepoIntelligenceService struct { + lockFactory db.LockFactory + dao RepoIntelligenceDao + events services.EventService + auditSvc repoEvents.RepoEventService +} + +func (s *sqlRepoIntelligenceService) logAuditEvent(ctx context.Context, ri *RepoIntelligence, action string) { + if s.auditSvc == nil { + return + } + actorType := "system" + actorID := "api-server" + if ri.AnalyzedBySessionID != nil { + actorType = "session" + actorID = *ri.AnalyzedBySessionID + } + _, _ = s.auditSvc.Create(ctx, &repoEvents.RepoEvent{ + ResourceType: "intelligence", + ResourceID: ri.ID, + Action: action, + ActorType: actorType, + ActorID: actorID, + ProjectID: ri.ProjectID, + }) +} + +func (s *sqlRepoIntelligenceService) OnUpsert(ctx context.Context, id string) error { + log := logger.NewLogger(ctx) + log.Infof("RepoIntelligence upserted: %s", id) + return nil +} + +func (s *sqlRepoIntelligenceService) OnDelete(ctx context.Context, id string) error { + log := logger.NewLogger(ctx) + log.Infof("RepoIntelligence deleted: %s", id) + return nil +} + +func (s *sqlRepoIntelligenceService) Get(ctx context.Context, id string) (*RepoIntelligence, *errors.ServiceError) { + ri, err := s.dao.Get(ctx, id) + if err != nil { + return nil, services.HandleGetError("RepoIntelligence", "id", id, err) + } + return ri, nil +} + +func (s *sqlRepoIntelligenceService) Create(ctx context.Context, ri *RepoIntelligence) (*RepoIntelligence, *errors.ServiceError) { + ri, err := s.dao.Create(ctx, ri) + if err != nil { + return nil, services.HandleCreateError("RepoIntelligence", err) + } + + _, evErr := s.events.Create(ctx, &api.Event{ + Source: "RepoIntelligences", + SourceID: ri.ID, + EventType: api.CreateEventType, + }) + if evErr != nil { + return nil, services.HandleCreateError("RepoIntelligence", evErr) + } + + s.logAuditEvent(ctx, ri, "created") + + return ri, nil +} + +func (s *sqlRepoIntelligenceService) Replace(ctx context.Context, ri *RepoIntelligence) (*RepoIntelligence, *errors.ServiceError) { + lockOwnerID, err := s.lockFactory.NewAdvisoryLock(ctx, ri.ID, repoIntelligencesLockType) + if err != nil { + return nil, errors.DatabaseAdvisoryLock(err) + } + defer s.lockFactory.Unlock(ctx, lockOwnerID) + + ri, err = s.dao.Replace(ctx, ri) + if err != nil { + return nil, services.HandleUpdateError("RepoIntelligence", err) + } + + _, evErr := s.events.Create(ctx, &api.Event{ + Source: "RepoIntelligences", + SourceID: ri.ID, + EventType: api.UpdateEventType, + }) + if evErr != nil { + return nil, services.HandleUpdateError("RepoIntelligence", evErr) + } + + s.logAuditEvent(ctx, ri, "updated") + + return ri, nil +} + +func (s *sqlRepoIntelligenceService) Delete(ctx context.Context, id string) *errors.ServiceError { + // Fetch before delete to get project_id for audit + ri, getErr := s.dao.Get(ctx, id) + if getErr != nil { + return services.HandleDeleteError("RepoIntelligence", errors.GeneralError("unable to delete repo intelligence: %s", getErr)) + } + + if err := s.dao.Delete(ctx, id); err != nil { + return services.HandleDeleteError("RepoIntelligence", errors.GeneralError("unable to delete repo intelligence: %s", err)) + } + + _, evErr := s.events.Create(ctx, &api.Event{ + Source: "RepoIntelligences", + SourceID: id, + EventType: api.DeleteEventType, + }) + if evErr != nil { + return services.HandleDeleteError("RepoIntelligence", evErr) + } + + s.logAuditEvent(ctx, ri, "deleted") + + return nil +} + +func (s *sqlRepoIntelligenceService) FindByIDs(ctx context.Context, ids []string) (RepoIntelligenceList, *errors.ServiceError) { + items, err := s.dao.FindByIDs(ctx, ids) + if err != nil { + return nil, errors.GeneralError("unable to find repo intelligences: %s", err) + } + return items, nil +} + +func (s *sqlRepoIntelligenceService) All(ctx context.Context) (RepoIntelligenceList, *errors.ServiceError) { + items, err := s.dao.All(ctx) + if err != nil { + return nil, errors.GeneralError("unable to get all repo intelligences: %s", err) + } + return items, nil +} + +func (s *sqlRepoIntelligenceService) GetByProjectAndRepo(ctx context.Context, projectID, repoURL string) (*RepoIntelligence, *errors.ServiceError) { + ri, err := s.dao.GetByProjectAndRepo(ctx, projectID, repoURL) + if err != nil { + return nil, services.HandleGetError("RepoIntelligence", "project_id+repo_url", projectID+"/"+repoURL, err) + } + return ri, nil +} diff --git a/components/backend/Dockerfile b/components/backend/Dockerfile index 009f1a3e9..d131a0cde 100755 --- a/components/backend/Dockerfile +++ b/components/backend/Dockerfile @@ -8,8 +8,8 @@ USER 0 # Copy go mod and sum files COPY go.mod go.sum ./ -# Download dependencies -RUN go mod download +# Download dependencies (GOPROXY=direct works around HTTP/2 stream errors with proxy.golang.org inside Docker) +RUN GOPROXY=direct go mod download # Copy the source code COPY . . diff --git a/components/backend/handlers/sessions.go b/components/backend/handlers/sessions.go index bb899e4ab..148178d5a 100755 --- a/components/backend/handlers/sessions.go +++ b/components/backend/handlers/sessions.go @@ -961,6 +961,9 @@ func CreateSession(c *gin.Context) { if req.StopOnRunFinished != nil && *req.StopOnRunFinished { spec["stopOnRunFinished"] = true } + if req.DisableIntelligence != nil && *req.DisableIntelligence { + spec["disableIntelligence"] = true + } session := map[string]interface{}{ "apiVersion": "vteam.ambient-code/v1alpha1", @@ -2222,6 +2225,7 @@ func RemoveRepo(c *gin.Context) { filteredRepos := []interface{}{} foundInSpec := false + removedRepoURL := "" for _, r := range repos { rm, _ := r.(map[string]interface{}) url, _ := rm["url"].(string) @@ -2229,6 +2233,7 @@ func RemoveRepo(c *gin.Context) { filteredRepos = append(filteredRepos, r) } else { foundInSpec = true + removedRepoURL = url } } @@ -2256,6 +2261,11 @@ func RemoveRepo(c *gin.Context) { name, found, err := unstructured.NestedString(rm, "name") if found && err == nil && name == repoName { foundInReconciled = true + if removedRepoURL == "" { + if u, f, e := unstructured.NestedString(rm, "url"); f && e == nil { + removedRepoURL = u + } + } break } @@ -2263,6 +2273,9 @@ func RemoveRepo(c *gin.Context) { url, found, err := unstructured.NestedString(rm, "url") if found && err == nil && DeriveRepoFolderFromURL(url) == repoName { foundInReconciled = true + if removedRepoURL == "" { + removedRepoURL = url + } break } } @@ -2270,21 +2283,30 @@ func RemoveRepo(c *gin.Context) { // Always call runner to remove from filesystem (if session is running) // Do this BEFORE checking if repo exists in CR, because it might only be on filesystem phase, _, _ := unstructured.NestedString(status, "phase") + deleteIntelligence := c.Query("delete_intelligence") == "true" runnerRemoved := false + runnerDeletedIntelligence := false if phase == "Running" { runnerURL := fmt.Sprintf("http://session-%s.%s.svc.cluster.local:8001/repos/remove", sessionName, project) - runnerReq := map[string]string{"name": repoName} + runnerReq := map[string]interface{}{"name": repoName, "delete_intelligence": deleteIntelligence} reqBody, _ := json.Marshal(runnerReq) resp, err := http.Post(runnerURL, "application/json", bytes.NewReader(reqBody)) if err != nil { log.Printf("Warning: failed to call runner /repos/remove: %v", err) } else { defer resp.Body.Close() + body, _ := io.ReadAll(resp.Body) if resp.StatusCode == http.StatusOK { runnerRemoved = true log.Printf("Runner successfully removed repo %s from filesystem", repoName) + // Check if the runner actually deleted the intelligence record + var runnerResp map[string]interface{} + if json.Unmarshal(body, &runnerResp) == nil { + if deleted, ok := runnerResp["intelligence_deleted"].(bool); ok && deleted { + runnerDeletedIntelligence = true + } + } } else { - body, _ := io.ReadAll(resp.Body) log.Printf("Runner failed to remove repo %s (status %d): %s", repoName, resp.StatusCode, string(body)) } } @@ -2306,6 +2328,11 @@ func RemoveRepo(c *gin.Context) { return } + // Delete intelligence after successful CR update to avoid partial state + if deleteIntelligence && removedRepoURL != "" && !runnerDeletedIntelligence { + deleteIntelligenceFromAPIServer(project, removedRepoURL) + } + session := types.AgenticSession{ APIVersion: updated.GetAPIVersion(), Kind: updated.GetKind(), @@ -2336,6 +2363,72 @@ func sanitizeK8sName(name string) (string, error) { return name, nil } +// deleteIntelligenceFromAPIServer calls the API server directly to delete the +// intelligence record for a repo. Used as fallback when the runner pod is +// unreachable (session stopped/failed). +func deleteIntelligenceFromAPIServer(projectID, repoURL string) { + apiServerURL := os.Getenv("API_SERVER_URL") + if apiServerURL == "" { + apiServerURL = "http://ambient-api-server:8000" + } + apiServerURL = strings.TrimRight(apiServerURL, "/") + + params := url.Values{ + "project_id": {projectID}, + "repo_url": {repoURL}, + } + deleteURL := fmt.Sprintf("%s/api/ambient/v1/repo_intelligences/lookup?%s", apiServerURL, params.Encode()) + + req, err := http.NewRequest(http.MethodDelete, deleteURL, nil) + if err != nil { + log.Printf("Warning: failed to create intelligence delete request: %v", err) + return + } + + client := &http.Client{Timeout: 10 * time.Second} + resp, err := client.Do(req) + if err != nil { + log.Printf("Warning: failed to delete intelligence from API server: %v", err) + return + } + defer resp.Body.Close() + + switch resp.StatusCode { + case http.StatusNoContent: + log.Printf("Deleted intelligence for %s from API server (backend fallback)", repoURL) + case http.StatusNotFound: + log.Printf("No intelligence found to delete for %s on API server", repoURL) + default: + body, _ := io.ReadAll(resp.Body) + log.Printf("Warning: API server intelligence delete returned %d: %s", resp.StatusCode, string(body)) + } +} + +// ReanalyzeRepo triggers re-analysis of a repository's intelligence. +// POST /api/projects/:projectName/agentic-sessions/:sessionName/repos/reanalyze +func ReanalyzeRepo(c *gin.Context) { + project := c.GetString("project") + sessionName := c.Param("sessionName") + + var body map[string]string + if err := c.BindJSON(&body); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"}) + return + } + + runnerURL := fmt.Sprintf("http://session-%s.%s.svc.cluster.local:8001/repos/reanalyze", sessionName, project) + reqBody, _ := json.Marshal(body) + resp, err := http.Post(runnerURL, "application/json", bytes.NewReader(reqBody)) + if err != nil { + log.Printf("Failed to call runner /repos/reanalyze: %v", err) + c.JSON(http.StatusServiceUnavailable, gin.H{"error": "Runner not available"}) + return + } + defer resp.Body.Close() + respBody, _ := io.ReadAll(resp.Body) + c.Data(resp.StatusCode, "application/json", respBody) +} + // getRunnerServiceName returns the K8s Service name for a session's runner. // The runner serves both AG-UI and content endpoints on port 8001. func getRunnerServiceName(session string) string { diff --git a/components/backend/routes.go b/components/backend/routes.go index 23abad9de..d512c515b 100644 --- a/components/backend/routes.go +++ b/components/backend/routes.go @@ -68,6 +68,7 @@ func registerRoutes(r *gin.Engine) { // NOTE: /repos/status must come BEFORE /repos/:repoName to avoid wildcard matching projectGroup.GET("/agentic-sessions/:sessionName/repos/status", handlers.GetReposStatus) projectGroup.DELETE("/agentic-sessions/:sessionName/repos/:repoName", handlers.RemoveRepo) + projectGroup.POST("/agentic-sessions/:sessionName/repos/reanalyze", handlers.ReanalyzeRepo) projectGroup.PUT("/agentic-sessions/:sessionName/displayname", handlers.UpdateSessionDisplayName) projectGroup.POST("/agentic-sessions/:sessionName/model", handlers.SwitchModel) diff --git a/components/backend/types/session.go b/components/backend/types/session.go index a1fcb6bbe..78b3fe9ed 100755 --- a/components/backend/types/session.go +++ b/components/backend/types/session.go @@ -27,6 +27,7 @@ type AgenticSessionSpec struct { Timeout int `json:"timeout"` InactivityTimeout *int `json:"inactivityTimeout,omitempty"` StopOnRunFinished bool `json:"stopOnRunFinished,omitempty"` + DisableIntelligence bool `json:"disableIntelligence,omitempty"` UserContext *UserContext `json:"userContext,omitempty"` BotAccount *BotAccountRef `json:"botAccount,omitempty"` ResourceOverrides *ResourceOverrides `json:"resourceOverrides,omitempty"` @@ -70,6 +71,7 @@ type CreateAgenticSessionRequest struct { Timeout *int `json:"timeout,omitempty"` InactivityTimeout *int `json:"inactivityTimeout,omitempty"` StopOnRunFinished *bool `json:"stopOnRunFinished,omitempty"` + DisableIntelligence *bool `json:"disableIntelligence,omitempty"` ParentSessionID string `json:"parent_session_id,omitempty"` Repos []SimpleRepo `json:"repos,omitempty"` ActiveWorkflow *WorkflowSelection `json:"activeWorkflow,omitempty"` diff --git a/components/frontend/src/app/api/projects/[name]/agentic-sessions/[sessionName]/repos/[repoName]/route.ts b/components/frontend/src/app/api/projects/[name]/agentic-sessions/[sessionName]/repos/[repoName]/route.ts index f5ae370dc..ca4b7b6c4 100644 --- a/components/frontend/src/app/api/projects/[name]/agentic-sessions/[sessionName]/repos/[repoName]/route.ts +++ b/components/frontend/src/app/api/projects/[name]/agentic-sessions/[sessionName]/repos/[repoName]/route.ts @@ -7,9 +7,12 @@ export async function DELETE( ) { const { name, sessionName, repoName } = await params; const headers = await buildForwardHeadersAsync(request); + const { searchParams } = new URL(request.url); + const qs = searchParams.toString(); + const suffix = qs ? `?${qs}` : ''; const resp = await fetch( - `${BACKEND_URL}/projects/${encodeURIComponent(name)}/agentic-sessions/${encodeURIComponent(sessionName)}/repos/${encodeURIComponent(repoName)}`, + `${BACKEND_URL}/projects/${encodeURIComponent(name)}/agentic-sessions/${encodeURIComponent(sessionName)}/repos/${encodeURIComponent(repoName)}${suffix}`, { method: 'DELETE', headers, diff --git a/components/frontend/src/app/api/projects/[name]/agentic-sessions/[sessionName]/repos/reanalyze/route.ts b/components/frontend/src/app/api/projects/[name]/agentic-sessions/[sessionName]/repos/reanalyze/route.ts new file mode 100644 index 000000000..a23bd73e2 --- /dev/null +++ b/components/frontend/src/app/api/projects/[name]/agentic-sessions/[sessionName]/repos/reanalyze/route.ts @@ -0,0 +1,26 @@ +import { BACKEND_URL } from '@/lib/config'; +import { buildForwardHeadersAsync } from '@/lib/auth'; + +export async function POST( + request: Request, + { params }: { params: Promise<{ name: string; sessionName: string }> }, +) { + const { name, sessionName } = await params; + const headers = await buildForwardHeadersAsync(request); + const body = await request.text(); + + const resp = await fetch( + `${BACKEND_URL}/projects/${encodeURIComponent(name)}/agentic-sessions/${encodeURIComponent(sessionName)}/repos/reanalyze`, + { + method: 'POST', + headers, + body, + } + ); + + const data = await resp.text(); + return new Response(data, { + status: resp.status, + headers: { 'Content-Type': 'application/json' } + }); +} diff --git a/components/frontend/src/app/api/projects/[name]/intelligence/route.ts b/components/frontend/src/app/api/projects/[name]/intelligence/route.ts new file mode 100644 index 000000000..545f46294 --- /dev/null +++ b/components/frontend/src/app/api/projects/[name]/intelligence/route.ts @@ -0,0 +1,46 @@ +import { NextRequest, NextResponse } from "next/server"; +import { API_SERVER_URL } from "@/lib/config"; +import { buildForwardHeadersAsync } from "@/lib/auth"; + +export async function GET( + request: NextRequest, + { params }: { params: Promise<{ name: string }> } +) { + try { + const { name: projectName } = await params; + const repoUrl = request.nextUrl.searchParams.get("repo_url"); + + if (!repoUrl) { + return NextResponse.json( + { error: "repo_url query parameter is required" }, + { status: 400 } + ); + } + + const searchParams = new URLSearchParams({ + project_id: projectName, + repo_url: repoUrl, + }); + + const headers = await buildForwardHeadersAsync(request); + const response = await fetch( + `${API_SERVER_URL}/api/ambient/v1/repo_intelligences/lookup?${searchParams}`, + { + method: "GET", + headers, + } + ); + + const data = await response.text(); + return new NextResponse(data, { + status: response.status, + headers: { "Content-Type": "application/json" }, + }); + } catch (error) { + console.error("Failed to fetch repo intelligence:", error); + return NextResponse.json( + { error: "Failed to fetch repo intelligence" }, + { status: 500 } + ); + } +} diff --git a/components/frontend/src/app/layout.tsx b/components/frontend/src/app/layout.tsx index b30787eac..1cbb0f95b 100755 --- a/components/frontend/src/app/layout.tsx +++ b/components/frontend/src/app/layout.tsx @@ -36,6 +36,7 @@ export default function RootLayout({ // between server-rendered content and client-side theme application +