From a11ff36932798264bc18dbd1b2541d75bbef1902 Mon Sep 17 00:00:00 2001 From: "tamas.albert" Date: Wed, 4 Mar 2026 11:02:20 +0200 Subject: [PATCH 01/39] upstream | chore: Collect teams and display per team metrics --- .../20260227_add_team_tables.go | 43 + .../models/migrationscripts/register.go | 1 + backend/plugins/gh-copilot/models/models.go | 3 + .../plugins/gh-copilot/models/models_test.go | 36 +- backend/plugins/gh-copilot/models/team.go | 47 + .../plugins/gh-copilot/models/team_user.go | 38 + .../plugins/gh-copilot/tasks/pagination.go | 55 + backend/plugins/gh-copilot/tasks/register.go | 9 +- .../gh-copilot/tasks/team_collector.go | 103 ++ .../gh-copilot/tasks/team_extractor.go | 118 ++ .../gh-copilot/tasks/team_user_collector.go | 125 ++ .../gh-copilot/tasks/team_user_extractor.go | 114 ++ .../GithubCopilotAdoptionByTeam.json | 1568 +++++++++++++++++ 13 files changed, 2241 insertions(+), 19 deletions(-) create mode 100644 backend/plugins/gh-copilot/models/migrationscripts/20260227_add_team_tables.go create mode 100644 backend/plugins/gh-copilot/models/team.go create mode 100644 backend/plugins/gh-copilot/models/team_user.go create mode 100644 backend/plugins/gh-copilot/tasks/pagination.go create mode 100644 backend/plugins/gh-copilot/tasks/team_collector.go create mode 100644 backend/plugins/gh-copilot/tasks/team_extractor.go create mode 100644 backend/plugins/gh-copilot/tasks/team_user_collector.go create mode 100644 backend/plugins/gh-copilot/tasks/team_user_extractor.go create mode 100644 grafana/dashboards/GithubCopilotAdoptionByTeam.json diff --git a/backend/plugins/gh-copilot/models/migrationscripts/20260227_add_team_tables.go b/backend/plugins/gh-copilot/models/migrationscripts/20260227_add_team_tables.go new file mode 100644 index 00000000000..5b792ca1e89 --- /dev/null +++ b/backend/plugins/gh-copilot/models/migrationscripts/20260227_add_team_tables.go @@ -0,0 +1,43 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/helpers/migrationhelper" + "github.com/apache/incubator-devlake/plugins/gh-copilot/models" +) + +type addTeamTables struct{} + +func (script *addTeamTables) Up(basicRes context.BasicRes) errors.Error { + return migrationhelper.AutoMigrateTables( + basicRes, + &models.GhCopilotTeam{}, + &models.GhCopilotTeamUser{}, + ) +} + +func (*addTeamTables) Version() uint64 { + return 20260227000000 +} + +func (*addTeamTables) Name() string { + return "copilot add team and team_user tables" +} diff --git a/backend/plugins/gh-copilot/models/migrationscripts/register.go b/backend/plugins/gh-copilot/models/migrationscripts/register.go index 5e275f943bf..3acf34f3788 100644 --- a/backend/plugins/gh-copilot/models/migrationscripts/register.go +++ b/backend/plugins/gh-copilot/models/migrationscripts/register.go @@ -29,5 +29,6 @@ func All() []plugin.MigrationScript { new(addScopeConfig20260121), new(migrateToUsageMetricsV2), new(addPRFieldsToEnterpriseMetrics), + new(addTeamTables), } } diff --git a/backend/plugins/gh-copilot/models/models.go b/backend/plugins/gh-copilot/models/models.go index f223c821827..d9f0f02b3bf 100644 --- a/backend/plugins/gh-copilot/models/models.go +++ b/backend/plugins/gh-copilot/models/models.go @@ -45,5 +45,8 @@ func GetTablesInfo() []dal.Tabler { &GhCopilotUserMetricsByModelFeature{}, // Seat assignments &GhCopilotSeat{}, + // Team / team-member tables (org-level) + &GhCopilotTeam{}, + &GhCopilotTeamUser{}, } } diff --git a/backend/plugins/gh-copilot/models/models_test.go b/backend/plugins/gh-copilot/models/models_test.go index 72ead8a65e9..428402460b2 100644 --- a/backend/plugins/gh-copilot/models/models_test.go +++ b/backend/plugins/gh-copilot/models/models_test.go @@ -22,24 +22,26 @@ import "testing" func TestGetTablesInfo(t *testing.T) { tables := GetTablesInfo() expected := map[string]bool{ - (&GhCopilotConnection{}).TableName(): false, - (&GhCopilotScope{}).TableName(): false, - (&GhCopilotScopeConfig{}).TableName(): false, - (&GhCopilotOrgMetrics{}).TableName(): false, - (&GhCopilotLanguageMetrics{}).TableName(): false, - (&GhCopilotEnterpriseDailyMetrics{}).TableName(): false, - (&GhCopilotMetricsByIde{}).TableName(): false, - (&GhCopilotMetricsByFeature{}).TableName(): false, - (&GhCopilotMetricsByLanguageFeature{}).TableName(): false, - (&GhCopilotMetricsByLanguageModel{}).TableName(): false, - (&GhCopilotMetricsByModelFeature{}).TableName(): false, - (&GhCopilotUserDailyMetrics{}).TableName(): false, - (&GhCopilotUserMetricsByIde{}).TableName(): false, - (&GhCopilotUserMetricsByFeature{}).TableName(): false, + (&GhCopilotConnection{}).TableName(): false, + (&GhCopilotScope{}).TableName(): false, + (&GhCopilotScopeConfig{}).TableName(): false, + (&GhCopilotOrgMetrics{}).TableName(): false, + (&GhCopilotLanguageMetrics{}).TableName(): false, + (&GhCopilotEnterpriseDailyMetrics{}).TableName(): false, + (&GhCopilotMetricsByIde{}).TableName(): false, + (&GhCopilotMetricsByFeature{}).TableName(): false, + (&GhCopilotMetricsByLanguageFeature{}).TableName(): false, + (&GhCopilotMetricsByLanguageModel{}).TableName(): false, + (&GhCopilotMetricsByModelFeature{}).TableName(): false, + (&GhCopilotUserDailyMetrics{}).TableName(): false, + (&GhCopilotUserMetricsByIde{}).TableName(): false, + (&GhCopilotUserMetricsByFeature{}).TableName(): false, (&GhCopilotUserMetricsByLanguageFeature{}).TableName(): false, - (&GhCopilotUserMetricsByLanguageModel{}).TableName(): false, - (&GhCopilotUserMetricsByModelFeature{}).TableName(): false, - (&GhCopilotSeat{}).TableName(): false, + (&GhCopilotUserMetricsByLanguageModel{}).TableName(): false, + (&GhCopilotUserMetricsByModelFeature{}).TableName(): false, + (&GhCopilotSeat{}).TableName(): false, + (&GhCopilotTeam{}).TableName(): false, + (&GhCopilotTeamUser{}).TableName(): false, } if len(tables) != len(expected) { diff --git a/backend/plugins/gh-copilot/models/team.go b/backend/plugins/gh-copilot/models/team.go new file mode 100644 index 00000000000..42574e74f90 --- /dev/null +++ b/backend/plugins/gh-copilot/models/team.go @@ -0,0 +1,47 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "time" + + "github.com/apache/incubator-devlake/core/models/common" +) + +// GhCopilotTeam stores a GitHub organization team in the tool layer. +type GhCopilotTeam struct { + ConnectionId uint64 `gorm:"primaryKey"` + Id int `json:"id" gorm:"primaryKey;autoIncrement:false"` + OrgId *int `json:"organization_id" gorm:"index"` + OrgLogin string `json:"org_login" gorm:"type:varchar(255);index"` + Name string `json:"name" gorm:"type:varchar(255)"` + Slug string `json:"slug" gorm:"type:varchar(255);index"` + Description string `json:"description"` + Privacy string `json:"privacy" gorm:"type:varchar(100)"` + Permission string `json:"permission" gorm:"type:varchar(100)"` + NotificationSetting string `json:"notification_setting" gorm:"type:varchar(100)"` + ParentTeamId *int `json:"parent_team_id" gorm:"index"` + ParentTeamSlug string `json:"parent_team_slug" gorm:"type:varchar(255);index"` + GithubCreatedAt *time.Time + GithubUpdatedAt *time.Time + common.NoPKModel +} + +func (GhCopilotTeam) TableName() string { + return "_tool_copilot_teams" +} diff --git a/backend/plugins/gh-copilot/models/team_user.go b/backend/plugins/gh-copilot/models/team_user.go new file mode 100644 index 00000000000..260af089ffc --- /dev/null +++ b/backend/plugins/gh-copilot/models/team_user.go @@ -0,0 +1,38 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import "github.com/apache/incubator-devlake/core/models/common" + +// GhCopilotTeamUser stores a team-member relationship in the tool layer. +type GhCopilotTeamUser struct { + ConnectionId uint64 `gorm:"primaryKey"` + TeamId int `gorm:"primaryKey;autoIncrement:false"` + UserId int `gorm:"primaryKey;autoIncrement:false"` + OrgLogin string `gorm:"type:varchar(255);index"` + TeamSlug string `gorm:"type:varchar(255);index"` + UserLogin string `json:"login" gorm:"type:varchar(255);index"` + Type string `json:"type" gorm:"type:varchar(100)"` + ViewType string `json:"user_view_type" gorm:"type:varchar(100)"` + IsSiteAdmin bool `json:"site_admin"` + common.NoPKModel +} + +func (GhCopilotTeamUser) TableName() string { + return "_tool_copilot_team_users" +} diff --git a/backend/plugins/gh-copilot/tasks/pagination.go b/backend/plugins/gh-copilot/tasks/pagination.go new file mode 100644 index 00000000000..54454984214 --- /dev/null +++ b/backend/plugins/gh-copilot/tasks/pagination.go @@ -0,0 +1,55 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "net/http" + "regexp" + "strconv" + "strings" + + "github.com/apache/incubator-devlake/core/errors" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" +) + +// getTotalPagesFromResponse parses the GitHub Link header to determine the last page number. +// This is used for paginated list endpoints (teams, team members). +func getTotalPagesFromResponse(res *http.Response, _ *helper.ApiCollectorArgs) (int, errors.Error) { + link := res.Header.Get("Link") + if link == "" { + return 0, nil + } + pagePattern := regexp.MustCompile(`page=(\d+)`) + relPattern := regexp.MustCompile(`rel="([a-z]+)"`) + for _, part := range strings.Split(link, ",") { + relMatch := relPattern.FindStringSubmatch(part) + if len(relMatch) < 2 || relMatch[1] != "last" { + continue + } + pageMatch := pagePattern.FindStringSubmatch(part) + if len(pageMatch) < 2 { + continue + } + last, err := strconv.Atoi(pageMatch[1]) + if err != nil { + return 0, errors.Default.Wrap(err, "failed to parse last page") + } + return last, nil + } + return 0, nil +} diff --git a/backend/plugins/gh-copilot/tasks/register.go b/backend/plugins/gh-copilot/tasks/register.go index ee1dcc797fc..3286e23fa9a 100644 --- a/backend/plugins/gh-copilot/tasks/register.go +++ b/backend/plugins/gh-copilot/tasks/register.go @@ -22,12 +22,17 @@ import "github.com/apache/incubator-devlake/core/plugin" // GetSubTaskMetas returns the ordered list of Copilot subtasks. func GetSubTaskMetas() []plugin.SubTaskMeta { return []plugin.SubTaskMeta{ - // Collectors + // Collectors – metrics CollectOrgMetricsMeta, CollectCopilotSeatAssignmentsMeta, CollectEnterpriseMetricsMeta, CollectUserMetricsMeta, - // Extractors + // Collectors – teams (extract teams before collecting team users) + CollectTeamsMeta, + ExtractTeamsMeta, + CollectTeamUsersMeta, + ExtractTeamUsersMeta, + // Extractors – metrics ExtractSeatsMeta, ExtractOrgMetricsMeta, ExtractEnterpriseMetricsMeta, diff --git a/backend/plugins/gh-copilot/tasks/team_collector.go b/backend/plugins/gh-copilot/tasks/team_collector.go new file mode 100644 index 00000000000..686ae1a9fcb --- /dev/null +++ b/backend/plugins/gh-copilot/tasks/team_collector.go @@ -0,0 +1,103 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "fmt" + "net/http" + "net/url" + "strings" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" +) + +const rawCopilotTeamTable = "copilot_api_teams" + +var CollectTeamsMeta = plugin.SubTaskMeta{ + Name: "collectTeams", + EntryPoint: CollectTeams, + EnabledByDefault: true, + Description: "Collect teams data from GitHub API for the configured organization.", + DomainTypes: []string{plugin.DOMAIN_TYPE_CROSS}, + DependencyTables: []string{}, + ProductTables: []string{rawCopilotTeamTable}, +} + +func CollectTeams(taskCtx plugin.SubTaskContext) errors.Error { + data, ok := taskCtx.TaskContext().GetData().(*GhCopilotTaskData) + if !ok { + return errors.Default.New("task data is not GhCopilotTaskData") + } + connection := data.Connection + connection.Normalize() + + org := strings.TrimSpace(connection.Organization) + if org == "" { + taskCtx.GetLogger().Warn(nil, "skipping team collection: no organization configured on connection %d", connection.ID) + return nil + } + + apiClient, err := CreateApiClient(taskCtx.TaskContext(), connection) + if err != nil { + return err + } + + collector, cErr := helper.NewStatefulApiCollector(helper.RawDataSubTaskArgs{ + Ctx: taskCtx, + Table: rawCopilotTeamTable, + Options: copilotRawParams{ + ConnectionId: data.Options.ConnectionId, + ScopeId: data.Options.ScopeId, + Organization: org, + Endpoint: connection.Endpoint, + }, + }) + if cErr != nil { + return cErr + } + + cErr = collector.InitCollector(helper.ApiCollectorArgs{ + ApiClient: apiClient, + PageSize: 100, + UrlTemplate: fmt.Sprintf("orgs/%s/teams", org), + Query: func(reqData *helper.RequestData) (url.Values, errors.Error) { + query := url.Values{} + query.Set("page", fmt.Sprintf("%v", reqData.Pager.Page)) + query.Set("per_page", fmt.Sprintf("%v", reqData.Pager.Size)) + return query, nil + }, + GetTotalPages: getTotalPagesFromResponse, + ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) { + var items []json.RawMessage + e := helper.UnmarshalResponse(res, &items) + if e != nil { + return nil, e + } + return items, nil + }, + AfterResponse: ignore404, + }) + if cErr != nil { + return cErr + } + + return collector.Execute() +} diff --git a/backend/plugins/gh-copilot/tasks/team_extractor.go b/backend/plugins/gh-copilot/tasks/team_extractor.go new file mode 100644 index 00000000000..a4e26b2d214 --- /dev/null +++ b/backend/plugins/gh-copilot/tasks/team_extractor.go @@ -0,0 +1,118 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "strings" + "time" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/gh-copilot/models" +) + +var ExtractTeamsMeta = plugin.SubTaskMeta{ + Name: "extractTeams", + EntryPoint: ExtractTeams, + EnabledByDefault: true, + Description: "Extract raw team data into tool layer table _tool_copilot_teams", + DomainTypes: []string{plugin.DOMAIN_TYPE_CROSS}, + DependencyTables: []string{rawCopilotTeamTable}, + ProductTables: []string{models.GhCopilotTeam{}.TableName()}, +} + +// githubTeamParentResponse represents the parent field in the team API response. +type githubTeamParentResponse struct { + Id int `json:"id"` + Slug string `json:"slug"` +} + +// githubTeamResponse represents a single team object from GET /orgs/{org}/teams. +type githubTeamResponse struct { + Id int `json:"id"` + Url string `json:"url"` + HtmlUrl string `json:"html_url"` + Name string `json:"name"` + Type string `json:"type"` + Slug string `json:"slug"` + Description string `json:"description"` + Privacy string `json:"privacy"` + NotificationSetting string `json:"notification_setting"` + Permission string `json:"permission"` + MembersUrl string `json:"members_url"` + RepositoriesUrl string `json:"repositories_url"` + Parent *githubTeamParentResponse `json:"parent"` + CreatedAt *time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` + OrgId *int `json:"organization_id"` +} + +func ExtractTeams(taskCtx plugin.SubTaskContext) errors.Error { + data, ok := taskCtx.TaskContext().GetData().(*GhCopilotTaskData) + if !ok { + return errors.Default.New("task data is not GhCopilotTaskData") + } + connection := data.Connection + connection.Normalize() + org := strings.TrimSpace(connection.Organization) + + extractor, err := helper.NewApiExtractor(helper.ApiExtractorArgs{ + RawDataSubTaskArgs: helper.RawDataSubTaskArgs{ + Ctx: taskCtx, + Table: rawCopilotTeamTable, + Options: copilotRawParams{ + ConnectionId: data.Options.ConnectionId, + ScopeId: data.Options.ScopeId, + Organization: org, + Endpoint: connection.Endpoint, + }, + }, + Extract: func(row *helper.RawData) ([]interface{}, errors.Error) { + apiTeam := &githubTeamResponse{} + if e := json.Unmarshal(row.Data, apiTeam); e != nil { + return nil, errors.Convert(e) + } + + team := &models.GhCopilotTeam{ + ConnectionId: data.Options.ConnectionId, + Id: apiTeam.Id, + Name: apiTeam.Name, + Slug: apiTeam.Slug, + Description: apiTeam.Description, + Privacy: apiTeam.Privacy, + Permission: apiTeam.Permission, + NotificationSetting: apiTeam.NotificationSetting, + GithubCreatedAt: apiTeam.CreatedAt, + GithubUpdatedAt: apiTeam.UpdatedAt, + OrgId: apiTeam.OrgId, + OrgLogin: org, + } + if apiTeam.Parent != nil { + team.ParentTeamId = &apiTeam.Parent.Id + team.ParentTeamSlug = apiTeam.Parent.Slug + } + return []interface{}{team}, nil + }, + }) + if err != nil { + return err + } + return extractor.Execute() +} diff --git a/backend/plugins/gh-copilot/tasks/team_user_collector.go b/backend/plugins/gh-copilot/tasks/team_user_collector.go new file mode 100644 index 00000000000..234ed09bd2f --- /dev/null +++ b/backend/plugins/gh-copilot/tasks/team_user_collector.go @@ -0,0 +1,125 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "fmt" + "net/http" + "net/url" + "reflect" + "strings" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/gh-copilot/models" +) + +const rawCopilotTeamUserTable = "copilot_api_team_users" + +// simpleCopilotTeam is the minimal struct used as input for the team-member +// collector iterator. Fields must be exported for DalCursorIterator. +type simpleCopilotTeam struct { + Id int + Slug string + OrgLogin string +} + +var CollectTeamUsersMeta = plugin.SubTaskMeta{ + Name: "collectTeamUsers", + EntryPoint: CollectTeamUsers, + EnabledByDefault: true, + Description: "Collect team members data from GitHub API.", + DomainTypes: []string{plugin.DOMAIN_TYPE_CROSS}, + DependencyTables: []string{models.GhCopilotTeam{}.TableName()}, + ProductTables: []string{rawCopilotTeamUserTable}, +} + +func CollectTeamUsers(taskCtx plugin.SubTaskContext) errors.Error { + data, ok := taskCtx.TaskContext().GetData().(*GhCopilotTaskData) + if !ok { + return errors.Default.New("task data is not GhCopilotTaskData") + } + connection := data.Connection + connection.Normalize() + + org := strings.TrimSpace(connection.Organization) + if org == "" { + taskCtx.GetLogger().Warn(nil, "skipping team-user collection: no organization configured on connection %d", connection.ID) + return nil + } + + db := taskCtx.GetDal() + cursor, err := db.Cursor( + dal.Select("id, slug, org_login"), + dal.From(models.GhCopilotTeam{}.TableName()), + dal.Where("connection_id = ? AND org_login = ?", data.Options.ConnectionId, org), + ) + if err != nil { + return err + } + iterator, err := helper.NewDalCursorIterator(db, cursor, reflect.TypeOf(simpleCopilotTeam{})) + if err != nil { + return err + } + + apiClient, aErr := CreateApiClient(taskCtx.TaskContext(), connection) + if aErr != nil { + return aErr + } + + collector, cErr := helper.NewApiCollector(helper.ApiCollectorArgs{ + RawDataSubTaskArgs: helper.RawDataSubTaskArgs{ + Ctx: taskCtx, + Table: rawCopilotTeamUserTable, + Options: copilotRawParams{ + ConnectionId: data.Options.ConnectionId, + ScopeId: data.Options.ScopeId, + Organization: org, + Endpoint: connection.Endpoint, + }, + }, + ApiClient: apiClient, + Input: iterator, + PageSize: 100, + UrlTemplate: "/orgs/{{ .Input.OrgLogin }}/teams/{{ .Input.Slug }}/members", + Query: func(reqData *helper.RequestData) (url.Values, errors.Error) { + query := url.Values{} + query.Set("page", fmt.Sprintf("%v", reqData.Pager.Page)) + query.Set("per_page", fmt.Sprintf("%v", reqData.Pager.Size)) + return query, nil + }, + GetTotalPages: getTotalPagesFromResponse, + ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) { + var item json.RawMessage + e := helper.UnmarshalResponse(res, &item) + if e != nil { + return nil, e + } + return []json.RawMessage{item}, nil + }, + AfterResponse: ignore404, + }) + if cErr != nil { + return cErr + } + + return collector.Execute() +} diff --git a/backend/plugins/gh-copilot/tasks/team_user_extractor.go b/backend/plugins/gh-copilot/tasks/team_user_extractor.go new file mode 100644 index 00000000000..1e54eeae945 --- /dev/null +++ b/backend/plugins/gh-copilot/tasks/team_user_extractor.go @@ -0,0 +1,114 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "strings" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/gh-copilot/models" +) + +var ExtractTeamUsersMeta = plugin.SubTaskMeta{ + Name: "extractTeamUsers", + EntryPoint: ExtractTeamUsers, + EnabledByDefault: true, + Description: "Extract raw team member data into tool layer table _tool_copilot_team_users", + DomainTypes: []string{plugin.DOMAIN_TYPE_CROSS}, + DependencyTables: []string{rawCopilotTeamUserTable}, + ProductTables: []string{models.GhCopilotTeamUser{}.TableName()}, +} + +// githubTeamUserResponse represents a user object from the team members API. +type githubTeamUserResponse struct { + Id int `json:"id"` + Login string `json:"login"` + NodeId string `json:"node_id"` + Type string `json:"type"` + ViewType string `json:"view_type"` + IsSiteAdmin bool `json:"site_admin"` +} + +func ExtractTeamUsers(taskCtx plugin.SubTaskContext) errors.Error { + data, ok := taskCtx.TaskContext().GetData().(*GhCopilotTaskData) + if !ok { + return errors.Default.New("task data is not GhCopilotTaskData") + } + connection := data.Connection + connection.Normalize() + org := strings.TrimSpace(connection.Organization) + if org == "" { + return errors.BadInput.New("no organization configured on connection") + } + + db := taskCtx.GetDal() + // Delete existing team-user records for this connection/org before re-extracting. + if dErr := db.Delete( + &models.GhCopilotTeamUser{}, + dal.Where("connection_id = ? AND org_login = ?", data.Options.ConnectionId, org), + ); dErr != nil { + return dErr + } + + extractor, err := helper.NewApiExtractor(helper.ApiExtractorArgs{ + RawDataSubTaskArgs: helper.RawDataSubTaskArgs{ + Ctx: taskCtx, + Table: rawCopilotTeamUserTable, + Options: copilotRawParams{ + ConnectionId: data.Options.ConnectionId, + ScopeId: data.Options.ScopeId, + Organization: org, + Endpoint: connection.Endpoint, + }, + }, + Extract: func(row *helper.RawData) ([]interface{}, errors.Error) { + apiUsers := &[]githubTeamUserResponse{} + if e := json.Unmarshal(row.Data, apiUsers); e != nil { + return nil, errors.Convert(e) + } + team := &simpleCopilotTeam{} + if e := json.Unmarshal(row.Input, team); e != nil { + return nil, errors.Convert(e) + } + + results := make([]interface{}, 0, len(*apiUsers)) + for _, u := range *apiUsers { + results = append(results, &models.GhCopilotTeamUser{ + ConnectionId: data.Options.ConnectionId, + TeamId: team.Id, + UserId: u.Id, + OrgLogin: team.OrgLogin, + TeamSlug: team.Slug, + UserLogin: u.Login, + Type: u.Type, + ViewType: u.ViewType, + IsSiteAdmin: u.IsSiteAdmin, + }) + } + return results, nil + }, + }) + if err != nil { + return err + } + return extractor.Execute() +} diff --git a/grafana/dashboards/GithubCopilotAdoptionByTeam.json b/grafana/dashboards/GithubCopilotAdoptionByTeam.json new file mode 100644 index 00000000000..06db211fce1 --- /dev/null +++ b/grafana/dashboards/GithubCopilotAdoptionByTeam.json @@ -0,0 +1,1568 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": "mysql", + "description": "Daily active users in the selected team(s)", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "yellow", + "value": 50 + }, + { + "color": "red", + "value": 100 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 0, + "y": 0 + }, + "id": 1, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(DISTINCT m.user_login) as \"Daily Active Users\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND m.day = (\n SELECT MAX(day) FROM _tool_copilot_user_daily_metrics\n WHERE connection_id = ${connection_id} AND scope_id = '${scope_id}'\n )\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')", + "refId": "A" + } + ], + "title": "Daily Active Users", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Code acceptance rate for the selected team(s) in the time range", + "fieldConfig": { + "defaults": { + "mappings": [], + "min": 0, + "max": 100, + "unit": "percent", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "red", + "value": null + }, + { + "color": "yellow", + "value": 20 + }, + { + "color": "green", + "value": 40 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 6, + "y": 0 + }, + "id": 2, + "options": { + "minVizHeight": 75, + "minVizWidth": 75, + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true, + "sizing": "auto" + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT ROUND(SUM(m.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(m.code_generation_activity_count), 0), 1) as \"Acceptance Rate %\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')", + "refId": "A" + } + ], + "title": "Code Acceptance Rate", + "type": "gauge" + }, + { + "datasource": "mysql", + "description": "Total lines of code added by team members in the time range", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 12, + "y": 0 + }, + "id": 3, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT SUM(m.loc_added_sum) as \"Lines Added\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')", + "refId": "A" + } + ], + "title": "Lines of Code Added", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Total user-initiated interactions in the time range", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "purple", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 18, + "y": 0 + }, + "id": 4, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT SUM(m.user_initiated_interaction_count) as \"User-Initiated Interactions\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')", + "refId": "A" + } + ], + "title": "User-Initiated Interactions", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Weekly active users in the selected team(s)", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 0, + "y": 4 + }, + "id": 15, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(DISTINCT m.user_login) as \"Weekly Active Users\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE m.day >= DATE_SUB(CURDATE(), INTERVAL 7 DAY)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')", + "refId": "A" + } + ], + "title": "Weekly Active Users (WAU)", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Monthly active users in the selected team(s)", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 6, + "y": 4 + }, + "id": 16, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(DISTINCT m.user_login) as \"Monthly Active Users\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE m.day >= DATE_SUB(CURDATE(), INTERVAL 28 DAY)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')", + "refId": "A" + } + ], + "title": "Monthly Active Users (MAU)", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Unique users with Copilot activity in the time range", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 12, + "y": 4 + }, + "id": 11, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(DISTINCT m.user_login) as \"Unique Users\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')", + "refId": "A" + } + ], + "title": "Unique Users (Period)", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Seat utilization for team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "min": 0, + "max": 100, + "unit": "percent", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "red", + "value": null + }, + { + "color": "yellow", + "value": 50 + }, + { + "color": "green", + "value": 75 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 18, + "y": 4 + }, + "id": 14, + "options": { + "minVizHeight": 75, + "minVizWidth": 75, + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true, + "sizing": "auto" + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n COUNT(CASE WHEN s.last_activity_at IS NOT NULL THEN 1 END) * 100.0 / NULLIF(COUNT(*), 0) as \"Utilization %\"\nFROM _tool_copilot_seats s\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = s.connection_id AND tu.user_login = s.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE s.connection_id = ${connection_id}\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')", + "refId": "A" + } + ], + "title": "Seat Utilization", + "type": "gauge" + }, + { + "datasource": "mysql", + "description": "Active users over time for the selected team(s)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Users", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 8 + }, + "id": 5, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT m.day as time,\n COUNT(DISTINCT m.user_login) as \"Active Users\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\nGROUP BY m.day\nORDER BY 1", + "refId": "A" + } + ], + "title": "Active Users Over Time", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Code suggestions generated vs accepted over time for team members", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Count", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 8 + }, + "id": 6, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT m.day as time,\n SUM(m.code_generation_activity_count) as \"Suggestions\",\n SUM(m.code_acceptance_activity_count) as \"Acceptances\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\nGROUP BY m.day\nORDER BY 1", + "refId": "A" + } + ], + "title": "Code Suggestions & Acceptances", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Lines of code suggested vs added over time for team members", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Lines of Code", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 16 + }, + "id": 19, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT m.day as time,\n SUM(m.loc_suggested_to_add_sum) as \"LOC Suggested\",\n SUM(m.loc_added_sum) as \"LOC Added\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\nGROUP BY m.day\nORDER BY 1", + "refId": "A" + } + ], + "title": "LOC Suggested vs LOC Added Over Time", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Daily acceptance rate trend for team members", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "mappings": [], + "max": 1, + "min": 0, + "unit": "percentunit", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 16 + }, + "id": 23, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT m.day as time,\n SUM(m.code_acceptance_activity_count) / NULLIF(SUM(m.code_generation_activity_count), 0) as \"Acceptance Rate\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\nGROUP BY m.day\nORDER BY 1", + "refId": "A" + } + ], + "title": "Overall Acceptance Rate Trend", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 24 + }, + "id": 40, + "panels": [], + "title": "Team Comparison", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Side-by-side comparison of key metrics across all teams", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 24, + "x": 0, + "y": 25 + }, + "id": 41, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT t.name as \"Team\",\n COUNT(DISTINCT m.user_login) as \"Active Users\",\n SUM(m.code_generation_activity_count) as \"Suggestions\",\n SUM(m.code_acceptance_activity_count) as \"Acceptances\",\n ROUND(SUM(m.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(m.code_generation_activity_count), 0), 1) as \"Acceptance %\",\n SUM(m.loc_added_sum) as \"LOC Added\",\n SUM(m.user_initiated_interaction_count) as \"Interactions\",\n COUNT(DISTINCT CASE WHEN m.used_agent = 1 THEN m.user_login END) as \"Agent Adopters\",\n COUNT(DISTINCT CASE WHEN m.used_chat = 1 THEN m.user_login END) as \"Chat Adopters\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\nGROUP BY t.name\nORDER BY SUM(m.code_generation_activity_count) DESC", + "refId": "A" + } + ], + "title": "Team Metrics Comparison", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Weekly active users by team over time", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Users", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "bars", + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 0, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "mode": "normal", + "group": "A" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 35 + }, + "id": 42, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + }, + "stacking": { + "mode": "normal", + "group": "A" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT DATE(DATE_SUB(m.day, INTERVAL WEEKDAY(m.day) DAY)) as time,\n t.name as metric,\n COUNT(DISTINCT m.user_login) as value\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\nGROUP BY time, t.name\nORDER BY time", + "refId": "A" + } + ], + "title": "Weekly Active Users by Team", + "type": "timeseries", + "transformations": [ + { + "id": "prepareTimeSeries", + "options": { + "format": "many" + } + }, + { + "id": "renameByRegex", + "options": { + "regex": "^value (.+)$", + "renamePattern": "$1" + } + } + ] + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 43 + }, + "id": 43, + "panels": [], + "title": "Quality & Efficiency Ratios", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Acceptance ratio by language for team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 44 + }, + "id": 9, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT lf.language as \"Language\",\n SUM(lf.code_generation_activity_count) as \"Suggestions\",\n SUM(lf.code_acceptance_activity_count) as \"Acceptances\",\n ROUND(SUM(lf.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(lf.code_generation_activity_count), 0), 1) as \"Acceptance Rate %\"\nFROM _tool_copilot_user_metrics_by_language_feature lf\nINNER JOIN _tool_copilot_user_daily_metrics udm\n ON udm.connection_id = lf.connection_id AND udm.user_id = lf.user_id AND udm.day = lf.day AND udm.scope_id = lf.scope_id\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = lf.connection_id AND tu.user_login = udm.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(lf.day)\n AND lf.connection_id = ${connection_id}\n AND lf.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\nGROUP BY lf.language\nHAVING SUM(lf.code_generation_activity_count) > 0\nORDER BY ROUND(SUM(lf.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(lf.code_generation_activity_count), 0), 1) DESC\nLIMIT 15", + "refId": "A" + } + ], + "title": "Top Languages by Acceptance Rate", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Acceptance ratio by model for team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 44 + }, + "id": 10, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT mf.model as \"Model\",\n SUM(mf.code_generation_activity_count) as \"Suggestions\",\n SUM(mf.code_acceptance_activity_count) as \"Acceptances\",\n ROUND(SUM(mf.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(mf.code_generation_activity_count), 0), 1) as \"Acceptance Rate %\"\nFROM _tool_copilot_user_metrics_by_model_feature mf\nINNER JOIN _tool_copilot_user_daily_metrics udm\n ON udm.connection_id = mf.connection_id AND udm.user_id = mf.user_id AND udm.day = mf.day AND udm.scope_id = mf.scope_id\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = mf.connection_id AND tu.user_login = udm.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(mf.day)\n AND mf.connection_id = ${connection_id}\n AND mf.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\nGROUP BY mf.model\nHAVING SUM(mf.code_generation_activity_count) > 0\nORDER BY ROUND(SUM(mf.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(mf.code_generation_activity_count), 0), 1) DESC\nLIMIT 10", + "refId": "A" + } + ], + "title": "Top Models by Acceptance Rate", + "type": "table" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 52 + }, + "id": 45, + "panels": [], + "title": "User Behavior", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Top users by code generation activity in the selected team(s)", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 53 + }, + "id": 29, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT m.user_login as \"user_login\",\n t.name as \"team\",\n SUM(m.code_generation_activity_count) as \"suggestions\",\n SUM(m.code_acceptance_activity_count) as \"acceptances\",\n ROUND(SUM(m.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(m.code_generation_activity_count), 0), 1) as \"acceptance %\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\nGROUP BY m.user_login, t.name\nHAVING SUM(m.code_generation_activity_count) > 0\nORDER BY SUM(m.code_generation_activity_count) DESC\nLIMIT 25", + "refId": "A" + } + ], + "title": "Top Users by Code Generations", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Agent vs chat user trend for team members", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Users", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 53 + }, + "id": 30, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT m.day as time,\n COUNT(DISTINCT CASE WHEN m.used_agent = 1 THEN m.user_login END) as \"Agent Users\",\n COUNT(DISTINCT CASE WHEN m.used_chat = 1 THEN m.user_login END) as \"Chat Users\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\nGROUP BY m.day\nORDER BY 1", + "refId": "A" + } + ], + "title": "Agent Users vs Chat Users Trend", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "New active users per day for team members", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Users", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 61 + }, + "id": 31, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT first_day as time,\n COUNT(*) as \"New Active Users\"\nFROM (\n SELECT m.user_login,\n MIN(m.day) as first_day\n FROM _tool_copilot_user_daily_metrics m\n INNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\n GROUP BY m.user_login\n) first_seen\nWHERE $__timeFilter(first_day)\nGROUP BY first_day\nORDER BY 1", + "refId": "A" + } + ], + "title": "New Active Users per Day", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 69 + }, + "id": 46, + "panels": [], + "title": "Seat Effectiveness", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Seat activity by editor for team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 0, + "y": 70 + }, + "id": 33, + "options": { + "displayLabels": [ + "name", + "percent" + ], + "legend": { + "displayMode": "list", + "placement": "right", + "showLegend": true + }, + "pieType": "pie", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COALESCE(NULLIF(s.last_activity_editor, ''), 'unknown') as \"Editor\",\n COUNT(*) as \"Seats\"\nFROM _tool_copilot_seats s\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = s.connection_id AND tu.user_login = s.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE s.connection_id = ${connection_id}\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\nGROUP BY COALESCE(NULLIF(s.last_activity_editor, ''), 'unknown')\nORDER BY COUNT(*) DESC", + "refId": "A" + } + ], + "title": "Seat Activity by Editor", + "type": "piechart" + }, + { + "datasource": "mysql", + "description": "Total, active, and inactive seats for team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 8, + "y": 70 + }, + "id": 34, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(*) as \"total_seats\",\n SUM(CASE WHEN s.last_activity_at IS NOT NULL AND s.last_activity_at >= DATE_SUB(UTC_TIMESTAMP(), INTERVAL 30 DAY) THEN 1 ELSE 0 END) as \"active_seats\",\n SUM(CASE WHEN s.last_activity_at IS NULL OR s.last_activity_at < DATE_SUB(UTC_TIMESTAMP(), INTERVAL 30 DAY) THEN 1 ELSE 0 END) as \"inactive_seats\"\nFROM _tool_copilot_seats s\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = s.connection_id AND tu.user_login = s.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE s.connection_id = ${connection_id}\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')", + "refId": "A" + } + ], + "title": "Seats Summary", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Inactive seats for team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 16, + "y": 70 + }, + "id": 35, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT s.user_login as \"user_login\",\n t.name as \"team\",\n s.last_activity_at as \"last_activity_at\",\n s.plan_type as \"plan_type\"\nFROM _tool_copilot_seats s\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = s.connection_id AND tu.user_login = s.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE s.connection_id = ${connection_id}\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\n AND (s.last_activity_at IS NULL OR s.last_activity_at < DATE_SUB(UTC_TIMESTAMP(), INTERVAL 30 DAY))\nORDER BY s.last_activity_at IS NULL DESC, s.last_activity_at ASC\nLIMIT 100", + "refId": "A" + } + ], + "title": "Inactive Seats", + "type": "table" + } + ], + "refresh": "", + "schemaVersion": 38, + "tags": [ + "copilot", + "devlake", + "teams" + ], + "templating": { + "list": [ + { + "current": { + "selected": false, + "text": "", + "value": "" + }, + "datasource": "mysql", + "definition": "SELECT DISTINCT connection_id FROM _tool_copilot_scopes ORDER BY connection_id DESC", + "hide": 0, + "includeAll": false, + "label": "Connection ID", + "multi": false, + "name": "connection_id", + "options": [], + "query": "SELECT DISTINCT connection_id FROM _tool_copilot_scopes ORDER BY connection_id DESC", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "current": { + "selected": false, + "text": "", + "value": "" + }, + "datasource": "mysql", + "definition": "SELECT DISTINCT id as scope_id FROM _tool_copilot_scopes WHERE connection_id = ${connection_id} ORDER BY 1", + "hide": 0, + "includeAll": false, + "label": "Scope ID", + "multi": false, + "name": "scope_id", + "options": [], + "query": "SELECT DISTINCT id as scope_id FROM _tool_copilot_scopes WHERE connection_id = ${connection_id} ORDER BY 1", + "refresh": 2, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "current": { + "selected": false, + "text": "All", + "value": "All" + }, + "datasource": "mysql", + "definition": "SELECT slug as __value, name as __text FROM _tool_copilot_teams WHERE connection_id = ${connection_id} ORDER BY name", + "hide": 0, + "includeAll": true, + "label": "Team", + "multi": false, + "name": "team", + "options": [], + "query": "SELECT slug as __value, name as __text FROM _tool_copilot_teams WHERE connection_id = ${connection_id} ORDER BY name", + "refresh": 2, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query", + "allValue": "All" + } + ] + }, + "time": { + "from": "now-90d", + "to": "now" + }, + "timepicker": {}, + "timezone": "utc", + "title": "GitHub Copilot Adoption by Team", + "uid": "copilot_adoption_by_team", + "version": 1, + "weekStart": "" +} \ No newline at end of file From 6cddb800d0fba7219a09f2b3b6ff8ec163ef6f0d Mon Sep 17 00:00:00 2001 From: "tamas.albert" Date: Wed, 4 Mar 2026 16:02:53 +0200 Subject: [PATCH 02/39] upstream | XPL-496: Fix queries and add missing panels to dashboard --- .../GithubCopilotAdoptionByTeam.json | 208 ++++++++++++++++-- 1 file changed, 185 insertions(+), 23 deletions(-) diff --git a/grafana/dashboards/GithubCopilotAdoptionByTeam.json b/grafana/dashboards/GithubCopilotAdoptionByTeam.json index 06db211fce1..a5efc0df349 100644 --- a/grafana/dashboards/GithubCopilotAdoptionByTeam.json +++ b/grafana/dashboards/GithubCopilotAdoptionByTeam.json @@ -194,7 +194,7 @@ "datasource": "mysql", "format": "table", "rawQuery": true, - "rawSql": "SELECT SUM(m.loc_added_sum) as \"Lines Added\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')", + "rawSql": "SELECT SUM(m.loc_added_sum) as \"Lines Added\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:raw}' = 'All'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug = '${team:raw}'\n )\n )", "refId": "A" } ], @@ -248,7 +248,7 @@ "datasource": "mysql", "format": "table", "rawQuery": true, - "rawSql": "SELECT SUM(m.user_initiated_interaction_count) as \"User-Initiated Interactions\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')", + "rawSql": "SELECT SUM(m.user_initiated_interaction_count) as \"User-Initiated Interactions\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:raw}' = 'All'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug = '${team:raw}'\n )\n )", "refId": "A" } ], @@ -481,6 +481,114 @@ "title": "Seat Utilization", "type": "gauge" }, + { + "datasource": "mysql", + "description": "Users who have used Copilot agent mode in the selected team(s)", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "purple", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 0, + "y": 8 + }, + "id": 47, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(DISTINCT m.user_login) as \"Agent Users\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND m.used_agent = 1\n AND (\n '${team:raw}' = 'All'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug = '${team:raw}'\n )\n )", + "refId": "A" + } + ], + "title": "Agent Mode Adopters", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Users who have used Copilot chat in the selected team(s)", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "orange", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 6, + "y": 8 + }, + "id": 48, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(DISTINCT m.user_login) as \"Chat Users\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND m.used_chat = 1\n AND (\n '${team:raw}' = 'All'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug = '${team:raw}'\n )\n )", + "refId": "A" + } + ], + "title": "Chat Adopters", + "type": "stat" + }, { "datasource": "mysql", "description": "Active users over time for the selected team(s)", @@ -538,7 +646,7 @@ "h": 8, "w": 12, "x": 0, - "y": 8 + "y": 12 }, "id": 5, "options": { @@ -623,7 +731,7 @@ "h": 8, "w": 12, "x": 12, - "y": 8 + "y": 12 }, "id": 6, "options": { @@ -708,7 +816,7 @@ "h": 8, "w": 12, "x": 0, - "y": 16 + "y": 20 }, "id": 19, "options": { @@ -764,7 +872,7 @@ "h": 8, "w": 12, "x": 12, - "y": 16 + "y": 20 }, "id": 23, "options": { @@ -792,13 +900,67 @@ "title": "Overall Acceptance Rate Trend", "type": "timeseries" }, + { + "datasource": "mysql", + "description": "Daily LOC yield trend for team members (added LOC / suggested LOC)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 28 + }, + "id": 49, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT m.day as time,\n ROUND(SUM(m.loc_added_sum) / NULLIF(SUM(m.loc_suggested_to_add_sum), 0), 2) as \"LOC Yield\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:raw}' = 'All'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug = '${team:raw}'\n )\n )\nGROUP BY m.day\nORDER BY 1", + "refId": "A" + } + ], + "title": "LOC Yield Trend", + "type": "timeseries" + }, { "collapsed": false, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 24 + "y": 36 }, "id": 40, "panels": [], @@ -827,7 +989,7 @@ "h": 10, "w": 24, "x": 0, - "y": 25 + "y": 37 }, "id": 41, "options": { @@ -912,7 +1074,7 @@ "h": 8, "w": 24, "x": 0, - "y": 35 + "y": 47 }, "id": 42, "options": { @@ -965,7 +1127,7 @@ "h": 1, "w": 24, "x": 0, - "y": 43 + "y": 55 }, "id": 43, "panels": [], @@ -994,7 +1156,7 @@ "h": 8, "w": 12, "x": 0, - "y": 44 + "y": 56 }, "id": 9, "options": { @@ -1044,7 +1206,7 @@ "h": 8, "w": 12, "x": 12, - "y": 44 + "y": 56 }, "id": 10, "options": { @@ -1078,7 +1240,7 @@ "h": 1, "w": 24, "x": 0, - "y": 52 + "y": 64 }, "id": 45, "panels": [], @@ -1107,7 +1269,7 @@ "h": 8, "w": 12, "x": 0, - "y": 53 + "y": 65 }, "id": 29, "options": { @@ -1128,7 +1290,7 @@ "datasource": "mysql", "format": "table", "rawQuery": true, - "rawSql": "SELECT m.user_login as \"user_login\",\n t.name as \"team\",\n SUM(m.code_generation_activity_count) as \"suggestions\",\n SUM(m.code_acceptance_activity_count) as \"acceptances\",\n ROUND(SUM(m.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(m.code_generation_activity_count), 0), 1) as \"acceptance %\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\nGROUP BY m.user_login, t.name\nHAVING SUM(m.code_generation_activity_count) > 0\nORDER BY SUM(m.code_generation_activity_count) DESC\nLIMIT 25", + "rawSql": "SELECT m.user_login as \"user_login\",\n (SELECT GROUP_CONCAT(DISTINCT t.name ORDER BY t.name SEPARATOR ', ')\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n ) as \"teams\",\n SUM(m.code_generation_activity_count) as \"suggestions\",\n SUM(m.code_acceptance_activity_count) as \"acceptances\",\n ROUND(SUM(m.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(m.code_generation_activity_count), 0), 1) as \"acceptance %\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:raw}' = 'All'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug = '${team:raw}'\n )\n )\nGROUP BY m.user_login\nHAVING SUM(m.code_generation_activity_count) > 0\nORDER BY SUM(m.code_generation_activity_count) DESC\nLIMIT 25", "refId": "A" } ], @@ -1192,7 +1354,7 @@ "h": 8, "w": 12, "x": 12, - "y": 53 + "y": 65 }, "id": 30, "options": { @@ -1277,7 +1439,7 @@ "h": 8, "w": 24, "x": 0, - "y": 61 + "y": 73 }, "id": 31, "options": { @@ -1311,7 +1473,7 @@ "h": 1, "w": 24, "x": 0, - "y": 69 + "y": 81 }, "id": 46, "panels": [], @@ -1340,7 +1502,7 @@ "h": 8, "w": 8, "x": 0, - "y": 70 + "y": 82 }, "id": 33, "options": { @@ -1401,7 +1563,7 @@ "h": 8, "w": 8, "x": 8, - "y": 70 + "y": 82 }, "id": 34, "options": { @@ -1451,7 +1613,7 @@ "h": 8, "w": 8, "x": 16, - "y": 70 + "y": 82 }, "id": 35, "options": { @@ -1472,7 +1634,7 @@ "datasource": "mysql", "format": "table", "rawQuery": true, - "rawSql": "SELECT s.user_login as \"user_login\",\n t.name as \"team\",\n s.last_activity_at as \"last_activity_at\",\n s.plan_type as \"plan_type\"\nFROM _tool_copilot_seats s\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = s.connection_id AND tu.user_login = s.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE s.connection_id = ${connection_id}\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\n AND (s.last_activity_at IS NULL OR s.last_activity_at < DATE_SUB(UTC_TIMESTAMP(), INTERVAL 30 DAY))\nORDER BY s.last_activity_at IS NULL DESC, s.last_activity_at ASC\nLIMIT 100", + "rawSql": "SELECT s.user_login as \"user_login\",\n (SELECT GROUP_CONCAT(DISTINCT t.name ORDER BY t.name SEPARATOR ', ')\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = s.connection_id\n AND tu.user_login = s.user_login\n ) as \"teams\",\n s.last_activity_at as \"last_activity_at\",\n s.plan_type as \"plan_type\"\nFROM _tool_copilot_seats s\nWHERE s.connection_id = ${connection_id}\n AND (\n '${team:raw}' = 'All'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = s.connection_id\n AND tu.user_login = s.user_login\n AND t.slug = '${team:raw}'\n )\n )\n AND (s.last_activity_at IS NULL OR s.last_activity_at < DATE_SUB(UTC_TIMESTAMP(), INTERVAL 30 DAY))\nORDER BY s.last_activity_at IS NULL DESC, s.last_activity_at ASC\nLIMIT 100", "refId": "A" } ], @@ -1565,4 +1727,4 @@ "uid": "copilot_adoption_by_team", "version": 1, "weekStart": "" -} \ No newline at end of file +} From de77d55784dd95d7b0e98ccd1ce4ad6d12d3d842 Mon Sep 17 00:00:00 2001 From: "tamas.albert" Date: Fri, 6 Mar 2026 11:33:03 +0200 Subject: [PATCH 03/39] XPL-496: Create new dashboard filterable by team and user --- .../GithubCopilotAdoptionByTeamAndUser.json | 2535 +++++++++++++++++ 1 file changed, 2535 insertions(+) create mode 100644 grafana/dashboards/GithubCopilotAdoptionByTeamAndUser.json diff --git a/grafana/dashboards/GithubCopilotAdoptionByTeamAndUser.json b/grafana/dashboards/GithubCopilotAdoptionByTeamAndUser.json new file mode 100644 index 00000000000..bcd664c9e20 --- /dev/null +++ b/grafana/dashboards/GithubCopilotAdoptionByTeamAndUser.json @@ -0,0 +1,2535 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": "mysql", + "description": "Daily active users in the selected team(s)", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "yellow", + "value": 50 + }, + { + "color": "red", + "value": 100 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 0, + "y": 0 + }, + "id": 1, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(DISTINCT m.user_login) as \"Daily Active Users\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND m.day = (\n SELECT MAX(day) FROM _tool_copilot_user_daily_metrics\n WHERE connection_id = ${connection_id} AND scope_id = '${scope_id}'\n )\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))", + "refId": "A" + } + ], + "title": "Daily Active Users", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Code acceptance rate for the selected team(s) in the time range", + "fieldConfig": { + "defaults": { + "mappings": [], + "min": 0, + "max": 100, + "unit": "percent", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "red", + "value": null + }, + { + "color": "yellow", + "value": 20 + }, + { + "color": "green", + "value": 40 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 6, + "y": 0 + }, + "id": 2, + "options": { + "minVizHeight": 75, + "minVizWidth": 75, + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true, + "sizing": "auto" + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT ROUND(SUM(m.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(m.code_generation_activity_count), 0), 1) as \"Acceptance Rate %\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))", + "refId": "A" + } + ], + "title": "Code Acceptance Rate", + "type": "gauge" + }, + { + "datasource": "mysql", + "description": "Total lines of code added by team members in the time range", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 12, + "y": 0 + }, + "id": 3, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT SUM(m.loc_added_sum) as \"Lines Added\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))", + "refId": "A" + } + ], + "title": "Lines of Code Added", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Total user-initiated interactions in the time range", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "purple", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 18, + "y": 0 + }, + "id": 4, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT SUM(m.user_initiated_interaction_count) as \"User-Initiated Interactions\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))", + "refId": "A" + } + ], + "title": "User-Initiated Interactions", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Weekly active users in the selected team(s)", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 0, + "y": 4 + }, + "id": 15, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(DISTINCT m.user_login) as \"Weekly Active Users\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE m.day >= DATE_SUB(CURDATE(), INTERVAL 7 DAY)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))", + "refId": "A" + } + ], + "title": "Weekly Active Users (WAU)", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Monthly active users in the selected team(s)", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 6, + "y": 4 + }, + "id": 16, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(DISTINCT m.user_login) as \"Monthly Active Users\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE m.day >= DATE_SUB(CURDATE(), INTERVAL 28 DAY)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))", + "refId": "A" + } + ], + "title": "Monthly Active Users (MAU)", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Unique users with Copilot activity in the time range", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 12, + "y": 4 + }, + "id": 11, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(DISTINCT m.user_login) as \"Unique Users\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))", + "refId": "A" + } + ], + "title": "Unique Users (Period)", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Seat utilization for team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "min": 0, + "max": 100, + "unit": "percent", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "red", + "value": null + }, + { + "color": "yellow", + "value": 50 + }, + { + "color": "green", + "value": 75 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 18, + "y": 4 + }, + "id": 14, + "options": { + "minVizHeight": 75, + "minVizWidth": 75, + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true, + "sizing": "auto" + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n COUNT(CASE WHEN s.last_activity_at IS NOT NULL THEN 1 END) * 100.0 / NULLIF(COUNT(*), 0) as \"Utilization %\"\nFROM _tool_copilot_seats s\nWHERE s.connection_id = ${connection_id}\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = s.connection_id\n AND tu.user_login = s.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR s.user_login IN (${user:sqlstring}))", + "refId": "A" + } + ], + "title": "Seat Utilization", + "type": "gauge" + }, + { + "datasource": "mysql", + "description": "Users who have used Copilot agent mode in the selected team(s)", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "purple", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 0, + "y": 8 + }, + "id": 47, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(DISTINCT m.user_login) as \"Agent Users\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND m.used_agent = 1\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))", + "refId": "A" + } + ], + "title": "Agent Mode Adopters", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Users who have used Copilot chat in the selected team(s)", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "orange", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 6, + "y": 8 + }, + "id": 48, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(DISTINCT m.user_login) as \"Chat Users\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND m.used_chat = 1\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))", + "refId": "A" + } + ], + "title": "Chat Adopters", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Active users over time for the selected team(s)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Users", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 12 + }, + "id": 5, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT m.day as time,\n COUNT(DISTINCT m.user_login) as \"Active Users\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))\nGROUP BY m.day\nORDER BY 1", + "refId": "A" + } + ], + "title": "Active Users Over Time", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Code suggestions generated vs accepted over time for team members", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Count", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 12 + }, + "id": 6, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT m.day as time,\n SUM(m.code_generation_activity_count) as \"Suggestions\",\n SUM(m.code_acceptance_activity_count) as \"Acceptances\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))\nGROUP BY m.day\nORDER BY 1", + "refId": "A" + } + ], + "title": "Code Suggestions & Acceptances", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Lines of code suggested vs added over time for team members", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Lines of Code", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 20 + }, + "id": 19, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT m.day as time,\n SUM(m.loc_suggested_to_add_sum) as \"LOC Suggested\",\n SUM(m.loc_added_sum) as \"LOC Added\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))\nGROUP BY m.day\nORDER BY 1", + "refId": "A" + } + ], + "title": "LOC Suggested vs LOC Added Over Time", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Daily acceptance rate trend for team members", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "mappings": [], + "max": 1, + "min": 0, + "unit": "percentunit", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 20 + }, + "id": 23, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT m.day as time,\n SUM(m.code_acceptance_activity_count) / NULLIF(SUM(m.code_generation_activity_count), 0) as \"Acceptance Rate\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))\nGROUP BY m.day\nORDER BY 1", + "refId": "A" + } + ], + "title": "Overall Acceptance Rate Trend", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Daily LOC yield trend for team members (added LOC / suggested LOC)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 28 + }, + "id": 49, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT m.day as time,\n ROUND(SUM(m.loc_added_sum) / NULLIF(SUM(m.loc_suggested_to_add_sum), 0), 2) as \"LOC Yield\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))\nGROUP BY m.day\nORDER BY 1", + "refId": "A" + } + ], + "title": "LOC Yield Trend", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 36 + }, + "id": 40, + "panels": [], + "title": "Team Comparison", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Side-by-side comparison of key metrics across all teams (users in multiple teams are counted once per team)", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 24, + "x": 0, + "y": 37 + }, + "id": 41, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT t.name as \"Team\",\n COUNT(DISTINCT m.user_login) as \"Active Users\",\n SUM(m.code_generation_activity_count) as \"Suggestions\",\n SUM(m.code_acceptance_activity_count) as \"Acceptances\",\n ROUND(SUM(m.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(m.code_generation_activity_count), 0), 1) as \"Acceptance %\",\n SUM(m.loc_added_sum) as \"LOC Added\",\n SUM(m.user_initiated_interaction_count) as \"Interactions\",\n COUNT(DISTINCT CASE WHEN m.used_agent = 1 THEN m.user_login END) as \"Agent Adopters\",\n COUNT(DISTINCT CASE WHEN m.used_chat = 1 THEN m.user_login END) as \"Chat Adopters\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN (\n SELECT DISTINCT connection_id, team_id, user_login\n FROM _tool_copilot_team_users\n) tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR t.slug IN (${team:sqlstring}))\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))\nGROUP BY t.name\nORDER BY SUM(m.code_generation_activity_count) DESC", + "refId": "A" + } + ], + "title": "Team Metrics Comparison", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Weekly active users by team over time (users in multiple teams appear in each team they belong to)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Users", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "bars", + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 0, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "mode": "normal", + "group": "A" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 47 + }, + "id": 42, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + }, + "stacking": { + "mode": "normal", + "group": "A" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT DATE(DATE_SUB(m.day, INTERVAL WEEKDAY(m.day) DAY)) as time,\n t.name as metric,\n COUNT(DISTINCT m.user_login) as value\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN (\n SELECT DISTINCT connection_id, team_id, user_login\n FROM _tool_copilot_team_users\n) tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR t.slug IN (${team:sqlstring}))\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))\nGROUP BY time, t.name\nORDER BY time", + "refId": "A" + } + ], + "title": "Weekly Active Users by Team", + "type": "timeseries", + "transformations": [ + { + "id": "prepareTimeSeries", + "options": { + "format": "many" + } + }, + { + "id": "renameByRegex", + "options": { + "regex": "^value (.+)$", + "renamePattern": "$1" + } + } + ] + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 55 + }, + "id": 43, + "panels": [], + "title": "Quality & Efficiency Ratios", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Acceptance ratio by language for team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 56 + }, + "id": 9, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT lf.language as \"Language\",\n SUM(lf.code_generation_activity_count) as \"Suggestions\",\n SUM(lf.code_acceptance_activity_count) as \"Acceptances\",\n ROUND(SUM(lf.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(lf.code_generation_activity_count), 0), 1) as \"Acceptance Rate %\"\nFROM _tool_copilot_user_metrics_by_language_feature lf\nINNER JOIN _tool_copilot_user_daily_metrics udm\n ON udm.connection_id = lf.connection_id AND udm.user_id = lf.user_id AND udm.day = lf.day AND udm.scope_id = lf.scope_id\nWHERE $__timeFilter(lf.day)\n AND lf.connection_id = ${connection_id}\n AND lf.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = udm.connection_id\n AND tu.user_login = udm.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR udm.user_login IN (${user:sqlstring}))\nGROUP BY lf.language\nHAVING SUM(lf.code_generation_activity_count) > 0\nORDER BY ROUND(SUM(lf.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(lf.code_generation_activity_count), 0), 1) DESC\nLIMIT 15", + "refId": "A" + } + ], + "title": "Top Languages by Acceptance Rate", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Acceptance ratio by model for team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 56 + }, + "id": 10, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT mf.model as \"Model\",\n SUM(mf.code_generation_activity_count) as \"Suggestions\",\n SUM(mf.code_acceptance_activity_count) as \"Acceptances\",\n ROUND(SUM(mf.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(mf.code_generation_activity_count), 0), 1) as \"Acceptance Rate %\"\nFROM _tool_copilot_user_metrics_by_model_feature mf\nINNER JOIN _tool_copilot_user_daily_metrics udm\n ON udm.connection_id = mf.connection_id AND udm.user_id = mf.user_id AND udm.day = mf.day AND udm.scope_id = mf.scope_id\nWHERE $__timeFilter(mf.day)\n AND mf.connection_id = ${connection_id}\n AND mf.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = udm.connection_id\n AND tu.user_login = udm.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR udm.user_login IN (${user:sqlstring}))\nGROUP BY mf.model\nHAVING SUM(mf.code_generation_activity_count) > 0\nORDER BY ROUND(SUM(mf.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(mf.code_generation_activity_count), 0), 1) DESC\nLIMIT 10", + "refId": "A" + } + ], + "title": "Top Models by Acceptance Rate", + "type": "table" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 64 + }, + "id": 45, + "panels": [], + "title": "User Behavior", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Top users by code generation activity in the selected team(s)", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 65 + }, + "id": 29, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT m.user_login as \"user_login\",\n (SELECT GROUP_CONCAT(DISTINCT t.name ORDER BY t.name SEPARATOR ', ')\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n ) as \"teams\",\n SUM(m.code_generation_activity_count) as \"suggestions\",\n SUM(m.code_acceptance_activity_count) as \"acceptances\",\n ROUND(SUM(m.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(m.code_generation_activity_count), 0), 1) as \"acceptance %\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))\nGROUP BY m.user_login\nHAVING SUM(m.code_generation_activity_count) > 0\nORDER BY SUM(m.code_generation_activity_count) DESC\nLIMIT 25", + "refId": "A" + } + ], + "title": "Top Users by Code Generations", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Agent vs chat user trend for team members", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Users", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 65 + }, + "id": 30, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT m.day as time,\n COUNT(DISTINCT CASE WHEN m.used_agent = 1 THEN m.user_login END) as \"Agent Users\",\n COUNT(DISTINCT CASE WHEN m.used_chat = 1 THEN m.user_login END) as \"Chat Users\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))\nGROUP BY m.day\nORDER BY 1", + "refId": "A" + } + ], + "title": "Agent Users vs Chat Users Trend", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "New active users per day for team members", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Users", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 73 + }, + "id": 31, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT first_day as time,\n COUNT(*) as \"New Active Users\"\nFROM (\n SELECT m.user_login,\n MIN(m.day) as first_day\n FROM _tool_copilot_user_daily_metrics m\n WHERE m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))\n GROUP BY m.user_login\n) first_seen\nWHERE $__timeFilter(first_day)\nGROUP BY first_day\nORDER BY 1", + "refId": "A" + } + ], + "title": "New Active Users per Day", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 81 + }, + "id": 46, + "panels": [], + "title": "Seat Effectiveness", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Seat activity by editor for team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 0, + "y": 82 + }, + "id": 33, + "options": { + "displayLabels": [ + "name", + "percent" + ], + "legend": { + "displayMode": "list", + "placement": "right", + "showLegend": true + }, + "pieType": "pie", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COALESCE(NULLIF(s.last_activity_editor, ''), 'unknown') as \"Editor\",\n COUNT(*) as \"Seats\"\nFROM _tool_copilot_seats s\nWHERE s.connection_id = ${connection_id}\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = s.connection_id\n AND tu.user_login = s.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR s.user_login IN (${user:sqlstring}))\nGROUP BY COALESCE(NULLIF(s.last_activity_editor, ''), 'unknown')\nORDER BY COUNT(*) DESC", + "refId": "A" + } + ], + "title": "Seat Activity by Editor", + "type": "piechart" + }, + { + "datasource": "mysql", + "description": "Total, active, and inactive seats for team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 8, + "y": 82 + }, + "id": 34, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(*) as \"total_seats\",\n SUM(CASE WHEN s.last_activity_at IS NOT NULL AND s.last_activity_at >= DATE_SUB(UTC_TIMESTAMP(), INTERVAL 30 DAY) THEN 1 ELSE 0 END) as \"active_seats\",\n SUM(CASE WHEN s.last_activity_at IS NULL OR s.last_activity_at < DATE_SUB(UTC_TIMESTAMP(), INTERVAL 30 DAY) THEN 1 ELSE 0 END) as \"inactive_seats\"\nFROM _tool_copilot_seats s\nWHERE s.connection_id = ${connection_id}\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = s.connection_id\n AND tu.user_login = s.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR s.user_login IN (${user:sqlstring}))", + "refId": "A" + } + ], + "title": "Seats Summary", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Inactive seats for team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 16, + "y": 82 + }, + "id": 35, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT s.user_login as \"user_login\",\n (SELECT GROUP_CONCAT(DISTINCT t.name ORDER BY t.name SEPARATOR ', ')\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = s.connection_id\n AND tu.user_login = s.user_login\n ) as \"teams\",\n s.last_activity_at as \"last_activity_at\",\n s.plan_type as \"plan_type\"\nFROM _tool_copilot_seats s\nWHERE s.connection_id = ${connection_id}\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = s.connection_id\n AND tu.user_login = s.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND (s.last_activity_at IS NULL OR s.last_activity_at < DATE_SUB(UTC_TIMESTAMP(), INTERVAL 30 DAY))\n AND ('${user:csv}' = '0' OR s.user_login IN (${user:sqlstring}))\nORDER BY s.last_activity_at IS NULL DESC, s.last_activity_at ASC\nLIMIT 100", + "refId": "A" + } + ], + "title": "Inactive Seats", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Total PR summaries created by Copilot for selected team(s) in the time range Team-level aggregate; returns no data when User filter is applied.", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "blue", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 12, + "y": 8 + }, + "id": 50, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT SUM(total_pr_summaries_created) as \"PRs by Copilot\"\nFROM _tool_copilot_team_dotcom_prs\nWHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'", + "refId": "A" + } + ], + "title": "PRs Created by Copilot", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Proxy metric for PR review engagement in selected team(s) (team API exposes engaged users, not reviewed count) Team-level aggregate; returns no data when User filter is applied.", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "blue", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 18, + "y": 8 + }, + "id": 51, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT SUM(total_engaged_users) as \"PRs Reviewed by Copilot\"\nFROM _tool_copilot_team_dotcom_prs\nWHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'", + "refId": "A" + } + ], + "title": "PRs Reviewed by Copilot", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "PR summary activity over time for selected team(s) Team-level aggregate; returns no data when User filter is applied.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Pull Requests", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 28 + }, + "id": 52, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT date as time,\n SUM(total_pr_summaries_created) as \"PRs Created by Copilot\",\n SUM(total_engaged_users) as \"PR Engaged Users\"\nFROM _tool_copilot_team_dotcom_prs\nWHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\nGROUP BY date\nORDER BY 1", + "refId": "A" + } + ], + "title": "PR Activity Over Time", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Weekly activity mix by Copilot capability for selected team(s) Team-level aggregate; returns no data when User filter is applied.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Interactions", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "bars", + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 0, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "mode": "normal", + "group": "A" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 90 + }, + "id": 53, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + }, + "stacking": { + "mode": "normal", + "group": "A" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT time, metric, SUM(value) as value\nFROM (\n SELECT DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)) as time,\n 'Code Completions' as metric,\n SUM(total_code_suggestions) as value\n FROM _tool_copilot_team_completions\n WHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\n GROUP BY DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY))\n\n UNION ALL\n\n SELECT DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)) as time,\n 'IDE Chat' as metric,\n SUM(total_chats) as value\n FROM _tool_copilot_team_ide_chat\n WHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\n GROUP BY DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY))\n\n UNION ALL\n\n SELECT DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)) as time,\n 'Dotcom Chat' as metric,\n SUM(total_chats) as value\n FROM _tool_copilot_team_dotcom_chat\n WHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\n GROUP BY DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY))\n\n UNION ALL\n\n SELECT DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)) as time,\n 'Dotcom Pull Requests' as metric,\n SUM(total_pr_summaries_created) as value\n FROM _tool_copilot_team_dotcom_prs\n WHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\n GROUP BY DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY))\n) feature_mix\nGROUP BY time, metric\nORDER BY time", + "refId": "A" + } + ], + "title": "Feature Mix Over Time", + "type": "timeseries", + "transformations": [ + { + "id": "prepareTimeSeries", + "options": { + "format": "many" + } + }, + { + "id": "renameByRegex", + "options": { + "regex": "^value (.+)$", + "renamePattern": "$1" + } + } + ] + }, + { + "datasource": "mysql", + "description": "Weekly IDE activity for selected team(s) Team-level aggregate; returns no data when User filter is applied.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Weekly Interactions", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "bars", + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 0, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "mode": "normal", + "group": "A" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 90 + }, + "id": 54, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + }, + "stacking": { + "mode": "normal", + "group": "A" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT time, metric, SUM(value) as value\nFROM (\n SELECT DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)) as time,\n editor as metric,\n SUM(total_code_suggestions + total_code_acceptances) as value\n FROM _tool_copilot_team_completions\n WHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\n GROUP BY DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)), editor\n\n UNION ALL\n\n SELECT DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)) as time,\n editor as metric,\n SUM(total_chats) as value\n FROM _tool_copilot_team_ide_chat\n WHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\n GROUP BY DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)), editor\n) ide_adoption\nGROUP BY time, metric\nORDER BY time", + "refId": "A" + } + ], + "title": "IDE Adoption Over Time", + "type": "timeseries", + "transformations": [ + { + "id": "prepareTimeSeries", + "options": { + "format": "many" + } + }, + { + "id": "renameByRegex", + "options": { + "regex": "^value (.+)$", + "renamePattern": "$1" + } + } + ] + }, + { + "datasource": "mysql", + "description": "Matrix-style breakdown of completion suggestions and acceptances by language for selected team(s) Team-level aggregate; returns no data when User filter is applied.", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 98 + }, + "id": 55, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT language as \"Language\",\n 'code_completions' as \"Feature\",\n SUM(total_code_suggestions) as \"Suggestions\",\n SUM(total_code_acceptances) as \"Acceptances\"\nFROM _tool_copilot_team_completions\nWHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\nGROUP BY language\nHAVING SUM(total_code_suggestions) > 0\nORDER BY language", + "refId": "A" + } + ], + "title": "Feature x Language Matrix", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Acceptance ratio by Copilot feature for selected team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 0, + "y": 106 + }, + "id": 56, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT f.feature as \"Feature\",\n SUM(f.code_generation_activity_count) as \"Suggestions\",\n SUM(f.code_acceptance_activity_count) as \"Acceptances\",\n ROUND(SUM(f.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(f.code_generation_activity_count), 0), 1) as \"Acceptance Rate %\"\nFROM _tool_copilot_user_metrics_by_feature f\nINNER JOIN _tool_copilot_user_daily_metrics udm\n ON udm.connection_id = f.connection_id AND udm.user_id = f.user_id AND udm.day = f.day AND udm.scope_id = f.scope_id\nWHERE $__timeFilter(f.day)\n AND f.connection_id = ${connection_id}\n AND f.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = udm.connection_id\n AND tu.user_login = udm.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR udm.user_login IN (${user:sqlstring}))\nGROUP BY f.feature\nHAVING SUM(f.code_generation_activity_count) > 0\nORDER BY ROUND(SUM(f.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(f.code_generation_activity_count), 0), 1) DESC, SUM(f.code_generation_activity_count) DESC\nLIMIT 20", + "refId": "A" + } + ], + "title": "Acceptance Rate by Feature", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Acceptance ratio by programming language for selected team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 8, + "y": 106 + }, + "id": 57, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT lf.language as \"Language\",\n SUM(lf.code_generation_activity_count) as \"Suggestions\",\n SUM(lf.code_acceptance_activity_count) as \"Acceptances\",\n ROUND(SUM(lf.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(lf.code_generation_activity_count), 0), 1) as \"Acceptance Rate %\"\nFROM _tool_copilot_user_metrics_by_language_feature lf\nINNER JOIN _tool_copilot_user_daily_metrics udm\n ON udm.connection_id = lf.connection_id AND udm.user_id = lf.user_id AND udm.day = lf.day AND udm.scope_id = lf.scope_id\nWHERE $__timeFilter(lf.day)\n AND lf.connection_id = ${connection_id}\n AND lf.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = udm.connection_id\n AND tu.user_login = udm.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR udm.user_login IN (${user:sqlstring}))\nGROUP BY lf.language\nHAVING SUM(lf.code_generation_activity_count) > 0\nORDER BY ROUND(SUM(lf.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(lf.code_generation_activity_count), 0), 1) DESC, SUM(lf.code_generation_activity_count) DESC\nLIMIT 20", + "refId": "A" + } + ], + "title": "Acceptance Rate by Language", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Acceptance ratio by model for selected team members", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 16, + "y": 106 + }, + "id": 58, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT mf.model as \"Model\",\n SUM(mf.code_generation_activity_count) as \"Suggestions\",\n SUM(mf.code_acceptance_activity_count) as \"Acceptances\",\n ROUND(SUM(mf.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(mf.code_generation_activity_count), 0), 1) as \"Acceptance Rate %\"\nFROM _tool_copilot_user_metrics_by_model_feature mf\nINNER JOIN _tool_copilot_user_daily_metrics udm\n ON udm.connection_id = mf.connection_id AND udm.user_id = mf.user_id AND udm.day = mf.day AND udm.scope_id = mf.scope_id\nWHERE $__timeFilter(mf.day)\n AND mf.connection_id = ${connection_id}\n AND mf.scope_id = '${scope_id}'\n AND (\n '${team:csv}' = '0'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = udm.connection_id\n AND tu.user_login = udm.user_login\n AND t.slug IN (${team:sqlstring})\n )\n )\n AND ('${user:csv}' = '0' OR udm.user_login IN (${user:sqlstring}))\nGROUP BY mf.model\nHAVING SUM(mf.code_generation_activity_count) > 0\nORDER BY ROUND(SUM(mf.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(mf.code_generation_activity_count), 0), 1) DESC, SUM(mf.code_generation_activity_count) DESC\nLIMIT 20", + "refId": "A" + } + ], + "title": "Acceptance Rate by Model", + "type": "table" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 114 + }, + "id": 59, + "panels": [], + "title": "Diagnostics", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Latest available team metrics day freshness Team-level aggregate; returns no data when User filter is applied.", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 0, + "y": 115 + }, + "id": 60, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COALESCE(TIMESTAMPDIFF(DAY, MAX(date), UTC_DATE()), 9999) as \"Days Since Latest Data\"\nFROM _tool_copilot_team_daily_metrics\nWHERE connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'", + "refId": "A" + } + ], + "title": "Data Freshness", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Share of PR summaries tied to unknown model taxonomy in selected team(s) Team-level aggregate; returns no data when User filter is applied.", + "fieldConfig": { + "defaults": { + "mappings": [], + "min": 0, + "max": 100, + "unit": "percent", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "yellow", + "value": 5 + }, + { + "color": "red", + "value": 20 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 6, + "y": 115 + }, + "id": 61, + "options": { + "minVizHeight": 75, + "minVizWidth": 75, + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true, + "sizing": "auto" + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT ROUND(\n SUM(CASE WHEN LOWER(COALESCE(model, '')) = 'unknown' THEN total_pr_summaries_created ELSE 0 END) * 100.0\n / NULLIF(SUM(total_pr_summaries_created), 0),\n 2\n) as \"Unknown Taxonomy %\"\nFROM _tool_copilot_team_dotcom_prs\nWHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'", + "refId": "A" + } + ], + "title": "Unknown Taxonomy Share", + "type": "gauge" + }, + { + "datasource": "mysql", + "description": "Row counts for team Copilot metrics tables Team-level aggregate; returns no data when User filter is applied.", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 115 + }, + "id": 62, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT *\nFROM (\n SELECT '_tool_copilot_team_daily_metrics' as table_name,\n COUNT(*) as row_count\n FROM _tool_copilot_team_daily_metrics\n WHERE connection_id = ${connection_id} AND scope_id = '${scope_id}' AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring})) AND '${user:csv}' = '0'\n\n UNION ALL\n\n SELECT '_tool_copilot_team_completions' as table_name,\n COUNT(*) as row_count\n FROM _tool_copilot_team_completions\n WHERE connection_id = ${connection_id} AND scope_id = '${scope_id}' AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring})) AND '${user:csv}' = '0'\n\n UNION ALL\n\n SELECT '_tool_copilot_team_ide_chat' as table_name,\n COUNT(*) as row_count\n FROM _tool_copilot_team_ide_chat\n WHERE connection_id = ${connection_id} AND scope_id = '${scope_id}' AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring})) AND '${user:csv}' = '0'\n\n UNION ALL\n\n SELECT '_tool_copilot_team_dotcom_chat' as table_name,\n COUNT(*) as row_count\n FROM _tool_copilot_team_dotcom_chat\n WHERE connection_id = ${connection_id} AND scope_id = '${scope_id}' AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring})) AND '${user:csv}' = '0'\n\n UNION ALL\n\n SELECT '_tool_copilot_team_dotcom_prs' as table_name,\n COUNT(*) as row_count\n FROM _tool_copilot_team_dotcom_prs\n WHERE connection_id = ${connection_id} AND scope_id = '${scope_id}' AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring})) AND '${user:csv}' = '0'\n) data_volume\nORDER BY row_count DESC", + "refId": "A" + } + ], + "title": "Data Volume by Table", + "type": "table" + } + ], + "refresh": "", + "schemaVersion": 38, + "tags": [ + "copilot", + "devlake", + "teams", + "users" + ], + "templating": { + "list": [ + { + "current": { + "selected": false, + "text": "", + "value": "" + }, + "datasource": "mysql", + "definition": "SELECT DISTINCT connection_id FROM _tool_copilot_scopes ORDER BY connection_id DESC", + "hide": 0, + "includeAll": false, + "label": "Connection ID", + "multi": false, + "name": "connection_id", + "options": [], + "query": "SELECT DISTINCT connection_id FROM _tool_copilot_scopes ORDER BY connection_id DESC", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "current": { + "selected": false, + "text": "", + "value": "" + }, + "datasource": "mysql", + "definition": "SELECT DISTINCT id as scope_id FROM _tool_copilot_scopes WHERE connection_id = ${connection_id} ORDER BY 1", + "hide": 0, + "includeAll": false, + "label": "Scope ID", + "multi": false, + "name": "scope_id", + "options": [], + "query": "SELECT DISTINCT id as scope_id FROM _tool_copilot_scopes WHERE connection_id = ${connection_id} ORDER BY 1", + "refresh": 2, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "current": { + "selected": false, + "text": "All", + "value": "0" + }, + "datasource": "mysql", + "definition": "SELECT slug as __value, name as __text FROM _tool_copilot_teams WHERE connection_id = ${connection_id} ORDER BY name", + "hide": 0, + "includeAll": true, + "label": "Team", + "multi": true, + "name": "team", + "options": [], + "query": "SELECT slug as __value, name as __text FROM _tool_copilot_teams WHERE connection_id = ${connection_id} ORDER BY name", + "refresh": 2, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query", + "allValue": "0" + }, + { + "current": { + "selected": false, + "text": "All", + "value": "0" + }, + "datasource": "mysql", + "definition": "SELECT DISTINCT m.user_login as __value, m.user_login as __text FROM _tool_copilot_user_daily_metrics m WHERE m.connection_id = ${connection_id} AND m.scope_id = '${scope_id}' AND ('${team:csv}' = '0' OR EXISTS (SELECT 1 FROM _tool_copilot_team_users tu INNER JOIN _tool_copilot_teams t ON t.connection_id = tu.connection_id AND t.id = tu.team_id WHERE tu.connection_id = m.connection_id AND tu.user_login = m.user_login AND t.slug IN (${team:sqlstring}))) ORDER BY m.user_login", + "hide": 0, + "includeAll": true, + "label": "User", + "multi": true, + "name": "user", + "options": [], + "query": "SELECT DISTINCT m.user_login as __value, m.user_login as __text FROM _tool_copilot_user_daily_metrics m WHERE m.connection_id = ${connection_id} AND m.scope_id = '${scope_id}' AND ('${team:csv}' = '0' OR EXISTS (SELECT 1 FROM _tool_copilot_team_users tu INNER JOIN _tool_copilot_teams t ON t.connection_id = tu.connection_id AND t.id = tu.team_id WHERE tu.connection_id = m.connection_id AND tu.user_login = m.user_login AND t.slug IN (${team:sqlstring}))) ORDER BY m.user_login", + "refresh": 2, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query", + "allValue": "0" + } + ] + }, + "time": { + "from": "now-90d", + "to": "now" + }, + "timepicker": {}, + "timezone": "utc", + "title": "GitHub Copilot Adoption by Team and User", + "uid": "copilot_adoption_by_team_and_user", + "version": 1, + "weekStart": "" +} From ce2a9a5b63fa67602fbe2de2eb2464f32eb29608 Mon Sep 17 00:00:00 2001 From: "tamas.albert" Date: Mon, 9 Mar 2026 11:35:30 +0200 Subject: [PATCH 04/39] XPL-496: Fix partially functional charts --- .../GithubCopilotAdoptionByTeamAndUser.json | 1018 ++++++++++------- 1 file changed, 579 insertions(+), 439 deletions(-) diff --git a/grafana/dashboards/GithubCopilotAdoptionByTeamAndUser.json b/grafana/dashboards/GithubCopilotAdoptionByTeamAndUser.json index bcd664c9e20..00de984fa6b 100644 --- a/grafana/dashboards/GithubCopilotAdoptionByTeamAndUser.json +++ b/grafana/dashboards/GithubCopilotAdoptionByTeamAndUser.json @@ -18,8 +18,8 @@ "editable": true, "fiscalYearStartMonth": 0, "graphTooltip": 0, + "id": 49, "links": [], - "liveNow": false, "panels": [ { "datasource": "mysql", @@ -31,8 +31,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" }, { "color": "yellow", @@ -59,6 +58,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -70,7 +70,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -89,15 +89,13 @@ "fieldConfig": { "defaults": { "mappings": [], - "min": 0, "max": 100, - "unit": "percent", + "min": 0, "thresholds": { "mode": "absolute", "steps": [ { - "color": "red", - "value": null + "color": "red" }, { "color": "yellow", @@ -108,7 +106,8 @@ "value": 40 } ] - } + }, + "unit": "percent" }, "overrides": [] }, @@ -134,7 +133,7 @@ "showThresholdMarkers": true, "sizing": "auto" }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -157,8 +156,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -177,6 +175,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -188,7 +187,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -211,8 +210,7 @@ "mode": "absolute", "steps": [ { - "color": "purple", - "value": null + "color": "purple" } ] } @@ -231,6 +229,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -242,7 +241,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -265,8 +264,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -285,6 +283,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -296,7 +295,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -319,8 +318,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -339,6 +337,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -350,7 +349,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -373,8 +372,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -393,6 +391,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -404,7 +403,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -423,15 +422,13 @@ "fieldConfig": { "defaults": { "mappings": [], - "min": 0, "max": 100, - "unit": "percent", + "min": 0, "thresholds": { "mode": "absolute", "steps": [ { - "color": "red", - "value": null + "color": "red" }, { "color": "yellow", @@ -442,7 +439,8 @@ "value": 75 } ] - } + }, + "unit": "percent" }, "overrides": [] }, @@ -468,7 +466,7 @@ "showThresholdMarkers": true, "sizing": "auto" }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -491,8 +489,7 @@ "mode": "absolute", "steps": [ { - "color": "purple", - "value": null + "color": "purple" } ] } @@ -511,6 +508,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -522,7 +520,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -545,8 +543,7 @@ "mode": "absolute", "steps": [ { - "color": "orange", - "value": null + "color": "orange" } ] } @@ -565,6 +562,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -576,7 +574,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -589,6 +587,114 @@ "title": "Chat Adopters", "type": "stat" }, + { + "datasource": "mysql", + "description": "Total PR summaries created by Copilot for selected team(s) in the time range Team-level aggregate; returns no data when User filter is applied.", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "blue" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 12, + "y": 8 + }, + "id": 50, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT SUM(total_pr_summaries_created) as \"PRs by Copilot\"\nFROM _tool_copilot_team_dotcom_prs\nWHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'", + "refId": "A" + } + ], + "title": "PRs Created by Copilot", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Proxy metric for PR review engagement in selected team(s) (team API exposes engaged users, not reviewed count) Team-level aggregate; returns no data when User filter is applied.", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "blue" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 18, + "y": 8 + }, + "id": 51, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT SUM(total_engaged_users) as \"PRs Reviewed by Copilot\"\nFROM _tool_copilot_team_dotcom_prs\nWHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'", + "refId": "A" + } + ], + "title": "PRs Reviewed by Copilot", + "type": "stat" + }, { "datasource": "mysql", "description": "Active users over time for the selected team(s)", @@ -604,6 +710,7 @@ "axisLabel": "Users", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -634,8 +741,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -657,11 +763,12 @@ "showLegend": true }, "tooltip": { + "hideZeros": false, "mode": "single", "sort": "none" } }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -689,6 +796,7 @@ "axisLabel": "Count", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -719,8 +827,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -742,11 +849,12 @@ "showLegend": true }, "tooltip": { + "hideZeros": false, "mode": "single", "sort": "none" } }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -774,6 +882,7 @@ "axisLabel": "Lines of Code", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -804,8 +913,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -827,11 +935,12 @@ "showLegend": true }, "tooltip": { + "hideZeros": false, "mode": "single", "sort": "none" } }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -852,19 +961,51 @@ "color": { "mode": "palette-classic" }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, "mappings": [], "max": 1, "min": 0, - "unit": "percentunit", "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] - } + }, + "unit": "percentunit" }, "overrides": [] }, @@ -883,11 +1024,12 @@ "showLegend": true }, "tooltip": { + "hideZeros": false, "mode": "single", "sort": "none" } }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -908,14 +1050,46 @@ "color": { "mode": "palette-classic" }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, "mappings": [], "min": 0, "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -937,11 +1111,12 @@ "showLegend": true }, "tooltip": { + "hideZeros": false, "mode": "single", "sort": "none" } }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -954,72 +1129,9 @@ "title": "LOC Yield Trend", "type": "timeseries" }, - { - "collapsed": false, - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 36 - }, - "id": 40, - "panels": [], - "title": "Team Comparison", - "type": "row" - }, - { - "datasource": "mysql", - "description": "Side-by-side comparison of key metrics across all teams (users in multiple teams are counted once per team)", - "fieldConfig": { - "defaults": { - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null - } - ] - } - }, - "overrides": [] - }, - "gridPos": { - "h": 10, - "w": 24, - "x": 0, - "y": 37 - }, - "id": 41, - "options": { - "cellHeight": "sm", - "footer": { - "countRows": false, - "fields": "", - "reducer": [ - "sum" - ], - "show": false - }, - "showHeader": true - }, - "pluginVersion": "11.0.0", - "targets": [ - { - "datasource": "mysql", - "format": "table", - "rawQuery": true, - "rawSql": "SELECT t.name as \"Team\",\n COUNT(DISTINCT m.user_login) as \"Active Users\",\n SUM(m.code_generation_activity_count) as \"Suggestions\",\n SUM(m.code_acceptance_activity_count) as \"Acceptances\",\n ROUND(SUM(m.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(m.code_generation_activity_count), 0), 1) as \"Acceptance %\",\n SUM(m.loc_added_sum) as \"LOC Added\",\n SUM(m.user_initiated_interaction_count) as \"Interactions\",\n COUNT(DISTINCT CASE WHEN m.used_agent = 1 THEN m.user_login END) as \"Agent Adopters\",\n COUNT(DISTINCT CASE WHEN m.used_chat = 1 THEN m.user_login END) as \"Chat Adopters\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN (\n SELECT DISTINCT connection_id, team_id, user_login\n FROM _tool_copilot_team_users\n) tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR t.slug IN (${team:sqlstring}))\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))\nGROUP BY t.name\nORDER BY SUM(m.code_generation_activity_count) DESC", - "refId": "A" - } - ], - "title": "Team Metrics Comparison", - "type": "table" - }, { "datasource": "mysql", - "description": "Weekly active users by team over time (users in multiple teams appear in each team they belong to)", + "description": "PR summary activity over time for selected team(s) Team-level aggregate; returns no data when User filter is applied.", "fieldConfig": { "defaults": { "color": { @@ -1029,11 +1141,12 @@ "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", - "axisLabel": "Users", + "axisLabel": "Pull Requests", "axisPlacement": "auto", "barAlignment": 0, - "drawStyle": "bars", - "fillOpacity": 80, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, "gradientMode": "none", "hideFrom": { "legend": false, @@ -1042,7 +1155,7 @@ }, "insertNulls": false, "lineInterpolation": "linear", - "lineWidth": 0, + "lineWidth": 2, "pointSize": 5, "scaleDistribution": { "type": "linear" @@ -1050,8 +1163,8 @@ "showPoints": "never", "spanNulls": false, "stacking": { - "mode": "normal", - "group": "A" + "group": "A", + "mode": "none" }, "thresholdsStyle": { "mode": "off" @@ -1062,8 +1175,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -1072,11 +1184,11 @@ }, "gridPos": { "h": 8, - "w": 24, - "x": 0, - "y": 47 + "w": 12, + "x": 12, + "y": 28 }, - "id": 42, + "id": 52, "options": { "legend": { "calcs": [], @@ -1085,15 +1197,171 @@ "showLegend": true }, "tooltip": { + "hideZeros": false, "mode": "single", "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT date as time,\n SUM(total_pr_summaries_created) as \"PRs Created by Copilot\",\n SUM(total_engaged_users) as \"PR Engaged Users\"\nFROM _tool_copilot_team_dotcom_prs\nWHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\nGROUP BY date\nORDER BY 1", + "refId": "A" + } + ], + "title": "PR Activity Over Time", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 36 + }, + "id": 40, + "panels": [], + "title": "Team Comparison", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Side-by-side comparison of key metrics across all teams (users in multiple teams are counted once per team)", + "fieldConfig": { + "defaults": { + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 24, + "x": 0, + "y": 37 + }, + "id": 41, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT t.name as \"Team\",\n COUNT(DISTINCT m.user_login) as \"Active Users\",\n SUM(m.code_generation_activity_count) as \"Suggestions\",\n SUM(m.code_acceptance_activity_count) as \"Acceptances\",\n ROUND(SUM(m.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(m.code_generation_activity_count), 0), 1) as \"Acceptance %\",\n SUM(m.loc_added_sum) as \"LOC Added\",\n SUM(m.user_initiated_interaction_count) as \"Interactions\",\n COUNT(DISTINCT CASE WHEN m.used_agent = 1 THEN m.user_login END) as \"Agent Adopters\",\n COUNT(DISTINCT CASE WHEN m.used_chat = 1 THEN m.user_login END) as \"Chat Adopters\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN (\n SELECT DISTINCT connection_id, team_id, user_login\n FROM _tool_copilot_team_users\n) tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR t.slug IN (${team:sqlstring}))\n AND ('${user:csv}' = '0' OR m.user_login IN (${user:sqlstring}))\nGROUP BY t.name\nORDER BY SUM(m.code_generation_activity_count) DESC", + "refId": "A" + } + ], + "title": "Team Metrics Comparison", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Weekly active users by team over time (users in multiple teams appear in each team they belong to)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Users", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "bars", + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 0, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "normal" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 47 + }, + "id": 42, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true }, "stacking": { - "mode": "normal", - "group": "A" + "group": "A", + "mode": "normal" + }, + "tooltip": { + "hideZeros": false, + "mode": "single", + "sort": "none" } }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -1104,7 +1372,6 @@ } ], "title": "Weekly Active Users by Team", - "type": "timeseries", "transformations": [ { "id": "prepareTimeSeries", @@ -1119,7 +1386,8 @@ "renamePattern": "$1" } } - ] + ], + "type": "timeseries" }, { "collapsed": false, @@ -1139,13 +1407,19 @@ "description": "Acceptance ratio by language for team members", "fieldConfig": { "defaults": { + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, "mappings": [], "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -1171,7 +1445,7 @@ }, "showHeader": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -1189,13 +1463,19 @@ "description": "Acceptance ratio by model for team members", "fieldConfig": { "defaults": { + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, "mappings": [], "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -1221,7 +1501,7 @@ }, "showHeader": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -1252,13 +1532,19 @@ "description": "Top users by code generation activity in the selected team(s)", "fieldConfig": { "defaults": { + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, "mappings": [], "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -1284,7 +1570,7 @@ }, "showHeader": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -1312,6 +1598,7 @@ "axisLabel": "Users", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1342,8 +1629,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -1365,11 +1651,12 @@ "showLegend": true }, "tooltip": { + "hideZeros": false, "mode": "single", "sort": "none" } }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -1397,6 +1684,7 @@ "axisLabel": "Users", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1427,8 +1715,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -1450,11 +1737,12 @@ "showLegend": true }, "tooltip": { + "hideZeros": false, "mode": "single", "sort": "none" } }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -1485,16 +1773,17 @@ "description": "Seat activity by editor for team members", "fieldConfig": { "defaults": { - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null - } - ] - } + "color": { + "mode": "palette-classic" + }, + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + } + }, + "mappings": [] }, "overrides": [] }, @@ -1524,11 +1813,12 @@ "values": false }, "tooltip": { + "hideZeros": false, "mode": "single", "sort": "none" } }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -1546,13 +1836,19 @@ "description": "Total, active, and inactive seats for team members", "fieldConfig": { "defaults": { + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, "mappings": [], "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -1578,7 +1874,7 @@ }, "showHeader": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -1596,13 +1892,19 @@ "description": "Inactive seats for team members", "fieldConfig": { "defaults": { + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, "mappings": [], "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -1628,7 +1930,7 @@ }, "showHeader": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -1643,200 +1945,7 @@ }, { "datasource": "mysql", - "description": "Total PR summaries created by Copilot for selected team(s) in the time range Team-level aggregate; returns no data when User filter is applied.", - "fieldConfig": { - "defaults": { - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "blue", - "value": null - } - ] - } - }, - "overrides": [] - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 12, - "y": 8 - }, - "id": 50, - "options": { - "colorMode": "value", - "graphMode": "none", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "lastNotNull" - ], - "fields": "", - "values": false - }, - "showPercentChange": false, - "textMode": "auto", - "wideLayout": true - }, - "pluginVersion": "11.0.0", - "targets": [ - { - "datasource": "mysql", - "format": "table", - "rawQuery": true, - "rawSql": "SELECT SUM(total_pr_summaries_created) as \"PRs by Copilot\"\nFROM _tool_copilot_team_dotcom_prs\nWHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'", - "refId": "A" - } - ], - "title": "PRs Created by Copilot", - "type": "stat" - }, - { - "datasource": "mysql", - "description": "Proxy metric for PR review engagement in selected team(s) (team API exposes engaged users, not reviewed count) Team-level aggregate; returns no data when User filter is applied.", - "fieldConfig": { - "defaults": { - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "blue", - "value": null - } - ] - } - }, - "overrides": [] - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 18, - "y": 8 - }, - "id": 51, - "options": { - "colorMode": "value", - "graphMode": "none", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "lastNotNull" - ], - "fields": "", - "values": false - }, - "showPercentChange": false, - "textMode": "auto", - "wideLayout": true - }, - "pluginVersion": "11.0.0", - "targets": [ - { - "datasource": "mysql", - "format": "table", - "rawQuery": true, - "rawSql": "SELECT SUM(total_engaged_users) as \"PRs Reviewed by Copilot\"\nFROM _tool_copilot_team_dotcom_prs\nWHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'", - "refId": "A" - } - ], - "title": "PRs Reviewed by Copilot", - "type": "stat" - }, - { - "datasource": "mysql", - "description": "PR summary activity over time for selected team(s) Team-level aggregate; returns no data when User filter is applied.", - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "Pull Requests", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "linear", - "lineWidth": 2, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "never", - "spanNulls": false, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null - } - ] - } - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 12, - "y": 28 - }, - "id": 52, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "pluginVersion": "11.0.0", - "targets": [ - { - "datasource": "mysql", - "format": "table", - "rawQuery": true, - "rawSql": "SELECT date as time,\n SUM(total_pr_summaries_created) as \"PRs Created by Copilot\",\n SUM(total_engaged_users) as \"PR Engaged Users\"\nFROM _tool_copilot_team_dotcom_prs\nWHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\nGROUP BY date\nORDER BY 1", - "refId": "A" - } - ], - "title": "PR Activity Over Time", - "type": "timeseries" - }, - { - "datasource": "mysql", - "description": "Weekly activity mix by Copilot capability for selected team(s) Team-level aggregate; returns no data when User filter is applied.", + "description": "Weekly user-initiated interactions by top Copilot features for the selected team(s) and user(s), using per-user metrics as the source of truth.", "fieldConfig": { "defaults": { "color": { @@ -1849,6 +1958,7 @@ "axisLabel": "Interactions", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "bars", "fillOpacity": 80, "gradientMode": "none", @@ -1867,8 +1977,8 @@ "showPoints": "never", "spanNulls": false, "stacking": { - "mode": "normal", - "group": "A" + "group": "A", + "mode": "normal" }, "thresholdsStyle": { "mode": "off" @@ -1879,8 +1989,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -1901,27 +2010,27 @@ "placement": "bottom", "showLegend": true }, + "stacking": { + "group": "A", + "mode": "normal" + }, "tooltip": { + "hideZeros": false, "mode": "single", "sort": "none" - }, - "stacking": { - "mode": "normal", - "group": "A" } }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", "format": "table", "rawQuery": true, - "rawSql": "SELECT time, metric, SUM(value) as value\nFROM (\n SELECT DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)) as time,\n 'Code Completions' as metric,\n SUM(total_code_suggestions) as value\n FROM _tool_copilot_team_completions\n WHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\n GROUP BY DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY))\n\n UNION ALL\n\n SELECT DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)) as time,\n 'IDE Chat' as metric,\n SUM(total_chats) as value\n FROM _tool_copilot_team_ide_chat\n WHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\n GROUP BY DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY))\n\n UNION ALL\n\n SELECT DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)) as time,\n 'Dotcom Chat' as metric,\n SUM(total_chats) as value\n FROM _tool_copilot_team_dotcom_chat\n WHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\n GROUP BY DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY))\n\n UNION ALL\n\n SELECT DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)) as time,\n 'Dotcom Pull Requests' as metric,\n SUM(total_pr_summaries_created) as value\n FROM _tool_copilot_team_dotcom_prs\n WHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\n GROUP BY DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY))\n) feature_mix\nGROUP BY time, metric\nORDER BY time", + "rawSql": "SELECT DATE(DATE_SUB(f.day, INTERVAL WEEKDAY(f.day) DAY)) as time,\n CASE\n WHEN f.feature IN (\n SELECT feature FROM (\n SELECT f2.feature\n FROM _tool_copilot_user_metrics_by_feature f2\n INNER JOIN _tool_copilot_user_daily_metrics udm2\n ON udm2.connection_id = f2.connection_id AND udm2.user_id = f2.user_id AND udm2.day = f2.day AND udm2.scope_id = f2.scope_id\n LEFT JOIN (\n SELECT DISTINCT connection_id, user_login\n FROM _tool_copilot_team_users\n WHERE '${team:csv}' = '0' OR team_slug IN (${team:sqlstring})\n ) tu2\n ON tu2.connection_id = udm2.connection_id AND tu2.user_login = udm2.user_login\n WHERE $__timeFilter(f2.day)\n AND f2.connection_id = ${connection_id}\n AND f2.scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR tu2.user_login IS NOT NULL)\n AND ('${user:csv}' = '0' OR udm2.user_login IN (${user:sqlstring}))\n GROUP BY f2.feature\n HAVING SUM(f2.user_initiated_interaction_count) > 0\n ORDER BY SUM(f2.user_initiated_interaction_count) DESC\n LIMIT 4\n ) top_features\n ) THEN REPLACE(REPLACE(f.feature, 'chat_panel_', ''), '_mode', '')\n ELSE 'Other'\n END as metric,\n SUM(f.user_initiated_interaction_count) as value\nFROM _tool_copilot_user_metrics_by_feature f\nINNER JOIN _tool_copilot_user_daily_metrics udm\n ON udm.connection_id = f.connection_id AND udm.user_id = f.user_id AND udm.day = f.day AND udm.scope_id = f.scope_id\nLEFT JOIN (\n SELECT DISTINCT connection_id, user_login\n FROM _tool_copilot_team_users\n WHERE '${team:csv}' = '0' OR team_slug IN (${team:sqlstring})\n) tu\n ON tu.connection_id = udm.connection_id AND tu.user_login = udm.user_login\nWHERE $__timeFilter(f.day)\n AND f.connection_id = ${connection_id}\n AND f.scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR tu.user_login IS NOT NULL)\n AND ('${user:csv}' = '0' OR udm.user_login IN (${user:sqlstring}))\nGROUP BY time, metric\nORDER BY time", "refId": "A" } ], "title": "Feature Mix Over Time", - "type": "timeseries", "transformations": [ { "id": "prepareTimeSeries", @@ -1936,11 +2045,12 @@ "renamePattern": "$1" } } - ] + ], + "type": "timeseries" }, { "datasource": "mysql", - "description": "Weekly IDE activity for selected team(s) Team-level aggregate; returns no data when User filter is applied.", + "description": "Weekly IDE activity for selected team(s) and user(s), using per-user IDE metrics as the source of truth.", "fieldConfig": { "defaults": { "color": { @@ -1953,6 +2063,7 @@ "axisLabel": "Weekly Interactions", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "bars", "fillOpacity": 80, "gradientMode": "none", @@ -1971,8 +2082,8 @@ "showPoints": "never", "spanNulls": false, "stacking": { - "mode": "normal", - "group": "A" + "group": "A", + "mode": "normal" }, "thresholdsStyle": { "mode": "off" @@ -1983,8 +2094,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -2005,27 +2115,27 @@ "placement": "bottom", "showLegend": true }, + "stacking": { + "group": "A", + "mode": "normal" + }, "tooltip": { + "hideZeros": false, "mode": "single", "sort": "none" - }, - "stacking": { - "mode": "normal", - "group": "A" } }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", "format": "table", "rawQuery": true, - "rawSql": "SELECT time, metric, SUM(value) as value\nFROM (\n SELECT DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)) as time,\n editor as metric,\n SUM(total_code_suggestions + total_code_acceptances) as value\n FROM _tool_copilot_team_completions\n WHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\n GROUP BY DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)), editor\n\n UNION ALL\n\n SELECT DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)) as time,\n editor as metric,\n SUM(total_chats) as value\n FROM _tool_copilot_team_ide_chat\n WHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\n GROUP BY DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)), editor\n) ide_adoption\nGROUP BY time, metric\nORDER BY time", + "rawSql": "SELECT DATE(DATE_SUB(ui.day, INTERVAL WEEKDAY(ui.day) DAY)) as time,\n ui.ide as metric,\n SUM(ui.user_initiated_interaction_count + ui.code_generation_activity_count + ui.code_acceptance_activity_count) as value\nFROM _tool_copilot_user_metrics_by_ide ui\nINNER JOIN _tool_copilot_user_daily_metrics udm\n ON udm.connection_id = ui.connection_id AND udm.user_id = ui.user_id AND udm.day = ui.day AND udm.scope_id = ui.scope_id\nLEFT JOIN (\n SELECT DISTINCT connection_id, user_login\n FROM _tool_copilot_team_users\n WHERE '${team:csv}' = '0' OR team_slug IN (${team:sqlstring})\n) tu\n ON tu.connection_id = udm.connection_id AND tu.user_login = udm.user_login\nWHERE $__timeFilter(ui.day)\n AND ui.connection_id = ${connection_id}\n AND ui.scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR tu.user_login IS NOT NULL)\n AND ('${user:csv}' = '0' OR udm.user_login IN (${user:sqlstring}))\nGROUP BY time, ui.ide\nORDER BY time", "refId": "A" } ], "title": "IDE Adoption Over Time", - "type": "timeseries", "transformations": [ { "id": "prepareTimeSeries", @@ -2040,20 +2150,27 @@ "renamePattern": "$1" } } - ] + ], + "type": "timeseries" }, { "datasource": "mysql", - "description": "Matrix-style breakdown of completion suggestions and acceptances by language for selected team(s) Team-level aggregate; returns no data when User filter is applied.", + "description": "Matrix-style breakdown of completion suggestions and acceptances by language for selected team(s) and user(s), using per-user language metrics as the source of truth.", "fieldConfig": { "defaults": { + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, "mappings": [], "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -2079,14 +2196,32 @@ }, "showHeader": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", + "editorMode": "code", "format": "table", "rawQuery": true, - "rawSql": "SELECT language as \"Language\",\n 'code_completions' as \"Feature\",\n SUM(total_code_suggestions) as \"Suggestions\",\n SUM(total_code_acceptances) as \"Acceptances\"\nFROM _tool_copilot_team_completions\nWHERE $__timeFilter(date)\n AND connection_id = ${connection_id}\n AND scope_id = '${scope_id}'\n AND ('${team:csv}' = '0' OR team_slug IN (${team:sqlstring}))\n AND '${user:csv}' = '0'\nGROUP BY language\nHAVING SUM(total_code_suggestions) > 0\nORDER BY language", - "refId": "A" + "rawSql": "SELECT lf.language as \"Language\",\n 'code_completions' as \"Feature\",\n SUM(lf.code_generation_activity_count) as \"Suggestions\",\n SUM(lf.code_acceptance_activity_count) as \"Acceptances\"\nFROM _tool_copilot_user_metrics_by_language_feature lf\nINNER JOIN _tool_copilot_user_daily_metrics udm\n ON udm.connection_id = lf.connection_id AND udm.user_id = lf.user_id AND udm.day = lf.day AND udm.scope_id = lf.scope_id\nLEFT JOIN (\n SELECT DISTINCT connection_id, user_login\n FROM _tool_copilot_team_users\n WHERE '${team:csv}' = '0' OR team_slug IN (${team:sqlstring})\n) tu\n ON tu.connection_id = udm.connection_id AND tu.user_login = udm.user_login\nWHERE $__timeFilter(lf.day)\n AND lf.connection_id = ${connection_id}\n AND lf.scope_id = '${scope_id}'\n AND ('${user:csv}' = '0' OR udm.user_login IN (${user:sqlstring}))\nGROUP BY lf.language\nHAVING SUM(lf.code_generation_activity_count) > 0\nORDER BY lf.language", + "refId": "A", + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + } } ], "title": "Feature x Language Matrix", @@ -2097,13 +2232,19 @@ "description": "Acceptance ratio by Copilot feature for selected team members", "fieldConfig": { "defaults": { + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, "mappings": [], "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -2129,7 +2270,7 @@ }, "showHeader": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -2147,13 +2288,19 @@ "description": "Acceptance ratio by programming language for selected team members", "fieldConfig": { "defaults": { + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, "mappings": [], "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -2179,7 +2326,7 @@ }, "showHeader": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -2197,13 +2344,19 @@ "description": "Acceptance ratio by model for selected team members", "fieldConfig": { "defaults": { + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, "mappings": [], "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -2229,7 +2382,7 @@ }, "showHeader": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -2265,8 +2418,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -2285,6 +2437,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -2296,7 +2449,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -2315,15 +2468,13 @@ "fieldConfig": { "defaults": { "mappings": [], - "min": 0, "max": 100, - "unit": "percent", + "min": 0, "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" }, { "color": "yellow", @@ -2334,7 +2485,8 @@ "value": 20 } ] - } + }, + "unit": "percent" }, "overrides": [] }, @@ -2360,7 +2512,7 @@ "showThresholdMarkers": true, "sizing": "auto" }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -2378,13 +2530,19 @@ "description": "Row counts for team Copilot metrics tables Team-level aggregate; returns no data when User filter is applied.", "fieldConfig": { "defaults": { + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, "mappings": [], "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" } ] } @@ -2410,7 +2568,7 @@ }, "showHeader": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.6.2", "targets": [ { "datasource": "mysql", @@ -2424,8 +2582,9 @@ "type": "table" } ], + "preload": false, "refresh": "", - "schemaVersion": 38, + "schemaVersion": 41, "tags": [ "copilot", "devlake", @@ -2436,55 +2595,44 @@ "list": [ { "current": { - "selected": false, "text": "", "value": "" }, "datasource": "mysql", "definition": "SELECT DISTINCT connection_id FROM _tool_copilot_scopes ORDER BY connection_id DESC", - "hide": 0, "includeAll": false, "label": "Connection ID", - "multi": false, "name": "connection_id", "options": [], "query": "SELECT DISTINCT connection_id FROM _tool_copilot_scopes ORDER BY connection_id DESC", "refresh": 1, "regex": "", - "skipUrlSync": false, - "sort": 0, "type": "query" }, { "current": { - "selected": false, "text": "", "value": "" }, "datasource": "mysql", "definition": "SELECT DISTINCT id as scope_id FROM _tool_copilot_scopes WHERE connection_id = ${connection_id} ORDER BY 1", - "hide": 0, "includeAll": false, "label": "Scope ID", - "multi": false, "name": "scope_id", "options": [], "query": "SELECT DISTINCT id as scope_id FROM _tool_copilot_scopes WHERE connection_id = ${connection_id} ORDER BY 1", "refresh": 2, "regex": "", - "skipUrlSync": false, - "sort": 0, "type": "query" }, { + "allValue": "0", "current": { - "selected": false, "text": "All", "value": "0" }, "datasource": "mysql", "definition": "SELECT slug as __value, name as __text FROM _tool_copilot_teams WHERE connection_id = ${connection_id} ORDER BY name", - "hide": 0, "includeAll": true, "label": "Team", "multi": true, @@ -2493,20 +2641,16 @@ "query": "SELECT slug as __value, name as __text FROM _tool_copilot_teams WHERE connection_id = ${connection_id} ORDER BY name", "refresh": 2, "regex": "", - "skipUrlSync": false, - "sort": 0, - "type": "query", - "allValue": "0" + "type": "query" }, { + "allValue": "0", "current": { - "selected": false, "text": "All", "value": "0" }, "datasource": "mysql", "definition": "SELECT DISTINCT m.user_login as __value, m.user_login as __text FROM _tool_copilot_user_daily_metrics m WHERE m.connection_id = ${connection_id} AND m.scope_id = '${scope_id}' AND ('${team:csv}' = '0' OR EXISTS (SELECT 1 FROM _tool_copilot_team_users tu INNER JOIN _tool_copilot_teams t ON t.connection_id = tu.connection_id AND t.id = tu.team_id WHERE tu.connection_id = m.connection_id AND tu.user_login = m.user_login AND t.slug IN (${team:sqlstring}))) ORDER BY m.user_login", - "hide": 0, "includeAll": true, "label": "User", "multi": true, @@ -2515,10 +2659,7 @@ "query": "SELECT DISTINCT m.user_login as __value, m.user_login as __text FROM _tool_copilot_user_daily_metrics m WHERE m.connection_id = ${connection_id} AND m.scope_id = '${scope_id}' AND ('${team:csv}' = '0' OR EXISTS (SELECT 1 FROM _tool_copilot_team_users tu INNER JOIN _tool_copilot_teams t ON t.connection_id = tu.connection_id AND t.id = tu.team_id WHERE tu.connection_id = m.connection_id AND tu.user_login = m.user_login AND t.slug IN (${team:sqlstring}))) ORDER BY m.user_login", "refresh": 2, "regex": "", - "skipUrlSync": false, - "sort": 0, - "type": "query", - "allValue": "0" + "type": "query" } ] }, @@ -2530,6 +2671,5 @@ "timezone": "utc", "title": "GitHub Copilot Adoption by Team and User", "uid": "copilot_adoption_by_team_and_user", - "version": 1, - "weekStart": "" -} + "version": 7 +} \ No newline at end of file From 63c809e90082a2a2b424d521b8db056c4a7dc117 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tam=C3=A1s=20Laczk=C3=B3-Albert?= <76678478+la-tamas@users.noreply.github.com> Date: Thu, 26 Mar 2026 12:23:31 +0200 Subject: [PATCH 05/39] Update URL for consistency across the plugin Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- backend/plugins/gh-copilot/tasks/team_user_collector.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/plugins/gh-copilot/tasks/team_user_collector.go b/backend/plugins/gh-copilot/tasks/team_user_collector.go index 234ed09bd2f..7dffb8934fe 100644 --- a/backend/plugins/gh-copilot/tasks/team_user_collector.go +++ b/backend/plugins/gh-copilot/tasks/team_user_collector.go @@ -99,7 +99,7 @@ func CollectTeamUsers(taskCtx plugin.SubTaskContext) errors.Error { ApiClient: apiClient, Input: iterator, PageSize: 100, - UrlTemplate: "/orgs/{{ .Input.OrgLogin }}/teams/{{ .Input.Slug }}/members", + UrlTemplate: "orgs/{{ .Input.OrgLogin }}/teams/{{ .Input.Slug }}/members", Query: func(reqData *helper.RequestData) (url.Values, errors.Error) { query := url.Values{} query.Set("page", fmt.Sprintf("%v", reqData.Pager.Page)) From db2a9215ed3bc7524523eef24488e4904ac0a095 Mon Sep 17 00:00:00 2001 From: Warren Chen Date: Sat, 28 Feb 2026 19:32:43 +0800 Subject: [PATCH 06/39] fix(q_dev): prevent data duplication in user_report and user_data tables (#8737) * fix(q_dev): prevent data duplication in user_report and user_data tables Replace auto-increment ID with composite primary keys so that CreateOrUpdate can properly deduplicate rows on re-extraction. - user_report PK: (connection_id, scope_id, user_id, date, client_type) - user_data PK: (connection_id, scope_id, user_id, date) - Switch db.Create() to db.CreateOrUpdate() in s3_data_extractor - Migration drops old tables, rebuilds with new PKs, resets s3_file_meta processed flag to trigger re-extraction * fix(q_dev): gofmt archived user_data_v2 model --- .../20260228_fix_dedup_user_tables.go | 60 ++++++++++++++ .../20260228_reset_s3_file_meta_processed.go | 51 ++++++++++++ .../migrationscripts/archived/user_data_v2.go | 81 +++++++++++++++++++ .../archived/user_report_v2.go | 46 +++++++++++ .../q_dev/models/migrationscripts/register.go | 2 + backend/plugins/q_dev/models/user_data.go | 10 +-- backend/plugins/q_dev/models/user_report.go | 10 +-- .../plugins/q_dev/tasks/s3_data_extractor.go | 5 +- 8 files changed, 252 insertions(+), 13 deletions(-) create mode 100644 backend/plugins/q_dev/models/migrationscripts/20260228_fix_dedup_user_tables.go create mode 100644 backend/plugins/q_dev/models/migrationscripts/20260228_reset_s3_file_meta_processed.go create mode 100644 backend/plugins/q_dev/models/migrationscripts/archived/user_data_v2.go create mode 100644 backend/plugins/q_dev/models/migrationscripts/archived/user_report_v2.go diff --git a/backend/plugins/q_dev/models/migrationscripts/20260228_fix_dedup_user_tables.go b/backend/plugins/q_dev/models/migrationscripts/20260228_fix_dedup_user_tables.go new file mode 100644 index 00000000000..94b7666a9a5 --- /dev/null +++ b/backend/plugins/q_dev/models/migrationscripts/20260228_fix_dedup_user_tables.go @@ -0,0 +1,60 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/helpers/migrationhelper" + "github.com/apache/incubator-devlake/plugins/q_dev/models/migrationscripts/archived" +) + +type fixDedupUserTables struct{} + +func (*fixDedupUserTables) Up(basicRes context.BasicRes) errors.Error { + db := basicRes.GetDal() + + // Drop old tables that used auto-increment ID (which caused data duplication) + err := db.DropTables( + "_tool_q_dev_user_report", + "_tool_q_dev_user_data", + ) + if err != nil { + return errors.Default.Wrap(err, "failed to drop old user tables") + } + + // Recreate tables with composite primary keys for proper deduplication + err = migrationhelper.AutoMigrateTables( + basicRes, + &archived.QDevUserReportV2{}, + &archived.QDevUserDataV2{}, + ) + if err != nil { + return errors.Default.Wrap(err, "failed to recreate user tables") + } + + return nil +} + +func (*fixDedupUserTables) Version() uint64 { + return 20260228000001 +} + +func (*fixDedupUserTables) Name() string { + return "Rebuild user_report and user_data tables with composite primary keys to fix data duplication" +} diff --git a/backend/plugins/q_dev/models/migrationscripts/20260228_reset_s3_file_meta_processed.go b/backend/plugins/q_dev/models/migrationscripts/20260228_reset_s3_file_meta_processed.go new file mode 100644 index 00000000000..7d0b8ba1f4f --- /dev/null +++ b/backend/plugins/q_dev/models/migrationscripts/20260228_reset_s3_file_meta_processed.go @@ -0,0 +1,51 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" +) + +type resetS3FileMetaProcessed struct{} + +func (*resetS3FileMetaProcessed) Up(basicRes context.BasicRes) errors.Error { + db := basicRes.GetDal() + + // Reset processed flag so data will be re-extracted with the new + // dedup-safe composite-PK schema on next pipeline run + err := db.UpdateColumn( + "_tool_q_dev_s3_file_meta", + "processed", false, + dal.Where("1 = 1"), + ) + if err != nil { + return errors.Default.Wrap(err, "failed to reset s3_file_meta processed flag") + } + + return nil +} + +func (*resetS3FileMetaProcessed) Version() uint64 { + return 20260228000002 +} + +func (*resetS3FileMetaProcessed) Name() string { + return "Reset s3_file_meta processed flag to re-extract data with dedup-safe schema" +} diff --git a/backend/plugins/q_dev/models/migrationscripts/archived/user_data_v2.go b/backend/plugins/q_dev/models/migrationscripts/archived/user_data_v2.go new file mode 100644 index 00000000000..8d5db8496a3 --- /dev/null +++ b/backend/plugins/q_dev/models/migrationscripts/archived/user_data_v2.go @@ -0,0 +1,81 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package archived + +import ( + "time" + + "github.com/apache/incubator-devlake/core/models/migrationscripts/archived" +) + +type QDevUserDataV2 struct { + archived.NoPKModel + ConnectionId uint64 `gorm:"primaryKey"` + ScopeId string `gorm:"primaryKey;type:varchar(255)" json:"scopeId"` + UserId string `gorm:"primaryKey;type:varchar(255)" json:"userId"` + Date time.Time `gorm:"primaryKey;type:date" json:"date"` + DisplayName string `gorm:"type:varchar(255)" json:"displayName"` + CodeReview_FindingsCount int + CodeReview_SucceededEventCount int + InlineChat_AcceptanceEventCount int + InlineChat_AcceptedLineAdditions int + InlineChat_AcceptedLineDeletions int + InlineChat_DismissalEventCount int + InlineChat_DismissedLineAdditions int + InlineChat_DismissedLineDeletions int + InlineChat_RejectedLineAdditions int + InlineChat_RejectedLineDeletions int + InlineChat_RejectionEventCount int + InlineChat_TotalEventCount int + Inline_AICodeLines int + Inline_AcceptanceCount int + Inline_SuggestionsCount int + Chat_AICodeLines int + Chat_MessagesInteracted int + Chat_MessagesSent int + CodeFix_AcceptanceEventCount int + CodeFix_AcceptedLines int + CodeFix_GeneratedLines int + CodeFix_GenerationEventCount int + CodeReview_FailedEventCount int + Dev_AcceptanceEventCount int + Dev_AcceptedLines int + Dev_GeneratedLines int + Dev_GenerationEventCount int + DocGeneration_AcceptedFileUpdates int + DocGeneration_AcceptedFilesCreations int + DocGeneration_AcceptedLineAdditions int + DocGeneration_AcceptedLineUpdates int + DocGeneration_EventCount int + DocGeneration_RejectedFileCreations int + DocGeneration_RejectedFileUpdates int + DocGeneration_RejectedLineAdditions int + DocGeneration_RejectedLineUpdates int + TestGeneration_AcceptedLines int + TestGeneration_AcceptedTests int + TestGeneration_EventCount int + TestGeneration_GeneratedLines int + TestGeneration_GeneratedTests int + Transformation_EventCount int + Transformation_LinesGenerated int + Transformation_LinesIngested int +} + +func (QDevUserDataV2) TableName() string { + return "_tool_q_dev_user_data" +} diff --git a/backend/plugins/q_dev/models/migrationscripts/archived/user_report_v2.go b/backend/plugins/q_dev/models/migrationscripts/archived/user_report_v2.go new file mode 100644 index 00000000000..7045874851c --- /dev/null +++ b/backend/plugins/q_dev/models/migrationscripts/archived/user_report_v2.go @@ -0,0 +1,46 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package archived + +import ( + "time" + + "github.com/apache/incubator-devlake/core/models/migrationscripts/archived" +) + +type QDevUserReportV2 struct { + archived.NoPKModel + ConnectionId uint64 `gorm:"primaryKey"` + ScopeId string `gorm:"primaryKey;type:varchar(255)" json:"scopeId"` + UserId string `gorm:"primaryKey;type:varchar(255)" json:"userId"` + Date time.Time `gorm:"primaryKey;type:date" json:"date"` + ClientType string `gorm:"primaryKey;type:varchar(50)" json:"clientType"` + DisplayName string `gorm:"type:varchar(255)" json:"displayName"` + SubscriptionTier string `gorm:"type:varchar(50)" json:"subscriptionTier"` + ProfileId string `gorm:"type:varchar(512)" json:"profileId"` + ChatConversations int `json:"chatConversations"` + CreditsUsed float64 `json:"creditsUsed"` + OverageCap float64 `json:"overageCap"` + OverageCreditsUsed float64 `json:"overageCreditsUsed"` + OverageEnabled bool `json:"overageEnabled"` + TotalMessages int `json:"totalMessages"` +} + +func (QDevUserReportV2) TableName() string { + return "_tool_q_dev_user_report" +} diff --git a/backend/plugins/q_dev/models/migrationscripts/register.go b/backend/plugins/q_dev/models/migrationscripts/register.go index 825ab5658a7..9c68ae8f815 100644 --- a/backend/plugins/q_dev/models/migrationscripts/register.go +++ b/backend/plugins/q_dev/models/migrationscripts/register.go @@ -33,5 +33,7 @@ func All() []plugin.MigrationScript { new(addScopeIdFields), new(addUserReportTable), new(addAccountIdToS3Slice), + new(fixDedupUserTables), + new(resetS3FileMetaProcessed), } } diff --git a/backend/plugins/q_dev/models/user_data.go b/backend/plugins/q_dev/models/user_data.go index a668db78bdd..3d59f965ade 100644 --- a/backend/plugins/q_dev/models/user_data.go +++ b/backend/plugins/q_dev/models/user_data.go @@ -25,12 +25,12 @@ import ( // QDevUserData 存储从CSV中提取的原始数据 type QDevUserData struct { - common.Model + common.NoPKModel ConnectionId uint64 `gorm:"primaryKey"` - UserId string `gorm:"index" json:"userId"` - Date time.Time `gorm:"index" json:"date"` - DisplayName string `gorm:"type:varchar(255)" json:"displayName"` // New field for user display name - ScopeId string `gorm:"index;type:varchar(255)" json:"scopeId"` + ScopeId string `gorm:"primaryKey;type:varchar(255)" json:"scopeId"` + UserId string `gorm:"primaryKey;type:varchar(255)" json:"userId"` + Date time.Time `gorm:"primaryKey;type:date" json:"date"` + DisplayName string `gorm:"type:varchar(255)" json:"displayName"` CodeReview_FindingsCount int CodeReview_SucceededEventCount int diff --git a/backend/plugins/q_dev/models/user_report.go b/backend/plugins/q_dev/models/user_report.go index f64090e8980..17c4ac07daa 100644 --- a/backend/plugins/q_dev/models/user_report.go +++ b/backend/plugins/q_dev/models/user_report.go @@ -24,13 +24,13 @@ import ( ) type QDevUserReport struct { - common.Model + common.NoPKModel ConnectionId uint64 `gorm:"primaryKey"` - UserId string `gorm:"index" json:"userId"` - Date time.Time `gorm:"index" json:"date"` + ScopeId string `gorm:"primaryKey;type:varchar(255)" json:"scopeId"` + UserId string `gorm:"primaryKey;type:varchar(255)" json:"userId"` + Date time.Time `gorm:"primaryKey;type:date" json:"date"` + ClientType string `gorm:"primaryKey;type:varchar(50)" json:"clientType"` DisplayName string `gorm:"type:varchar(255)" json:"displayName"` - ScopeId string `gorm:"index;type:varchar(255)" json:"scopeId"` - ClientType string `gorm:"type:varchar(50)" json:"clientType"` SubscriptionTier string `gorm:"type:varchar(50)" json:"subscriptionTier"` ProfileId string `gorm:"type:varchar(512)" json:"profileId"` ChatConversations int `json:"chatConversations"` diff --git a/backend/plugins/q_dev/tasks/s3_data_extractor.go b/backend/plugins/q_dev/tasks/s3_data_extractor.go index 919c4dbe60a..1cf2a9f2ef5 100644 --- a/backend/plugins/q_dev/tasks/s3_data_extractor.go +++ b/backend/plugins/q_dev/tasks/s3_data_extractor.go @@ -147,7 +147,7 @@ func processCSVData(taskCtx plugin.SubTaskContext, db dal.Dal, reader io.ReadClo if err != nil { return errors.Default.Wrap(err, "failed to create user report data") } - err = db.Create(reportData) + err = db.CreateOrUpdate(reportData) if err != nil { return errors.Default.Wrap(err, "failed to save user report data") } @@ -158,8 +158,7 @@ func processCSVData(taskCtx plugin.SubTaskContext, db dal.Dal, reader io.ReadClo return errors.Default.Wrap(err, "failed to create user data") } - // Save to database - no need to check for duplicates since we're processing each file only once - err = db.Create(userData) + err = db.CreateOrUpdate(userData) if err != nil { return errors.Default.Wrap(err, "failed to save user data") } From 4bb62dde7ed4dcf37c2e3184cfaf373f3b6cf278 Mon Sep 17 00:00:00 2001 From: Ema Abitante Date: Mon, 2 Mar 2026 13:33:59 +0100 Subject: [PATCH 07/39] feat(github): Extend exclusion of file extensions to github plugin (#8719) * feat(github): extend PR size exclusion for specified file extension to github plugin * fix: register migration script * fix: move PR size to 'Additional settings' and change so the comma doesn't get removed while typing * fix: linting --- backend/plugins/github/api/blueprint_v200.go | 15 +++--- ...0260216_add_pr_size_excluded_extensions.go | 50 +++++++++++++++++++ .../models/migrationscripts/register.go | 1 + backend/plugins/github/models/scope_config.go | 29 +++++------ .../src/plugins/register/github/config.tsx | 1 + .../register/github/transformation.tsx | 33 ++++++++++++ 6 files changed, 108 insertions(+), 21 deletions(-) create mode 100644 backend/plugins/github/models/migrationscripts/20260216_add_pr_size_excluded_extensions.go diff --git a/backend/plugins/github/api/blueprint_v200.go b/backend/plugins/github/api/blueprint_v200.go index 708a941b689..6515f24b72c 100644 --- a/backend/plugins/github/api/blueprint_v200.go +++ b/backend/plugins/github/api/blueprint_v200.go @@ -128,13 +128,14 @@ func makeDataSourcePipelinePlanV200( stage = append(stage, &coreModels.PipelineTask{ Plugin: "gitextractor", Options: map[string]interface{}{ - "url": cloneUrl.String(), - "name": githubRepo.FullName, - "fullName": githubRepo.FullName, - "repoId": didgen.NewDomainIdGenerator(&models.GithubRepo{}).Generate(connection.ID, githubRepo.GithubId), - "proxy": connection.Proxy, - "connectionId": githubRepo.ConnectionId, - "pluginName": "github", + "url": cloneUrl.String(), + "name": githubRepo.FullName, + "fullName": githubRepo.FullName, + "repoId": didgen.NewDomainIdGenerator(&models.GithubRepo{}).Generate(connection.ID, githubRepo.GithubId), + "proxy": connection.Proxy, + "connectionId": githubRepo.ConnectionId, + "pluginName": "github", + "excludeFileExtensions": scopeConfig.PrSizeExcludedFileExtensions, }, }) diff --git a/backend/plugins/github/models/migrationscripts/20260216_add_pr_size_excluded_extensions.go b/backend/plugins/github/models/migrationscripts/20260216_add_pr_size_excluded_extensions.go new file mode 100644 index 00000000000..cfc4337c528 --- /dev/null +++ b/backend/plugins/github/models/migrationscripts/20260216_add_pr_size_excluded_extensions.go @@ -0,0 +1,50 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/migrationhelper" +) + +var _ plugin.MigrationScript = (*addPrSizeExcludedFileExtensions)(nil) + +type githubScopeConfig20260216 struct { + PrSizeExcludedFileExtensions []string `gorm:"type:json" json:"prSizeExcludedFileExtensions" mapstructure:"prSizeExcludedFileExtensions"` +} + +func (githubScopeConfig20260216) TableName() string { + return "_tool_github_scope_configs" +} + +type addPrSizeExcludedFileExtensions struct{} + +func (script *addPrSizeExcludedFileExtensions) Up(basicRes context.BasicRes) errors.Error { + return migrationhelper.AutoMigrateTables( + basicRes, + &githubScopeConfig20260216{}, + ) +} + +func (*addPrSizeExcludedFileExtensions) Version() uint64 { return 20260216100000 } + +func (*addPrSizeExcludedFileExtensions) Name() string { + return "add pr_size_excluded_file_extensions to _tool_github_scope_configs" +} diff --git a/backend/plugins/github/models/migrationscripts/register.go b/backend/plugins/github/models/migrationscripts/register.go index 0094972890e..65f538f31c1 100644 --- a/backend/plugins/github/models/migrationscripts/register.go +++ b/backend/plugins/github/models/migrationscripts/register.go @@ -57,5 +57,6 @@ func All() []plugin.MigrationScript { new(addIndexToGithubJobs), new(addRefreshTokenFields), new(modifyTokenExpiresAtToNullable), + new(addPrSizeExcludedFileExtensions), } } diff --git a/backend/plugins/github/models/scope_config.go b/backend/plugins/github/models/scope_config.go index 60b298d363c..a19f33db6f3 100644 --- a/backend/plugins/github/models/scope_config.go +++ b/backend/plugins/github/models/scope_config.go @@ -26,20 +26,21 @@ import ( var _ plugin.ToolLayerScopeConfig = (*GithubScopeConfig)(nil) type GithubScopeConfig struct { - common.ScopeConfig `mapstructure:",squash" json:",inline" gorm:"embedded"` - PrType string `mapstructure:"prType,omitempty" json:"prType" gorm:"type:varchar(255)"` - PrComponent string `mapstructure:"prComponent,omitempty" json:"prComponent" gorm:"type:varchar(255)"` - PrBodyClosePattern string `mapstructure:"prBodyClosePattern,omitempty" json:"prBodyClosePattern" gorm:"type:varchar(255)"` - IssueSeverity string `mapstructure:"issueSeverity,omitempty" json:"issueSeverity" gorm:"type:varchar(255)"` - IssuePriority string `mapstructure:"issuePriority,omitempty" json:"issuePriority" gorm:"type:varchar(255)"` - IssueComponent string `mapstructure:"issueComponent,omitempty" json:"issueComponent" gorm:"type:varchar(255)"` - IssueTypeBug string `mapstructure:"issueTypeBug,omitempty" json:"issueTypeBug" gorm:"type:varchar(255)"` - IssueTypeIncident string `mapstructure:"issueTypeIncident,omitempty" json:"issueTypeIncident" gorm:"type:varchar(255)"` - IssueTypeRequirement string `mapstructure:"issueTypeRequirement,omitempty" json:"issueTypeRequirement" gorm:"type:varchar(255)"` - DeploymentPattern string `mapstructure:"deploymentPattern,omitempty" json:"deploymentPattern" gorm:"type:varchar(255)"` - ProductionPattern string `mapstructure:"productionPattern,omitempty" json:"productionPattern" gorm:"type:varchar(255)"` - EnvNamePattern string `mapstructure:"envNamePattern,omitempty" json:"envNamePattern" gorm:"type:varchar(255)"` - Refdiff datatypes.JSONMap `mapstructure:"refdiff,omitempty" json:"refdiff" swaggertype:"object" format:"json"` + common.ScopeConfig `mapstructure:",squash" json:",inline" gorm:"embedded"` + PrType string `mapstructure:"prType,omitempty" json:"prType" gorm:"type:varchar(255)"` + PrComponent string `mapstructure:"prComponent,omitempty" json:"prComponent" gorm:"type:varchar(255)"` + PrBodyClosePattern string `mapstructure:"prBodyClosePattern,omitempty" json:"prBodyClosePattern" gorm:"type:varchar(255)"` + IssueSeverity string `mapstructure:"issueSeverity,omitempty" json:"issueSeverity" gorm:"type:varchar(255)"` + IssuePriority string `mapstructure:"issuePriority,omitempty" json:"issuePriority" gorm:"type:varchar(255)"` + IssueComponent string `mapstructure:"issueComponent,omitempty" json:"issueComponent" gorm:"type:varchar(255)"` + IssueTypeBug string `mapstructure:"issueTypeBug,omitempty" json:"issueTypeBug" gorm:"type:varchar(255)"` + IssueTypeIncident string `mapstructure:"issueTypeIncident,omitempty" json:"issueTypeIncident" gorm:"type:varchar(255)"` + IssueTypeRequirement string `mapstructure:"issueTypeRequirement,omitempty" json:"issueTypeRequirement" gorm:"type:varchar(255)"` + DeploymentPattern string `mapstructure:"deploymentPattern,omitempty" json:"deploymentPattern" gorm:"type:varchar(255)"` + ProductionPattern string `mapstructure:"productionPattern,omitempty" json:"productionPattern" gorm:"type:varchar(255)"` + EnvNamePattern string `mapstructure:"envNamePattern,omitempty" json:"envNamePattern" gorm:"type:varchar(255)"` + Refdiff datatypes.JSONMap `mapstructure:"refdiff,omitempty" json:"refdiff" swaggertype:"object" format:"json"` + PrSizeExcludedFileExtensions []string `mapstructure:"prSizeExcludedFileExtensions" json:"prSizeExcludedFileExtensions" gorm:"type:json;serializer:json"` } // GetConnectionId implements plugin.ToolLayerScopeConfig. diff --git a/config-ui/src/plugins/register/github/config.tsx b/config-ui/src/plugins/register/github/config.tsx index 9a372f98471..587f00cb9ea 100644 --- a/config-ui/src/plugins/register/github/config.tsx +++ b/config-ui/src/plugins/register/github/config.tsx @@ -117,6 +117,7 @@ export const GitHubConfig: IPluginConfig = { tagsLimit: 10, tagsPattern: '/v\\d+\\.\\d+(\\.\\d+(-rc)*\\d*)*$/', }, + prSizeExcludedFileExtensions: [], }, }, }; diff --git a/config-ui/src/plugins/register/github/transformation.tsx b/config-ui/src/plugins/register/github/transformation.tsx index 0d6473c4d8a..ef9d4d7473f 100644 --- a/config-ui/src/plugins/register/github/transformation.tsx +++ b/config-ui/src/plugins/register/github/transformation.tsx @@ -442,6 +442,39 @@ const renderCollapseItems = ({ /> for calculation +

+ PR Size Exclusions +

+
+ Exclude file extensions (comma-separated, e.g. .md,.txt,.json) + { + // Don't filter during onChange to allow typing commas freely + const extensions = e.target.value + .split(',') + .map((s: string) => s.trim()); + onChangeTransformation({ + ...transformation, + prSizeExcludedFileExtensions: extensions, + }); + }} + onBlur={(e) => { + // Clean up empty entries when user leaves the field + const extensions = e.target.value + .split(',') + .map((s: string) => s.trim()) + .filter((s: string) => s.length > 0); + onChangeTransformation({ + ...transformation, + prSizeExcludedFileExtensions: extensions, + }); + }} + /> + +
), }, From 756a0dbb9a54089928dbd08ee9b1a16dee982c15 Mon Sep 17 00:00:00 2001 From: Spiff Azeta <35563797+spiffaz@users.noreply.github.com> Date: Mon, 2 Mar 2026 14:00:54 +0100 Subject: [PATCH 08/39] fix(doc): update expired Slack invite links in README (#8739) The Slack invite links in README.md were expired and returning "This link is no longer active." Updated both occurrences (badge and community section) to match the current link on the official DevLake website. Closes #8738 Co-authored-by: Spiff Azeta --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index c31c0ffd3db..ce0a23215e0 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ limitations under the License. [![Dockerhub pulls](https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fhub.docker.com%2Fv2%2Frepositories%2Fapache%2Fdevlake&query=%24.pull_count&label=Dockerhub%20pulls)](https://hub.docker.com/r/apache/devlake) [![unit-test](https://github.com/apache/incubator-devlake/actions/workflows/test.yml/badge.svg)](https://github.com/apache/incubator-devlake/actions/workflows/test.yml) [![Go Report Card](https://goreportcard.com/badge/github.com/apache/incubator-devlake)](https://goreportcard.com/report/github.com/apache/incubator-devlake) -[![Slack](https://img.shields.io/badge/slack-join_chat-success.svg?logo=slack)](https://join.slack.com/t/devlake-io/shared_invite/zt-18uayb6ut-cHOjiYcBwERQ8VVPZ9cQQw) +[![Slack](https://img.shields.io/badge/slack-join_chat-success.svg?logo=slack)](https://join.slack.com/t/devlake-io/shared_invite/zt-1lkgbdmys-AU2azidzO1u~mtjlg9my7A) [![Twitter](https://badgen.net/badge/icon/twitter?icon=twitter&label)](https://twitter.com/ApacheDevLake) @@ -121,7 +121,7 @@ One of the best ways to get started contributing is by improving DevLake's docum ## 💙 Community -Message us on Slack +Message us on Slack ## 📄 License From 26f3a738fc0f7746477fb0219092e6de0c26f231 Mon Sep 17 00:00:00 2001 From: Eldrick Wega Date: Mon, 2 Mar 2026 16:10:22 +0300 Subject: [PATCH 09/39] docs: add gh-devlake CLI to Getting Started installation options (#8733) Adds gh-devlake as a third installation method alongside Docker Compose and Helm. gh-devlake is a GitHub CLI extension that automates DevLake deployment, configuration, and monitoring from the terminal. Closes #8732 --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index ce0a23215e0..185ebe2a299 100644 --- a/README.md +++ b/README.md @@ -66,6 +66,7 @@ You can set up Apache DevLake by following our step-by-step instructions for eit - [Install via Docker Compose](https://devlake.apache.org/docs/GettingStarted/DockerComposeSetup) - [Install via Helm](https://devlake.apache.org/docs/GettingStarted/HelmSetup) +- [Install via gh-devlake CLI](https://github.com/DevExpGBB/gh-devlake) — A GitHub CLI extension that deploys, configures, and monitors DevLake entirely from the terminal. Supports local Docker and Azure deployments. ## 🤓 Usage From 1421904fbfaab692d97faced56dcd1bc7c272e1d Mon Sep 17 00:00:00 2001 From: Spiff Azeta <35563797+spiffaz@users.noreply.github.com> Date: Mon, 2 Mar 2026 14:11:40 +0100 Subject: [PATCH 10/39] fix(gitlab): add missing repos scope in project_mapping (#8743) GitLab's makeScopeV200 did not create a repos scope when scopeConfig.Entities was empty or only contained CROSS. This caused project_mapping to have no table='repos' row, breaking downstream DORA metrics, PR-issue linking, and all PR dashboard panels that join on project_mapping. The fix aligns GitLab with the GitHub plugin by: 1. Defaulting empty entities to plugin.DOMAIN_TYPES 2. Adding DOMAIN_TYPE_CROSS to the repo scope condition Closes #8742 Co-authored-by: Spiff Azeta --- .../plugins/gitlab/api/blueprint_V200_test.go | 64 +++++++++++++++++++ backend/plugins/gitlab/api/blueprint_v200.go | 8 ++- 2 files changed, 71 insertions(+), 1 deletion(-) diff --git a/backend/plugins/gitlab/api/blueprint_V200_test.go b/backend/plugins/gitlab/api/blueprint_V200_test.go index 34f1175de9b..fde792ffd86 100644 --- a/backend/plugins/gitlab/api/blueprint_V200_test.go +++ b/backend/plugins/gitlab/api/blueprint_V200_test.go @@ -72,6 +72,70 @@ func TestMakeScopes(t *testing.T) { assert.Equal(t, actualScopes[2].ScopeId(), expectDomainScopeId) } +func TestMakeScopesWithEmptyEntities(t *testing.T) { + mockGitlabPlugin(t) + + const connectionId = 1 + const gitlabProjectId = 37 + const expectDomainScopeId = "gitlab:GitlabProject:1:37" + + actualScopes, err := makeScopeV200( + connectionId, + []*srvhelper.ScopeDetail[models.GitlabProject, models.GitlabScopeConfig]{ + { + Scope: models.GitlabProject{ + Scope: common.Scope{ + ConnectionId: connectionId, + }, + GitlabId: gitlabProjectId, + }, + ScopeConfig: &models.GitlabScopeConfig{ + ScopeConfig: common.ScopeConfig{ + Entities: []string{}, + }, + }, + }, + }, + ) + assert.Nil(t, err) + // empty entities should default to all domain types, producing repo + cicd + board scopes + assert.Equal(t, 3, len(actualScopes)) + assert.Equal(t, actualScopes[0].ScopeId(), expectDomainScopeId) +} + +func TestMakeScopesWithCrossEntity(t *testing.T) { + mockGitlabPlugin(t) + + const connectionId = 1 + const gitlabProjectId = 37 + const expectDomainScopeId = "gitlab:GitlabProject:1:37" + + actualScopes, err := makeScopeV200( + connectionId, + []*srvhelper.ScopeDetail[models.GitlabProject, models.GitlabScopeConfig]{ + { + Scope: models.GitlabProject{ + Scope: common.Scope{ + ConnectionId: connectionId, + }, + GitlabId: gitlabProjectId, + }, + ScopeConfig: &models.GitlabScopeConfig{ + ScopeConfig: common.ScopeConfig{ + Entities: []string{plugin.DOMAIN_TYPE_CROSS, plugin.DOMAIN_TYPE_TICKET}, + }, + }, + }, + }, + ) + assert.Nil(t, err) + // CROSS entity should trigger repo scope creation, plus ticket = board scope + assert.Equal(t, 2, len(actualScopes)) + assert.Equal(t, actualScopes[0].ScopeId(), expectDomainScopeId) + assert.Equal(t, "repos", actualScopes[0].TableName()) + assert.Equal(t, "boards", actualScopes[1].TableName()) +} + func TestMakeDataSourcePipelinePlanV200(t *testing.T) { mockGitlabPlugin(t) diff --git a/backend/plugins/gitlab/api/blueprint_v200.go b/backend/plugins/gitlab/api/blueprint_v200.go index dbe14905df1..515e8967f22 100644 --- a/backend/plugins/gitlab/api/blueprint_v200.go +++ b/backend/plugins/gitlab/api/blueprint_v200.go @@ -78,8 +78,14 @@ func makeScopeV200( gitlabProject, scopeConfig := scope.Scope, scope.ScopeConfig id := didgen.NewDomainIdGenerator(&models.GitlabProject{}).Generate(connectionId, gitlabProject.GitlabId) + // if no entities specified, use all entities enabled by default + if len(scopeConfig.Entities) == 0 { + scopeConfig.Entities = plugin.DOMAIN_TYPES + } + if utils.StringsContains(scopeConfig.Entities, plugin.DOMAIN_TYPE_CODE_REVIEW) || - utils.StringsContains(scopeConfig.Entities, plugin.DOMAIN_TYPE_CODE) { + utils.StringsContains(scopeConfig.Entities, plugin.DOMAIN_TYPE_CODE) || + utils.StringsContains(scopeConfig.Entities, plugin.DOMAIN_TYPE_CROSS) { // if we don't need to collect gitex, we need to add repo to scopes here scopeRepo := code.NewRepo(id, gitlabProject.PathWithNamespace) From 9e5c911218f0b5a5d33974dd2bdedd3241b25811 Mon Sep 17 00:00:00 2001 From: Spiff Azeta <35563797+spiffaz@users.noreply.github.com> Date: Wed, 4 Mar 2026 11:00:45 +0100 Subject: [PATCH 11/39] fix(grafana): update dashboard descriptions to list all supported data sources (#8741) Several dashboard introduction panels hardcoded "GitHub and Jira" as required data sources, even though the underlying queries use generic domain layer tables that work with any supported Git tool or issue tracker. Updated to list all supported sources following the pattern already used by DORA and WorkLogs dashboards. Closes #8740 Co-authored-by: Spiff Azeta --- grafana/dashboards/EngineeringOverview.json | 2 +- grafana/dashboards/EngineeringThroughputAndCycleTime.json | 2 +- .../dashboards/EngineeringThroughputAndCycleTimeTeamView.json | 2 +- grafana/dashboards/WeeklyBugRetro.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/grafana/dashboards/EngineeringOverview.json b/grafana/dashboards/EngineeringOverview.json index e2e6cdede52..00ca588768e 100644 --- a/grafana/dashboards/EngineeringOverview.json +++ b/grafana/dashboards/EngineeringOverview.json @@ -40,7 +40,7 @@ "showLineNumbers": false, "showMiniMap": false }, - "content": "- Use Cases: This dashboard is to overview the Git and project management metrics.\n- Data Source Required: Jira + GitHub, or Jira + GitLab.", + "content": "- Use Cases: This dashboard is to overview the Git and project management metrics.\n- Data Sources Required:\n - One of the Git tools, e.g. [GitHub](https://devlake.apache.org/docs/Configuration/GitHub), [GitLab](https://devlake.apache.org/docs/Configuration/GitLab), [Bitbucket](https://devlake.apache.org/docs/Configuration/BitBucket) or [Azure DevOps](https://devlake.apache.org/docs/Configuration/AzureDevOps)\n - One of the issue tracking tools, e.g. [Jira](https://devlake.apache.org/docs/Configuration/Jira)", "mode": "markdown" }, "pluginVersion": "11.0.0", diff --git a/grafana/dashboards/EngineeringThroughputAndCycleTime.json b/grafana/dashboards/EngineeringThroughputAndCycleTime.json index a9f1c9e48b3..6d3b5ecc72d 100644 --- a/grafana/dashboards/EngineeringThroughputAndCycleTime.json +++ b/grafana/dashboards/EngineeringThroughputAndCycleTime.json @@ -39,7 +39,7 @@ "showLineNumbers": false, "showMiniMap": false }, - "content": "- Use Cases: This dashboard shows the engineering throughput and and cycle time, which helps to identify productivity and bottlenecks of the development process.\n- Data Source Required: GitHub and Jira([transformation](https://devlake.apache.org/docs/UserManuals/ConfigUI/Jira#step-3---adding-transformation-rules-optional) required to tell DevLake what the story_points field is)", + "content": "- Use Cases: This dashboard shows the engineering throughput and and cycle time, which helps to identify productivity and bottlenecks of the development process.\n- Data Sources Required:\n - One of the Git tools, e.g. [GitHub](https://devlake.apache.org/docs/Configuration/GitHub), [GitLab](https://devlake.apache.org/docs/Configuration/GitLab), [Bitbucket](https://devlake.apache.org/docs/Configuration/BitBucket) or [Azure DevOps](https://devlake.apache.org/docs/Configuration/AzureDevOps)\n - One of the issue tracking tools, e.g. [Jira](https://devlake.apache.org/docs/Configuration/Jira) ([Scope Config](https://devlake.apache.org/docs/UserManuals/ConfigUI/Jira#step-3---adding-transformation-rules-optional) required to tell DevLake what the story_points field is)", "mode": "markdown" }, "pluginVersion": "11.0.0", diff --git a/grafana/dashboards/EngineeringThroughputAndCycleTimeTeamView.json b/grafana/dashboards/EngineeringThroughputAndCycleTimeTeamView.json index c9cd453387c..82251d97eab 100644 --- a/grafana/dashboards/EngineeringThroughputAndCycleTimeTeamView.json +++ b/grafana/dashboards/EngineeringThroughputAndCycleTimeTeamView.json @@ -40,7 +40,7 @@ "showLineNumbers": false, "showMiniMap": false }, - "content": "- Use Cases: This dashboard shows the engineering throughput and and cycle time, which helps to identify productivity and bottlenecks of the development process.\n- Data Source Required: GitHub and Jira([transformation](https://devlake.apache.org/docs/UserManuals/ConfigUI/Jira#step-3---adding-transformation-rules-optional) required to tell DevLake what the story_points field is). You also need to do [team configuration](https://devlake.apache.org/docs/Configuration/TeamConfiguration) to use this dashboard.", + "content": "- Use Cases: This dashboard shows the engineering throughput and and cycle time, which helps to identify productivity and bottlenecks of the development process.\n- Data Sources Required:\n - One of the Git tools, e.g. [GitHub](https://devlake.apache.org/docs/Configuration/GitHub), [GitLab](https://devlake.apache.org/docs/Configuration/GitLab), [Bitbucket](https://devlake.apache.org/docs/Configuration/BitBucket) or [Azure DevOps](https://devlake.apache.org/docs/Configuration/AzureDevOps)\n - One of the issue tracking tools, e.g. [Jira](https://devlake.apache.org/docs/Configuration/Jira) ([Scope Config](https://devlake.apache.org/docs/UserManuals/ConfigUI/Jira#step-3---adding-transformation-rules-optional) required to tell DevLake what the story_points field is)\n- You also need to do [team configuration](https://devlake.apache.org/docs/Configuration/TeamConfiguration) to use this dashboard.", "mode": "markdown" }, "pluginVersion": "11.0.0", diff --git a/grafana/dashboards/WeeklyBugRetro.json b/grafana/dashboards/WeeklyBugRetro.json index 63746d0740d..f6351bf0e9d 100644 --- a/grafana/dashboards/WeeklyBugRetro.json +++ b/grafana/dashboards/WeeklyBugRetro.json @@ -40,7 +40,7 @@ "showLineNumbers": false, "showMiniMap": false }, - "content": "- Use Cases: This dashboard can be used to track bugs with metrics such as [Bug Age](https://devlake.apache.org/docs/Metrics/BugAge).\n- Data Source Required: GitHub ([Scope Config](https://devlake.apache.org/docs/UserManuals/ConfigUI/GitHub#step-3---adding-transformation-rules-optional) required) or Jira ([Scope Config](https://devlake.apache.org/docs/UserManuals/ConfigUI/Jira#step-3---adding-transformation-rules-optional) required). Scope config is the settings to define which issues are bugs.", + "content": "- Use Cases: This dashboard can be used to track bugs with metrics such as [Bug Age](https://devlake.apache.org/docs/Metrics/BugAge).\n- Data Source Required: One of the issue tracking tools, e.g. [GitHub](https://devlake.apache.org/docs/UserManuals/ConfigUI/GitHub#step-3---adding-transformation-rules-optional), [GitLab](https://devlake.apache.org/docs/Configuration/GitLab) or [Jira](https://devlake.apache.org/docs/UserManuals/ConfigUI/Jira#step-3---adding-transformation-rules-optional) (Scope Config required to define which issues are bugs).", "mode": "markdown" }, "pluginVersion": "11.0.0", From d087b50117758731e40a640ef045605d1054ee95 Mon Sep 17 00:00:00 2001 From: Dan Crews Date: Thu, 5 Mar 2026 06:05:20 -0800 Subject: [PATCH 12/39] fix: modify cicd_deployments name from varchar to text (#8724) * fix: modify cicd_deployments name from varchar to text * fix: update the year --- .../domainlayer/devops/cicd_deployment.go | 2 +- ...0260217_modify_cicd_deployments_to_text.go | 66 +++++++++++++++++++ .../core/models/migrationscripts/register.go | 1 + 3 files changed, 68 insertions(+), 1 deletion(-) create mode 100644 backend/core/models/migrationscripts/20260217_modify_cicd_deployments_to_text.go diff --git a/backend/core/models/domainlayer/devops/cicd_deployment.go b/backend/core/models/domainlayer/devops/cicd_deployment.go index b3071077253..e1f9049ded6 100644 --- a/backend/core/models/domainlayer/devops/cicd_deployment.go +++ b/backend/core/models/domainlayer/devops/cicd_deployment.go @@ -24,7 +24,7 @@ import ( type CICDDeployment struct { domainlayer.DomainEntity CicdScopeId string `gorm:"index;type:varchar(255)"` - Name string `gorm:"type:varchar(255)"` + Name string `gorm:"type:text"` DisplayTitle string Url string Result string `gorm:"type:varchar(100)"` diff --git a/backend/core/models/migrationscripts/20260217_modify_cicd_deployments_to_text.go b/backend/core/models/migrationscripts/20260217_modify_cicd_deployments_to_text.go new file mode 100644 index 00000000000..348099cea94 --- /dev/null +++ b/backend/core/models/migrationscripts/20260217_modify_cicd_deployments_to_text.go @@ -0,0 +1,66 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/migrationhelper" +) + +var _ plugin.MigrationScript = (*modifyCicdDeploymentsToText)(nil) + +type modifyCicdDeploymentsToText struct{} + +type cicdDeployment20260217 struct { + Name string +} + +func (cicdDeployment20260217) TableName() string { + return "cicd_deployments" +} + +func (script *modifyCicdDeploymentsToText) Up(basicRes context.BasicRes) errors.Error { + // cicd_deployments.name might be text, we ought to change the type + // for the column from `varchar(255)` to `text` + db := basicRes.GetDal() + return migrationhelper.ChangeColumnsType[cicdDeployment20260217]( + basicRes, + script, + cicdDeployment20260217{}.TableName(), + []string{"name"}, + func(tmpColumnParams []interface{}) errors.Error { + return db.UpdateColumn( + &cicdDeployment20260217{}, + "name", + dal.DalClause{Expr: " ? ", Params: tmpColumnParams}, + dal.Where("? != '' ", tmpColumnParams...), + ) + }, + ) +} + +func (*modifyCicdDeploymentsToText) Version() uint64 { + return 20260217145125 +} + +func (*modifyCicdDeploymentsToText) Name() string { + return "modify cicd_deployments name from varchar to text" +} diff --git a/backend/core/models/migrationscripts/register.go b/backend/core/models/migrationscripts/register.go index 363de1b5e3f..5b682b66222 100644 --- a/backend/core/models/migrationscripts/register.go +++ b/backend/core/models/migrationscripts/register.go @@ -141,5 +141,6 @@ func All() []plugin.MigrationScript { new(addIssueFixVerion), new(addPipelinePriority), new(fixNullPriority), + new(modifyCicdDeploymentsToText), } } From 14260c4cf8b11c907c6e20fb731c03e28b5d2e56 Mon Sep 17 00:00:00 2001 From: Tomoya Kawaguchi <68677002+yamoyamoto@users.noreply.github.com> Date: Thu, 5 Mar 2026 23:21:03 +0900 Subject: [PATCH 13/39] fix(q_dev): replace MariaDB-specific IF NOT EXISTS syntax with DAL methods for MySQL 8.x compatibility (#8745) --- .../20251209_add_scope_id_fields.go | 26 ++++++++----------- .../20260220_add_account_id_to_s3_slice.go | 12 ++++----- 2 files changed, 16 insertions(+), 22 deletions(-) diff --git a/backend/plugins/q_dev/models/migrationscripts/20251209_add_scope_id_fields.go b/backend/plugins/q_dev/models/migrationscripts/20251209_add_scope_id_fields.go index d7819ff6160..a4448b01248 100644 --- a/backend/plugins/q_dev/models/migrationscripts/20251209_add_scope_id_fields.go +++ b/backend/plugins/q_dev/models/migrationscripts/20251209_add_scope_id_fields.go @@ -19,6 +19,7 @@ package migrationscripts import ( "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/dal" "github.com/apache/incubator-devlake/core/errors" "github.com/apache/incubator-devlake/core/plugin" ) @@ -32,29 +33,24 @@ func (*addScopeIdFields) Up(basicRes context.BasicRes) errors.Error { // Add scope_id column to _tool_q_dev_user_data table // This field links user data to QDevS3Slice scope, which can then be mapped to projects via project_mapping - err := db.Exec(` - ALTER TABLE _tool_q_dev_user_data - ADD COLUMN IF NOT EXISTS scope_id VARCHAR(255) DEFAULT NULL - `) - if err != nil { - // Try alternative syntax for databases that don't support IF NOT EXISTS - _ = db.Exec(`ALTER TABLE _tool_q_dev_user_data ADD COLUMN scope_id VARCHAR(255) DEFAULT NULL`) + if !db.HasColumn("_tool_q_dev_user_data", "scope_id") { + if err := db.AddColumn("_tool_q_dev_user_data", "scope_id", dal.Varchar); err != nil { + return errors.Default.Wrap(err, "failed to add scope_id to _tool_q_dev_user_data") + } } // Add index on scope_id for better query performance - _ = db.Exec(`CREATE INDEX IF NOT EXISTS idx_q_dev_user_data_scope_id ON _tool_q_dev_user_data(scope_id)`) + _ = db.Exec(`CREATE INDEX idx_q_dev_user_data_scope_id ON _tool_q_dev_user_data(scope_id)`) // Add scope_id column to _tool_q_dev_s3_file_meta table - err = db.Exec(` - ALTER TABLE _tool_q_dev_s3_file_meta - ADD COLUMN IF NOT EXISTS scope_id VARCHAR(255) DEFAULT NULL - `) - if err != nil { - _ = db.Exec(`ALTER TABLE _tool_q_dev_s3_file_meta ADD COLUMN scope_id VARCHAR(255) DEFAULT NULL`) + if !db.HasColumn("_tool_q_dev_s3_file_meta", "scope_id") { + if err := db.AddColumn("_tool_q_dev_s3_file_meta", "scope_id", dal.Varchar); err != nil { + return errors.Default.Wrap(err, "failed to add scope_id to _tool_q_dev_s3_file_meta") + } } // Add index on scope_id - _ = db.Exec(`CREATE INDEX IF NOT EXISTS idx_q_dev_s3_file_meta_scope_id ON _tool_q_dev_s3_file_meta(scope_id)`) + _ = db.Exec(`CREATE INDEX idx_q_dev_s3_file_meta_scope_id ON _tool_q_dev_s3_file_meta(scope_id)`) return nil } diff --git a/backend/plugins/q_dev/models/migrationscripts/20260220_add_account_id_to_s3_slice.go b/backend/plugins/q_dev/models/migrationscripts/20260220_add_account_id_to_s3_slice.go index 71a13c7b259..f0b0b897fa6 100644 --- a/backend/plugins/q_dev/models/migrationscripts/20260220_add_account_id_to_s3_slice.go +++ b/backend/plugins/q_dev/models/migrationscripts/20260220_add_account_id_to_s3_slice.go @@ -19,6 +19,7 @@ package migrationscripts import ( "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/dal" "github.com/apache/incubator-devlake/core/errors" "github.com/apache/incubator-devlake/core/plugin" ) @@ -30,13 +31,10 @@ type addAccountIdToS3Slice struct{} func (*addAccountIdToS3Slice) Up(basicRes context.BasicRes) errors.Error { db := basicRes.GetDal() - err := db.Exec(` - ALTER TABLE _tool_q_dev_s3_slices - ADD COLUMN IF NOT EXISTS account_id VARCHAR(255) DEFAULT NULL - `) - if err != nil { - // Try alternative syntax for databases that don't support IF NOT EXISTS - _ = db.Exec(`ALTER TABLE _tool_q_dev_s3_slices ADD COLUMN account_id VARCHAR(255) DEFAULT NULL`) + if !db.HasColumn("_tool_q_dev_s3_slices", "account_id") { + if err := db.AddColumn("_tool_q_dev_s3_slices", "account_id", dal.Varchar); err != nil { + return errors.Default.Wrap(err, "failed to add account_id to _tool_q_dev_s3_slices") + } } return nil From f94bbbd594c822fe97216191c0ec6a7dd4486443 Mon Sep 17 00:00:00 2001 From: Spiff Azeta <35563797+spiffaz@users.noreply.github.com> Date: Thu, 5 Mar 2026 15:21:24 +0100 Subject: [PATCH 14/39] fix(azuredevops): default empty entities and add CROSS to repo scope in makeScopeV200 (#8751) When scopeConfig.Entities is empty (common when no entities are explicitly selected in the UI), makeScopeV200 produced zero scopes, leaving project_mapping with no rows. Additionally, the repo scope condition did not check for DOMAIN_TYPE_CROSS, so selecting only CROSS would not create a repo scope, breaking DORA metrics. This adds the same fixes applied to GitLab in #8743. Closes #8749 --- .../azuredevops_go/api/blueprint_v200.go | 8 ++- .../azuredevops_go/api/blueprint_v200_test.go | 58 +++++++++++++++++++ 2 files changed, 65 insertions(+), 1 deletion(-) diff --git a/backend/plugins/azuredevops_go/api/blueprint_v200.go b/backend/plugins/azuredevops_go/api/blueprint_v200.go index 59a587b7803..018d9d1d77e 100644 --- a/backend/plugins/azuredevops_go/api/blueprint_v200.go +++ b/backend/plugins/azuredevops_go/api/blueprint_v200.go @@ -77,8 +77,14 @@ func makeScopeV200( } id := didgen.NewDomainIdGenerator(&models.AzuredevopsRepo{}).Generate(connectionId, azuredevopsRepo.Id) + // if no entities specified, use all entities enabled by default + if len(scopeConfig.Entities) == 0 { + scopeConfig.Entities = plugin.DOMAIN_TYPES + } + if utils.StringsContains(scopeConfig.Entities, plugin.DOMAIN_TYPE_CODE_REVIEW) || - utils.StringsContains(scopeConfig.Entities, plugin.DOMAIN_TYPE_CODE) { + utils.StringsContains(scopeConfig.Entities, plugin.DOMAIN_TYPE_CODE) || + utils.StringsContains(scopeConfig.Entities, plugin.DOMAIN_TYPE_CROSS) { // if we don't need to collect gitex, we need to add repo to scopes here scopeRepo := code.NewRepo(id, azuredevopsRepo.Name) sc = append(sc, scopeRepo) diff --git a/backend/plugins/azuredevops_go/api/blueprint_v200_test.go b/backend/plugins/azuredevops_go/api/blueprint_v200_test.go index ceddcda680d..fd40353f7a8 100644 --- a/backend/plugins/azuredevops_go/api/blueprint_v200_test.go +++ b/backend/plugins/azuredevops_go/api/blueprint_v200_test.go @@ -78,6 +78,64 @@ func TestMakeScopes(t *testing.T) { assert.Equal(t, actualScopes[2].ScopeId(), expectDomainScopeId) } +func TestMakeScopesWithEmptyEntities(t *testing.T) { + mockAzuredevopsPlugin(t) + + actualScopes, err := makeScopeV200( + connectionID, + []*srvhelper.ScopeDetail[models.AzuredevopsRepo, models.AzuredevopsScopeConfig]{ + { + Scope: models.AzuredevopsRepo{ + Scope: common.Scope{ + ConnectionId: connectionID, + }, + Id: azuredevopsRepoId, + Type: models.RepositoryTypeADO, + }, + ScopeConfig: &models.AzuredevopsScopeConfig{ + ScopeConfig: common.ScopeConfig{ + Entities: []string{}, + }, + }, + }, + }, + ) + assert.Nil(t, err) + // empty entities should default to all domain types, producing repo + cicd + board scopes + assert.Equal(t, 3, len(actualScopes)) + assert.Equal(t, actualScopes[0].ScopeId(), expectDomainScopeId) +} + +func TestMakeScopesWithCrossEntity(t *testing.T) { + mockAzuredevopsPlugin(t) + + actualScopes, err := makeScopeV200( + connectionID, + []*srvhelper.ScopeDetail[models.AzuredevopsRepo, models.AzuredevopsScopeConfig]{ + { + Scope: models.AzuredevopsRepo{ + Scope: common.Scope{ + ConnectionId: connectionID, + }, + Id: azuredevopsRepoId, + Type: models.RepositoryTypeADO, + }, + ScopeConfig: &models.AzuredevopsScopeConfig{ + ScopeConfig: common.ScopeConfig{ + Entities: []string{plugin.DOMAIN_TYPE_CROSS, plugin.DOMAIN_TYPE_TICKET}, + }, + }, + }, + }, + ) + assert.Nil(t, err) + // CROSS entity should trigger repo scope creation, plus ticket = board scope + assert.Equal(t, 2, len(actualScopes)) + assert.Equal(t, actualScopes[0].ScopeId(), expectDomainScopeId) + assert.Equal(t, "repos", actualScopes[0].TableName()) + assert.Equal(t, "boards", actualScopes[1].TableName()) +} + func TestMakeDataSourcePipelinePlanV200(t *testing.T) { mockAzuredevopsPlugin(t) From 0554c4f69b9a56cbfde9c324f2df77b01e592460 Mon Sep 17 00:00:00 2001 From: Spiff Azeta <35563797+spiffaz@users.noreply.github.com> Date: Fri, 6 Mar 2026 08:04:27 +0100 Subject: [PATCH 15/39] fix(bitbucket): default empty entities to all domain types in makeScopesV200 (#8750) When scopeConfig.Entities is empty (common when no entities are explicitly selected in the UI), makeScopesV200 produced zero scopes, leaving project_mapping with no repo rows. This adds the same empty-entities default applied to GitLab in #8743. Closes #8748 --- .../plugins/bitbucket/api/blueprint_v200.go | 5 + .../bitbucket/api/blueprint_v200_test.go | 153 ++++++++++++++++++ 2 files changed, 158 insertions(+) create mode 100644 backend/plugins/bitbucket/api/blueprint_v200_test.go diff --git a/backend/plugins/bitbucket/api/blueprint_v200.go b/backend/plugins/bitbucket/api/blueprint_v200.go index 0e2477e80fe..6fedb0df298 100644 --- a/backend/plugins/bitbucket/api/blueprint_v200.go +++ b/backend/plugins/bitbucket/api/blueprint_v200.go @@ -154,6 +154,11 @@ func makeScopesV200( scope, scopeConfig := scopeDetail.Scope, scopeDetail.ScopeConfig id := idgen.Generate(connection.ID, scope.BitbucketId) + // if no entities specified, use all entities enabled by default + if len(scopeConfig.Entities) == 0 { + scopeConfig.Entities = plugin.DOMAIN_TYPES + } + if utils.StringsContains(scopeConfig.Entities, plugin.DOMAIN_TYPE_CODE_REVIEW) || utils.StringsContains(scopeConfig.Entities, plugin.DOMAIN_TYPE_CODE) || utils.StringsContains(scopeConfig.Entities, plugin.DOMAIN_TYPE_CROSS) { diff --git a/backend/plugins/bitbucket/api/blueprint_v200_test.go b/backend/plugins/bitbucket/api/blueprint_v200_test.go new file mode 100644 index 00000000000..cd46decc2c3 --- /dev/null +++ b/backend/plugins/bitbucket/api/blueprint_v200_test.go @@ -0,0 +1,153 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package api + +import ( + "testing" + + mockplugin "github.com/apache/incubator-devlake/mocks/core/plugin" + + "github.com/apache/incubator-devlake/core/models/common" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/helpers/srvhelper" + "github.com/apache/incubator-devlake/plugins/bitbucket/models" + "github.com/stretchr/testify/assert" +) + +func mockBitbucketPlugin(t *testing.T) { + mockMeta := mockplugin.NewPluginMeta(t) + mockMeta.On("RootPkgPath").Return("github.com/apache/incubator-devlake/plugins/bitbucket") + mockMeta.On("Name").Return("dummy").Maybe() + err := plugin.RegisterPlugin("bitbucket", mockMeta) + assert.Equal(t, err, nil) +} + +func TestMakeScopes(t *testing.T) { + mockBitbucketPlugin(t) + + const connectionId uint64 = 1 + const bitbucketId = "owner/repo" + const expectDomainScopeId = "bitbucket:BitbucketRepo:1:owner/repo" + + actualScopes, err := makeScopesV200( + []*srvhelper.ScopeDetail[models.BitbucketRepo, models.BitbucketScopeConfig]{ + { + Scope: models.BitbucketRepo{ + Scope: common.Scope{ + ConnectionId: connectionId, + }, + BitbucketId: bitbucketId, + }, + ScopeConfig: &models.BitbucketScopeConfig{ + ScopeConfig: common.ScopeConfig{ + Entities: []string{plugin.DOMAIN_TYPE_CODE, plugin.DOMAIN_TYPE_TICKET, plugin.DOMAIN_TYPE_CICD}, + }, + }, + }, + }, + &models.BitbucketConnection{ + BaseConnection: api.BaseConnection{ + Model: common.Model{ + ID: connectionId, + }, + }, + }, + ) + assert.Nil(t, err) + assert.Equal(t, 3, len(actualScopes)) + assert.Equal(t, expectDomainScopeId, actualScopes[0].ScopeId()) + assert.Equal(t, expectDomainScopeId, actualScopes[1].ScopeId()) + assert.Equal(t, expectDomainScopeId, actualScopes[2].ScopeId()) +} + +func TestMakeScopesWithEmptyEntities(t *testing.T) { + mockBitbucketPlugin(t) + + const connectionId uint64 = 1 + const bitbucketId = "owner/repo" + const expectDomainScopeId = "bitbucket:BitbucketRepo:1:owner/repo" + + actualScopes, err := makeScopesV200( + []*srvhelper.ScopeDetail[models.BitbucketRepo, models.BitbucketScopeConfig]{ + { + Scope: models.BitbucketRepo{ + Scope: common.Scope{ + ConnectionId: connectionId, + }, + BitbucketId: bitbucketId, + }, + ScopeConfig: &models.BitbucketScopeConfig{ + ScopeConfig: common.ScopeConfig{ + Entities: []string{}, + }, + }, + }, + }, + &models.BitbucketConnection{ + BaseConnection: api.BaseConnection{ + Model: common.Model{ + ID: connectionId, + }, + }, + }, + ) + assert.Nil(t, err) + // empty entities should default to all domain types, producing repo + cicd + board scopes + assert.Equal(t, 3, len(actualScopes)) + assert.Equal(t, expectDomainScopeId, actualScopes[0].ScopeId()) +} + +func TestMakeScopesWithCrossEntity(t *testing.T) { + mockBitbucketPlugin(t) + + const connectionId uint64 = 1 + const bitbucketId = "owner/repo" + const expectDomainScopeId = "bitbucket:BitbucketRepo:1:owner/repo" + + actualScopes, err := makeScopesV200( + []*srvhelper.ScopeDetail[models.BitbucketRepo, models.BitbucketScopeConfig]{ + { + Scope: models.BitbucketRepo{ + Scope: common.Scope{ + ConnectionId: connectionId, + }, + BitbucketId: bitbucketId, + }, + ScopeConfig: &models.BitbucketScopeConfig{ + ScopeConfig: common.ScopeConfig{ + Entities: []string{plugin.DOMAIN_TYPE_CROSS, plugin.DOMAIN_TYPE_TICKET}, + }, + }, + }, + }, + &models.BitbucketConnection{ + BaseConnection: api.BaseConnection{ + Model: common.Model{ + ID: connectionId, + }, + }, + }, + ) + assert.Nil(t, err) + // CROSS entity should trigger repo scope creation, plus ticket = board scope + assert.Equal(t, 2, len(actualScopes)) + assert.Equal(t, expectDomainScopeId, actualScopes[0].ScopeId()) + assert.Equal(t, "repos", actualScopes[0].TableName()) + assert.Equal(t, "boards", actualScopes[1].TableName()) +} From c0845ea0dcabea20998624ac06ad4c1772d15716 Mon Sep 17 00:00:00 2001 From: Reece Ward <47779818+ReeceXW@users.noreply.github.com> Date: Tue, 10 Mar 2026 05:06:40 +0000 Subject: [PATCH 16/39] feat(gh-copilot): add support for organization daily user metrics (#8747) --- ...303_add_organization_id_to_user_metrics.go | 49 +++++++++++++++++++ .../models/migrationscripts/register.go | 1 + .../plugins/gh-copilot/models/user_metrics.go | 9 ++-- .../tasks/user_metrics_collector.go | 26 +++++----- .../tasks/user_metrics_extractor.go | 23 ++++----- 5 files changed, 80 insertions(+), 28 deletions(-) create mode 100644 backend/plugins/gh-copilot/models/migrationscripts/20260303_add_organization_id_to_user_metrics.go diff --git a/backend/plugins/gh-copilot/models/migrationscripts/20260303_add_organization_id_to_user_metrics.go b/backend/plugins/gh-copilot/models/migrationscripts/20260303_add_organization_id_to_user_metrics.go new file mode 100644 index 00000000000..d868be69084 --- /dev/null +++ b/backend/plugins/gh-copilot/models/migrationscripts/20260303_add_organization_id_to_user_metrics.go @@ -0,0 +1,49 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" +) + +var _ plugin.MigrationScript = (*addOrganizationIdToUserMetrics)(nil) + +type addOrganizationIdToUserMetrics struct{} + +func (script *addOrganizationIdToUserMetrics) Up(basicRes context.BasicRes) errors.Error { + db := basicRes.GetDal() + return db.AutoMigrate(&userDailyMetrics20260303{}) +} + +func (*addOrganizationIdToUserMetrics) Version() uint64 { + return 20260303000000 +} + +func (*addOrganizationIdToUserMetrics) Name() string { + return "add organization_id to user daily metrics" +} + +type userDailyMetrics20260303 struct { + OrganizationId string `gorm:"type:varchar(100)"` +} + +func (userDailyMetrics20260303) TableName() string { + return "_tool_copilot_user_daily_metrics" +} diff --git a/backend/plugins/gh-copilot/models/migrationscripts/register.go b/backend/plugins/gh-copilot/models/migrationscripts/register.go index 3acf34f3788..28fb0caf152 100644 --- a/backend/plugins/gh-copilot/models/migrationscripts/register.go +++ b/backend/plugins/gh-copilot/models/migrationscripts/register.go @@ -30,5 +30,6 @@ func All() []plugin.MigrationScript { new(migrateToUsageMetricsV2), new(addPRFieldsToEnterpriseMetrics), new(addTeamTables), + new(addOrganizationIdToUserMetrics), } } diff --git a/backend/plugins/gh-copilot/models/user_metrics.go b/backend/plugins/gh-copilot/models/user_metrics.go index c53f767ffbc..1f17acad80a 100644 --- a/backend/plugins/gh-copilot/models/user_metrics.go +++ b/backend/plugins/gh-copilot/models/user_metrics.go @@ -30,10 +30,11 @@ type GhCopilotUserDailyMetrics struct { Day time.Time `gorm:"primaryKey;type:date" json:"day"` UserId int64 `gorm:"primaryKey" json:"userId"` - EnterpriseId string `json:"enterpriseId" gorm:"type:varchar(100)"` - UserLogin string `json:"userLogin" gorm:"type:varchar(255);index"` - UsedAgent bool `json:"usedAgent"` - UsedChat bool `json:"usedChat"` + OrganizationId string `json:"organizationId" gorm:"type:varchar(100)"` + EnterpriseId string `json:"enterpriseId" gorm:"type:varchar(100)"` + UserLogin string `json:"userLogin" gorm:"type:varchar(255);index"` + UsedAgent bool `json:"usedAgent"` + UsedChat bool `json:"usedChat"` CopilotActivityMetrics `mapstructure:",squash"` common.NoPKModel diff --git a/backend/plugins/gh-copilot/tasks/user_metrics_collector.go b/backend/plugins/gh-copilot/tasks/user_metrics_collector.go index 526c13bf34b..f665a85e74f 100644 --- a/backend/plugins/gh-copilot/tasks/user_metrics_collector.go +++ b/backend/plugins/gh-copilot/tasks/user_metrics_collector.go @@ -32,9 +32,9 @@ import ( const rawUserMetricsTable = "copilot_user_metrics" -// CollectUserMetrics collects enterprise user-level daily Copilot usage reports. +// CollectUserMetrics collects user-level daily Copilot usage reports. // These reports are in JSONL format (one JSON object per line per user). -// Only available for enterprise-scoped connections. +// Utilizes the enterprise or organization endpoints depending on connection configuration func CollectUserMetrics(taskCtx plugin.SubTaskContext) errors.Error { data, ok := taskCtx.TaskContext().GetData().(*GhCopilotTaskData) if !ok { @@ -43,16 +43,21 @@ func CollectUserMetrics(taskCtx plugin.SubTaskContext) errors.Error { connection := data.Connection connection.Normalize() - if !connection.HasEnterprise() { - taskCtx.GetLogger().Info("No enterprise configured, skipping user metrics collection") - return nil - } - apiClient, err := CreateApiClient(taskCtx.TaskContext(), connection) if err != nil { return err } + var urlTemplate string + + if connection.HasEnterprise() { + urlTemplate = fmt.Sprintf("enterprises/%s/copilot/metrics/reports/users-1-day", connection.Enterprise) + } else if connection.Organization != "" { + urlTemplate = fmt.Sprintf("orgs/%s/copilot/metrics/reports/users-1-day", connection.Organization) + } else { + return nil + } + rawArgs := helper.RawDataSubTaskArgs{ Ctx: taskCtx, Table: rawUserMetricsTable, @@ -76,10 +81,9 @@ func CollectUserMetrics(taskCtx plugin.SubTaskContext) errors.Error { dayIter := newDayIterator(start, until) err = collector.InitCollector(helper.ApiCollectorArgs{ - ApiClient: apiClient, - Input: dayIter, - UrlTemplate: fmt.Sprintf("enterprises/%s/copilot/metrics/reports/users-1-day", - connection.Enterprise), + ApiClient: apiClient, + Input: dayIter, + UrlTemplate: urlTemplate, Query: func(reqData *helper.RequestData) (url.Values, errors.Error) { input := reqData.Input.(*dayInput) q := url.Values{} diff --git a/backend/plugins/gh-copilot/tasks/user_metrics_extractor.go b/backend/plugins/gh-copilot/tasks/user_metrics_extractor.go index 1eb73554bb3..96f5570f758 100644 --- a/backend/plugins/gh-copilot/tasks/user_metrics_extractor.go +++ b/backend/plugins/gh-copilot/tasks/user_metrics_extractor.go @@ -33,6 +33,7 @@ type userDailyReport struct { ReportStartDay string `json:"report_start_day"` ReportEndDay string `json:"report_end_day"` Day string `json:"day"` + OrganizationId string `json:"organization_id"` EnterpriseId string `json:"enterprise_id"` UserId int64 `json:"user_id"` UserLogin string `json:"user_login"` @@ -78,11 +79,6 @@ func ExtractUserMetrics(taskCtx plugin.SubTaskContext) errors.Error { connection := data.Connection connection.Normalize() - if !connection.HasEnterprise() { - taskCtx.GetLogger().Info("No enterprise configured, skipping user metrics extraction") - return nil - } - params := copilotRawParams{ ConnectionId: data.Options.ConnectionId, ScopeId: data.Options.ScopeId, @@ -111,14 +107,15 @@ func ExtractUserMetrics(taskCtx plugin.SubTaskContext) errors.Error { // Main user daily metrics results = append(results, &models.GhCopilotUserDailyMetrics{ - ConnectionId: data.Options.ConnectionId, - ScopeId: data.Options.ScopeId, - Day: day, - UserId: u.UserId, - EnterpriseId: u.EnterpriseId, - UserLogin: u.UserLogin, - UsedAgent: u.UsedAgent, - UsedChat: u.UsedChat, + ConnectionId: data.Options.ConnectionId, + ScopeId: data.Options.ScopeId, + Day: day, + UserId: u.UserId, + OrganizationId: u.OrganizationId, + EnterpriseId: u.EnterpriseId, + UserLogin: u.UserLogin, + UsedAgent: u.UsedAgent, + UsedChat: u.UsedChat, CopilotActivityMetrics: models.CopilotActivityMetrics{ UserInitiatedInteractionCount: u.UserInitiatedInteractionCount, CodeGenerationActivityCount: u.CodeGenerationActivityCount, From f26ef79667c098bd256267d5f3e9a7a64fa1ce3e Mon Sep 17 00:00:00 2001 From: Joshua Smith Date: Tue, 10 Mar 2026 06:25:54 -0600 Subject: [PATCH 17/39] feat(circleci): add server version requirement and endpoint help text (#8757) Update CircleCI connection form to indicate Server v4.x+ requirement and provide guidance for server endpoint configuration. Signed-off-by: Joshua Smith --- config-ui/src/plugins/register/circleci/config.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/config-ui/src/plugins/register/circleci/config.tsx b/config-ui/src/plugins/register/circleci/config.tsx index f6fcf06ccbe..64deb5f4ade 100644 --- a/config-ui/src/plugins/register/circleci/config.tsx +++ b/config-ui/src/plugins/register/circleci/config.tsx @@ -34,8 +34,10 @@ export const CircleCIConfig: IPluginConfig = { key: 'endpoint', multipleVersions: { cloud: 'https://circleci.com/api/', - server: '', + server: '(v4.x+)', }, + subLabel: + 'If you are using CircleCI Server, please enter the endpoint URL. E.g. https://circleci.your-company.com/api/', }, 'token', 'proxy', From a4e7351b5ac8f45395dae958f4f8b0fd2a878eef Mon Sep 17 00:00:00 2001 From: jawad khan Date: Thu, 12 Mar 2026 17:27:57 +0500 Subject: [PATCH 18/39] feat(asana): add Asana plugin for project and task collection (#8758) Add a new Asana plugin that integrates with Asana's REST API to collect projects, sections, tasks, subtasks, stories (comments), tags, and users, mapping them to DevLake's ticket/board domain model. Backend: - Plugin implementation with all required interfaces (PluginMeta, PluginTask, PluginModel, PluginMigration, PluginSource, PluginApi, DataSourcePluginBlueprintV200) - Collectors, extractors, and converters for projects, sections, tasks, subtasks, stories, tags, and users - Remote API scope picker (Workspaces -> Teams/Portfolios -> Projects) - Scope config with issue-type regex transformation rules - Migration scripts for schema evolution - E2E tests with CSV fixtures for project and task data flows Config UI: - Plugin registration with connection form (PAT auth, endpoint, proxy) - Scope config transformation form for issue-type mapping - Dashboard URL integration for onboarding flow Grafana: - Asana dashboard with task metrics and visualizations Made-with: Cursor --- backend/plugins/asana/api/blueprint_v200.go | 101 ++ backend/plugins/asana/api/connection_api.go | 124 ++ backend/plugins/asana/api/init.go | 54 + backend/plugins/asana/api/remote_api.go | 632 +++++++++ backend/plugins/asana/api/scope_api.go | 48 + backend/plugins/asana/api/scope_config_api.go | 47 + backend/plugins/asana/api/swagger.go | 32 + backend/plugins/asana/asana.go | 42 + backend/plugins/asana/e2e/e2e_path_test.go | 38 + backend/plugins/asana/e2e/project_test.go | 96 ++ .../e2e/raw_tables/_raw_asana_projects.csv | 2 + .../asana/e2e/raw_tables/_raw_asana_tasks.csv | 2 + .../snapshot_tables/_tool_asana_projects.csv | 2 + ..._tool_asana_projects_with_scope_config.csv | 2 + .../_tool_asana_scope_configs.csv | 2 + .../e2e/snapshot_tables/_tool_asana_tasks.csv | 2 + .../asana/e2e/snapshot_tables/boards.csv | 2 + backend/plugins/asana/e2e/task_test.go | 49 + backend/plugins/asana/impl/impl.go | 223 +++ backend/plugins/asana/models/connection.go | 71 + backend/plugins/asana/models/custom_field.go | 59 + backend/plugins/asana/models/membership.go | 48 + .../20250203_add_init_tables.go | 56 + .../20250212_add_missing_tables.go | 51 + ...0212_add_scope_config_issue_type_fields.go | 51 + ...20250212_add_task_transformation_fields.go | 71 + .../20250212_add_user_photo_url.go | 51 + ...0219_add_connection_id_to_scope_configs.go | 51 + .../migrationscripts/archived/models.go | 264 ++++ .../asana/models/migrationscripts/register.go | 34 + backend/plugins/asana/models/project.go | 63 + backend/plugins/asana/models/scope_config.go | 41 + backend/plugins/asana/models/section.go | 35 + backend/plugins/asana/models/story.go | 47 + backend/plugins/asana/models/tag.go | 50 + backend/plugins/asana/models/task.go | 66 + backend/plugins/asana/models/team.go | 38 + backend/plugins/asana/models/user.go | 37 + backend/plugins/asana/models/workspace.go | 35 + backend/plugins/asana/tasks/api_client.go | 40 + .../plugins/asana/tasks/project_collector.go | 75 ++ .../plugins/asana/tasks/project_convertor.go | 80 ++ .../plugins/asana/tasks/project_extractor.go | 88 ++ .../plugins/asana/tasks/section_collector.go | 72 + .../plugins/asana/tasks/section_extractor.go | 83 ++ .../plugins/asana/tasks/story_collector.go | 100 ++ .../plugins/asana/tasks/story_convertor.go | 88 ++ .../plugins/asana/tasks/story_extractor.go | 119 ++ .../plugins/asana/tasks/subtask_collector.go | 97 ++ .../plugins/asana/tasks/subtask_extractor.go | 124 ++ backend/plugins/asana/tasks/tag_collector.go | 96 ++ backend/plugins/asana/tasks/tag_extractor.go | 98 ++ backend/plugins/asana/tasks/task_collector.go | 102 ++ backend/plugins/asana/tasks/task_convertor.go | 298 +++++ backend/plugins/asana/tasks/task_data.go | 49 + backend/plugins/asana/tasks/task_extractor.go | 162 +++ backend/plugins/asana/tasks/user_collector.go | 101 ++ backend/plugins/asana/tasks/user_convertor.go | 80 ++ backend/plugins/asana/tasks/user_extractor.go | 85 ++ backend/plugins/table_info_test.go | 2 + config-ui/env.example | 1 + .../components/scope-config-form/index.tsx | 10 + .../plugins/register/asana/assets/icon.svg | 20 + .../src/plugins/register/asana/config.tsx | 66 + config-ui/src/plugins/register/asana/index.ts | 20 + .../plugins/register/asana/transformation.tsx | 116 ++ config-ui/src/plugins/register/index.ts | 2 + config-ui/src/plugins/utils.ts | 2 + config-ui/src/release/stable.ts | 4 + config-ui/src/routes/onboard/step-4.tsx | 1 + config-ui/src/vite-env.d.ts | 1 + grafana/dashboards/Asana.json | 1192 +++++++++++++++++ 72 files changed, 6193 insertions(+) create mode 100644 backend/plugins/asana/api/blueprint_v200.go create mode 100644 backend/plugins/asana/api/connection_api.go create mode 100644 backend/plugins/asana/api/init.go create mode 100644 backend/plugins/asana/api/remote_api.go create mode 100644 backend/plugins/asana/api/scope_api.go create mode 100644 backend/plugins/asana/api/scope_config_api.go create mode 100644 backend/plugins/asana/api/swagger.go create mode 100644 backend/plugins/asana/asana.go create mode 100644 backend/plugins/asana/e2e/e2e_path_test.go create mode 100644 backend/plugins/asana/e2e/project_test.go create mode 100644 backend/plugins/asana/e2e/raw_tables/_raw_asana_projects.csv create mode 100644 backend/plugins/asana/e2e/raw_tables/_raw_asana_tasks.csv create mode 100644 backend/plugins/asana/e2e/snapshot_tables/_tool_asana_projects.csv create mode 100644 backend/plugins/asana/e2e/snapshot_tables/_tool_asana_projects_with_scope_config.csv create mode 100644 backend/plugins/asana/e2e/snapshot_tables/_tool_asana_scope_configs.csv create mode 100644 backend/plugins/asana/e2e/snapshot_tables/_tool_asana_tasks.csv create mode 100644 backend/plugins/asana/e2e/snapshot_tables/boards.csv create mode 100644 backend/plugins/asana/e2e/task_test.go create mode 100644 backend/plugins/asana/impl/impl.go create mode 100644 backend/plugins/asana/models/connection.go create mode 100644 backend/plugins/asana/models/custom_field.go create mode 100644 backend/plugins/asana/models/membership.go create mode 100644 backend/plugins/asana/models/migrationscripts/20250203_add_init_tables.go create mode 100644 backend/plugins/asana/models/migrationscripts/20250212_add_missing_tables.go create mode 100644 backend/plugins/asana/models/migrationscripts/20250212_add_scope_config_issue_type_fields.go create mode 100644 backend/plugins/asana/models/migrationscripts/20250212_add_task_transformation_fields.go create mode 100644 backend/plugins/asana/models/migrationscripts/20250212_add_user_photo_url.go create mode 100644 backend/plugins/asana/models/migrationscripts/20250219_add_connection_id_to_scope_configs.go create mode 100644 backend/plugins/asana/models/migrationscripts/archived/models.go create mode 100644 backend/plugins/asana/models/migrationscripts/register.go create mode 100644 backend/plugins/asana/models/project.go create mode 100644 backend/plugins/asana/models/scope_config.go create mode 100644 backend/plugins/asana/models/section.go create mode 100644 backend/plugins/asana/models/story.go create mode 100644 backend/plugins/asana/models/tag.go create mode 100644 backend/plugins/asana/models/task.go create mode 100644 backend/plugins/asana/models/team.go create mode 100644 backend/plugins/asana/models/user.go create mode 100644 backend/plugins/asana/models/workspace.go create mode 100644 backend/plugins/asana/tasks/api_client.go create mode 100644 backend/plugins/asana/tasks/project_collector.go create mode 100644 backend/plugins/asana/tasks/project_convertor.go create mode 100644 backend/plugins/asana/tasks/project_extractor.go create mode 100644 backend/plugins/asana/tasks/section_collector.go create mode 100644 backend/plugins/asana/tasks/section_extractor.go create mode 100644 backend/plugins/asana/tasks/story_collector.go create mode 100644 backend/plugins/asana/tasks/story_convertor.go create mode 100644 backend/plugins/asana/tasks/story_extractor.go create mode 100644 backend/plugins/asana/tasks/subtask_collector.go create mode 100644 backend/plugins/asana/tasks/subtask_extractor.go create mode 100644 backend/plugins/asana/tasks/tag_collector.go create mode 100644 backend/plugins/asana/tasks/tag_extractor.go create mode 100644 backend/plugins/asana/tasks/task_collector.go create mode 100644 backend/plugins/asana/tasks/task_convertor.go create mode 100644 backend/plugins/asana/tasks/task_data.go create mode 100644 backend/plugins/asana/tasks/task_extractor.go create mode 100644 backend/plugins/asana/tasks/user_collector.go create mode 100644 backend/plugins/asana/tasks/user_convertor.go create mode 100644 backend/plugins/asana/tasks/user_extractor.go create mode 100644 config-ui/src/plugins/register/asana/assets/icon.svg create mode 100644 config-ui/src/plugins/register/asana/config.tsx create mode 100644 config-ui/src/plugins/register/asana/index.ts create mode 100644 config-ui/src/plugins/register/asana/transformation.tsx create mode 100644 grafana/dashboards/Asana.json diff --git a/backend/plugins/asana/api/blueprint_v200.go b/backend/plugins/asana/api/blueprint_v200.go new file mode 100644 index 00000000000..2820593e350 --- /dev/null +++ b/backend/plugins/asana/api/blueprint_v200.go @@ -0,0 +1,101 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package api + +import ( + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + + "github.com/apache/incubator-devlake/plugins/asana/models" + "github.com/apache/incubator-devlake/plugins/asana/tasks" + + "github.com/apache/incubator-devlake/core/errors" + coreModels "github.com/apache/incubator-devlake/core/models" + "github.com/apache/incubator-devlake/core/models/domainlayer/didgen" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/core/utils" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/helpers/srvhelper" +) + +func MakePipelinePlanV200( + subtaskMetas []plugin.SubTaskMeta, + connectionId uint64, + bpScopes []*coreModels.BlueprintScope, +) (coreModels.PipelinePlan, []plugin.Scope, errors.Error) { + connection, err := dsHelper.ConnSrv.FindByPk(connectionId) + if err != nil { + return nil, nil, err + } + scopeDetails, err := dsHelper.ScopeSrv.MapScopeDetails(connectionId, bpScopes) + if err != nil { + return nil, nil, err + } + plan, err := makePipelinePlanV200(subtaskMetas, scopeDetails, connection) + if err != nil { + return nil, nil, err + } + scopes, err := makeScopesV200(scopeDetails, connection) + return plan, scopes, err +} + +func makePipelinePlanV200( + subtaskMetas []plugin.SubTaskMeta, + scopeDetails []*srvhelper.ScopeDetail[models.AsanaProject, models.AsanaScopeConfig], + connection *models.AsanaConnection, +) (coreModels.PipelinePlan, errors.Error) { + plan := make(coreModels.PipelinePlan, len(scopeDetails)) + for i, scopeDetail := range scopeDetails { + stage := plan[i] + if stage == nil { + stage = coreModels.PipelineStage{} + } + scope, scopeConfig := scopeDetail.Scope, scopeDetail.ScopeConfig + task, err := helper.MakePipelinePlanTask( + "asana", + subtaskMetas, + scopeConfig.Entities, + tasks.AsanaOptions{ + ConnectionId: connection.ID, + ProjectId: scope.Gid, + ScopeConfigId: scopeConfig.ID, + }, + ) + if err != nil { + return nil, err + } + stage = append(stage, task) + plan[i] = stage + } + return plan, nil +} + +func makeScopesV200( + scopeDetails []*srvhelper.ScopeDetail[models.AsanaProject, models.AsanaScopeConfig], + connection *models.AsanaConnection, +) ([]plugin.Scope, errors.Error) { + scopes := make([]plugin.Scope, 0, len(scopeDetails)) + idgen := didgen.NewDomainIdGenerator(&models.AsanaProject{}) + for _, scopeDetail := range scopeDetails { + scope, scopeConfig := scopeDetail.Scope, scopeDetail.ScopeConfig + id := idgen.Generate(connection.ID, scope.Gid) + if utils.StringsContains(scopeConfig.Entities, plugin.DOMAIN_TYPE_TICKET) { + scopes = append(scopes, ticket.NewBoard(id, scope.Name)) + } + } + return scopes, nil +} diff --git a/backend/plugins/asana/api/connection_api.go b/backend/plugins/asana/api/connection_api.go new file mode 100644 index 00000000000..6d69c8c4ee4 --- /dev/null +++ b/backend/plugins/asana/api/connection_api.go @@ -0,0 +1,124 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package api + +import ( + "context" + "net/http" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" + "github.com/apache/incubator-devlake/server/api/shared" +) + +type AsanaTestConnResponse struct { + shared.ApiBody + Connection *models.AsanaConn +} + +func testConnection(ctx context.Context, connection models.AsanaConn) (*AsanaTestConnResponse, errors.Error) { + if vld != nil { + if err := vld.Struct(connection); err != nil { + return nil, errors.Default.Wrap(err, "error validating target") + } + } + if connection.GetEndpoint() == "" { + connection.Endpoint = defaultAsanaEndpoint + } + apiClient, err := helper.NewApiClientFromConnection(ctx, basicRes, &connection) + if err != nil { + return nil, err + } + res, err := apiClient.Get("users/me", nil, nil) + if err != nil { + return nil, errors.BadInput.Wrap(err, "verify token failed") + } + if res.StatusCode == http.StatusUnauthorized { + return nil, errors.HttpStatus(http.StatusBadRequest).New("StatusUnauthorized error while testing connection") + } + if res.StatusCode != http.StatusOK { + return nil, errors.HttpStatus(res.StatusCode).New("unexpected status code while testing connection") + } + connection = connection.Sanitize() + body := AsanaTestConnResponse{} + body.Success = true + body.Message = "success" + body.Connection = &connection + return &body, nil +} + +func TestConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + var connection models.AsanaConn + err := helper.Decode(input.Body, &connection, vld) + if err != nil { + return nil, err + } + if connection.Endpoint == "" { + connection.Endpoint = defaultAsanaEndpoint + } + result, err := testConnection(context.TODO(), connection) + if err != nil { + return nil, plugin.WrapTestConnectionErrResp(basicRes, err) + } + return &plugin.ApiResourceOutput{Body: result, Status: http.StatusOK}, nil +} + +func TestExistingConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + connection, err := dsHelper.ConnApi.GetMergedConnection(input) + if err != nil { + return nil, errors.BadInput.Wrap(err, "find connection from db") + } + if err := helper.DecodeMapStruct(input.Body, connection, false); err != nil { + return nil, err + } + if connection.Endpoint == "" { + connection.Endpoint = defaultAsanaEndpoint + } + testConnectionResult, testConnectionErr := testConnection(context.TODO(), connection.AsanaConn) + if testConnectionErr != nil { + return nil, plugin.WrapTestConnectionErrResp(basicRes, testConnectionErr) + } + return &plugin.ApiResourceOutput{Body: testConnectionResult, Status: http.StatusOK}, nil +} + +func PostConnections(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + if input.Body != nil { + if endpoint, ok := input.Body["endpoint"]; !ok || endpoint == nil || endpoint == "" { + input.Body["endpoint"] = defaultAsanaEndpoint + } + } + return dsHelper.ConnApi.Post(input) +} + +func PatchConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ConnApi.Patch(input) +} + +func DeleteConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ConnApi.Delete(input) +} + +func ListConnections(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ConnApi.GetAll(input) +} + +func GetConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ConnApi.GetDetail(input) +} diff --git a/backend/plugins/asana/api/init.go b/backend/plugins/asana/api/init.go new file mode 100644 index 00000000000..d13865b184c --- /dev/null +++ b/backend/plugins/asana/api/init.go @@ -0,0 +1,54 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package api + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" + "github.com/go-playground/validator/v10" +) + +const defaultAsanaEndpoint = "https://app.asana.com/api/1.0/" + +var vld *validator.Validate +var basicRes context.BasicRes + +var dsHelper *api.DsHelper[models.AsanaConnection, models.AsanaProject, models.AsanaScopeConfig] +var raProxy *api.DsRemoteApiProxyHelper[models.AsanaConnection] +var raScopeList *api.DsRemoteApiScopeListHelper[models.AsanaConnection, models.AsanaProject, AsanaRemotePagination] + +func Init(br context.BasicRes, p plugin.PluginMeta) { + basicRes = br + vld = validator.New() + dsHelper = api.NewDataSourceHelper[ + models.AsanaConnection, models.AsanaProject, models.AsanaScopeConfig, + ]( + br, + p.Name(), + []string{"name"}, + func(c models.AsanaConnection) models.AsanaConnection { + return c.Sanitize() + }, + nil, + nil, + ) + raProxy = api.NewDsRemoteApiProxyHelper[models.AsanaConnection](dsHelper.ConnApi.ModelApiHelper) + raScopeList = api.NewDsRemoteApiScopeListHelper[models.AsanaConnection, models.AsanaProject, AsanaRemotePagination](raProxy, listAsanaRemoteScopes) +} diff --git a/backend/plugins/asana/api/remote_api.go b/backend/plugins/asana/api/remote_api.go new file mode 100644 index 00000000000..0245cb6008b --- /dev/null +++ b/backend/plugins/asana/api/remote_api.go @@ -0,0 +1,632 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package api + +import ( + "fmt" + "net/url" + "strings" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + dsmodels "github.com/apache/incubator-devlake/helpers/pluginhelper/api/models" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +type AsanaRemotePagination struct { + Offset string `json:"offset"` + Limit int `json:"limit"` +} + +// Response types for Asana API +type asanaWorkspaceResponse struct { + Gid string `json:"gid"` + Name string `json:"name"` + ResourceType string `json:"resource_type"` + IsOrganization bool `json:"is_organization"` +} + +type asanaWorkspacesListResponse struct { + Data []asanaWorkspaceResponse `json:"data"` + NextPage *asanaNextPage `json:"next_page"` +} + +type asanaNextPage struct { + Offset string `json:"offset"` + Path string `json:"path"` + URI string `json:"uri"` +} + +type asanaTeamResponse struct { + Gid string `json:"gid"` + Name string `json:"name"` + ResourceType string `json:"resource_type"` + Description string `json:"description"` + PermalinkUrl string `json:"permalink_url"` +} + +type asanaTeamsListResponse struct { + Data []asanaTeamResponse `json:"data"` + NextPage *asanaNextPage `json:"next_page"` +} + +type asanaPortfolioResponse struct { + Gid string `json:"gid"` + Name string `json:"name"` + ResourceType string `json:"resource_type"` + PermalinkUrl string `json:"permalink_url"` +} + +type asanaPortfoliosListResponse struct { + Data []asanaPortfolioResponse `json:"data"` + NextPage *asanaNextPage `json:"next_page"` +} + +type asanaGoalResponse struct { + Gid string `json:"gid"` + Name string `json:"name"` + ResourceType string `json:"resource_type"` + Notes string `json:"notes"` +} + +type asanaGoalsListResponse struct { + Data []asanaGoalResponse `json:"data"` + NextPage *asanaNextPage `json:"next_page"` +} + +type asanaProjectResponse struct { + Gid string `json:"gid"` + Name string `json:"name"` + ResourceType string `json:"resource_type"` + Archived bool `json:"archived"` + PermalinkUrl string `json:"permalink_url"` + Workspace *struct { + Gid string `json:"gid"` + } `json:"workspace"` + Team *struct { + Gid string `json:"gid"` + Name string `json:"name"` + } `json:"team"` +} + +type asanaProjectsListResponse struct { + Data []asanaProjectResponse `json:"data"` + NextPage *asanaNextPage `json:"next_page"` +} + +// Scope type constants +const ( + ScopeTypeTeam = "team" + ScopeTypePortfolio = "portfolio" + ScopeTypeGoal = "goal" +) + +func listAsanaRemoteScopes( + connection *models.AsanaConnection, + apiClient plugin.ApiClient, + groupId string, + page AsanaRemotePagination, +) ( + children []dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject], + nextPage *AsanaRemotePagination, + err errors.Error, +) { + if page.Limit == 0 { + page.Limit = 100 + } + + // Level 1: No groupId - list workspaces + if groupId == "" { + return listAsanaWorkspaces(apiClient, page) + } + + // Parse hierarchical groupId + // Format examples: + // - "workspace/{gid}" -> show Teams, Portfolios, Goals options + // - "workspace/{gid}/team/{teamGid}" -> list projects in team + // - "workspace/{gid}/portfolio/{portfolioGid}" -> list projects in portfolio + // - "workspace/{gid}/goal/{goalGid}" -> list projects for goal + + if strings.HasPrefix(groupId, "workspace/") { + parts := strings.Split(groupId[10:], "/") // Remove "workspace/" prefix + + if len(parts) == 1 { + // Level 2: Workspace selected - show Teams, Portfolios, Goals as categories + workspaceGid := parts[0] + return listAsanaScopeCategories(apiClient, workspaceGid, page) + } + + if len(parts) >= 3 { + workspaceGid := parts[0] + scopeType := parts[1] + scopeGid := parts[2] + + switch scopeType { + case ScopeTypeTeam: + // Level 4: List projects in team + return listAsanaTeamProjects(apiClient, workspaceGid, scopeGid, page) + case ScopeTypePortfolio: + // Level 4: List projects in portfolio + return listAsanaPortfolioProjects(apiClient, workspaceGid, scopeGid, page) + case ScopeTypeGoal: + // Level 4: List projects for goal + return listAsanaGoalProjects(apiClient, workspaceGid, scopeGid, page) + } + } + + if len(parts) == 2 { + workspaceGid := parts[0] + scopeType := parts[1] + + switch scopeType { + case ScopeTypeTeam: + // Level 3: List all teams + return listAsanaTeams(apiClient, workspaceGid, page) + case ScopeTypePortfolio: + // Level 3: List all portfolios + return listAsanaPortfolios(apiClient, workspaceGid, page) + case ScopeTypeGoal: + // Level 3: List all goals + return listAsanaGoals(apiClient, workspaceGid, page) + } + } + } + + return nil, nil, errors.BadInput.New("invalid groupId format") +} + +// Level 1: List workspaces +func listAsanaWorkspaces( + apiClient plugin.ApiClient, + page AsanaRemotePagination, +) ( + children []dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject], + nextPage *AsanaRemotePagination, + err errors.Error, +) { + query := url.Values{} + query.Set("limit", fmt.Sprintf("%d", page.Limit)) + query.Set("opt_fields", "name,resource_type,is_organization") + if page.Offset != "" { + query.Set("offset", page.Offset) + } + + res, err := apiClient.Get("workspaces", query, nil) + if err != nil { + return nil, nil, errors.Default.Wrap(err, "failed to fetch workspaces from Asana API") + } + + var response asanaWorkspacesListResponse + err = api.UnmarshalResponse(res, &response) + if err != nil { + return nil, nil, errors.Default.Wrap(err, "failed to unmarshal Asana workspaces response") + } + + for _, workspace := range response.Data { + groupId := fmt.Sprintf("workspace/%s", workspace.Gid) + children = append(children, dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject]{ + Type: api.RAS_ENTRY_TYPE_GROUP, + ParentId: nil, // Root level, no parent + Id: groupId, + Name: workspace.Name, + FullName: workspace.Name, + }) + } + + if response.NextPage != nil && response.NextPage.Offset != "" { + nextPage = &AsanaRemotePagination{ + Offset: response.NextPage.Offset, + Limit: page.Limit, + } + } + + return children, nextPage, nil +} + +// Level 2: Show scope categories (Teams, Portfolios, Goals) +func listAsanaScopeCategories( + apiClient plugin.ApiClient, + workspaceGid string, + page AsanaRemotePagination, +) ( + children []dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject], + nextPage *AsanaRemotePagination, + err errors.Error, +) { + // Parent is the workspace + parentId := fmt.Sprintf("workspace/%s", workspaceGid) + + // Return the three main categories: Teams, Portfolios, Goals + children = []dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject]{ + { + Type: api.RAS_ENTRY_TYPE_GROUP, + ParentId: &parentId, + Id: fmt.Sprintf("workspace/%s/%s", workspaceGid, ScopeTypeTeam), + Name: "🏢 Teams", + FullName: "Teams", + }, + { + Type: api.RAS_ENTRY_TYPE_GROUP, + ParentId: &parentId, + Id: fmt.Sprintf("workspace/%s/%s", workspaceGid, ScopeTypePortfolio), + Name: "📁 Portfolios", + FullName: "Portfolios", + }, + { + Type: api.RAS_ENTRY_TYPE_GROUP, + ParentId: &parentId, + Id: fmt.Sprintf("workspace/%s/%s", workspaceGid, ScopeTypeGoal), + Name: "🎯 Goals", + FullName: "Goals", + }, + } + + return children, nil, nil +} + +// Level 3: List teams in workspace +func listAsanaTeams( + apiClient plugin.ApiClient, + workspaceGid string, + page AsanaRemotePagination, +) ( + children []dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject], + nextPage *AsanaRemotePagination, + err errors.Error, +) { + // Parent is the "Teams" category + parentId := fmt.Sprintf("workspace/%s/%s", workspaceGid, ScopeTypeTeam) + + query := url.Values{} + query.Set("limit", fmt.Sprintf("%d", page.Limit)) + query.Set("opt_fields", "name,resource_type,description,permalink_url") + if page.Offset != "" { + query.Set("offset", page.Offset) + } + + apiPath := fmt.Sprintf("workspaces/%s/teams", workspaceGid) + res, err := apiClient.Get(apiPath, query, nil) + if err != nil { + return nil, nil, errors.Default.Wrap(err, "failed to fetch teams from Asana API") + } + + var response asanaTeamsListResponse + err = api.UnmarshalResponse(res, &response) + if err != nil { + return nil, nil, errors.Default.Wrap(err, "failed to unmarshal Asana teams response") + } + + for _, team := range response.Data { + groupId := fmt.Sprintf("workspace/%s/%s/%s", workspaceGid, ScopeTypeTeam, team.Gid) + children = append(children, dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject]{ + Type: api.RAS_ENTRY_TYPE_GROUP, + ParentId: &parentId, + Id: groupId, + Name: team.Name, + FullName: team.Name, + }) + } + + if response.NextPage != nil && response.NextPage.Offset != "" { + nextPage = &AsanaRemotePagination{ + Offset: response.NextPage.Offset, + Limit: page.Limit, + } + } + + return children, nextPage, nil +} + +// Level 3: List portfolios in workspace +func listAsanaPortfolios( + apiClient plugin.ApiClient, + workspaceGid string, + page AsanaRemotePagination, +) ( + children []dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject], + nextPage *AsanaRemotePagination, + err errors.Error, +) { + // Parent is the "Portfolios" category + parentId := fmt.Sprintf("workspace/%s/%s", workspaceGid, ScopeTypePortfolio) + + query := url.Values{} + query.Set("limit", fmt.Sprintf("%d", page.Limit)) + query.Set("workspace", workspaceGid) + query.Set("owner", "me") + query.Set("opt_fields", "name,resource_type,permalink_url") + if page.Offset != "" { + query.Set("offset", page.Offset) + } + + res, err := apiClient.Get("portfolios", query, nil) + if err != nil { + return nil, nil, errors.Default.Wrap(err, "failed to fetch portfolios from Asana API") + } + + var response asanaPortfoliosListResponse + err = api.UnmarshalResponse(res, &response) + if err != nil { + return nil, nil, errors.Default.Wrap(err, "failed to unmarshal Asana portfolios response") + } + + for _, portfolio := range response.Data { + groupId := fmt.Sprintf("workspace/%s/%s/%s", workspaceGid, ScopeTypePortfolio, portfolio.Gid) + children = append(children, dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject]{ + Type: api.RAS_ENTRY_TYPE_GROUP, + ParentId: &parentId, + Id: groupId, + Name: portfolio.Name, + FullName: portfolio.Name, + }) + } + + if response.NextPage != nil && response.NextPage.Offset != "" { + nextPage = &AsanaRemotePagination{ + Offset: response.NextPage.Offset, + Limit: page.Limit, + } + } + + return children, nextPage, nil +} + +// Level 3: List goals in workspace +func listAsanaGoals( + apiClient plugin.ApiClient, + workspaceGid string, + page AsanaRemotePagination, +) ( + children []dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject], + nextPage *AsanaRemotePagination, + err errors.Error, +) { + // Parent is the "Goals" category + parentId := fmt.Sprintf("workspace/%s/%s", workspaceGid, ScopeTypeGoal) + + query := url.Values{} + query.Set("limit", fmt.Sprintf("%d", page.Limit)) + query.Set("workspace", workspaceGid) + query.Set("opt_fields", "name,resource_type,notes") + if page.Offset != "" { + query.Set("offset", page.Offset) + } + + res, err := apiClient.Get("goals", query, nil) + if err != nil { + return nil, nil, errors.Default.Wrap(err, "failed to fetch goals from Asana API") + } + + var response asanaGoalsListResponse + err = api.UnmarshalResponse(res, &response) + if err != nil { + return nil, nil, errors.Default.Wrap(err, "failed to unmarshal Asana goals response") + } + + for _, goal := range response.Data { + groupId := fmt.Sprintf("workspace/%s/%s/%s", workspaceGid, ScopeTypeGoal, goal.Gid) + children = append(children, dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject]{ + Type: api.RAS_ENTRY_TYPE_GROUP, + ParentId: &parentId, + Id: groupId, + Name: goal.Name, + FullName: goal.Name, + }) + } + + if response.NextPage != nil && response.NextPage.Offset != "" { + nextPage = &AsanaRemotePagination{ + Offset: response.NextPage.Offset, + Limit: page.Limit, + } + } + + return children, nextPage, nil +} + +// Level 4: List projects in a team +func listAsanaTeamProjects( + apiClient plugin.ApiClient, + workspaceGid string, + teamGid string, + page AsanaRemotePagination, +) ( + children []dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject], + nextPage *AsanaRemotePagination, + err errors.Error, +) { + // Parent is the specific team + parentId := fmt.Sprintf("workspace/%s/%s/%s", workspaceGid, ScopeTypeTeam, teamGid) + + query := url.Values{} + query.Set("limit", fmt.Sprintf("%d", page.Limit)) + query.Set("opt_fields", "name,resource_type,archived,permalink_url,workspace,team") + if page.Offset != "" { + query.Set("offset", page.Offset) + } + + apiPath := fmt.Sprintf("teams/%s/projects", teamGid) + res, err := apiClient.Get(apiPath, query, nil) + if err != nil { + return nil, nil, errors.Default.Wrap(err, "failed to fetch team projects from Asana API") + } + + var response asanaProjectsListResponse + err = api.UnmarshalResponse(res, &response) + if err != nil { + return nil, nil, errors.Default.Wrap(err, "failed to unmarshal Asana team projects response") + } + + children = convertProjectsToScopes(response.Data, workspaceGid, &parentId) + + if response.NextPage != nil && response.NextPage.Offset != "" { + nextPage = &AsanaRemotePagination{ + Offset: response.NextPage.Offset, + Limit: page.Limit, + } + } + + return children, nextPage, nil +} + +// Level 4: List projects in a portfolio +func listAsanaPortfolioProjects( + apiClient plugin.ApiClient, + workspaceGid string, + portfolioGid string, + page AsanaRemotePagination, +) ( + children []dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject], + nextPage *AsanaRemotePagination, + err errors.Error, +) { + // Parent is the specific portfolio + parentId := fmt.Sprintf("workspace/%s/%s/%s", workspaceGid, ScopeTypePortfolio, portfolioGid) + + query := url.Values{} + query.Set("limit", fmt.Sprintf("%d", page.Limit)) + query.Set("opt_fields", "name,resource_type,archived,permalink_url,workspace,team") + if page.Offset != "" { + query.Set("offset", page.Offset) + } + + apiPath := fmt.Sprintf("portfolios/%s/items", portfolioGid) + res, err := apiClient.Get(apiPath, query, nil) + if err != nil { + return nil, nil, errors.Default.Wrap(err, "failed to fetch portfolio items from Asana API") + } + + var response asanaProjectsListResponse + err = api.UnmarshalResponse(res, &response) + if err != nil { + return nil, nil, errors.Default.Wrap(err, "failed to unmarshal Asana portfolio items response") + } + + children = convertProjectsToScopes(response.Data, workspaceGid, &parentId) + + if response.NextPage != nil && response.NextPage.Offset != "" { + nextPage = &AsanaRemotePagination{ + Offset: response.NextPage.Offset, + Limit: page.Limit, + } + } + + return children, nextPage, nil +} + +// Level 4: List projects associated with a goal +func listAsanaGoalProjects( + apiClient plugin.ApiClient, + workspaceGid string, + goalGid string, + page AsanaRemotePagination, +) ( + children []dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject], + nextPage *AsanaRemotePagination, + err errors.Error, +) { + // Parent is the specific goal + parentId := fmt.Sprintf("workspace/%s/%s/%s", workspaceGid, ScopeTypeGoal, goalGid) + + query := url.Values{} + query.Set("limit", fmt.Sprintf("%d", page.Limit)) + query.Set("opt_fields", "name,resource_type,archived,permalink_url,workspace,team") + if page.Offset != "" { + query.Set("offset", page.Offset) + } + + // Goals API: GET /goals/{goal_gid}/parentGoals for related projects + // Note: Asana's goal-to-project relationship is through supporting work + apiPath := fmt.Sprintf("goals/%s/supportingWork", goalGid) + res, err := apiClient.Get(apiPath, query, nil) + if err != nil { + // If supporting work API fails, return empty list + return children, nil, nil + } + + var response asanaProjectsListResponse + err = api.UnmarshalResponse(res, &response) + if err != nil { + return nil, nil, errors.Default.Wrap(err, "failed to unmarshal Asana goal supporting work response") + } + + children = convertProjectsToScopes(response.Data, workspaceGid, &parentId) + + if response.NextPage != nil && response.NextPage.Offset != "" { + nextPage = &AsanaRemotePagination{ + Offset: response.NextPage.Offset, + Limit: page.Limit, + } + } + + return children, nextPage, nil +} + +// Helper function to convert Asana projects to scope entries +func convertProjectsToScopes( + projects []asanaProjectResponse, + workspaceGid string, + parentId *string, +) []dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject] { + var children []dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject] + + for _, project := range projects { + workspaceGidVal := workspaceGid + if project.Workspace != nil { + workspaceGidVal = project.Workspace.Gid + } + children = append(children, dsmodels.DsRemoteApiScopeListEntry[models.AsanaProject]{ + Type: api.RAS_ENTRY_TYPE_SCOPE, + ParentId: parentId, + Id: project.Gid, + Name: project.Name, + FullName: project.Name, + Data: &models.AsanaProject{ + Gid: project.Gid, + Name: project.Name, + ResourceType: project.ResourceType, + Archived: project.Archived, + PermalinkUrl: project.PermalinkUrl, + WorkspaceGid: workspaceGidVal, + }, + }) + } + + return children +} + +// RemoteScopes list all available scopes (projects) for this connection +// @Summary list all available scopes (projects) for this connection +// @Description list all available scopes (projects) for this connection +// @Tags plugins/asana +// @Accept application/json +// @Param connectionId path int false "connection ID" +// @Param groupId query string false "group ID" +// @Param pageToken query string false "page Token" +// @Success 200 {object} RemoteScopesOutput +// @Failure 400 {object} shared.ApiBody "Bad Request" +// @Failure 500 {object} shared.ApiBody "Internal Error" +// @Router /plugins/asana/connections/{connectionId}/remote-scopes [GET] +func RemoteScopes(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return raScopeList.Get(input) +} + +func Proxy(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return raProxy.Proxy(input) +} diff --git a/backend/plugins/asana/api/scope_api.go b/backend/plugins/asana/api/scope_api.go new file mode 100644 index 00000000000..afbc8486583 --- /dev/null +++ b/backend/plugins/asana/api/scope_api.go @@ -0,0 +1,48 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package api + +import ( + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +type PutScopesReqBody api.PutScopesReqBody[models.AsanaProject] +type ScopeDetail api.ScopeDetail[models.AsanaProject, models.AsanaScopeConfig] + +func PutScopes(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeApi.PutMultiple(input) +} + +func PatchScope(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeApi.Patch(input) +} + +func GetScopeList(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeApi.GetPage(input) +} + +func GetScope(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeApi.GetScopeDetail(input) +} + +func DeleteScope(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeApi.Delete(input) +} diff --git a/backend/plugins/asana/api/scope_config_api.go b/backend/plugins/asana/api/scope_config_api.go new file mode 100644 index 00000000000..d7e5b541b11 --- /dev/null +++ b/backend/plugins/asana/api/scope_config_api.go @@ -0,0 +1,47 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package api + +import ( + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" +) + +func PostScopeConfig(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeConfigApi.Post(input) +} + +func PatchScopeConfig(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeConfigApi.Patch(input) +} + +func GetScopeConfig(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeConfigApi.GetDetail(input) +} + +func GetScopeConfigList(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeConfigApi.GetAll(input) +} + +func GetProjectsByScopeConfig(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeConfigApi.GetProjectsByScopeConfig(input) +} + +func DeleteScopeConfig(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeConfigApi.Delete(input) +} diff --git a/backend/plugins/asana/api/swagger.go b/backend/plugins/asana/api/swagger.go new file mode 100644 index 00000000000..43897adf9ba --- /dev/null +++ b/backend/plugins/asana/api/swagger.go @@ -0,0 +1,32 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package api + +import ( + "github.com/apache/incubator-devlake/plugins/asana/tasks" +) + +type AsanaTaskOptions tasks.AsanaOptions + +// @Summary asana task options for pipelines +// @Description Task options for asana pipelines +// @Tags plugins/asana +// @Accept application/json +// @Param pipeline body AsanaTaskOptions true "json" +// @Router /pipelines/asana/pipeline-task [post] +func _() {} diff --git a/backend/plugins/asana/asana.go b/backend/plugins/asana/asana.go new file mode 100644 index 00000000000..4bfbbf5b098 --- /dev/null +++ b/backend/plugins/asana/asana.go @@ -0,0 +1,42 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main + +import ( + "github.com/apache/incubator-devlake/core/runner" + "github.com/apache/incubator-devlake/plugins/asana/impl" + "github.com/spf13/cobra" +) + +var PluginEntry impl.Asana //nolint + +func main() { + cmd := &cobra.Command{Use: "asana"} + connectionId := cmd.Flags().Uint64P("connection", "c", 0, "asana connection id") + projectId := cmd.Flags().StringP("project", "p", "", "asana project gid") + timeAfter := cmd.Flags().StringP("timeAfter", "a", "", "collect data that are created after specified time, ie 2006-01-02T15:04:05Z") + _ = cmd.MarkFlagRequired("connection") + _ = cmd.MarkFlagRequired("project") + cmd.Run = func(c *cobra.Command, args []string) { + runner.DirectRun(c, args, PluginEntry, map[string]interface{}{ + "connectionId": *connectionId, + "projectId": *projectId, + }, *timeAfter) + } + runner.RunCmd(cmd) +} diff --git a/backend/plugins/asana/e2e/e2e_path_test.go b/backend/plugins/asana/e2e/e2e_path_test.go new file mode 100644 index 00000000000..36858800890 --- /dev/null +++ b/backend/plugins/asana/e2e/e2e_path_test.go @@ -0,0 +1,38 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package e2e + +import ( + "path/filepath" + "runtime" +) + +// e2eDir returns the directory of the asana e2e package so CSV paths work when +// tests run from backend/ (CI) or from the e2e directory. +func e2eDir() string { + _, file, _, _ := runtime.Caller(0) + return filepath.Dir(file) +} + +func rawTablePath(name string) string { + return filepath.Join(e2eDir(), "raw_tables", name) +} + +func snapshotPath(name string) string { + return filepath.Join(e2eDir(), "snapshot_tables", name) +} diff --git a/backend/plugins/asana/e2e/project_test.go b/backend/plugins/asana/e2e/project_test.go new file mode 100644 index 00000000000..3fbc6308ea5 --- /dev/null +++ b/backend/plugins/asana/e2e/project_test.go @@ -0,0 +1,96 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package e2e + +import ( + "testing" + + "github.com/apache/incubator-devlake/core/models/common" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/helpers/e2ehelper" + "github.com/apache/incubator-devlake/plugins/asana/impl" + "github.com/apache/incubator-devlake/plugins/asana/models" + "github.com/apache/incubator-devlake/plugins/asana/tasks" +) + +func TestAsanaProjectDataFlow(t *testing.T) { + var asana impl.Asana + dataflowTester := e2ehelper.NewDataFlowTester(t, "asana", asana) + + taskData := &tasks.AsanaTaskData{ + Options: &tasks.AsanaOptions{ + ConnectionId: 1, + ProjectId: "1234567890", + }, + } + + // Import raw data for projects + dataflowTester.ImportCsvIntoRawTable(rawTablePath("_raw_asana_projects.csv"), "_raw_asana_projects") + + // Verify project extraction + dataflowTester.FlushTabler(&models.AsanaProject{}) + dataflowTester.Subtask(tasks.ExtractProjectMeta, taskData) + + dataflowTester.VerifyTableWithOptions(models.AsanaProject{}, e2ehelper.TableOptions{ + CSVRelPath: snapshotPath("_tool_asana_projects.csv"), + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) + + // Verify project conversion to domain layer board + dataflowTester.FlushTabler(&ticket.Board{}) + dataflowTester.Subtask(tasks.ConvertProjectMeta, taskData) + + dataflowTester.VerifyTable( + ticket.Board{}, + snapshotPath("boards.csv"), + []string{ + "id", + "name", + "description", + "url", + "created_date", + }, + ) +} + +func TestAsanaProjectWithScopeConfig(t *testing.T) { + var asana impl.Asana + dataflowTester := e2ehelper.NewDataFlowTester(t, "asana", asana) + + taskData := &tasks.AsanaTaskData{ + Options: &tasks.AsanaOptions{ + ConnectionId: 1, + ProjectId: "1234567890", + ScopeConfigId: 1, + }, + } + + // Import project with scope config association + dataflowTester.ImportCsvIntoRawTable(rawTablePath("_raw_asana_projects.csv"), "_raw_asana_projects") + dataflowTester.ImportCsvIntoTabler(snapshotPath("_tool_asana_scope_configs.csv"), &models.AsanaScopeConfig{}) + + // Extract project + dataflowTester.FlushTabler(&models.AsanaProject{}) + dataflowTester.Subtask(tasks.ExtractProjectMeta, taskData) + + // Verify project has scope_config_id + dataflowTester.VerifyTableWithOptions(models.AsanaProject{}, e2ehelper.TableOptions{ + CSVRelPath: snapshotPath("_tool_asana_projects_with_scope_config.csv"), + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) +} diff --git a/backend/plugins/asana/e2e/raw_tables/_raw_asana_projects.csv b/backend/plugins/asana/e2e/raw_tables/_raw_asana_projects.csv new file mode 100644 index 00000000000..fcc941f85de --- /dev/null +++ b/backend/plugins/asana/e2e/raw_tables/_raw_asana_projects.csv @@ -0,0 +1,2 @@ +id,params,data,url,input,created_at +1,"{""ConnectionId"":1,""ProjectId"":""1234567890""}","{""gid"":""1234567890"",""name"":""Test Project"",""resource_type"":""project"",""archived"":false,""permalink_url"":""https://app.asana.com/0/1234567890"",""workspace"":{""gid"":""ws123""}}",https://app.asana.com/api/1.0/projects/1234567890,null,2025-02-03 12:00:00 diff --git a/backend/plugins/asana/e2e/raw_tables/_raw_asana_tasks.csv b/backend/plugins/asana/e2e/raw_tables/_raw_asana_tasks.csv new file mode 100644 index 00000000000..59815e7308c --- /dev/null +++ b/backend/plugins/asana/e2e/raw_tables/_raw_asana_tasks.csv @@ -0,0 +1,2 @@ +id,params,data,url,input,created_at +1,"{""ConnectionId"":1,""ProjectId"":""123456789""}","{""gid"":""987654321"",""name"":""Test Task"",""notes"":""Notes here"",""resource_type"":""task"",""resource_subtype"":""default_task"",""completed"":false,""due_on"":""2025-02-15"",""created_at"":""2025-02-01T10:00:00.000Z"",""permalink_url"":""https://app.asana.com/0/123/987654321"",""memberships"":[{""project"":{""gid"":""123456789""},""section"":{""gid"":""111""}}]}",https://app.asana.com/api/1.0/projects/123456789/tasks,null,2025-02-03 12:00:00 diff --git a/backend/plugins/asana/e2e/snapshot_tables/_tool_asana_projects.csv b/backend/plugins/asana/e2e/snapshot_tables/_tool_asana_projects.csv new file mode 100644 index 00000000000..e05e3d2bad2 --- /dev/null +++ b/backend/plugins/asana/e2e/snapshot_tables/_tool_asana_projects.csv @@ -0,0 +1,2 @@ +connection_id,scope_config_id,gid,name,resource_type,archived,workspace_gid,permalink_url +1,0,1234567890,Test Project,project,0,ws123,https://app.asana.com/0/1234567890 diff --git a/backend/plugins/asana/e2e/snapshot_tables/_tool_asana_projects_with_scope_config.csv b/backend/plugins/asana/e2e/snapshot_tables/_tool_asana_projects_with_scope_config.csv new file mode 100644 index 00000000000..12e7ee7aee3 --- /dev/null +++ b/backend/plugins/asana/e2e/snapshot_tables/_tool_asana_projects_with_scope_config.csv @@ -0,0 +1,2 @@ +connection_id,scope_config_id,gid,name,resource_type,archived,workspace_gid,permalink_url +1,1,1234567890,Test Project,project,0,ws123,https://app.asana.com/0/1234567890 diff --git a/backend/plugins/asana/e2e/snapshot_tables/_tool_asana_scope_configs.csv b/backend/plugins/asana/e2e/snapshot_tables/_tool_asana_scope_configs.csv new file mode 100644 index 00000000000..7fd96f9c494 --- /dev/null +++ b/backend/plugins/asana/e2e/snapshot_tables/_tool_asana_scope_configs.csv @@ -0,0 +1,2 @@ +id,connection_id,name,entities,issue_type_requirement,issue_type_bug,issue_type_incident +1,1,default,"[""TICKET""]",(feat|feature|story|requirement),(bug|defect|broken),(incident|outage|failure) diff --git a/backend/plugins/asana/e2e/snapshot_tables/_tool_asana_tasks.csv b/backend/plugins/asana/e2e/snapshot_tables/_tool_asana_tasks.csv new file mode 100644 index 00000000000..e4bde6d2b35 --- /dev/null +++ b/backend/plugins/asana/e2e/snapshot_tables/_tool_asana_tasks.csv @@ -0,0 +1,2 @@ +connection_id,gid,name,notes,resource_type,resource_subtype,completed,completed_at,due_on,created_at,modified_at,permalink_url,project_gid,section_gid,section_name,assignee_gid,assignee_name,creator_gid,creator_name,parent_gid,num_subtasks,std_type,std_status,priority,story_point,severity,lead_time_minutes +1,987654321,Test Task,Notes here,task,default_task,0,,2025-02-15T00:00:00.000+00:00,2025-02-01T10:00:00.000+00:00,,https://app.asana.com/0/123/987654321,123456789,111,,,,,,,0,,,,,, diff --git a/backend/plugins/asana/e2e/snapshot_tables/boards.csv b/backend/plugins/asana/e2e/snapshot_tables/boards.csv new file mode 100644 index 00000000000..8dd0e8c6780 --- /dev/null +++ b/backend/plugins/asana/e2e/snapshot_tables/boards.csv @@ -0,0 +1,2 @@ +id,name,description,url,created_date +asana:AsanaProject:1:1234567890,Test Project,,https://app.asana.com/0/1234567890, diff --git a/backend/plugins/asana/e2e/task_test.go b/backend/plugins/asana/e2e/task_test.go new file mode 100644 index 00000000000..5687ceb7c5b --- /dev/null +++ b/backend/plugins/asana/e2e/task_test.go @@ -0,0 +1,49 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package e2e + +import ( + "testing" + + "github.com/apache/incubator-devlake/core/models/common" + "github.com/apache/incubator-devlake/helpers/e2ehelper" + "github.com/apache/incubator-devlake/plugins/asana/impl" + "github.com/apache/incubator-devlake/plugins/asana/models" + "github.com/apache/incubator-devlake/plugins/asana/tasks" +) + +func TestAsanaTaskDataFlow(t *testing.T) { + var asana impl.Asana + dataflowTester := e2ehelper.NewDataFlowTester(t, "asana", asana) + + taskData := &tasks.AsanaTaskData{ + Options: &tasks.AsanaOptions{ + ConnectionId: 1, + ProjectId: "123456789", + }, + } + + dataflowTester.ImportCsvIntoRawTable(rawTablePath("_raw_asana_tasks.csv"), "_raw_asana_tasks") + + dataflowTester.FlushTabler(&models.AsanaTask{}) + dataflowTester.Subtask(tasks.ExtractTaskMeta, taskData) + dataflowTester.VerifyTableWithOptions(models.AsanaTask{}, e2ehelper.TableOptions{ + CSVRelPath: snapshotPath("_tool_asana_tasks.csv"), + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) +} diff --git a/backend/plugins/asana/impl/impl.go b/backend/plugins/asana/impl/impl.go new file mode 100644 index 00000000000..42ac786ae5f --- /dev/null +++ b/backend/plugins/asana/impl/impl.go @@ -0,0 +1,223 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package impl + +import ( + "fmt" + + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + coreModels "github.com/apache/incubator-devlake/core/models" + "github.com/apache/incubator-devlake/core/plugin" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/api" + "github.com/apache/incubator-devlake/plugins/asana/models" + "github.com/apache/incubator-devlake/plugins/asana/models/migrationscripts" + "github.com/apache/incubator-devlake/plugins/asana/tasks" +) + +var _ interface { + plugin.PluginTask + plugin.PluginMeta + plugin.PluginInit + plugin.PluginApi + plugin.PluginModel + plugin.PluginMigration + plugin.PluginSource + plugin.CloseablePluginTask + plugin.DataSourcePluginBlueprintV200 +} = (*Asana)(nil) + +type Asana struct{} + +func (p Asana) Init(basicRes context.BasicRes) errors.Error { + api.Init(basicRes, p) + return nil +} + +func (p Asana) GetTablesInfo() []dal.Tabler { + return []dal.Tabler{ + &models.AsanaConnection{}, + &models.AsanaProject{}, + &models.AsanaScopeConfig{}, + &models.AsanaTask{}, + &models.AsanaSection{}, + &models.AsanaUser{}, + &models.AsanaWorkspace{}, + &models.AsanaTeam{}, + &models.AsanaStory{}, + &models.AsanaTag{}, + &models.AsanaTaskTag{}, + &models.AsanaCustomField{}, + &models.AsanaTaskCustomFieldValue{}, + &models.AsanaProjectMembership{}, + &models.AsanaTeamMembership{}, + } +} + +func (p Asana) Description() string { + return "To collect and enrich data from Asana" +} + +func (p Asana) Name() string { + return "asana" +} + +func (p Asana) SubTaskMetas() []plugin.SubTaskMeta { + return []plugin.SubTaskMeta{ + // Collect and extract in hierarchical order + // 1. Project (scope) + tasks.CollectProjectMeta, + tasks.ExtractProjectMeta, + // 2. Users (project members) + tasks.CollectUserMeta, + tasks.ExtractUserMeta, + // 3. Sections + tasks.CollectSectionMeta, + tasks.ExtractSectionMeta, + // 4. Tasks + tasks.CollectTaskMeta, + tasks.ExtractTaskMeta, + // 5. Subtasks (children of tasks) + tasks.CollectSubtaskMeta, + tasks.ExtractSubtaskMeta, + // 6. Stories (comments on tasks) + tasks.CollectStoryMeta, + tasks.ExtractStoryMeta, + // 7. Tags (on tasks) + tasks.CollectTagMeta, + tasks.ExtractTagMeta, + // Convert to domain layer + tasks.ConvertProjectMeta, + tasks.ConvertUserMeta, + tasks.ConvertTaskMeta, + tasks.ConvertStoryMeta, + } +} + +func (p Asana) PrepareTaskData(taskCtx plugin.TaskContext, options map[string]interface{}) (interface{}, errors.Error) { + var op tasks.AsanaOptions + err := helper.Decode(options, &op, nil) + if err != nil { + return nil, errors.Default.Wrap(err, "Asana plugin could not decode options") + } + if op.ProjectId == "" { + return nil, errors.BadInput.New("asana projectId is required") + } + if op.ConnectionId == 0 { + return nil, errors.BadInput.New("asana connectionId is invalid") + } + connection := &models.AsanaConnection{} + connectionHelper := helper.NewConnectionHelper(taskCtx, nil, p.Name()) + err = connectionHelper.FirstById(connection, op.ConnectionId) + if err != nil { + return nil, errors.Default.Wrap(err, "error getting connection for Asana plugin") + } + apiClient, err := tasks.CreateApiClient(taskCtx, connection) + if err != nil { + return nil, err + } + return &tasks.AsanaTaskData{ + Options: &op, + ApiClient: apiClient, + }, nil +} + +func (p Asana) RootPkgPath() string { + return "github.com/apache/incubator-devlake/plugins/asana" +} + +func (p Asana) MigrationScripts() []plugin.MigrationScript { + return migrationscripts.All() +} + +func (p Asana) Connection() dal.Tabler { + return &models.AsanaConnection{} +} + +func (p Asana) Scope() plugin.ToolLayerScope { + return &models.AsanaProject{} +} + +func (p Asana) ScopeConfig() dal.Tabler { + return &models.AsanaScopeConfig{} +} + +func (p Asana) ApiResources() map[string]map[string]plugin.ApiResourceHandler { + return map[string]map[string]plugin.ApiResourceHandler{ + "test": { + "POST": api.TestConnection, + }, + "connections": { + "POST": api.PostConnections, + "GET": api.ListConnections, + }, + "connections/:connectionId": { + "PATCH": api.PatchConnection, + "DELETE": api.DeleteConnection, + "GET": api.GetConnection, + }, + "connections/:connectionId/test": { + "POST": api.TestExistingConnection, + }, + "connections/:connectionId/proxy/rest/*path": { + "GET": api.Proxy, + }, + "connections/:connectionId/remote-scopes": { + "GET": api.RemoteScopes, + }, + "connections/:connectionId/scope-configs": { + "POST": api.PostScopeConfig, + "GET": api.GetScopeConfigList, + }, + "connections/:connectionId/scope-configs/:scopeConfigId": { + "PATCH": api.PatchScopeConfig, + "GET": api.GetScopeConfig, + "DELETE": api.DeleteScopeConfig, + }, + "connections/:connectionId/scopes/:scopeId": { + "GET": api.GetScope, + "PATCH": api.PatchScope, + "DELETE": api.DeleteScope, + }, + "connections/:connectionId/scopes": { + "GET": api.GetScopeList, + "PUT": api.PutScopes, + }, + "scope-config/:scopeConfigId/projects": { + "GET": api.GetProjectsByScopeConfig, + }, + } +} + +func (p Asana) MakeDataSourcePipelinePlanV200( + connectionId uint64, + scopes []*coreModels.BlueprintScope, +) (coreModels.PipelinePlan, []plugin.Scope, errors.Error) { + return api.MakePipelinePlanV200(p.SubTaskMetas(), connectionId, scopes) +} + +func (p Asana) Close(taskCtx plugin.TaskContext) errors.Error { + data, ok := taskCtx.GetData().(*tasks.AsanaTaskData) + if !ok { + return errors.Default.New(fmt.Sprintf("GetData failed when try to close %+v", taskCtx)) + } + data.ApiClient.Release() + return nil +} diff --git a/backend/plugins/asana/models/connection.go b/backend/plugins/asana/models/connection.go new file mode 100644 index 00000000000..ecd49f1aa7a --- /dev/null +++ b/backend/plugins/asana/models/connection.go @@ -0,0 +1,71 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "fmt" + "net/http" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/utils" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" +) + +// AsanaConn holds the essential information to connect to the Asana API +type AsanaConn struct { + helper.RestConnection `mapstructure:",squash"` + Token string `mapstructure:"token" json:"token" encrypt:"yes"` +} + +func (ac *AsanaConn) Sanitize() AsanaConn { + ac.Token = utils.SanitizeString(ac.Token) + return *ac +} + +// AsanaConnection holds AsanaConn plus ID/Name for database storage +type AsanaConnection struct { + helper.BaseConnection `mapstructure:",squash"` + AsanaConn `mapstructure:",squash"` +} + +func (connection *AsanaConnection) MergeFromRequest(target *AsanaConnection, body map[string]interface{}) error { + token := target.Token + if err := helper.DecodeMapStruct(body, target, true); err != nil { + return err + } + modifiedToken := target.Token + if modifiedToken == "" || modifiedToken == utils.SanitizeString(token) { + target.Token = token + } + return nil +} + +func (connection AsanaConnection) Sanitize() AsanaConnection { + connection.AsanaConn = connection.AsanaConn.Sanitize() + return connection +} + +// SetupAuthentication sets up the HTTP Request Authentication (Bearer token) +func (ac *AsanaConn) SetupAuthentication(req *http.Request) errors.Error { + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", ac.Token)) + return nil +} + +func (AsanaConnection) TableName() string { + return "_tool_asana_connections" +} diff --git a/backend/plugins/asana/models/custom_field.go b/backend/plugins/asana/models/custom_field.go new file mode 100644 index 00000000000..aabade05f84 --- /dev/null +++ b/backend/plugins/asana/models/custom_field.go @@ -0,0 +1,59 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "github.com/apache/incubator-devlake/core/models/common" +) + +// AsanaCustomField represents a custom field definition +type AsanaCustomField struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + Name string `gorm:"type:varchar(255)"` + ResourceType string `gorm:"type:varchar(32)"` + ResourceSubtype string `gorm:"type:varchar(32)"` + Type string `gorm:"type:varchar(32)"` + Description string `gorm:"type:text"` + Precision int `json:"precision"` + IsGlobalToWorkspace bool `json:"isGlobalToWorkspace"` + HasNotificationsEnabled bool `json:"hasNotificationsEnabled"` + common.NoPKModel +} + +func (AsanaCustomField) TableName() string { + return "_tool_asana_custom_fields" +} + +// AsanaTaskCustomFieldValue represents a custom field value on a task +type AsanaTaskCustomFieldValue struct { + ConnectionId uint64 `gorm:"primaryKey"` + TaskGid string `gorm:"primaryKey;type:varchar(255)"` + CustomFieldGid string `gorm:"primaryKey;type:varchar(255)"` + CustomFieldName string `gorm:"type:varchar(255)"` + DisplayValue string `gorm:"type:text"` + TextValue string `gorm:"type:text"` + NumberValue *float64 `json:"numberValue"` + EnumValueGid string `gorm:"type:varchar(255)"` + EnumValueName string `gorm:"type:varchar(255)"` + common.NoPKModel +} + +func (AsanaTaskCustomFieldValue) TableName() string { + return "_tool_asana_task_custom_field_values" +} diff --git a/backend/plugins/asana/models/membership.go b/backend/plugins/asana/models/membership.go new file mode 100644 index 00000000000..572e2464ecd --- /dev/null +++ b/backend/plugins/asana/models/membership.go @@ -0,0 +1,48 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "github.com/apache/incubator-devlake/core/models/common" +) + +// AsanaProjectMembership links users to projects with their role +type AsanaProjectMembership struct { + ConnectionId uint64 `gorm:"primaryKey"` + ProjectGid string `gorm:"primaryKey;type:varchar(255)"` + UserGid string `gorm:"primaryKey;type:varchar(255)"` + Role string `gorm:"type:varchar(32)"` + common.NoPKModel +} + +func (AsanaProjectMembership) TableName() string { + return "_tool_asana_project_memberships" +} + +// AsanaTeamMembership links users to teams +type AsanaTeamMembership struct { + ConnectionId uint64 `gorm:"primaryKey"` + TeamGid string `gorm:"primaryKey;type:varchar(255)"` + UserGid string `gorm:"primaryKey;type:varchar(255)"` + IsGuest bool `json:"isGuest"` + common.NoPKModel +} + +func (AsanaTeamMembership) TableName() string { + return "_tool_asana_team_memberships" +} diff --git a/backend/plugins/asana/models/migrationscripts/20250203_add_init_tables.go b/backend/plugins/asana/models/migrationscripts/20250203_add_init_tables.go new file mode 100644 index 00000000000..a2dae698bd0 --- /dev/null +++ b/backend/plugins/asana/models/migrationscripts/20250203_add_init_tables.go @@ -0,0 +1,56 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/helpers/migrationhelper" + "github.com/apache/incubator-devlake/plugins/asana/models/migrationscripts/archived" +) + +type addInitTables struct{} + +func (*addInitTables) Up(basicRes context.BasicRes) errors.Error { + return migrationhelper.AutoMigrateTables( + basicRes, + &archived.AsanaConnection{}, + &archived.AsanaProject{}, + &archived.AsanaScopeConfig{}, + &archived.AsanaTask{}, + &archived.AsanaSection{}, + &archived.AsanaUser{}, + &archived.AsanaWorkspace{}, + &archived.AsanaTeam{}, + &archived.AsanaStory{}, + &archived.AsanaTag{}, + &archived.AsanaTaskTag{}, + &archived.AsanaCustomField{}, + &archived.AsanaTaskCustomFieldValue{}, + &archived.AsanaProjectMembership{}, + &archived.AsanaTeamMembership{}, + ) +} + +func (*addInitTables) Version() uint64 { + return 20250203000001 +} + +func (*addInitTables) Name() string { + return "asana init schemas" +} diff --git a/backend/plugins/asana/models/migrationscripts/20250212_add_missing_tables.go b/backend/plugins/asana/models/migrationscripts/20250212_add_missing_tables.go new file mode 100644 index 00000000000..35c482d271c --- /dev/null +++ b/backend/plugins/asana/models/migrationscripts/20250212_add_missing_tables.go @@ -0,0 +1,51 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/helpers/migrationhelper" + "github.com/apache/incubator-devlake/plugins/asana/models/migrationscripts/archived" +) + +type addMissingTables struct{} + +func (*addMissingTables) Up(basicRes context.BasicRes) errors.Error { + // Add all the new tables that were added after the initial migration + return migrationhelper.AutoMigrateTables( + basicRes, + &archived.AsanaWorkspace{}, + &archived.AsanaTeam{}, + &archived.AsanaStory{}, + &archived.AsanaTag{}, + &archived.AsanaTaskTag{}, + &archived.AsanaCustomField{}, + &archived.AsanaTaskCustomFieldValue{}, + &archived.AsanaProjectMembership{}, + &archived.AsanaTeamMembership{}, + ) +} + +func (*addMissingTables) Version() uint64 { + return 20250212000002 +} + +func (*addMissingTables) Name() string { + return "asana add missing tables for hierarchical data" +} diff --git a/backend/plugins/asana/models/migrationscripts/20250212_add_scope_config_issue_type_fields.go b/backend/plugins/asana/models/migrationscripts/20250212_add_scope_config_issue_type_fields.go new file mode 100644 index 00000000000..e092290c6ad --- /dev/null +++ b/backend/plugins/asana/models/migrationscripts/20250212_add_scope_config_issue_type_fields.go @@ -0,0 +1,51 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" +) + +var _ plugin.MigrationScript = (*addScopeConfigIssueTypeFields)(nil) + +type addScopeConfigIssueTypeFields struct{} + +type asanaScopeConfig20250212v2 struct { + IssueTypeRequirement string `gorm:"type:varchar(255)"` + IssueTypeBug string `gorm:"type:varchar(255)"` + IssueTypeIncident string `gorm:"type:varchar(255)"` +} + +func (asanaScopeConfig20250212v2) TableName() string { + return "_tool_asana_scope_configs" +} + +func (*addScopeConfigIssueTypeFields) Up(basicRes context.BasicRes) errors.Error { + db := basicRes.GetDal() + return db.AutoMigrate(&asanaScopeConfig20250212v2{}) +} + +func (*addScopeConfigIssueTypeFields) Version() uint64 { + return 20250212000004 +} + +func (*addScopeConfigIssueTypeFields) Name() string { + return "asana add issue type fields to scope config" +} diff --git a/backend/plugins/asana/models/migrationscripts/20250212_add_task_transformation_fields.go b/backend/plugins/asana/models/migrationscripts/20250212_add_task_transformation_fields.go new file mode 100644 index 00000000000..7af1404489b --- /dev/null +++ b/backend/plugins/asana/models/migrationscripts/20250212_add_task_transformation_fields.go @@ -0,0 +1,71 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" +) + +var _ plugin.MigrationScript = (*addTaskTransformationFields)(nil) + +type addTaskTransformationFields struct{} + +type asanaTask20250212 struct { + SectionName string `gorm:"type:varchar(255)"` + StdType string `gorm:"type:varchar(255)"` + StdStatus string `gorm:"type:varchar(255)"` + Priority string `gorm:"type:varchar(255)"` + StoryPoint *float64 `gorm:"type:double"` + Severity string `gorm:"type:varchar(255)"` + LeadTimeMinutes *uint `gorm:"type:int unsigned"` +} + +func (asanaTask20250212) TableName() string { + return "_tool_asana_tasks" +} + +type asanaScopeConfig20250212 struct { + // Regex patterns for tag-based type classification (like GitHub) + IssueTypeRequirement string `gorm:"type:varchar(255)"` + IssueTypeBug string `gorm:"type:varchar(255)"` + IssueTypeIncident string `gorm:"type:varchar(255)"` +} + +func (asanaScopeConfig20250212) TableName() string { + return "_tool_asana_scope_configs" +} + +func (*addTaskTransformationFields) Up(basicRes context.BasicRes) errors.Error { + db := basicRes.GetDal() + // Add transformation fields to tasks table + if err := db.AutoMigrate(&asanaTask20250212{}); err != nil { + return err + } + // Add transformation config fields to scope_configs table + return db.AutoMigrate(&asanaScopeConfig20250212{}) +} + +func (*addTaskTransformationFields) Version() uint64 { + return 20250212000003 +} + +func (*addTaskTransformationFields) Name() string { + return "asana add task transformation fields for issue tracking" +} diff --git a/backend/plugins/asana/models/migrationscripts/20250212_add_user_photo_url.go b/backend/plugins/asana/models/migrationscripts/20250212_add_user_photo_url.go new file mode 100644 index 00000000000..fc87c9fa1aa --- /dev/null +++ b/backend/plugins/asana/models/migrationscripts/20250212_add_user_photo_url.go @@ -0,0 +1,51 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" +) + +var _ plugin.MigrationScript = (*addUserPhotoUrl)(nil) + +type addUserPhotoUrl struct{} + +type asanaUser20250212 struct { + PhotoUrl string `gorm:"type:varchar(512)"` + WorkspaceGids string `gorm:"type:text"` +} + +func (asanaUser20250212) TableName() string { + return "_tool_asana_users" +} + +func (*addUserPhotoUrl) Up(basicRes context.BasicRes) errors.Error { + db := basicRes.GetDal() + // Add photo_url and workspace_gids columns to _tool_asana_users table + return db.AutoMigrate(&asanaUser20250212{}) +} + +func (*addUserPhotoUrl) Version() uint64 { + return 20250212000001 +} + +func (*addUserPhotoUrl) Name() string { + return "asana add photo_url and workspace_gids to users table" +} diff --git a/backend/plugins/asana/models/migrationscripts/20250219_add_connection_id_to_scope_configs.go b/backend/plugins/asana/models/migrationscripts/20250219_add_connection_id_to_scope_configs.go new file mode 100644 index 00000000000..dff72ee5c15 --- /dev/null +++ b/backend/plugins/asana/models/migrationscripts/20250219_add_connection_id_to_scope_configs.go @@ -0,0 +1,51 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" +) + +var _ plugin.MigrationScript = (*addConnectionIdToAsanaScopeConfigs)(nil) + +// asanaScopeConfig20250219 adds connection_id and name to match common.ScopeConfig. +// The init migration used archived.ScopeConfig which did not have these columns. +type asanaScopeConfig20250219 struct { + ConnectionId uint64 `json:"connectionId" gorm:"index" mapstructure:"connectionId,omitempty"` + Name string `mapstructure:"name" json:"name" gorm:"type:varchar(255);uniqueIndex"` +} + +func (asanaScopeConfig20250219) TableName() string { + return "_tool_asana_scope_configs" +} + +type addConnectionIdToAsanaScopeConfigs struct{} + +func (*addConnectionIdToAsanaScopeConfigs) Up(basicRes context.BasicRes) errors.Error { + return basicRes.GetDal().AutoMigrate(&asanaScopeConfig20250219{}) +} + +func (*addConnectionIdToAsanaScopeConfigs) Version() uint64 { + return 20250219000001 +} + +func (*addConnectionIdToAsanaScopeConfigs) Name() string { + return "add connection_id and name to _tool_asana_scope_configs" +} diff --git a/backend/plugins/asana/models/migrationscripts/archived/models.go b/backend/plugins/asana/models/migrationscripts/archived/models.go new file mode 100644 index 00000000000..1551cad6115 --- /dev/null +++ b/backend/plugins/asana/models/migrationscripts/archived/models.go @@ -0,0 +1,264 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package archived + +import ( + "time" + + "github.com/apache/incubator-devlake/core/models/migrationscripts/archived" +) + +type AsanaConnection struct { + archived.Model + Name string `gorm:"type:varchar(150);uniqueIndex" json:"name" validate:"required"` + Endpoint string `mapstructure:"endpoint" json:"endpoint" validate:"required"` + Proxy string `mapstructure:"proxy" json:"proxy"` + RateLimitPerHour int `comment:"api request rate limit per hour" json:"rateLimitPerHour"` + Token string `mapstructure:"token" json:"token" gorm:"serializer:encdec" encrypt:"yes"` +} + +func (AsanaConnection) TableName() string { + return "_tool_asana_connections" +} + +type AsanaProject struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `json:"gid" gorm:"type:varchar(255);primaryKey"` + Name string `json:"name" gorm:"type:varchar(255)"` + ResourceType string `json:"resourceType" gorm:"type:varchar(32)"` + Archived bool `json:"archived"` + WorkspaceGid string `json:"workspaceGid" gorm:"type:varchar(255)"` + PermalinkUrl string `json:"permalinkUrl" gorm:"type:varchar(512)"` + ScopeConfigId uint64 `json:"scopeConfigId,omitempty" mapstructure:"scopeConfigId"` + archived.NoPKModel +} + +func (AsanaProject) TableName() string { + return "_tool_asana_projects" +} + +type AsanaScopeConfig struct { + archived.ScopeConfig + IssueTypeRequirement string `mapstructure:"issueTypeRequirement,omitempty" json:"issueTypeRequirement" gorm:"type:varchar(255)"` + IssueTypeBug string `mapstructure:"issueTypeBug,omitempty" json:"issueTypeBug" gorm:"type:varchar(255)"` + IssueTypeIncident string `mapstructure:"issueTypeIncident,omitempty" json:"issueTypeIncident" gorm:"type:varchar(255)"` +} + +func (AsanaScopeConfig) TableName() string { + return "_tool_asana_scope_configs" +} + +type AsanaTask struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + Name string `gorm:"type:varchar(512)"` + Notes string `gorm:"type:text"` + ResourceType string `gorm:"type:varchar(32)"` + ResourceSubtype string `gorm:"type:varchar(32)"` + Completed bool `json:"completed"` + CompletedAt *time.Time `json:"completedAt"` + DueOn *time.Time `gorm:"type:date" json:"dueOn"` + CreatedAt time.Time `json:"createdAt"` + ModifiedAt *time.Time `json:"modifiedAt"` + PermalinkUrl string `gorm:"type:varchar(512)"` + ProjectGid string `gorm:"type:varchar(255);index"` + SectionGid string `gorm:"type:varchar(255);index"` + SectionName string `gorm:"type:varchar(255)"` + AssigneeGid string `gorm:"type:varchar(255)"` + AssigneeName string `gorm:"type:varchar(255)"` + CreatorGid string `gorm:"type:varchar(255)"` + CreatorName string `gorm:"type:varchar(255)"` + ParentGid string `gorm:"type:varchar(255);index"` + NumSubtasks int `json:"numSubtasks"` + StdType string `gorm:"type:varchar(255)"` + StdStatus string `gorm:"type:varchar(255)"` + Priority string `gorm:"type:varchar(255)"` + StoryPoint *float64 `json:"storyPoint"` + Severity string `gorm:"type:varchar(255)"` + LeadTimeMinutes *uint `json:"leadTimeMinutes"` + archived.NoPKModel +} + +func (AsanaTask) TableName() string { + return "_tool_asana_tasks" +} + +type AsanaSection struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + Name string `gorm:"type:varchar(255)"` + ResourceType string `gorm:"type:varchar(32)"` + ProjectGid string `gorm:"type:varchar(255);index"` + archived.NoPKModel +} + +func (AsanaSection) TableName() string { + return "_tool_asana_sections" +} + +type AsanaUser struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + Name string `gorm:"type:varchar(255)"` + Email string `gorm:"type:varchar(255)"` + ResourceType string `gorm:"type:varchar(32)"` + PhotoUrl string `gorm:"type:varchar(512)"` + WorkspaceGids string `gorm:"type:text"` + archived.NoPKModel +} + +func (AsanaUser) TableName() string { + return "_tool_asana_users" +} + +type AsanaWorkspace struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + Name string `gorm:"type:varchar(255)"` + ResourceType string `gorm:"type:varchar(32)"` + IsOrganization bool `json:"isOrganization"` + archived.NoPKModel +} + +func (AsanaWorkspace) TableName() string { + return "_tool_asana_workspaces" +} + +type AsanaTeam struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + Name string `gorm:"type:varchar(255)"` + ResourceType string `gorm:"type:varchar(32)"` + Description string `gorm:"type:text"` + HtmlDescription string `gorm:"type:text"` + OrganizationGid string `gorm:"type:varchar(255);index"` + PermalinkUrl string `gorm:"type:varchar(512)"` + archived.NoPKModel +} + +func (AsanaTeam) TableName() string { + return "_tool_asana_teams" +} + +type AsanaStory struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + ResourceType string `gorm:"type:varchar(32)"` + ResourceSubtype string `gorm:"type:varchar(64)"` + Text string `gorm:"type:text"` + HtmlText string `gorm:"type:text"` + IsPinned bool `json:"isPinned"` + IsEdited bool `json:"isEdited"` + StickerName string `gorm:"type:varchar(64)"` + CreatedAt time.Time `json:"createdAt"` + CreatedByGid string `gorm:"type:varchar(255)"` + CreatedByName string `gorm:"type:varchar(255)"` + TaskGid string `gorm:"type:varchar(255);index"` + TargetGid string `gorm:"type:varchar(255);index"` + archived.NoPKModel +} + +func (AsanaStory) TableName() string { + return "_tool_asana_stories" +} + +type AsanaTag struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + Name string `gorm:"type:varchar(255)"` + ResourceType string `gorm:"type:varchar(32)"` + Color string `gorm:"type:varchar(32)"` + Notes string `gorm:"type:text"` + WorkspaceGid string `gorm:"type:varchar(255);index"` + PermalinkUrl string `gorm:"type:varchar(512)"` + archived.NoPKModel +} + +func (AsanaTag) TableName() string { + return "_tool_asana_tags" +} + +type AsanaTaskTag struct { + ConnectionId uint64 `gorm:"primaryKey"` + TaskGid string `gorm:"primaryKey;type:varchar(255)"` + TagGid string `gorm:"primaryKey;type:varchar(255)"` + archived.NoPKModel +} + +func (AsanaTaskTag) TableName() string { + return "_tool_asana_task_tags" +} + +type AsanaCustomField struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + Name string `gorm:"type:varchar(255)"` + ResourceType string `gorm:"type:varchar(32)"` + ResourceSubtype string `gorm:"type:varchar(32)"` + Type string `gorm:"type:varchar(32)"` + Description string `gorm:"type:text"` + Precision int `json:"precision"` + IsGlobalToWorkspace bool `json:"isGlobalToWorkspace"` + HasNotificationsEnabled bool `json:"hasNotificationsEnabled"` + archived.NoPKModel +} + +func (AsanaCustomField) TableName() string { + return "_tool_asana_custom_fields" +} + +type AsanaTaskCustomFieldValue struct { + ConnectionId uint64 `gorm:"primaryKey"` + TaskGid string `gorm:"primaryKey;type:varchar(255)"` + CustomFieldGid string `gorm:"primaryKey;type:varchar(255)"` + CustomFieldName string `gorm:"type:varchar(255)"` + DisplayValue string `gorm:"type:text"` + TextValue string `gorm:"type:text"` + NumberValue *float64 `json:"numberValue"` + EnumValueGid string `gorm:"type:varchar(255)"` + EnumValueName string `gorm:"type:varchar(255)"` + archived.NoPKModel +} + +func (AsanaTaskCustomFieldValue) TableName() string { + return "_tool_asana_task_custom_field_values" +} + +type AsanaProjectMembership struct { + ConnectionId uint64 `gorm:"primaryKey"` + ProjectGid string `gorm:"primaryKey;type:varchar(255)"` + UserGid string `gorm:"primaryKey;type:varchar(255)"` + Role string `gorm:"type:varchar(32)"` + archived.NoPKModel +} + +func (AsanaProjectMembership) TableName() string { + return "_tool_asana_project_memberships" +} + +type AsanaTeamMembership struct { + ConnectionId uint64 `gorm:"primaryKey"` + TeamGid string `gorm:"primaryKey;type:varchar(255)"` + UserGid string `gorm:"primaryKey;type:varchar(255)"` + IsGuest bool `json:"isGuest"` + archived.NoPKModel +} + +func (AsanaTeamMembership) TableName() string { + return "_tool_asana_team_memberships" +} diff --git a/backend/plugins/asana/models/migrationscripts/register.go b/backend/plugins/asana/models/migrationscripts/register.go new file mode 100644 index 00000000000..9d25d2ed5b3 --- /dev/null +++ b/backend/plugins/asana/models/migrationscripts/register.go @@ -0,0 +1,34 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/plugin" +) + +// All return all the migration scripts +func All() []plugin.MigrationScript { + return []plugin.MigrationScript{ + new(addInitTables), + new(addUserPhotoUrl), + new(addMissingTables), + new(addTaskTransformationFields), + new(addScopeConfigIssueTypeFields), + new(addConnectionIdToAsanaScopeConfigs), + } +} diff --git a/backend/plugins/asana/models/project.go b/backend/plugins/asana/models/project.go new file mode 100644 index 00000000000..498bcd97ef1 --- /dev/null +++ b/backend/plugins/asana/models/project.go @@ -0,0 +1,63 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "github.com/apache/incubator-devlake/core/models/common" + "github.com/apache/incubator-devlake/core/plugin" +) + +var _ plugin.ToolLayerScope = (*AsanaProject)(nil) + +type AsanaProject struct { + common.Scope `mapstructure:",squash"` + Gid string `json:"gid" mapstructure:"gid" gorm:"type:varchar(255);primaryKey"` + Name string `json:"name" mapstructure:"name" gorm:"type:varchar(255)"` + ResourceType string `json:"resourceType" mapstructure:"resourceType" gorm:"type:varchar(32)"` + Archived bool `json:"archived" mapstructure:"archived"` + WorkspaceGid string `json:"workspaceGid" mapstructure:"workspaceGid" gorm:"type:varchar(255)"` + PermalinkUrl string `json:"permalinkUrl" mapstructure:"permalinkUrl" gorm:"type:varchar(512)"` +} + +func (p AsanaProject) ScopeId() string { + return p.Gid +} + +func (p AsanaProject) ScopeName() string { + return p.Name +} + +func (p AsanaProject) ScopeFullName() string { + return p.Name +} + +func (p AsanaProject) ScopeParams() interface{} { + return &AsanaApiParams{ + ConnectionId: p.ConnectionId, + ProjectId: p.Gid, + } +} + +func (AsanaProject) TableName() string { + return "_tool_asana_projects" +} + +type AsanaApiParams struct { + ConnectionId uint64 + ProjectId string +} diff --git a/backend/plugins/asana/models/scope_config.go b/backend/plugins/asana/models/scope_config.go new file mode 100644 index 00000000000..560e645a5e5 --- /dev/null +++ b/backend/plugins/asana/models/scope_config.go @@ -0,0 +1,41 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "github.com/apache/incubator-devlake/core/models/common" +) + +type AsanaScopeConfig struct { + common.ScopeConfig `mapstructure:",squash" json:",inline" gorm:"embedded"` + + // Issue type mapping using regex patterns (like GitHub) + // Tags matching these patterns will classify the task type + IssueTypeRequirement string `mapstructure:"issueTypeRequirement,omitempty" json:"issueTypeRequirement" gorm:"type:varchar(255)"` + IssueTypeBug string `mapstructure:"issueTypeBug,omitempty" json:"issueTypeBug" gorm:"type:varchar(255)"` + IssueTypeIncident string `mapstructure:"issueTypeIncident,omitempty" json:"issueTypeIncident" gorm:"type:varchar(255)"` +} + +func (AsanaScopeConfig) TableName() string { + return "_tool_asana_scope_configs" +} + +func (a *AsanaScopeConfig) SetConnectionId(c *AsanaScopeConfig, connectionId uint64) { + c.ConnectionId = connectionId + c.ScopeConfig.ConnectionId = connectionId +} diff --git a/backend/plugins/asana/models/section.go b/backend/plugins/asana/models/section.go new file mode 100644 index 00000000000..3ebc17b8a1b --- /dev/null +++ b/backend/plugins/asana/models/section.go @@ -0,0 +1,35 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "github.com/apache/incubator-devlake/core/models/common" +) + +type AsanaSection struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + Name string `gorm:"type:varchar(255)"` + ResourceType string `gorm:"type:varchar(32)"` + ProjectGid string `gorm:"type:varchar(255);index"` + common.NoPKModel +} + +func (AsanaSection) TableName() string { + return "_tool_asana_sections" +} diff --git a/backend/plugins/asana/models/story.go b/backend/plugins/asana/models/story.go new file mode 100644 index 00000000000..41386a41e24 --- /dev/null +++ b/backend/plugins/asana/models/story.go @@ -0,0 +1,47 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "time" + + "github.com/apache/incubator-devlake/core/models/common" +) + +// AsanaStory represents comments and system-generated stories on tasks +type AsanaStory struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + ResourceType string `gorm:"type:varchar(32)"` + ResourceSubtype string `gorm:"type:varchar(64)"` + Text string `gorm:"type:text"` + HtmlText string `gorm:"type:text"` + IsPinned bool `json:"isPinned"` + IsEdited bool `json:"isEdited"` + StickerName string `gorm:"type:varchar(64)"` + CreatedAt time.Time `json:"createdAt"` + CreatedByGid string `gorm:"type:varchar(255)"` + CreatedByName string `gorm:"type:varchar(255)"` + TaskGid string `gorm:"type:varchar(255);index"` + TargetGid string `gorm:"type:varchar(255);index"` + common.NoPKModel +} + +func (AsanaStory) TableName() string { + return "_tool_asana_stories" +} diff --git a/backend/plugins/asana/models/tag.go b/backend/plugins/asana/models/tag.go new file mode 100644 index 00000000000..2ab29555ad2 --- /dev/null +++ b/backend/plugins/asana/models/tag.go @@ -0,0 +1,50 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "github.com/apache/incubator-devlake/core/models/common" +) + +type AsanaTag struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + Name string `gorm:"type:varchar(255)"` + ResourceType string `gorm:"type:varchar(32)"` + Color string `gorm:"type:varchar(32)"` + Notes string `gorm:"type:text"` + WorkspaceGid string `gorm:"type:varchar(255);index"` + PermalinkUrl string `gorm:"type:varchar(512)"` + common.NoPKModel +} + +func (AsanaTag) TableName() string { + return "_tool_asana_tags" +} + +// AsanaTaskTag is a many-to-many relationship between tasks and tags +type AsanaTaskTag struct { + ConnectionId uint64 `gorm:"primaryKey"` + TaskGid string `gorm:"primaryKey;type:varchar(255)"` + TagGid string `gorm:"primaryKey;type:varchar(255)"` + common.NoPKModel +} + +func (AsanaTaskTag) TableName() string { + return "_tool_asana_task_tags" +} diff --git a/backend/plugins/asana/models/task.go b/backend/plugins/asana/models/task.go new file mode 100644 index 00000000000..fce6bd9388b --- /dev/null +++ b/backend/plugins/asana/models/task.go @@ -0,0 +1,66 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "time" + + "github.com/apache/incubator-devlake/core/models/common" +) + +type AsanaTask struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + Name string `gorm:"type:varchar(512)"` + Notes string `gorm:"type:text"` + ResourceType string `gorm:"type:varchar(32)"` + ResourceSubtype string `gorm:"type:varchar(32)"` // default_task, milestone, section, approval + Completed bool `json:"completed"` + CompletedAt *time.Time `json:"completedAt"` + DueOn *time.Time `gorm:"type:date" json:"dueOn"` + CreatedAt time.Time `json:"createdAt"` + ModifiedAt *time.Time `json:"modifiedAt"` + PermalinkUrl string `gorm:"type:varchar(512)"` + ProjectGid string `gorm:"type:varchar(255);index"` + SectionGid string `gorm:"type:varchar(255);index"` + SectionName string `gorm:"type:varchar(255)"` // For status mapping + AssigneeGid string `gorm:"type:varchar(255)"` + AssigneeName string `gorm:"type:varchar(255)"` + CreatorGid string `gorm:"type:varchar(255)"` + CreatorName string `gorm:"type:varchar(255)"` + ParentGid string `gorm:"type:varchar(255);index"` + NumSubtasks int `json:"numSubtasks"` + + // Transformed fields for domain layer + StdType string `gorm:"type:varchar(255)"` // Standard type: REQUIREMENT, BUG, INCIDENT, EPIC, TASK, SUBTASK + StdStatus string `gorm:"type:varchar(255)"` // Standard status: TODO, IN_PROGRESS, DONE + + // Custom field values (extracted during transformation) + Priority string `gorm:"type:varchar(255)"` // Priority from custom field + StoryPoint *float64 `json:"storyPoint"` // Story points from custom field + Severity string `gorm:"type:varchar(255)"` // Severity from custom field + + // Lead time tracking + LeadTimeMinutes *uint `json:"leadTimeMinutes"` + + common.NoPKModel +} + +func (AsanaTask) TableName() string { + return "_tool_asana_tasks" +} diff --git a/backend/plugins/asana/models/team.go b/backend/plugins/asana/models/team.go new file mode 100644 index 00000000000..1488a0fd00c --- /dev/null +++ b/backend/plugins/asana/models/team.go @@ -0,0 +1,38 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "github.com/apache/incubator-devlake/core/models/common" +) + +type AsanaTeam struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + Name string `gorm:"type:varchar(255)"` + ResourceType string `gorm:"type:varchar(32)"` + Description string `gorm:"type:text"` + HtmlDescription string `gorm:"type:text"` + OrganizationGid string `gorm:"type:varchar(255);index"` + PermalinkUrl string `gorm:"type:varchar(512)"` + common.NoPKModel +} + +func (AsanaTeam) TableName() string { + return "_tool_asana_teams" +} diff --git a/backend/plugins/asana/models/user.go b/backend/plugins/asana/models/user.go new file mode 100644 index 00000000000..92681195945 --- /dev/null +++ b/backend/plugins/asana/models/user.go @@ -0,0 +1,37 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "github.com/apache/incubator-devlake/core/models/common" +) + +type AsanaUser struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + Name string `gorm:"type:varchar(255)"` + Email string `gorm:"type:varchar(255)"` + ResourceType string `gorm:"type:varchar(32)"` + PhotoUrl string `gorm:"type:varchar(512)"` + WorkspaceGids string `gorm:"type:text"` // JSON array of workspace GIDs + common.NoPKModel +} + +func (AsanaUser) TableName() string { + return "_tool_asana_users" +} diff --git a/backend/plugins/asana/models/workspace.go b/backend/plugins/asana/models/workspace.go new file mode 100644 index 00000000000..101c249b358 --- /dev/null +++ b/backend/plugins/asana/models/workspace.go @@ -0,0 +1,35 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "github.com/apache/incubator-devlake/core/models/common" +) + +type AsanaWorkspace struct { + ConnectionId uint64 `gorm:"primaryKey"` + Gid string `gorm:"primaryKey;type:varchar(255)"` + Name string `gorm:"type:varchar(255)"` + ResourceType string `gorm:"type:varchar(32)"` + IsOrganization bool `json:"isOrganization"` + common.NoPKModel +} + +func (AsanaWorkspace) TableName() string { + return "_tool_asana_workspaces" +} diff --git a/backend/plugins/asana/tasks/api_client.go b/backend/plugins/asana/tasks/api_client.go new file mode 100644 index 00000000000..25abd421bdc --- /dev/null +++ b/backend/plugins/asana/tasks/api_client.go @@ -0,0 +1,40 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +func CreateApiClient(taskCtx plugin.TaskContext, connection *models.AsanaConnection) (*api.ApiAsyncClient, errors.Error) { + if connection.GetEndpoint() == "" { + connection.Endpoint = "https://app.asana.com/api/1.0/" + } + apiClient, err := api.NewApiClientFromConnection(taskCtx.GetContext(), taskCtx, connection) + if err != nil { + return nil, err + } + asyncApiClient, err := api.CreateAsyncApiClient(taskCtx, apiClient, nil) + if err != nil { + return nil, err + } + return asyncApiClient, nil +} diff --git a/backend/plugins/asana/tasks/project_collector.go b/backend/plugins/asana/tasks/project_collector.go new file mode 100644 index 00000000000..5ae69a68d63 --- /dev/null +++ b/backend/plugins/asana/tasks/project_collector.go @@ -0,0 +1,75 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "net/http" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +const rawProjectTable = "asana_projects" + +var _ plugin.SubTaskEntryPoint = CollectProject + +var CollectProjectMeta = plugin.SubTaskMeta{ + Name: "CollectProject", + EntryPoint: CollectProject, + EnabledByDefault: true, + Description: "Collect project data from Asana API", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +type asanaDataWrapper struct { + Data json.RawMessage `json:"data"` +} + +func CollectProject(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*AsanaTaskData) + collector, err := api.NewApiCollector(api.ApiCollectorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: models.AsanaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: rawProjectTable, + }, + ApiClient: data.ApiClient, + UrlTemplate: "projects/{{ .Params.ProjectId }}", + ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) { + var w asanaDataWrapper + err := api.UnmarshalResponse(res, &w) + if err != nil { + return nil, err + } + if len(w.Data) == 0 { + return nil, nil + } + return []json.RawMessage{w.Data}, nil + }, + }) + if err != nil { + return err + } + return collector.Execute() +} diff --git a/backend/plugins/asana/tasks/project_convertor.go b/backend/plugins/asana/tasks/project_convertor.go new file mode 100644 index 00000000000..cd6f89576ec --- /dev/null +++ b/backend/plugins/asana/tasks/project_convertor.go @@ -0,0 +1,80 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "reflect" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/models/domainlayer" + "github.com/apache/incubator-devlake/core/models/domainlayer/didgen" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/core/plugin" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +var _ plugin.SubTaskEntryPoint = ConvertProject + +var ConvertProjectMeta = plugin.SubTaskMeta{ + Name: "ConvertProject", + EntryPoint: ConvertProject, + EnabledByDefault: true, + Description: "Convert tool layer Asana projects into domain layer boards", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func ConvertProject(taskCtx plugin.SubTaskContext) errors.Error { + rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, rawProjectTable) + db := taskCtx.GetDal() + connectionId := data.Options.ConnectionId + projectId := data.Options.ProjectId + + clauses := []dal.Clause{ + dal.From(&models.AsanaProject{}), + dal.Where("connection_id = ? AND gid = ?", connectionId, projectId), + } + cursor, err := db.Cursor(clauses...) + if err != nil { + return err + } + defer cursor.Close() + + boardIdGen := didgen.NewDomainIdGenerator(&models.AsanaProject{}) + + converter, err := helper.NewDataConverter(helper.DataConverterArgs{ + RawDataSubTaskArgs: *rawDataSubTaskArgs, + InputRowType: reflect.TypeOf(models.AsanaProject{}), + Input: cursor, + Convert: func(inputRow interface{}) ([]interface{}, errors.Error) { + toolProject := inputRow.(*models.AsanaProject) + domainBoard := &ticket.Board{ + DomainEntity: domainlayer.DomainEntity{Id: boardIdGen.Generate(toolProject.ConnectionId, toolProject.Gid)}, + Name: toolProject.Name, + Url: toolProject.PermalinkUrl, + Type: "asana", + } + return []interface{}{domainBoard}, nil + }, + }) + if err != nil { + return err + } + return converter.Execute() +} diff --git a/backend/plugins/asana/tasks/project_extractor.go b/backend/plugins/asana/tasks/project_extractor.go new file mode 100644 index 00000000000..dba73b8a654 --- /dev/null +++ b/backend/plugins/asana/tasks/project_extractor.go @@ -0,0 +1,88 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +var _ plugin.SubTaskEntryPoint = ExtractProject + +var ExtractProjectMeta = plugin.SubTaskMeta{ + Name: "ExtractProject", + EntryPoint: ExtractProject, + EnabledByDefault: true, + Description: "Extract raw data into tool layer table _tool_asana_projects", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +type asanaApiProject struct { + Gid string `json:"gid"` + Name string `json:"name"` + ResourceType string `json:"resource_type"` + Archived bool `json:"archived"` + PermalinkUrl string `json:"permalink_url"` + Workspace *struct { + Gid string `json:"gid"` + } `json:"workspace"` +} + +func ExtractProject(taskCtx plugin.SubTaskContext) errors.Error { + taskData := taskCtx.GetData().(*AsanaTaskData) + extractor, err := api.NewApiExtractor(api.ApiExtractorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: models.AsanaApiParams{ + ConnectionId: taskData.Options.ConnectionId, + ProjectId: taskData.Options.ProjectId, + }, + Table: rawProjectTable, + }, + Extract: func(resData *api.RawData) ([]interface{}, errors.Error) { + apiProject := &asanaApiProject{} + err := errors.Convert(json.Unmarshal(resData.Data, apiProject)) + if err != nil { + return nil, err + } + workspaceGid := "" + if apiProject.Workspace != nil { + workspaceGid = apiProject.Workspace.Gid + } + toolProject := &models.AsanaProject{ + Gid: apiProject.Gid, + Name: apiProject.Name, + ResourceType: apiProject.ResourceType, + Archived: apiProject.Archived, + PermalinkUrl: apiProject.PermalinkUrl, + WorkspaceGid: workspaceGid, + } + toolProject.ConnectionId = taskData.Options.ConnectionId + toolProject.ScopeConfigId = taskData.Options.ScopeConfigId + return []interface{}{toolProject}, nil + }, + }) + if err != nil { + return err + } + return extractor.Execute() +} diff --git a/backend/plugins/asana/tasks/section_collector.go b/backend/plugins/asana/tasks/section_collector.go new file mode 100644 index 00000000000..782f47a90b7 --- /dev/null +++ b/backend/plugins/asana/tasks/section_collector.go @@ -0,0 +1,72 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "net/http" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +const rawSectionTable = "asana_sections" + +var _ plugin.SubTaskEntryPoint = CollectSection + +var CollectSectionMeta = plugin.SubTaskMeta{ + Name: "CollectSection", + EntryPoint: CollectSection, + EnabledByDefault: true, + Description: "Collect section data from Asana API", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +type asanaListWrapper struct { + Data []json.RawMessage `json:"data"` +} + +func CollectSection(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*AsanaTaskData) + collector, err := api.NewApiCollector(api.ApiCollectorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: models.AsanaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: rawSectionTable, + }, + ApiClient: data.ApiClient, + UrlTemplate: "projects/{{ .Params.ProjectId }}/sections", + ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) { + var w asanaListWrapper + err := api.UnmarshalResponse(res, &w) + if err != nil { + return nil, err + } + return w.Data, nil + }, + }) + if err != nil { + return err + } + return collector.Execute() +} diff --git a/backend/plugins/asana/tasks/section_extractor.go b/backend/plugins/asana/tasks/section_extractor.go new file mode 100644 index 00000000000..3c6e90a9b55 --- /dev/null +++ b/backend/plugins/asana/tasks/section_extractor.go @@ -0,0 +1,83 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +var _ plugin.SubTaskEntryPoint = ExtractSection + +var ExtractSectionMeta = plugin.SubTaskMeta{ + Name: "ExtractSection", + EntryPoint: ExtractSection, + EnabledByDefault: true, + Description: "Extract raw data into tool layer table _tool_asana_sections", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +type asanaApiSection struct { + Gid string `json:"gid"` + Name string `json:"name"` + ResourceType string `json:"resource_type"` + Project *struct { + Gid string `json:"gid"` + } `json:"project"` +} + +func ExtractSection(taskCtx plugin.SubTaskContext) errors.Error { + taskData := taskCtx.GetData().(*AsanaTaskData) + extractor, err := api.NewApiExtractor(api.ApiExtractorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: models.AsanaApiParams{ + ConnectionId: taskData.Options.ConnectionId, + ProjectId: taskData.Options.ProjectId, + }, + Table: rawSectionTable, + }, + Extract: func(resData *api.RawData) ([]interface{}, errors.Error) { + apiSection := &asanaApiSection{} + err := errors.Convert(json.Unmarshal(resData.Data, apiSection)) + if err != nil { + return nil, err + } + projectGid := taskData.Options.ProjectId + if apiSection.Project != nil { + projectGid = apiSection.Project.Gid + } + toolSection := &models.AsanaSection{ + ConnectionId: taskData.Options.ConnectionId, + Gid: apiSection.Gid, + Name: apiSection.Name, + ResourceType: apiSection.ResourceType, + ProjectGid: projectGid, + } + return []interface{}{toolSection}, nil + }, + }) + if err != nil { + return err + } + return extractor.Execute() +} diff --git a/backend/plugins/asana/tasks/story_collector.go b/backend/plugins/asana/tasks/story_collector.go new file mode 100644 index 00000000000..7fa8896900e --- /dev/null +++ b/backend/plugins/asana/tasks/story_collector.go @@ -0,0 +1,100 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "net/http" + "net/url" + "reflect" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +const rawStoryTable = "asana_stories" + +var _ plugin.SubTaskEntryPoint = CollectStory + +var CollectStoryMeta = plugin.SubTaskMeta{ + Name: "CollectStory", + EntryPoint: CollectStory, + EnabledByDefault: true, + Description: "Collect story/comment data from Asana API for each task", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func CollectStory(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*AsanaTaskData) + db := taskCtx.GetDal() + + // Get all tasks for this project + clauses := []dal.Clause{ + dal.Select("gid"), + dal.From(&models.AsanaTask{}), + dal.Where("connection_id = ? AND project_gid = ?", data.Options.ConnectionId, data.Options.ProjectId), + } + cursor, err := db.Cursor(clauses...) + if err != nil { + return err + } + + iterator, err := api.NewDalCursorIterator(db, cursor, reflect.TypeOf(simpleTask{})) + if err != nil { + return err + } + + collector, err := api.NewApiCollector(api.ApiCollectorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: models.AsanaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: rawStoryTable, + }, + ApiClient: data.ApiClient, + Input: iterator, + UrlTemplate: "tasks/{{ .Input.Gid }}/stories", + Query: func(reqData *api.RequestData) (url.Values, errors.Error) { + query := url.Values{} + query.Set("opt_fields", "gid,resource_type,resource_subtype,text,html_text,is_pinned,is_edited,sticker_name,created_at,created_by,target") + query.Set("limit", "100") + return query, nil + }, + ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) { + var resp asanaListResponse + err := api.UnmarshalResponse(res, &resp) + if err != nil { + return nil, err + } + return resp.Data, nil + }, + }) + if err != nil { + return err + } + return collector.Execute() +} + +type simpleTask struct { + Gid string +} diff --git a/backend/plugins/asana/tasks/story_convertor.go b/backend/plugins/asana/tasks/story_convertor.go new file mode 100644 index 00000000000..f5df43b2ab7 --- /dev/null +++ b/backend/plugins/asana/tasks/story_convertor.go @@ -0,0 +1,88 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "reflect" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/models/domainlayer" + "github.com/apache/incubator-devlake/core/models/domainlayer/didgen" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/core/plugin" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +var _ plugin.SubTaskEntryPoint = ConvertStory + +var ConvertStoryMeta = plugin.SubTaskMeta{ + Name: "ConvertStory", + EntryPoint: ConvertStory, + EnabledByDefault: true, + Description: "Convert tool layer Asana stories into domain layer issue comments", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func ConvertStory(taskCtx plugin.SubTaskContext) errors.Error { + rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, rawStoryTable) + db := taskCtx.GetDal() + connectionId := data.Options.ConnectionId + projectId := data.Options.ProjectId + + // Only convert comment-type stories (not system-generated ones) + clauses := []dal.Clause{ + dal.From(&models.AsanaStory{}), + dal.Join("LEFT JOIN _tool_asana_tasks ON _tool_asana_stories.task_gid = _tool_asana_tasks.gid AND _tool_asana_stories.connection_id = _tool_asana_tasks.connection_id"), + dal.Where("_tool_asana_stories.connection_id = ? AND _tool_asana_tasks.project_gid = ? AND _tool_asana_stories.resource_subtype = ?", + connectionId, projectId, "comment_added"), + } + cursor, err := db.Cursor(clauses...) + if err != nil { + return err + } + defer cursor.Close() + + commentIdGen := didgen.NewDomainIdGenerator(&models.AsanaStory{}) + taskIdGen := didgen.NewDomainIdGenerator(&models.AsanaTask{}) + userIdGen := didgen.NewDomainIdGenerator(&models.AsanaUser{}) + + converter, err := helper.NewDataConverter(helper.DataConverterArgs{ + RawDataSubTaskArgs: *rawDataSubTaskArgs, + InputRowType: reflect.TypeOf(models.AsanaStory{}), + Input: cursor, + Convert: func(inputRow interface{}) ([]interface{}, errors.Error) { + toolStory := inputRow.(*models.AsanaStory) + domainComment := &ticket.IssueComment{ + DomainEntity: domainlayer.DomainEntity{Id: commentIdGen.Generate(toolStory.ConnectionId, toolStory.Gid)}, + IssueId: taskIdGen.Generate(toolStory.ConnectionId, toolStory.TaskGid), + Body: toolStory.Text, + CreatedDate: toolStory.CreatedAt, + } + if toolStory.CreatedByGid != "" { + domainComment.AccountId = userIdGen.Generate(toolStory.ConnectionId, toolStory.CreatedByGid) + } + return []interface{}{domainComment}, nil + }, + }) + if err != nil { + return err + } + return converter.Execute() +} diff --git a/backend/plugins/asana/tasks/story_extractor.go b/backend/plugins/asana/tasks/story_extractor.go new file mode 100644 index 00000000000..e0240f2da88 --- /dev/null +++ b/backend/plugins/asana/tasks/story_extractor.go @@ -0,0 +1,119 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "time" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +var _ plugin.SubTaskEntryPoint = ExtractStory + +var ExtractStoryMeta = plugin.SubTaskMeta{ + Name: "ExtractStory", + EntryPoint: ExtractStory, + EnabledByDefault: true, + Description: "Extract raw data into tool layer table _tool_asana_stories", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +type asanaApiStory struct { + Gid string `json:"gid"` + ResourceType string `json:"resource_type"` + ResourceSubtype string `json:"resource_subtype"` + Text string `json:"text"` + HtmlText string `json:"html_text"` + IsPinned bool `json:"is_pinned"` + IsEdited bool `json:"is_edited"` + StickerName string `json:"sticker_name"` + CreatedAt time.Time `json:"created_at"` + CreatedBy *struct { + Gid string `json:"gid"` + Name string `json:"name"` + } `json:"created_by"` + Target *struct { + Gid string `json:"gid"` + } `json:"target"` +} + +func ExtractStory(taskCtx plugin.SubTaskContext) errors.Error { + taskData := taskCtx.GetData().(*AsanaTaskData) + extractor, err := api.NewApiExtractor(api.ApiExtractorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: models.AsanaApiParams{ + ConnectionId: taskData.Options.ConnectionId, + ProjectId: taskData.Options.ProjectId, + }, + Table: rawStoryTable, + }, + Extract: func(resData *api.RawData) ([]interface{}, errors.Error) { + apiStory := &asanaApiStory{} + err := errors.Convert(json.Unmarshal(resData.Data, apiStory)) + if err != nil { + return nil, err + } + + // Extract task GID from input + var input struct { + Gid string `json:"gid"` + } + if err := errors.Convert(json.Unmarshal(resData.Input, &input)); err != nil { + return nil, err + } + + createdByGid := "" + createdByName := "" + if apiStory.CreatedBy != nil { + createdByGid = apiStory.CreatedBy.Gid + createdByName = apiStory.CreatedBy.Name + } + targetGid := "" + if apiStory.Target != nil { + targetGid = apiStory.Target.Gid + } + + toolStory := &models.AsanaStory{ + ConnectionId: taskData.Options.ConnectionId, + Gid: apiStory.Gid, + ResourceType: apiStory.ResourceType, + ResourceSubtype: apiStory.ResourceSubtype, + Text: apiStory.Text, + HtmlText: apiStory.HtmlText, + IsPinned: apiStory.IsPinned, + IsEdited: apiStory.IsEdited, + StickerName: apiStory.StickerName, + CreatedAt: apiStory.CreatedAt, + CreatedByGid: createdByGid, + CreatedByName: createdByName, + TaskGid: input.Gid, + TargetGid: targetGid, + } + return []interface{}{toolStory}, nil + }, + }) + if err != nil { + return err + } + return extractor.Execute() +} diff --git a/backend/plugins/asana/tasks/subtask_collector.go b/backend/plugins/asana/tasks/subtask_collector.go new file mode 100644 index 00000000000..a2f22cd3a1a --- /dev/null +++ b/backend/plugins/asana/tasks/subtask_collector.go @@ -0,0 +1,97 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "net/http" + "net/url" + "reflect" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +const rawSubtaskTable = "asana_subtasks" + +var _ plugin.SubTaskEntryPoint = CollectSubtask + +var CollectSubtaskMeta = plugin.SubTaskMeta{ + Name: "CollectSubtask", + EntryPoint: CollectSubtask, + EnabledByDefault: true, + Description: "Collect subtask data from Asana API for each task", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func CollectSubtask(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*AsanaTaskData) + db := taskCtx.GetDal() + + // Get all tasks that have subtasks + clauses := []dal.Clause{ + dal.Select("gid"), + dal.From(&models.AsanaTask{}), + dal.Where("connection_id = ? AND project_gid = ? AND num_subtasks > 0", + data.Options.ConnectionId, data.Options.ProjectId), + } + cursor, err := db.Cursor(clauses...) + if err != nil { + return err + } + + iterator, err := api.NewDalCursorIterator(db, cursor, reflect.TypeOf(simpleTask{})) + if err != nil { + return err + } + + collector, err := api.NewApiCollector(api.ApiCollectorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: models.AsanaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: rawSubtaskTable, + }, + ApiClient: data.ApiClient, + Input: iterator, + UrlTemplate: "tasks/{{ .Input.Gid }}/subtasks", + Query: func(reqData *api.RequestData) (url.Values, errors.Error) { + query := url.Values{} + query.Set("opt_fields", "gid,name,notes,resource_type,resource_subtype,completed,completed_at,due_on,created_at,modified_at,permalink_url,assignee,created_by,parent,num_subtasks,memberships.section,memberships.project") + query.Set("limit", "100") + return query, nil + }, + ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) { + var resp asanaListResponse + err := api.UnmarshalResponse(res, &resp) + if err != nil { + return nil, err + } + return resp.Data, nil + }, + }) + if err != nil { + return err + } + return collector.Execute() +} diff --git a/backend/plugins/asana/tasks/subtask_extractor.go b/backend/plugins/asana/tasks/subtask_extractor.go new file mode 100644 index 00000000000..0e0b310facb --- /dev/null +++ b/backend/plugins/asana/tasks/subtask_extractor.go @@ -0,0 +1,124 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "time" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +var _ plugin.SubTaskEntryPoint = ExtractSubtask + +var ExtractSubtaskMeta = plugin.SubTaskMeta{ + Name: "ExtractSubtask", + EntryPoint: ExtractSubtask, + EnabledByDefault: true, + Description: "Extract raw subtask data into tool layer table _tool_asana_tasks", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func ExtractSubtask(taskCtx plugin.SubTaskContext) errors.Error { + taskData := taskCtx.GetData().(*AsanaTaskData) + extractor, err := api.NewApiExtractor(api.ApiExtractorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: models.AsanaApiParams{ + ConnectionId: taskData.Options.ConnectionId, + ProjectId: taskData.Options.ProjectId, + }, + Table: rawSubtaskTable, + }, + Extract: func(resData *api.RawData) ([]interface{}, errors.Error) { + apiTask := &asanaApiTask{} + err := errors.Convert(json.Unmarshal(resData.Data, apiTask)) + if err != nil { + return nil, err + } + + // Get parent GID from input + var input struct { + Gid string `json:"gid"` + } + if err := errors.Convert(json.Unmarshal(resData.Input, &input)); err != nil { + return nil, err + } + + assigneeGid := "" + assigneeName := "" + if apiTask.Assignee != nil { + assigneeGid = apiTask.Assignee.Gid + assigneeName = apiTask.Assignee.Name + } + creatorGid := "" + creatorName := "" + if apiTask.CreatedBy != nil { + creatorGid = apiTask.CreatedBy.Gid + creatorName = apiTask.CreatedBy.Name + } + sectionGid := "" + projectGid := taskData.Options.ProjectId + for _, m := range apiTask.Memberships { + if m.Project != nil { + projectGid = m.Project.Gid + } + if m.Section != nil && m.Section.Gid != "" { + sectionGid = m.Section.Gid + break + } + } + + var dueOn *time.Time + if apiTask.DueOn != "" { + dueOn = parseAsanaDate(apiTask.DueOn) + } + + toolTask := &models.AsanaTask{ + ConnectionId: taskData.Options.ConnectionId, + Gid: apiTask.Gid, + Name: apiTask.Name, + Notes: apiTask.Notes, + ResourceType: apiTask.ResourceType, + ResourceSubtype: apiTask.ResourceSubtype, + Completed: apiTask.Completed, + CompletedAt: apiTask.CompletedAt, + DueOn: dueOn, + CreatedAt: apiTask.CreatedAt, + ModifiedAt: apiTask.ModifiedAt, + PermalinkUrl: apiTask.PermalinkUrl, + ProjectGid: projectGid, + SectionGid: sectionGid, + AssigneeGid: assigneeGid, + AssigneeName: assigneeName, + CreatorGid: creatorGid, + CreatorName: creatorName, + ParentGid: input.Gid, // Parent is the task that has subtasks + NumSubtasks: apiTask.NumSubtasks, + } + return []interface{}{toolTask}, nil + }, + }) + if err != nil { + return err + } + return extractor.Execute() +} diff --git a/backend/plugins/asana/tasks/tag_collector.go b/backend/plugins/asana/tasks/tag_collector.go new file mode 100644 index 00000000000..0f3d218664e --- /dev/null +++ b/backend/plugins/asana/tasks/tag_collector.go @@ -0,0 +1,96 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "net/http" + "net/url" + "reflect" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +const rawTagTable = "asana_tags" + +var _ plugin.SubTaskEntryPoint = CollectTag + +var CollectTagMeta = plugin.SubTaskMeta{ + Name: "CollectTag", + EntryPoint: CollectTag, + EnabledByDefault: true, + Description: "Collect tag data from Asana API for each task", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func CollectTag(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*AsanaTaskData) + db := taskCtx.GetDal() + + // Get all tasks for this project + clauses := []dal.Clause{ + dal.Select("gid"), + dal.From(&models.AsanaTask{}), + dal.Where("connection_id = ? AND project_gid = ?", data.Options.ConnectionId, data.Options.ProjectId), + } + cursor, err := db.Cursor(clauses...) + if err != nil { + return err + } + + iterator, err := api.NewDalCursorIterator(db, cursor, reflect.TypeOf(simpleTask{})) + if err != nil { + return err + } + + collector, err := api.NewApiCollector(api.ApiCollectorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: models.AsanaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: rawTagTable, + }, + ApiClient: data.ApiClient, + Input: iterator, + UrlTemplate: "tasks/{{ .Input.Gid }}/tags", + Query: func(reqData *api.RequestData) (url.Values, errors.Error) { + query := url.Values{} + query.Set("opt_fields", "gid,name,resource_type,color,notes,permalink_url") + query.Set("limit", "100") + return query, nil + }, + ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) { + var resp asanaListResponse + err := api.UnmarshalResponse(res, &resp) + if err != nil { + return nil, err + } + return resp.Data, nil + }, + }) + if err != nil { + return err + } + return collector.Execute() +} diff --git a/backend/plugins/asana/tasks/tag_extractor.go b/backend/plugins/asana/tasks/tag_extractor.go new file mode 100644 index 00000000000..3e7db18ac4c --- /dev/null +++ b/backend/plugins/asana/tasks/tag_extractor.go @@ -0,0 +1,98 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +var _ plugin.SubTaskEntryPoint = ExtractTag + +var ExtractTagMeta = plugin.SubTaskMeta{ + Name: "ExtractTag", + EntryPoint: ExtractTag, + EnabledByDefault: true, + Description: "Extract raw data into tool layer tables _tool_asana_tags and _tool_asana_task_tags", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +type asanaApiTag struct { + Gid string `json:"gid"` + Name string `json:"name"` + ResourceType string `json:"resource_type"` + Color string `json:"color"` + Notes string `json:"notes"` + PermalinkUrl string `json:"permalink_url"` +} + +func ExtractTag(taskCtx plugin.SubTaskContext) errors.Error { + taskData := taskCtx.GetData().(*AsanaTaskData) + extractor, err := api.NewApiExtractor(api.ApiExtractorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: models.AsanaApiParams{ + ConnectionId: taskData.Options.ConnectionId, + ProjectId: taskData.Options.ProjectId, + }, + Table: rawTagTable, + }, + Extract: func(resData *api.RawData) ([]interface{}, errors.Error) { + apiTag := &asanaApiTag{} + err := errors.Convert(json.Unmarshal(resData.Data, apiTag)) + if err != nil { + return nil, err + } + + // Get task GID from input + var input struct { + Gid string `json:"gid"` + } + if err := errors.Convert(json.Unmarshal(resData.Input, &input)); err != nil { + return nil, err + } + + toolTag := &models.AsanaTag{ + ConnectionId: taskData.Options.ConnectionId, + Gid: apiTag.Gid, + Name: apiTag.Name, + ResourceType: apiTag.ResourceType, + Color: apiTag.Color, + Notes: apiTag.Notes, + PermalinkUrl: apiTag.PermalinkUrl, + } + + // Create the task-tag relationship + taskTag := &models.AsanaTaskTag{ + ConnectionId: taskData.Options.ConnectionId, + TaskGid: input.Gid, + TagGid: apiTag.Gid, + } + + return []interface{}{toolTag, taskTag}, nil + }, + }) + if err != nil { + return err + } + return extractor.Execute() +} diff --git a/backend/plugins/asana/tasks/task_collector.go b/backend/plugins/asana/tasks/task_collector.go new file mode 100644 index 00000000000..8e1b72c138b --- /dev/null +++ b/backend/plugins/asana/tasks/task_collector.go @@ -0,0 +1,102 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "net/http" + "net/url" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +const rawTaskTable = "asana_tasks" + +var _ plugin.SubTaskEntryPoint = CollectTask + +var CollectTaskMeta = plugin.SubTaskMeta{ + Name: "CollectTask", + EntryPoint: CollectTask, + EnabledByDefault: true, + Description: "Collect task data from Asana API", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +type asanaTaskListResponse struct { + Data []json.RawMessage `json:"data"` + NextPage *struct { + Offset string `json:"offset"` + Path string `json:"path"` + URI string `json:"uri"` + } `json:"next_page"` +} + +func CollectTask(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*AsanaTaskData) + collector, err := api.NewApiCollector(api.ApiCollectorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: models.AsanaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: rawTaskTable, + }, + ApiClient: data.ApiClient, + PageSize: 100, + UrlTemplate: "projects/{{ .Params.ProjectId }}/tasks", + Query: func(reqData *api.RequestData) (url.Values, errors.Error) { + query := url.Values{} + query.Set("limit", "100") + // Request all fields needed for transformation including section name + query.Set("opt_fields", "gid,name,notes,resource_type,resource_subtype,completed,completed_at,due_on,created_at,modified_at,permalink_url,assignee,assignee.name,created_by,created_by.name,parent,num_subtasks,memberships.section,memberships.section.name,memberships.project") + if reqData.CustomData != nil { + if offset, ok := reqData.CustomData.(string); ok && offset != "" { + query.Set("offset", offset) + } + } + return query, nil + }, + GetNextPageCustomData: func(prevReqData *api.RequestData, prevPageResponse *http.Response) (interface{}, errors.Error) { + var resp asanaTaskListResponse + err := api.UnmarshalResponse(prevPageResponse, &resp) + if err != nil { + return nil, err + } + if resp.NextPage != nil && resp.NextPage.Offset != "" { + return resp.NextPage.Offset, nil + } + return nil, nil + }, + ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) { + var w asanaTaskListResponse + err := api.UnmarshalResponse(res, &w) + if err != nil { + return nil, err + } + return w.Data, nil + }, + }) + if err != nil { + return err + } + return collector.Execute() +} diff --git a/backend/plugins/asana/tasks/task_convertor.go b/backend/plugins/asana/tasks/task_convertor.go new file mode 100644 index 00000000000..7c981b50d20 --- /dev/null +++ b/backend/plugins/asana/tasks/task_convertor.go @@ -0,0 +1,298 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "reflect" + "regexp" + "strings" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/models/domainlayer" + "github.com/apache/incubator-devlake/core/models/domainlayer/didgen" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/core/plugin" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +var _ plugin.SubTaskEntryPoint = ConvertTask + +var ConvertTaskMeta = plugin.SubTaskMeta{ + Name: "ConvertTask", + EntryPoint: ConvertTask, + EnabledByDefault: true, + Description: "Convert tool layer Asana tasks into domain layer issues and board_issues", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func ConvertTask(taskCtx plugin.SubTaskContext) errors.Error { + rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, rawTaskTable) + db := taskCtx.GetDal() + connectionId := data.Options.ConnectionId + projectId := data.Options.ProjectId + + // Get scope config for transformation rules + scopeConfig := getScopeConfig(taskCtx) + + // Get tags for tasks + taskTags := getTaskTags(db, connectionId) + + clauses := []dal.Clause{ + dal.From(&models.AsanaTask{}), + dal.Where("connection_id = ? AND project_gid = ?", connectionId, projectId), + } + cursor, err := db.Cursor(clauses...) + if err != nil { + return err + } + defer cursor.Close() + + taskIdGen := didgen.NewDomainIdGenerator(&models.AsanaTask{}) + boardIdGen := didgen.NewDomainIdGenerator(&models.AsanaProject{}) + accountIdGen := didgen.NewDomainIdGenerator(&models.AsanaUser{}) + + converter, err := helper.NewDataConverter(helper.DataConverterArgs{ + RawDataSubTaskArgs: *rawDataSubTaskArgs, + InputRowType: reflect.TypeOf(models.AsanaTask{}), + Input: cursor, + Convert: func(inputRow interface{}) ([]interface{}, errors.Error) { + toolTask := inputRow.(*models.AsanaTask) + + // Get tags for this task + tags := taskTags[toolTask.Gid] + + // Map type and status using scope config and tags + stdType, stdStatus := getStdTypeAndStatus(toolTask, scopeConfig, tags) + + domainIssue := &ticket.Issue{ + DomainEntity: domainlayer.DomainEntity{Id: taskIdGen.Generate(toolTask.ConnectionId, toolTask.Gid)}, + IssueKey: toolTask.Gid, + Title: toolTask.Name, + Description: toolTask.Notes, + Url: toolTask.PermalinkUrl, + Type: stdType, + OriginalType: toolTask.ResourceSubtype, + Status: stdStatus, + OriginalStatus: getOriginalStatus(toolTask), + StoryPoint: toolTask.StoryPoint, + CreatedDate: &toolTask.CreatedAt, + UpdatedDate: toolTask.ModifiedAt, + ResolutionDate: toolTask.CompletedAt, + DueDate: toolTask.DueOn, + CreatorName: toolTask.CreatorName, + AssigneeName: toolTask.AssigneeName, + LeadTimeMinutes: toolTask.LeadTimeMinutes, + } + + // Set creator and assignee IDs + if toolTask.CreatorGid != "" { + domainIssue.CreatorId = accountIdGen.Generate(connectionId, toolTask.CreatorGid) + } + if toolTask.AssigneeGid != "" { + domainIssue.AssigneeId = accountIdGen.Generate(connectionId, toolTask.AssigneeGid) + } + + // Set parent issue ID if this is a subtask + if toolTask.ParentGid != "" { + domainIssue.ParentIssueId = taskIdGen.Generate(connectionId, toolTask.ParentGid) + // If no type determined and has parent, it's a subtask + if stdType == "" || stdType == ticket.TASK { + domainIssue.Type = ticket.SUBTASK + } + } + + // Set subtask flag + domainIssue.IsSubtask = toolTask.ParentGid != "" + + var result []interface{} + result = append(result, domainIssue) + + // Create board issue relationship + boardId := boardIdGen.Generate(connectionId, toolTask.ProjectGid) + boardIssue := &ticket.BoardIssue{ + BoardId: boardId, + IssueId: domainIssue.Id, + } + result = append(result, boardIssue) + + // Create issue assignee if assignee exists + if toolTask.AssigneeGid != "" { + issueAssignee := &ticket.IssueAssignee{ + IssueId: domainIssue.Id, + AssigneeId: domainIssue.AssigneeId, + AssigneeName: toolTask.AssigneeName, + } + result = append(result, issueAssignee) + } + + return result, nil + }, + }) + if err != nil { + return err + } + return converter.Execute() +} + +// getScopeConfig retrieves the scope config for transformation rules +func getScopeConfig(taskCtx plugin.SubTaskContext) *models.AsanaScopeConfig { + logger := taskCtx.GetLogger() + if taskCtx.GetData() == nil { + logger.Info("getScopeConfig: taskCtx.GetData() is nil") + return nil + } + data := taskCtx.GetData().(*AsanaTaskData) + db := taskCtx.GetDal() + + // First try to get by ScopeConfigId from options + if data.Options.ScopeConfigId != 0 { + var scopeConfig models.AsanaScopeConfig + err := db.First(&scopeConfig, dal.Where("id = ?", data.Options.ScopeConfigId)) + if err == nil { + logger.Info("getScopeConfig: Found scope config by ID %d, IssueTypeRequirement=%s, IssueTypeBug=%s, IssueTypeIncident=%s", + data.Options.ScopeConfigId, scopeConfig.IssueTypeRequirement, scopeConfig.IssueTypeBug, scopeConfig.IssueTypeIncident) + return &scopeConfig + } + logger.Info("getScopeConfig: Failed to get scope config by ID %d: %v", data.Options.ScopeConfigId, err) + } else { + logger.Info("getScopeConfig: ScopeConfigId is 0, trying to get from project") + } + + // Try to get scope config from project's scope_config_id + var project models.AsanaProject + err := db.First(&project, dal.Where("connection_id = ? AND gid = ?", data.Options.ConnectionId, data.Options.ProjectId)) + if err != nil { + logger.Info("getScopeConfig: Failed to get project: %v", err) + return nil + } + + if project.ScopeConfigId != 0 { + var scopeConfig models.AsanaScopeConfig + err := db.First(&scopeConfig, dal.Where("id = ?", project.ScopeConfigId)) + if err == nil { + logger.Info("getScopeConfig: Found scope config from project, IssueTypeRequirement=%s, IssueTypeBug=%s, IssueTypeIncident=%s", + scopeConfig.IssueTypeRequirement, scopeConfig.IssueTypeBug, scopeConfig.IssueTypeIncident) + return &scopeConfig + } + logger.Info("getScopeConfig: Failed to get scope config from project: %v", err) + } else { + logger.Info("getScopeConfig: Project has no scope_config_id") + } + + return nil +} + +// getTaskTags retrieves all tags for tasks and returns a map of taskGid -> []tagName +func getTaskTags(db dal.Dal, connectionId uint64) map[string][]string { + result := make(map[string][]string) + + var taskTags []models.AsanaTaskTag + err := db.All(&taskTags, dal.Where("connection_id = ?", connectionId)) + if err != nil { + return result + } + + // Get all tag names + tagNames := make(map[string]string) + var tags []models.AsanaTag + err = db.All(&tags, dal.Where("connection_id = ?", connectionId)) + if err == nil { + for _, tag := range tags { + tagNames[tag.Gid] = tag.Name + } + } + + // Build taskGid -> []tagName map + for _, tt := range taskTags { + if tagName, ok := tagNames[tt.TagGid]; ok { + result[tt.TaskGid] = append(result[tt.TaskGid], tagName) + } + } + + return result +} + +// getStdTypeAndStatus maps Asana task to standard type and status using regex patterns (like GitHub) +func getStdTypeAndStatus(task *models.AsanaTask, scopeConfig *models.AsanaScopeConfig, tags []string) (string, string) { + stdType := ticket.TASK + stdStatus := ticket.TODO + + // Default status based on completion + if task.Completed { + stdStatus = ticket.DONE + } + + // If no scope config, return defaults + if scopeConfig == nil { + return getDefaultType(task), stdStatus + } + + // Combine all tags into a single string for matching + tagString := strings.ToLower(strings.Join(tags, " ")) + + // Match issue type using regex patterns (like GitHub) + if scopeConfig.IssueTypeRequirement != "" && matchPattern(tagString, scopeConfig.IssueTypeRequirement) { + stdType = ticket.REQUIREMENT + } + if scopeConfig.IssueTypeBug != "" && matchPattern(tagString, scopeConfig.IssueTypeBug) { + stdType = ticket.BUG + } + if scopeConfig.IssueTypeIncident != "" && matchPattern(tagString, scopeConfig.IssueTypeIncident) { + stdType = ticket.INCIDENT + } + + // If no type matched and task is a subtask, mark it as subtask + if stdType == ticket.TASK && task.ParentGid != "" { + stdType = ticket.SUBTASK + } + + return stdType, stdStatus +} + +// getDefaultType returns the default type based on task properties +func getDefaultType(task *models.AsanaTask) string { + if task.ParentGid != "" { + return ticket.SUBTASK + } + return ticket.TASK +} + +// matchPattern checks if the input string matches the regex pattern +func matchPattern(input, pattern string) bool { + if pattern == "" { + return false + } + re, err := regexp.Compile("(?i)" + pattern) + if err != nil { + return false + } + return re.MatchString(input) +} + +// getOriginalStatus returns the original status string +func getOriginalStatus(task *models.AsanaTask) string { + if task.Completed { + return "completed" + } + if task.SectionName != "" { + return task.SectionName + } + return "incomplete" +} diff --git a/backend/plugins/asana/tasks/task_data.go b/backend/plugins/asana/tasks/task_data.go new file mode 100644 index 00000000000..b6c82836b7b --- /dev/null +++ b/backend/plugins/asana/tasks/task_data.go @@ -0,0 +1,49 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +func CreateRawDataSubTaskArgs(taskCtx plugin.SubTaskContext, rawTable string) (*api.RawDataSubTaskArgs, *AsanaTaskData) { + data := taskCtx.GetData().(*AsanaTaskData) + params := models.AsanaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + } + return &api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: params, + Table: rawTable, + }, data +} + +type AsanaOptions struct { + ConnectionId uint64 `json:"connectionId" mapstructure:"connectionId"` + ProjectId string `json:"projectId" mapstructure:"projectId"` + ScopeConfigId uint64 `json:"scopeConfigId" mapstructure:"scopeConfigId,omitempty"` +} + +type AsanaTaskData struct { + Options *AsanaOptions + ApiClient *api.ApiAsyncClient + Project *models.AsanaProject +} diff --git a/backend/plugins/asana/tasks/task_extractor.go b/backend/plugins/asana/tasks/task_extractor.go new file mode 100644 index 00000000000..11ce45af1e0 --- /dev/null +++ b/backend/plugins/asana/tasks/task_extractor.go @@ -0,0 +1,162 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "time" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +var _ plugin.SubTaskEntryPoint = ExtractTask + +var ExtractTaskMeta = plugin.SubTaskMeta{ + Name: "ExtractTask", + EntryPoint: ExtractTask, + EnabledByDefault: true, + Description: "Extract raw data into tool layer table _tool_asana_tasks", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +type asanaApiTask struct { + Gid string `json:"gid"` + Name string `json:"name"` + Notes string `json:"notes"` + ResourceType string `json:"resource_type"` + ResourceSubtype string `json:"resource_subtype"` + Completed bool `json:"completed"` + CompletedAt *time.Time `json:"completed_at"` + DueOn string `json:"due_on"` + CreatedAt time.Time `json:"created_at"` + ModifiedAt *time.Time `json:"modified_at"` + PermalinkUrl string `json:"permalink_url"` + Assignee *struct { + Gid string `json:"gid"` + Name string `json:"name"` + } `json:"assignee"` + CreatedBy *struct { + Gid string `json:"gid"` + Name string `json:"name"` + } `json:"created_by"` + Parent *struct { + Gid string `json:"gid"` + } `json:"parent"` + NumSubtasks int `json:"num_subtasks"` + Memberships []struct { + Section *struct { + Gid string `json:"gid"` + Name string `json:"name"` + } `json:"section"` + Project *struct { + Gid string `json:"gid"` + } `json:"project"` + } `json:"memberships"` +} + +func parseAsanaDate(s string) *time.Time { + if s == "" { + return nil + } + t, err := time.Parse("2006-01-02", s) + if err != nil { + return nil + } + return &t +} + +func ExtractTask(taskCtx plugin.SubTaskContext) errors.Error { + taskData := taskCtx.GetData().(*AsanaTaskData) + extractor, err := api.NewApiExtractor(api.ApiExtractorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: models.AsanaApiParams{ + ConnectionId: taskData.Options.ConnectionId, + ProjectId: taskData.Options.ProjectId, + }, + Table: rawTaskTable, + }, + Extract: func(resData *api.RawData) ([]interface{}, errors.Error) { + apiTask := &asanaApiTask{} + err := errors.Convert(json.Unmarshal(resData.Data, apiTask)) + if err != nil { + return nil, err + } + assigneeGid := "" + assigneeName := "" + if apiTask.Assignee != nil { + assigneeGid = apiTask.Assignee.Gid + assigneeName = apiTask.Assignee.Name + } + creatorGid := "" + creatorName := "" + if apiTask.CreatedBy != nil { + creatorGid = apiTask.CreatedBy.Gid + creatorName = apiTask.CreatedBy.Name + } + parentGid := "" + if apiTask.Parent != nil { + parentGid = apiTask.Parent.Gid + } + sectionGid := "" + sectionName := "" + projectGid := taskData.Options.ProjectId + for _, m := range apiTask.Memberships { + if m.Project != nil { + projectGid = m.Project.Gid + } + if m.Section != nil && m.Section.Gid != "" { + sectionGid = m.Section.Gid + sectionName = m.Section.Name + break + } + } + toolTask := &models.AsanaTask{ + ConnectionId: taskData.Options.ConnectionId, + Gid: apiTask.Gid, + Name: apiTask.Name, + Notes: apiTask.Notes, + ResourceType: apiTask.ResourceType, + ResourceSubtype: apiTask.ResourceSubtype, + Completed: apiTask.Completed, + CompletedAt: apiTask.CompletedAt, + DueOn: parseAsanaDate(apiTask.DueOn), + CreatedAt: apiTask.CreatedAt, + ModifiedAt: apiTask.ModifiedAt, + PermalinkUrl: apiTask.PermalinkUrl, + ProjectGid: projectGid, + SectionGid: sectionGid, + SectionName: sectionName, + AssigneeGid: assigneeGid, + AssigneeName: assigneeName, + CreatorGid: creatorGid, + CreatorName: creatorName, + ParentGid: parentGid, + NumSubtasks: apiTask.NumSubtasks, + } + return []interface{}{toolTask}, nil + }, + }) + if err != nil { + return err + } + return extractor.Execute() +} diff --git a/backend/plugins/asana/tasks/user_collector.go b/backend/plugins/asana/tasks/user_collector.go new file mode 100644 index 00000000000..9b1649aa1fb --- /dev/null +++ b/backend/plugins/asana/tasks/user_collector.go @@ -0,0 +1,101 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "net/http" + "net/url" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +const rawUserTable = "asana_users" + +var _ plugin.SubTaskEntryPoint = CollectUser + +var CollectUserMeta = plugin.SubTaskMeta{ + Name: "CollectUser", + EntryPoint: CollectUser, + EnabledByDefault: true, + Description: "Collect user data from Asana API (project members)", + DomainTypes: []string{plugin.DOMAIN_TYPE_CROSS}, +} + +func CollectUser(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*AsanaTaskData) + collector, err := api.NewApiCollector(api.ApiCollectorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: models.AsanaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: rawUserTable, + }, + ApiClient: data.ApiClient, + PageSize: 100, + UrlTemplate: "projects/{{ .Params.ProjectId }}/members", + Query: func(reqData *api.RequestData) (url.Values, errors.Error) { + query := url.Values{} + query.Set("opt_fields", "gid,name,email,resource_type,photo.image_128x128") + query.Set("limit", "100") + if reqData.CustomData != nil { + if offset, ok := reqData.CustomData.(string); ok && offset != "" { + query.Set("offset", offset) + } + } + return query, nil + }, + GetNextPageCustomData: func(prevReqData *api.RequestData, prevPageResponse *http.Response) (interface{}, errors.Error) { + var resp asanaListResponse + err := api.UnmarshalResponse(prevPageResponse, &resp) + if err != nil { + return nil, err + } + if resp.NextPage != nil && resp.NextPage.Offset != "" { + return resp.NextPage.Offset, nil + } + return nil, nil + }, + ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) { + var resp asanaListResponse + err := api.UnmarshalResponse(res, &resp) + if err != nil { + return nil, err + } + return resp.Data, nil + }, + }) + if err != nil { + return err + } + return collector.Execute() +} + +type asanaListResponse struct { + Data []json.RawMessage `json:"data"` + NextPage *struct { + Offset string `json:"offset"` + Path string `json:"path"` + URI string `json:"uri"` + } `json:"next_page"` +} diff --git a/backend/plugins/asana/tasks/user_convertor.go b/backend/plugins/asana/tasks/user_convertor.go new file mode 100644 index 00000000000..4d9f3da0802 --- /dev/null +++ b/backend/plugins/asana/tasks/user_convertor.go @@ -0,0 +1,80 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "reflect" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/models/domainlayer" + "github.com/apache/incubator-devlake/core/models/domainlayer/crossdomain" + "github.com/apache/incubator-devlake/core/models/domainlayer/didgen" + "github.com/apache/incubator-devlake/core/plugin" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +var _ plugin.SubTaskEntryPoint = ConvertUser + +var ConvertUserMeta = plugin.SubTaskMeta{ + Name: "ConvertUser", + EntryPoint: ConvertUser, + EnabledByDefault: true, + Description: "Convert tool layer Asana users into domain layer accounts", + DomainTypes: []string{plugin.DOMAIN_TYPE_CROSS}, +} + +func ConvertUser(taskCtx plugin.SubTaskContext) errors.Error { + rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, rawUserTable) + db := taskCtx.GetDal() + connectionId := data.Options.ConnectionId + + clauses := []dal.Clause{ + dal.From(&models.AsanaUser{}), + dal.Where("connection_id = ?", connectionId), + } + cursor, err := db.Cursor(clauses...) + if err != nil { + return err + } + defer cursor.Close() + + accountIdGen := didgen.NewDomainIdGenerator(&models.AsanaUser{}) + + converter, err := helper.NewDataConverter(helper.DataConverterArgs{ + RawDataSubTaskArgs: *rawDataSubTaskArgs, + InputRowType: reflect.TypeOf(models.AsanaUser{}), + Input: cursor, + Convert: func(inputRow interface{}) ([]interface{}, errors.Error) { + toolUser := inputRow.(*models.AsanaUser) + domainAccount := &crossdomain.Account{ + DomainEntity: domainlayer.DomainEntity{Id: accountIdGen.Generate(toolUser.ConnectionId, toolUser.Gid)}, + Email: toolUser.Email, + FullName: toolUser.Name, + UserName: toolUser.Name, + AvatarUrl: toolUser.PhotoUrl, + } + return []interface{}{domainAccount}, nil + }, + }) + if err != nil { + return err + } + return converter.Execute() +} diff --git a/backend/plugins/asana/tasks/user_extractor.go b/backend/plugins/asana/tasks/user_extractor.go new file mode 100644 index 00000000000..763c5a2a766 --- /dev/null +++ b/backend/plugins/asana/tasks/user_extractor.go @@ -0,0 +1,85 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/asana/models" +) + +var _ plugin.SubTaskEntryPoint = ExtractUser + +var ExtractUserMeta = plugin.SubTaskMeta{ + Name: "ExtractUser", + EntryPoint: ExtractUser, + EnabledByDefault: true, + Description: "Extract raw data into tool layer table _tool_asana_users", + DomainTypes: []string{plugin.DOMAIN_TYPE_CROSS}, +} + +type asanaApiUser struct { + Gid string `json:"gid"` + Name string `json:"name"` + Email string `json:"email"` + ResourceType string `json:"resource_type"` + Photo *struct { + Image128x128 string `json:"image_128x128"` + } `json:"photo"` +} + +func ExtractUser(taskCtx plugin.SubTaskContext) errors.Error { + taskData := taskCtx.GetData().(*AsanaTaskData) + extractor, err := api.NewApiExtractor(api.ApiExtractorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: models.AsanaApiParams{ + ConnectionId: taskData.Options.ConnectionId, + ProjectId: taskData.Options.ProjectId, + }, + Table: rawUserTable, + }, + Extract: func(resData *api.RawData) ([]interface{}, errors.Error) { + apiUser := &asanaApiUser{} + err := errors.Convert(json.Unmarshal(resData.Data, apiUser)) + if err != nil { + return nil, err + } + photoUrl := "" + if apiUser.Photo != nil { + photoUrl = apiUser.Photo.Image128x128 + } + toolUser := &models.AsanaUser{ + ConnectionId: taskData.Options.ConnectionId, + Gid: apiUser.Gid, + Name: apiUser.Name, + Email: apiUser.Email, + ResourceType: apiUser.ResourceType, + PhotoUrl: photoUrl, + } + return []interface{}{toolUser}, nil + }, + }) + if err != nil { + return err + } + return extractor.Execute() +} diff --git a/backend/plugins/table_info_test.go b/backend/plugins/table_info_test.go index 834bc6d2e7c..929c069b86b 100644 --- a/backend/plugins/table_info_test.go +++ b/backend/plugins/table_info_test.go @@ -23,6 +23,7 @@ import ( "github.com/apache/incubator-devlake/helpers/unithelper" ae "github.com/apache/incubator-devlake/plugins/ae/impl" argocd "github.com/apache/incubator-devlake/plugins/argocd/impl" + asana "github.com/apache/incubator-devlake/plugins/asana/impl" azuredevops "github.com/apache/incubator-devlake/plugins/azuredevops_go/impl" bamboo "github.com/apache/incubator-devlake/plugins/bamboo/impl" bitbucket "github.com/apache/incubator-devlake/plugins/bitbucket/impl" @@ -70,6 +71,7 @@ func Test_GetPluginTablesInfo(t *testing.T) { checker.FeedIn("bitbucket/models", bitbucket.Bitbucket{}.GetTablesInfo) checker.FeedIn("bitbucket_server/models", bitbucket_server.BitbucketServer{}.GetTablesInfo) checker.FeedIn("argocd/models", argocd.ArgoCD{}.GetTablesInfo) + checker.FeedIn("asana/models", asana.Asana{}.GetTablesInfo) checker.FeedIn("customize/models", customize.Customize{}.GetTablesInfo) checker.FeedIn("dbt", dbt.Dbt{}.GetTablesInfo) checker.FeedIn("dora/models", dora.Dora{}.GetTablesInfo) diff --git a/config-ui/env.example b/config-ui/env.example index 078e902a541..1059e19df97 100644 --- a/config-ui/env.example +++ b/config-ui/env.example @@ -30,3 +30,4 @@ DEVLAKE_DASHBOARD_URL_GITHUB=/grafana/d/KXWvOFQnz/github?orgId=1&var-repo_id=All DEVLAKE_DASHBOARD_URL_GITLAB=/grafana/d/msSjEq97z/gitlab?orgId=1&var-repo_id=All&var-interval=WEEKDAY DEVLAKE_DASHBOARD_URL_BITBUCKET=/grafana/d/4LzQHZa4k/bitbucket?orgId=1&var-repo_id=All&var-interval=WEEKDAY DEVLAKE_DASHBOARD_URL_AZUREDEVOPS=/grafana/d/ba7e3a95-80ed-4067-a54b-2a82758eb3dd/azure-devops?orgId=1&var-repo_id=All&var-interval=WEEKDAY +DEVLAKE_DASHBOARD_URL_ASANA=/grafana/d/asana-dashboard?orgId=1 diff --git a/config-ui/src/plugins/components/scope-config-form/index.tsx b/config-ui/src/plugins/components/scope-config-form/index.tsx index e5095ff0983..46fbf129a59 100644 --- a/config-ui/src/plugins/components/scope-config-form/index.tsx +++ b/config-ui/src/plugins/components/scope-config-form/index.tsx @@ -36,6 +36,7 @@ import { BambooTransformation } from '@/plugins/register/bamboo'; import { CircleCITransformation } from '@/plugins/register/circleci'; import { ArgoCDTransformation } from '@/plugins/register/argocd'; import { GhCopilotTransformation } from '@/plugins/register/gh-copilot'; +import { AsanaTransformation } from '@/plugins/register/asana'; import { DOC_URL } from '@/release'; import { operator } from '@/utils'; @@ -297,6 +298,15 @@ export const ScopeConfigForm = ({ /> )} + {plugin === 'asana' && ( + + )} + {plugin === 'tapd' && scopeId && ( + + + + diff --git a/config-ui/src/plugins/register/asana/config.tsx b/config-ui/src/plugins/register/asana/config.tsx new file mode 100644 index 00000000000..af570c0e80e --- /dev/null +++ b/config-ui/src/plugins/register/asana/config.tsx @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { DOC_URL } from '@/release'; +import { IPluginConfig } from '@/types'; + +import Icon from './assets/icon.svg?react'; + +export const AsanaConfig: IPluginConfig = { + plugin: 'asana', + name: 'Asana', + icon: ({ color }) => , + sort: 12, + connection: { + docLink: DOC_URL.PLUGIN.ASANA.BASIS, + initialValues: { + endpoint: 'https://app.asana.com/api/1.0/', + }, + fields: [ + 'name', + { + key: 'endpoint', + label: 'Endpoint', + subLabel: 'Asana API base URL.', + }, + 'token', + 'proxy', + { + key: 'rateLimitPerHour', + subLabel: 'Maximum number of API requests per hour. Leave blank for default.', + defaultValue: 150, + }, + ], + }, + dataScope: { + title: 'Projects', + millerColumn: { + columnCount: 4, + firstColumnTitle: 'Workspaces', + }, + searchPlaceholder: 'Search projects...', + }, + scopeConfig: { + entities: ['TICKET'], + transformation: { + issueTypeRequirement: '(feat|feature|story|requirement)', + issueTypeBug: '(bug|defect|broken)', + issueTypeIncident: '(incident|outage|failure)', + }, + }, +}; diff --git a/config-ui/src/plugins/register/asana/index.ts b/config-ui/src/plugins/register/asana/index.ts new file mode 100644 index 00000000000..5f16858cbe4 --- /dev/null +++ b/config-ui/src/plugins/register/asana/index.ts @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +export * from './config'; +export * from './transformation'; diff --git a/config-ui/src/plugins/register/asana/transformation.tsx b/config-ui/src/plugins/register/asana/transformation.tsx new file mode 100644 index 00000000000..3277ee8e787 --- /dev/null +++ b/config-ui/src/plugins/register/asana/transformation.tsx @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { CaretRightOutlined } from '@ant-design/icons'; +import { theme, Collapse, Tag, Form, Input } from 'antd'; + +import { ExternalLink } from '@/components'; +import { DOC_URL } from '@/release'; + +interface Props { + entities: string[]; + connectionId: ID; + transformation: any; + setTransformation: React.Dispatch>; +} + +export const AsanaTransformation = ({ entities, transformation, setTransformation }: Props) => { + const { token } = theme.useToken(); + + const panelStyle: React.CSSProperties = { + marginBottom: 24, + background: token.colorFillAlter, + borderRadius: token.borderRadiusLG, + border: 'none', + }; + + return ( + } + style={{ background: token.colorBgContainer }} + size="large" + items={[ + { + key: 'TICKET', + label: 'Issue Tracking', + style: panelStyle, + children: ( + <> +

+ Tell DevLake what your Asana tags mean to view metrics such as{' '} + Bug Age,{' '} + DORA - Median Time to Restore Service, etc. +

+

+ DevLake defines three standard types of issues: REQUIREMENT, BUG and INCIDENT. Classify your Asana tasks + using tags that match the RegEx patterns below. +

+ + + setTransformation({ + ...transformation, + issueTypeRequirement: e.target.value, + }) + } + /> + + + + setTransformation({ + ...transformation, + issueTypeBug: e.target.value, + }) + } + /> + + + Incident + + DORA + + + } + > + + setTransformation({ + ...transformation, + issueTypeIncident: e.target.value, + }) + } + /> + + + ), + }, + ].filter((it) => entities.includes(it.key))} + /> + ); +}; diff --git a/config-ui/src/plugins/register/index.ts b/config-ui/src/plugins/register/index.ts index a58a60d7fd2..18baf06628c 100644 --- a/config-ui/src/plugins/register/index.ts +++ b/config-ui/src/plugins/register/index.ts @@ -19,6 +19,7 @@ import { IPluginConfig } from '@/types'; import { ArgoCDConfig } from './argocd'; +import { AsanaConfig } from './asana'; import { AzureConfig, AzureGoConfig } from './azure'; import { BambooConfig } from './bamboo'; import { BitbucketConfig } from './bitbucket'; @@ -42,6 +43,7 @@ import { SlackConfig } from './slack/config'; export const pluginConfigs: IPluginConfig[] = [ ArgoCDConfig, + AsanaConfig, AzureConfig, AzureGoConfig, BambooConfig, diff --git a/config-ui/src/plugins/utils.ts b/config-ui/src/plugins/utils.ts index ea1b8d51487..88b11b55576 100644 --- a/config-ui/src/plugins/utils.ts +++ b/config-ui/src/plugins/utils.ts @@ -43,6 +43,8 @@ export const getPluginScopeId = (plugin: string, scope: any) => { return `${scope.planKey}`; case 'argocd': return `${scope.name}`; + case 'asana': + return `${scope.gid}`; default: return `${scope.id}`; } diff --git a/config-ui/src/release/stable.ts b/config-ui/src/release/stable.ts index 7911e06e0dd..45a7c3e3018 100644 --- a/config-ui/src/release/stable.ts +++ b/config-ui/src/release/stable.ts @@ -23,6 +23,10 @@ const URLS = { }, DORA: 'https://devlake.apache.org/docs/DORA/', PLUGIN: { + ASANA: { + BASIS: 'https://devlake.apache.org/docs/Configuration/Asana', + TRANSFORMATION: 'https://devlake.apache.org/docs/Configuration/Asana', + }, ARGOCD: { BASIS: 'https://devlake.apache.org/docs/Configuration/ArgoCD', TRANSFORMATION: diff --git a/config-ui/src/routes/onboard/step-4.tsx b/config-ui/src/routes/onboard/step-4.tsx index eb1be96880c..569ed0f3da6 100644 --- a/config-ui/src/routes/onboard/step-4.tsx +++ b/config-ui/src/routes/onboard/step-4.tsx @@ -80,6 +80,7 @@ export const DashboardURLMap: Record = { gitlab: import.meta.env.DEVLAKE_DASHBOARD_URL_GITLAB, bitbucket: import.meta.env.DEVLAKE_DASHBOARD_URL_BITBUCKET, azuredevops: import.meta.env.DEVLAKE_DASHBOARD_URL_AZUREDEVOPS, + asana: import.meta.env.DEVLAKE_DASHBOARD_URL_ASANA, }; const getStatus = (data: any) => { diff --git a/config-ui/src/vite-env.d.ts b/config-ui/src/vite-env.d.ts index ceea9065145..946fc8444fd 100644 --- a/config-ui/src/vite-env.d.ts +++ b/config-ui/src/vite-env.d.ts @@ -30,6 +30,7 @@ interface ImportMetaEnv { readonly DEVLAKE_DASHBOARD_URL_GITLAB: string; readonly DEVLAKE_DASHBOARD_URL_BITBUCKET: string; readonly DEVLAKE_DASHBOARD_URL_AZUREDEVOPS: string; + readonly DEVLAKE_DASHBOARD_URL_ASANA: string; } interface ImportMeta { diff --git a/grafana/dashboards/Asana.json b/grafana/dashboards/Asana.json new file mode 100644 index 00000000000..665bb0672e2 --- /dev/null +++ b/grafana/dashboards/Asana.json @@ -0,0 +1,1192 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [ + { + "asDropdown": false, + "icon": "bolt", + "includeVars": false, + "keepTime": true, + "tags": [], + "targetBlank": false, + "title": "Homepage", + "tooltip": "", + "type": "link", + "url": "/grafana/d/Lv1XbLHnk/data-specific-dashboards-homepage" + }, + { + "asDropdown": false, + "icon": "external link", + "includeVars": false, + "keepTime": true, + "tags": [ + "Data Source Specific Dashboard" + ], + "targetBlank": false, + "title": "Metric dashboards", + "tooltip": "", + "type": "dashboards", + "url": "" + } + ], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "gridPos": { + "h": 3, + "w": 13, + "x": 0, + "y": 0 + }, + "id": 128, + "links": [ + { + "targetBlank": true, + "title": "Asana", + "url": "https://devlake.apache.org/docs/Configuration/Asana" + } + ], + "options": { + "code": { + "language": "plaintext", + "showLineNumbers": false, + "showMiniMap": false + }, + "content": "- Use Cases: This dashboard shows the basic project management metrics from Asana.\n- Data Source Required: Asana", + "mode": "markdown" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "queryType": "randomWalk", + "refId": "A" + } + ], + "title": "Dashboard Introduction", + "type": "text" + }, + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 3 + }, + "id": 126, + "targets": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "refId": "A" + } + ], + "title": "1. Issue Throughput", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Total number of issues created in the selected time range and board.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 4, + "x": 0, + "y": 4 + }, + "id": 114, + "links": [ + { + "targetBlank": true, + "title": "Requirement Count", + "url": "https://devlake.apache.org/docs/Metrics/RequirementCount" + } + ], + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "text": {}, + "textMode": "auto" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "group": [], + "metricColumn": "none", + "queryType": "randomWalk", + "rawQuery": true, + "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + }, + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Number of Issues [Issues Created in Selected Time Range]", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 4, + "x": 4, + "y": 4 + }, + "id": 116, + "links": [ + { + "targetBlank": true, + "title": "Requirement Count", + "url": "https://devlake.apache.org/docs/Metrics/RequirementCount" + } + ], + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "text": {}, + "textMode": "auto" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "group": [], + "metricColumn": "none", + "queryType": "randomWalk", + "rawQuery": true, + "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + }, + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Number of Delivered Issues [Issues Created in Selected Time Range]", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 1, + "drawStyle": "bars", + "fillOpacity": 12, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "normal" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 16, + "x": 8, + "y": 4 + }, + "id": 120, + "links": [ + { + "targetBlank": true, + "title": "Requirement Count", + "url": "https://devlake.apache.org/docs/Metrics/RequirementCount" + } + ], + "options": { + "legend": { + "calcs": [ + "sum" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "8.0.6", + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "group": [], + "metricColumn": "none", + "queryType": "randomWalk", + "rawQuery": true, + "rawSql": "SELECT\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n count(distinct case when status != 'DONE' then i.id else null end) as \"Number of Open Issues\",\r\n count(distinct case when status = 'DONE' then i.id else null end) as \"Number of Delivered Issues\"\r\nFROM issues i\r\n\tjoin board_issues bi on i.id = bi.issue_id\r\n\tjoin boards b on bi.board_id = b.id\r\nwhere \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\ngroup by 1", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Issue Status Distribution over Month [Issues Created in Selected Time Range]", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Issue Delivery Rate = count(Delivered Issues)/count(Issues)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "percentage", + "steps": [ + { + "color": "red", + "value": null + }, + { + "color": "green", + "value": 50 + } + ] + }, + "unit": "percentunit" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 8, + "x": 0, + "y": 10 + }, + "id": 117, + "links": [ + { + "targetBlank": true, + "title": "Requirement Delivery Rate", + "url": "https://devlake.apache.org/docs/Metrics/RequirementDeliveryRate" + } + ], + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "text": {}, + "textMode": "auto" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "group": [], + "metricColumn": "none", + "queryType": "randomWalk", + "rawQuery": true, + "rawSql": "with _requirements as(\r\n select\r\n count(distinct i.id) as total_count,\r\n count(distinct case when i.status = 'DONE' then i.id else null end) as delivered_count\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect \r\n now() as time,\r\n 1.0 * delivered_count/total_count as requirement_delivery_rate\r\nfrom _requirements", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Issue Delivery Rate [Issues Created in Selected Time Range]", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Issue Delivery Rate = count(Delivered Issues)/count(Issues)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Delivery Rate(%)", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 12, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "percentunit" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 16, + "x": 8, + "y": 10 + }, + "id": 121, + "links": [ + { + "targetBlank": true, + "title": "Requirement Delivery Rate", + "url": "https://devlake.apache.org/docs/Metrics/RequirementDeliveryRate" + } + ], + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.0.6", + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "group": [], + "metricColumn": "none", + "queryType": "randomWalk", + "rawQuery": true, + "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n 1.0 * count(distinct case when i.status = 'DONE' then i.id else null end)/count(distinct i.id) as delivered_rate\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect\r\n time,\r\n delivered_rate\r\nfrom _requirements\r\norder by time", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Issue Delivery Rate over Time [Issues Created in Selected Time Range]", + "type": "timeseries" + }, + { + "collapsed": false, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 16 + }, + "id": 110, + "panels": [], + "targets": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "refId": "A" + } + ], + "title": "2. Issue Lead Time", + "type": "row" + }, + { + "datasource": "mysql", + "description": "", + "fieldConfig": { + "defaults": { + "decimals": 1, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 14 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 4, + "x": 0, + "y": 17 + }, + "id": 12, + "links": [ + { + "targetBlank": true, + "title": "Requirement Lead Time", + "url": "https://devlake.apache.org/docs/Metrics/RequirementLeadTime" + } + ], + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "mean" + ], + "fields": "/^value$/", + "values": false + }, + "text": {}, + "textMode": "auto" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})", + "refId": "A", + "select": [ + [ + { + "params": [ + "progress" + ], + "type": "column" + } + ] + ], + "table": "ca_analysis", + "timeColumn": "create_time", + "timeColumnType": "timestamp", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Mean Issue Lead Time in Days [Issues Resolved in Selected Time Range]", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 21 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 4, + "x": 4, + "y": 17 + }, + "id": 13, + "links": [ + { + "targetBlank": true, + "title": "Requirement Lead Time", + "url": "https://devlake.apache.org/docs/Metrics/RequirementLeadTime" + } + ], + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "text": {}, + "textMode": "auto" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "with _ranks as(\r\n select \r\n i.lead_time_minutes,\r\n percent_rank() over (order by lead_time_minutes asc) as ranks\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect\r\n max(lead_time_minutes/1440) as value\r\nfrom _ranks\r\nwhere \r\n ranks <= 0.8", + "refId": "A", + "select": [ + [ + { + "params": [ + "progress" + ], + "type": "column" + } + ] + ], + "table": "ca_analysis", + "timeColumn": "create_time", + "timeColumnType": "timestamp", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "80% Issues' Lead Time are less than # days [Issues Resolved in Selected Time Range]", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Lead Time(days)", + "axisPlacement": "auto", + "axisSoftMin": 0, + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineWidth": 1, + "scaleDistribution": { + "type": "linear" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 16, + "x": 8, + "y": 17 + }, + "id": 17, + "interval": "", + "links": [ + { + "targetBlank": true, + "title": "Requirement Lead Time", + "url": "https://devlake.apache.org/docs/Metrics/RequirementLeadTime" + } + ], + "options": { + "barRadius": 0, + "barWidth": 0.5, + "fullHighlight": false, + "groupWidth": 0.7, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "orientation": "auto", + "showValue": "auto", + "stacking": "none", + "text": { + "valueSize": 12 + }, + "tooltip": { + "mode": "single", + "sort": "none" + }, + "xTickLabelRotation": 0, + "xTickLabelSpacing": 0 + }, + "pluginVersion": "8.0.6", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "with _requirements as(\r\n select \r\n DATE_ADD(date(i.resolution_date), INTERVAL -DAYOFMONTH(date(i.resolution_date))+1 DAY) as time,\r\n avg(lead_time_minutes/1440) as mean_lead_time\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect \r\n date_format(time,'%M %Y') as month,\r\n mean_lead_time\r\nfrom _requirements\r\norder by time asc", + "refId": "A", + "select": [ + [ + { + "params": [ + "progress" + ], + "type": "column" + } + ] + ], + "table": "ca_analysis", + "timeColumn": "create_time", + "timeColumnType": "timestamp", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Mean Issue Lead Time [Issues Resolved in Selected Time Range]", + "type": "barchart" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "mysql", + "description": "The cumulative distribution of issue lead time. Each point refers to the percent rank of a lead time.", + "fill": 0, + "fillGradient": 4, + "gridPos": { + "h": 6, + "w": 24, + "x": 0, + "y": 23 + }, + "hiddenSeries": false, + "id": 15, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "max": false, + "min": false, + "rightSide": false, + "show": false, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 8, + "links": [ + { + "targetBlank": true, + "title": "Requirement Lead Time", + "url": "https://devlake.apache.org/docs/Metrics/RequirementLeadTime" + } + ], + "nullPointMode": "null", + "options": { + "alertThreshold": false + }, + "percentage": false, + "pluginVersion": "9.5.15", + "pointradius": 0.5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "with _ranks as(\r\n select \r\n round(i.lead_time_minutes/1440) as lead_time_day\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n order by lead_time_day asc\r\n)\r\n\r\nselect \r\n now() as time,\r\n lpad(concat(lead_time_day,'d'), 4, ' ') as metric,\r\n percent_rank() over (order by lead_time_day asc) as value\r\nfrom _ranks\r\norder by lead_time_day asc", + "refId": "A", + "select": [ + [ + { + "params": [ + "progress" + ], + "type": "column" + } + ] + ], + "table": "ca_analysis", + "timeColumn": "create_time", + "timeColumnType": "timestamp", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "thresholds": [ + { + "colorMode": "ok", + "fill": true, + "line": true, + "op": "lt", + "value": 0.8, + "yaxis": "right" + } + ], + "timeRegions": [], + "title": "Cumulative Distribution of Issue Lead Time [Issues Resolved in Selected Time Range]", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transformations": [], + "type": "graph", + "xaxis": { + "mode": "series", + "show": true, + "values": [ + "current" + ] + }, + "yaxes": [ + { + "format": "percentunit", + "label": "Percent Rank (%)", + "logBase": 1, + "max": "1.2", + "show": true + }, + { + "format": "short", + "logBase": 1, + "show": false + } + ], + "yaxis": { + "align": false + } + }, + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "gridPos": { + "h": 2, + "w": 24, + "x": 0, + "y": 29 + }, + "id": 130, + "options": { + "code": { + "language": "plaintext", + "showLineNumbers": false, + "showMiniMap": false + }, + "content": "
\n\nThis dashboard is created based on this [data schema](https://devlake.apache.org/docs/DataModels/DevLakeDomainLayerSchema). Want to add more metrics? Please follow the [guide](https://devlake.apache.org/docs/Configuration/Dashboards/GrafanaUserGuide).", + "mode": "markdown" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "queryType": "randomWalk", + "refId": "A" + } + ], + "type": "text" + } + ], + "refresh": "", + "schemaVersion": 38, + "style": "dark", + "tags": [ + "Data Source Dashboard" + ], + "templating": { + "list": [ + { + "current": { + "selected": true, + "text": [ + "All" + ], + "value": [ + "$__all" + ] + }, + "datasource": "mysql", + "definition": "select concat(name, '--', id) from boards where id like 'asana%'", + "hide": 0, + "includeAll": true, + "label": "Choose Board", + "multi": true, + "name": "board_id", + "options": [], + "query": "select concat(name, '--', id) from boards where id like 'asana%'", + "refresh": 1, + "regex": "/^(?.*)--(?.*)$/", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "current": { + "selected": false, + "text": "All", + "value": "$__all" + }, + "datasource": "mysql", + "definition": "select distinct type from issues", + "hide": 0, + "includeAll": true, + "label": "Issue Type", + "multi": false, + "name": "type", + "options": [], + "query": "select distinct type from issues", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + } + ] + }, + "time": { + "from": "now-6M", + "to": "now" + }, + "timepicker": {}, + "timezone": "utc", + "title": "Asana", + "uid": "asana-dashboard", + "version": 1, + "weekStart": "" +} From 5f8c575e173dc4518b0134eb320841d9ac131983 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Leif=20Roger=20Fr=C3=B8ysaa?= Date: Thu, 12 Mar 2026 13:48:10 +0100 Subject: [PATCH 19/39] feat: GitHub App token refresh (#8746) * feat(github): auto-refresh GitHub App installation tokens Add transport-level token refresh for GitHub App (AppKey) connections. GitHub App installation tokens expire after ~1 hour; this adds proactive refresh (before expiry) and reactive refresh (on 401) using the existing TokenProvider/RefreshRoundTripper infrastructure. New files: - app_installation_refresh.go: refresh logic + DB persistence - refresh_api_client.go: minimal ApiClient for token refresh POST - cmd/test_refresh/main.go: manual test script for real GitHub Apps Modified: - connection.go: export GetInstallationAccessToken, parse ExpiresAt - token_provider.go: add refreshFn for pluggable refresh strategies - round_tripper.go: document dual Authorization header interaction - api_client.go: wire AppKey connections into refresh infrastructure - Tests updated for new constructors and AppKey refresh flow * feat(github): add diagnostic logging to GitHub App token refresh Add structured logging at key decision points for token refresh: - Token provider creation (connection ID, installation ID, expiry) - Round tripper installation (connection ID, auth method) - Proactive refresh trigger (near-expiry detection) - Refresh start/success/failure (old/new token prefixes, expiry times) - DB persistence success/failure - Reactive 401 refresh and skip-due-to-concurrent-refresh All logs route through the DevLake logger to pipeline log files. * fix(github): prevent deadlock and fix token persistence in App token refresh Deadlock fix: NewAppInstallationTokenProvider now captures client.Transport (the base transport) before wrapping with RefreshRoundTripper. The refresh function uses newRefreshApiClientWithTransport(baseTransport) to POST for new installation tokens, bypassing the RefreshRoundTripper entirely. Token persistence fix: PersistEncryptedTokenColumns() manually encrypts tokens via plugin.Encrypt() then writes ciphertext via dal.UpdateColumns with conn.TableName() (a string) as the first argument. Passing the table name string makes GORM use Table() instead of Model(), preventing the encdec serializer from corrupting the in-memory token value. The encryption secret is threaded from taskCtx.GetConfig(ENCRYPTION_SECRET) through CreateApiClient to TokenProvider to persist functions. Also persists the initial App token at startup for DB consistency, and adds TestProactiveRefreshNoDeadlock with a real RSA key to verify the deadlock scenario is resolved. * fix(grafana): update dashboard descriptions to list all supported data sources (#8741) Several dashboard introduction panels hardcoded "GitHub and Jira" as required data sources, even though the underlying queries use generic domain layer tables that work with any supported Git tool or issue tracker. Updated to list all supported sources following the pattern already used by DORA and WorkLogs dashboards. Closes #8740 Co-authored-by: Spiff Azeta * fix: modify cicd_deployments name from varchar to text (#8724) * fix: modify cicd_deployments name from varchar to text * fix: update the year * fix(q_dev): replace MariaDB-specific IF NOT EXISTS syntax with DAL methods for MySQL 8.x compatibility (#8745) * fix(azuredevops): default empty entities and add CROSS to repo scope in makeScopeV200 (#8751) When scopeConfig.Entities is empty (common when no entities are explicitly selected in the UI), makeScopeV200 produced zero scopes, leaving project_mapping with no rows. Additionally, the repo scope condition did not check for DOMAIN_TYPE_CROSS, so selecting only CROSS would not create a repo scope, breaking DORA metrics. This adds the same fixes applied to GitLab in #8743. Closes #8749 * fix(bitbucket): default empty entities to all domain types in makeScopesV200 (#8750) When scopeConfig.Entities is empty (common when no entities are explicitly selected in the UI), makeScopesV200 produced zero scopes, leaving project_mapping with no repo rows. This adds the same empty-entities default applied to GitLab in #8743. Closes #8748 * fix(github): remove unused refresh client constructor and update tests --------- Co-authored-by: Spiff Azeta <35563797+spiffaz@users.noreply.github.com> Co-authored-by: Spiff Azeta Co-authored-by: Dan Crews Co-authored-by: Tomoya Kawaguchi <68677002+yamoyamoto@users.noreply.github.com> --- backend/plugins/github/models/connection.go | 21 +- backend/plugins/github/tasks/api_client.go | 32 ++- .../github/token/app_installation_refresh.go | 152 +++++++++++ .../github/token/cmd/test_refresh/main.go | 240 ++++++++++++++++++ .../github/token/refresh_api_client.go | 115 +++++++++ .../github/token/refresh_api_client_test.go | 106 ++++++++ backend/plugins/github/token/round_tripper.go | 5 + .../github/token/round_tripper_test.go | 193 +++++++++++++- .../plugins/github/token/token_provider.go | 102 ++++++-- .../github/token/token_provider_test.go | 117 +++++++-- 10 files changed, 1019 insertions(+), 64 deletions(-) create mode 100644 backend/plugins/github/token/app_installation_refresh.go create mode 100644 backend/plugins/github/token/cmd/test_refresh/main.go create mode 100644 backend/plugins/github/token/refresh_api_client.go create mode 100644 backend/plugins/github/token/refresh_api_client_test.go diff --git a/backend/plugins/github/models/connection.go b/backend/plugins/github/models/connection.go index d03353c1881..51034f67cbb 100644 --- a/backend/plugins/github/models/connection.go +++ b/backend/plugins/github/models/connection.go @@ -81,13 +81,15 @@ func (conn *GithubConn) PrepareApiClient(apiClient plugin.ApiClient) errors.Erro } if conn.AuthMethod == AppKey && conn.InstallationID != 0 { - token, err := conn.getInstallationAccessToken(apiClient) + token, err := conn.GetInstallationAccessToken(apiClient) if err != nil { return err } - - conn.Token = token.Token - conn.tokens = []string{token.Token} + var expiresAt *time.Time + if !token.ExpiresAt.IsZero() { + expiresAt = &token.ExpiresAt + } + conn.UpdateToken(token.Token, "", expiresAt, nil) } return nil @@ -354,7 +356,8 @@ type GithubUserOfToken struct { } type InstallationToken struct { - Token string `json:"token"` + Token string `json:"token"` + ExpiresAt time.Time `json:"expires_at"` } type GithubApp struct { @@ -397,7 +400,7 @@ func (gak *GithubAppKey) CreateJwt() (string, errors.Error) { return tokenString, nil } -func (gak *GithubAppKey) getInstallationAccessToken( +func (gak *GithubAppKey) GetInstallationAccessToken( apiClient plugin.ApiClient, ) (*InstallationToken, errors.Error) { @@ -417,12 +420,18 @@ func (gak *GithubAppKey) getInstallationAccessToken( if err != nil { return nil, err } + if resp.StatusCode != http.StatusCreated && resp.StatusCode != http.StatusOK { + return nil, errors.HttpStatus(resp.StatusCode).New(fmt.Sprintf("unexpected status code while getting installation access token: %s", string(body))) + } var installationToken InstallationToken err = errors.Convert(json.Unmarshal(body, &installationToken)) if err != nil { return nil, err } + if installationToken.Token == "" { + return nil, errors.Default.New("empty installation access token returned") + } return &installationToken, nil } diff --git a/backend/plugins/github/tasks/api_client.go b/backend/plugins/github/tasks/api_client.go index 268af8ecec1..42181ff139e 100644 --- a/backend/plugins/github/tasks/api_client.go +++ b/backend/plugins/github/tasks/api_client.go @@ -35,14 +35,18 @@ func CreateApiClient(taskCtx plugin.TaskContext, connection *models.GithubConnec return nil, err } - // Inject TokenProvider if refresh token is present - if connection.RefreshToken != "" { - logger := taskCtx.GetLogger() - db := taskCtx.GetDal() - - // Create TokenProvider - tp := token.NewTokenProvider(connection, db, apiClient.GetClient(), logger) + logger := taskCtx.GetLogger() + db := taskCtx.GetDal() + encryptionSecret := taskCtx.GetConfig(plugin.EncodeKeyEnvStr) + // Inject TokenProvider for OAuth refresh or GitHub App installation tokens. + var tp *token.TokenProvider + if connection.RefreshToken != "" { + tp = token.NewTokenProvider(connection, db, apiClient.GetClient(), logger, encryptionSecret) + } else if connection.AuthMethod == models.AppKey && connection.InstallationID != 0 { + tp = token.NewAppInstallationTokenProvider(connection, db, apiClient.GetClient(), logger, encryptionSecret) + } + if tp != nil { // Wrap the transport baseTransport := apiClient.GetClient().Transport if baseTransport == nil { @@ -51,6 +55,20 @@ func CreateApiClient(taskCtx plugin.TaskContext, connection *models.GithubConnec rt := token.NewRefreshRoundTripper(baseTransport, tp) apiClient.GetClient().Transport = rt + logger.Info("Installed token refresh round tripper for connection %d (authMethod=%s)", + connection.ID, connection.AuthMethod) + } + + // Persist the freshly minted token so the DB has a correctly encrypted value. + // PrepareApiClient (called by NewApiClientFromConnection) mints the token + // in-memory but does not persist it; without this, the DB may contain a stale + // or corrupted token that breaks GET /connections. + if connection.AuthMethod == models.AppKey && connection.Token != "" { + if err := token.PersistEncryptedTokenColumns(db, connection, encryptionSecret, logger, false); err != nil { + logger.Warn(err, "Failed to persist initial token for connection %d", connection.ID) + } else { + logger.Info("Persisted initial token for connection %d", connection.ID) + } } // create rate limit calculator diff --git a/backend/plugins/github/token/app_installation_refresh.go b/backend/plugins/github/token/app_installation_refresh.go new file mode 100644 index 00000000000..2f4cf04b898 --- /dev/null +++ b/backend/plugins/github/token/app_installation_refresh.go @@ -0,0 +1,152 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package token + +import ( + "time" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/log" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/plugins/github/models" +) + +// tokenPrefix returns the first n characters of a token for safe logging. +func tokenPrefix(token string, n int) string { + if len(token) <= n { + return token + } + return token[:n] + "..." +} + +func refreshGitHubAppInstallationToken(tp *TokenProvider) errors.Error { + if tp == nil || tp.conn == nil { + return errors.Default.New("missing github connection for app token refresh") + } + if tp.conn.AuthMethod != models.AppKey || tp.conn.InstallationID == 0 { + return errors.Default.New("invalid github app connection for token refresh") + } + if tp.conn.Endpoint == "" { + return errors.Default.New("missing github endpoint for token refresh") + } + + oldToken := tp.conn.Token + if tp.logger != nil { + expiresStr := "unknown" + if tp.conn.TokenExpiresAt != nil { + expiresStr = tp.conn.TokenExpiresAt.Format(time.RFC3339) + } + tp.logger.Info( + "Refreshing GitHub App installation token for connection %d (installation %d), old token=%s, expires_at=%s", + tp.conn.ID, tp.conn.InstallationID, + tokenPrefix(oldToken, 8), + expiresStr, + ) + } + + // Use baseTransport (the unwrapped transport) to avoid deadlock. + // The httpClient's transport may be the RefreshRoundTripper which would + // re-enter GetToken() and deadlock on the mutex. + apiClient := newRefreshApiClientWithTransport(tp.conn.Endpoint, tp.baseTransport) + installationToken, err := tp.conn.GithubAppKey.GetInstallationAccessToken(apiClient) + if err != nil { + if tp.logger != nil { + tp.logger.Error(err, "Failed to refresh GitHub App installation token for connection %d", tp.conn.ID) + } + return err + } + + var expiresAt *time.Time + if !installationToken.ExpiresAt.IsZero() { + expiresAt = &installationToken.ExpiresAt + } + tp.conn.UpdateToken(installationToken.Token, "", expiresAt, nil) + + if tp.logger != nil { + tp.logger.Info( + "Successfully refreshed GitHub App installation token for connection %d, new token=%s, new expires_at=%s", + tp.conn.ID, + tokenPrefix(installationToken.Token, 8), + installationToken.ExpiresAt.Format(time.RFC3339), + ) + } + + persistAppToken(tp.dal, tp.conn, tp.encryptionSecret, tp.logger) + return nil +} + +func persistAppToken(d dal.Dal, conn *models.GithubConnection, encryptionSecret string, logger log.Logger) { + if d == nil || conn == nil { + return + } + if err := PersistEncryptedTokenColumns(d, conn, encryptionSecret, logger, false); err != nil { + if logger != nil { + logger.Warn(err, "Failed to persist refreshed app installation token for connection %d", conn.ID) + } + } else if logger != nil { + logger.Info("Persisted refreshed app installation token for connection %d", conn.ID) + } +} + +// PersistEncryptedTokenColumns manually encrypts token fields and writes them +// to the DB using UpdateColumns (map-based), which only touches the specified +// columns. This avoids two problems: +// - dal.Update (GORM Save) writes ALL fields, including refresh_token_expires_at +// which may have Go zero time that MySQL rejects as '0000-00-00'. +// - dal.UpdateColumns with plaintext bypasses the GORM encdec serializer, +// writing unencrypted tokens that corrupt subsequent reads. +// +// IMPORTANT: We pass the table name string (not the conn struct) to UpdateColumns +// so that GORM uses Table() instead of Model(). When Model(conn) is used, GORM +// processes the encdec serializer on the struct's Token field during statement +// preparation, which overwrites conn.Token in memory with the encrypted ciphertext. +// This corrupts the in-memory token causing immediate 401s on the next API call. +// +// If includeRefreshToken is true, refresh_token and refresh_token_expires_at +// are also written (used by the OAuth refresh path where these values are valid). +func PersistEncryptedTokenColumns(d dal.Dal, conn *models.GithubConnection, encryptionSecret string, logger log.Logger, includeRefreshToken bool) errors.Error { + encToken, err := plugin.Encrypt(encryptionSecret, conn.Token) + if err != nil { + return errors.Default.Wrap(err, "failed to encrypt token for persistence") + } + + sets := []dal.DalSet{ + {ColumnName: "token", Value: encToken}, + {ColumnName: "token_expires_at", Value: conn.TokenExpiresAt}, + } + + if includeRefreshToken { + encRefreshToken, err := plugin.Encrypt(encryptionSecret, conn.RefreshToken) + if err != nil { + return errors.Default.Wrap(err, "failed to encrypt refresh_token for persistence") + } + sets = append(sets, + dal.DalSet{ColumnName: "refresh_token", Value: encRefreshToken}, + dal.DalSet{ColumnName: "refresh_token_expires_at", Value: conn.RefreshTokenExpiresAt}, + ) + } + + // Use the table name string instead of the conn struct to prevent GORM from + // running the encdec serializer on conn.Token during Model() processing. + return d.UpdateColumns( + conn.TableName(), + sets, + dal.Where("id = ?", conn.ID), + ) +} diff --git a/backend/plugins/github/token/cmd/test_refresh/main.go b/backend/plugins/github/token/cmd/test_refresh/main.go new file mode 100644 index 00000000000..be1854dcc29 --- /dev/null +++ b/backend/plugins/github/token/cmd/test_refresh/main.go @@ -0,0 +1,240 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// This script lists GitHub App installations and tests the token refresh flow. +// +// Usage: +// +// GITHUB_APP_ID=123456 GITHUB_APP_PEM="$(cat private-key.pem)" go run ./plugins/github/token/cmd/test_refresh/ +// +// Or if the key is in a file: +// +// GITHUB_APP_ID=123456 GITHUB_APP_PEM_FILE=/path/to/private-key.pem go run ./plugins/github/token/cmd/test_refresh/ +package main + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "os" + "time" + + "github.com/golang-jwt/jwt/v5" +) + +type installation struct { + ID int `json:"id"` + Account struct { + Login string `json:"login"` + } `json:"account"` + AppID int `json:"app_id"` +} + +type installationToken struct { + Token string `json:"token"` + ExpiresAt time.Time `json:"expires_at"` +} + +func main() { + appID := os.Getenv("GITHUB_APP_ID") + if appID == "" { + fatal("GITHUB_APP_ID env var is required") + } + + pemData := os.Getenv("GITHUB_APP_PEM") + if pemData == "" { + pemFile := os.Getenv("GITHUB_APP_PEM_FILE") + if pemFile == "" { + fatal("Set GITHUB_APP_PEM (contents) or GITHUB_APP_PEM_FILE (path)") + } + data, err := os.ReadFile(pemFile) + if err != nil { + fatal("failed to read PEM file: %v", err) + } + pemData = string(data) + } + + // Step 1: Create a JWT signed with the app's private key + fmt.Println("=== Step 1: Creating JWT from App ID and private key ===") + jwtToken, err := createJWT(appID, pemData) + if err != nil { + fatal("failed to create JWT: %v", err) + } + fmt.Printf("JWT created (first 20 chars): %s...\n\n", jwtToken[:20]) + + // Step 2: List installations + fmt.Println("=== Step 2: Listing installations for this app ===") + installations, err := listInstallations(jwtToken) + if err != nil { + fatal("failed to list installations: %v", err) + } + if len(installations) == 0 { + fatal("no installations found for this app") + } + for _, inst := range installations { + fmt.Printf(" Installation ID: %d Account: %s\n", inst.ID, inst.Account.Login) + } + fmt.Println() + + // Step 3: Get an installation token for the first installation + inst := installations[0] + fmt.Printf("=== Step 3: Minting installation token for %s (ID: %d) ===\n", inst.Account.Login, inst.ID) + token1, err := getInstallationToken(jwtToken, inst.ID) + if err != nil { + fatal("failed to get installation token: %v", err) + } + fmt.Printf("Token 1: %s... Expires: %s\n\n", token1.Token[:10], token1.ExpiresAt.Format(time.RFC3339)) + + // Step 4: Make an API call with the token to verify it works + fmt.Println("=== Step 4: Verifying token works (GET /installation/repositories) ===") + err = verifyToken(token1.Token) + if err != nil { + fatal("token verification failed: %v", err) + } + fmt.Printf("Token is valid and working.\n\n") + + // Step 5: Simulate the refresh flow — mint a second token (as our refreshFn would) + fmt.Println("=== Step 5: Simulating token refresh (minting a second token) ===") + jwtToken2, err := createJWT(appID, pemData) + if err != nil { + fatal("failed to create second JWT: %v", err) + } + token2, err := getInstallationToken(jwtToken2, inst.ID) + if err != nil { + fatal("failed to get second installation token: %v", err) + } + fmt.Printf("Token 2: %s... Expires: %s\n", token2.Token[:10], token2.ExpiresAt.Format(time.RFC3339)) + + if token1.Token == token2.Token { + fmt.Println("Note: Both tokens are identical (GitHub may cache short-lived tokens)") + } else { + fmt.Println("Tokens are different — refresh produced a new token.") + } + fmt.Println() + + // Step 6: Verify the new token works + fmt.Println("=== Step 6: Verifying refreshed token works ===") + err = verifyToken(token2.Token) + if err != nil { + fatal("refreshed token verification failed: %v", err) + } + fmt.Println("Refreshed token is valid and working.") + + fmt.Println("\n=== All steps passed. The token refresh flow works correctly. ===") + fmt.Printf("\nFor reference, your Installation ID is: %d\n", inst.ID) +} + +func createJWT(appID, pemData string) (string, error) { + privateKey, err := jwt.ParseRSAPrivateKeyFromPEM([]byte(pemData)) + if err != nil { + return "", fmt.Errorf("invalid PEM key: %w", err) + } + + now := time.Now().Unix() + token := jwt.NewWithClaims(jwt.SigningMethodRS256, jwt.MapClaims{ + "iat": now, + "exp": now + (10 * 60), // 10 minutes + "iss": appID, + }) + + signed, err := token.SignedString(privateKey) + if err != nil { + return "", fmt.Errorf("failed to sign JWT: %w", err) + } + return signed, nil +} + +func listInstallations(jwtToken string) ([]installation, error) { + req, err := http.NewRequest("GET", "https://api.github.com/app/installations", nil) + if err != nil { + return nil, err + } + req.Header.Set("Authorization", "Bearer "+jwtToken) + req.Header.Set("Accept", "application/vnd.github+json") + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + body, _ := io.ReadAll(resp.Body) + if resp.StatusCode != 200 { + return nil, fmt.Errorf("HTTP %d: %s", resp.StatusCode, string(body)) + } + + var result []installation + if err := json.Unmarshal(body, &result); err != nil { + return nil, fmt.Errorf("failed to parse response: %w", err) + } + return result, nil +} + +func getInstallationToken(jwtToken string, installationID int) (*installationToken, error) { + url := fmt.Sprintf("https://api.github.com/app/installations/%d/access_tokens", installationID) + req, err := http.NewRequest("POST", url, nil) + if err != nil { + return nil, err + } + req.Header.Set("Authorization", "Bearer "+jwtToken) + req.Header.Set("Accept", "application/vnd.github+json") + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + body, _ := io.ReadAll(resp.Body) + if resp.StatusCode != http.StatusCreated { + return nil, fmt.Errorf("HTTP %d: %s", resp.StatusCode, string(body)) + } + + var token installationToken + if err := json.Unmarshal(body, &token); err != nil { + return nil, fmt.Errorf("failed to parse token response: %w", err) + } + return &token, nil +} + +func verifyToken(token string) error { + req, err := http.NewRequest("GET", "https://api.github.com/installation/repositories?per_page=1", nil) + if err != nil { + return err + } + req.Header.Set("Authorization", "Bearer "+token) + req.Header.Set("Accept", "application/vnd.github+json") + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + + if resp.StatusCode != 200 { + body, _ := io.ReadAll(resp.Body) + return fmt.Errorf("HTTP %d: %s", resp.StatusCode, string(body)) + } + fmt.Printf(" HTTP 200 OK (X-RateLimit-Remaining: %s)\n", resp.Header.Get("X-RateLimit-Remaining")) + return nil +} + +func fatal(format string, args ...interface{}) { + fmt.Fprintf(os.Stderr, "FATAL: "+format+"\n", args...) + os.Exit(1) +} diff --git a/backend/plugins/github/token/refresh_api_client.go b/backend/plugins/github/token/refresh_api_client.go new file mode 100644 index 00000000000..1db6f691712 --- /dev/null +++ b/backend/plugins/github/token/refresh_api_client.go @@ -0,0 +1,115 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package token + +import ( + "bytes" + "context" + "encoding/json" + "net/http" + "net/url" + "time" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" +) + +type refreshApiClient struct { + endpoint string + client *http.Client + timeout time.Duration +} + +// newRefreshApiClientWithTransport creates a refreshApiClient using a specific +// http.RoundTripper instead of an existing *http.Client. This is critical for +// avoiding deadlocks: the caller's *http.Client may have a RefreshRoundTripper +// transport that re-enters GetToken() and deadlocks on the mutex. +func newRefreshApiClientWithTransport(endpoint string, transport http.RoundTripper) plugin.ApiClient { + if transport == nil { + transport = http.DefaultTransport + } + return &refreshApiClient{ + endpoint: endpoint, + client: &http.Client{Transport: transport}, + timeout: 10 * time.Second, + } +} + +func (c *refreshApiClient) SetData(name string, data interface{}) {} + +func (c *refreshApiClient) GetData(name string) interface{} { return nil } + +func (c *refreshApiClient) SetHeaders(headers map[string]string) {} + +func (c *refreshApiClient) SetBeforeFunction(callback plugin.ApiClientBeforeRequest) {} + +func (c *refreshApiClient) GetBeforeFunction() plugin.ApiClientBeforeRequest { return nil } + +func (c *refreshApiClient) SetAfterFunction(callback plugin.ApiClientAfterResponse) {} + +func (c *refreshApiClient) GetAfterFunction() plugin.ApiClientAfterResponse { return nil } + +func (c *refreshApiClient) Get(path string, query url.Values, headers http.Header) (*http.Response, errors.Error) { + return c.do(http.MethodGet, path, query, nil, headers) +} + +func (c *refreshApiClient) Post(path string, query url.Values, body interface{}, headers http.Header) (*http.Response, errors.Error) { + return c.do(http.MethodPost, path, query, body, headers) +} + +func (c *refreshApiClient) do(method, path string, query url.Values, body interface{}, headers http.Header) (*http.Response, errors.Error) { + uri, err := api.GetURIStringPointer(c.endpoint, path, query) + if err != nil { + return nil, err + } + + var reqBody *bytes.Reader + if body != nil { + payload, err := json.Marshal(body) + if err != nil { + return nil, errors.Convert(err) + } + reqBody = bytes.NewReader(payload) + } else { + reqBody = bytes.NewReader(nil) + } + + ctx, cancel := context.WithTimeout(context.Background(), c.timeout) + defer cancel() + + req, err := errors.Convert01(http.NewRequestWithContext(ctx, method, *uri, reqBody)) + if err != nil { + return nil, err + } + if body != nil { + req.Header.Set("Content-Type", "application/json") + } + + for name, values := range headers { + for _, value := range values { + req.Header.Add(name, value) + } + } + + res, err := errors.Convert01(c.client.Do(req)) + if err != nil { + return nil, err + } + return res, nil +} diff --git a/backend/plugins/github/token/refresh_api_client_test.go b/backend/plugins/github/token/refresh_api_client_test.go new file mode 100644 index 00000000000..60cdbba2f40 --- /dev/null +++ b/backend/plugins/github/token/refresh_api_client_test.go @@ -0,0 +1,106 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package token + +import ( + "io" + "net/http" + "net/http/httptest" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestRefreshApiClientPost(t *testing.T) { + var receivedMethod string + var receivedPath string + var receivedAuth string + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + receivedAuth = r.Header.Get("Authorization") + w.WriteHeader(http.StatusCreated) + w.Write([]byte(`{"token":"ghs_test123","expires_at":"2026-03-02T12:00:00Z"}`)) + })) + defer server.Close() + + client := newRefreshApiClientWithTransport(server.URL, server.Client().Transport) + + headers := http.Header{ + "Authorization": []string{"Bearer jwt_token_here"}, + } + resp, err := client.Post("/app/installations/123/access_tokens", nil, nil, headers) + + assert.Nil(t, err) + assert.Equal(t, http.StatusCreated, resp.StatusCode) + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/app/installations/123/access_tokens", receivedPath) + assert.Equal(t, "Bearer jwt_token_here", receivedAuth) + + body, _ := io.ReadAll(resp.Body) + resp.Body.Close() + assert.Contains(t, string(body), "ghs_test123") +} + +func TestRefreshApiClientGet(t *testing.T) { + var receivedMethod string + var receivedQuery string + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedQuery = r.URL.Query().Get("page") + w.WriteHeader(http.StatusOK) + w.Write([]byte(`{"ok":true}`)) + })) + defer server.Close() + + client := newRefreshApiClientWithTransport(server.URL, server.Client().Transport) + + resp, err := client.Get("/test", map[string][]string{"page": {"2"}}, nil) + assert.Nil(t, err) + assert.Equal(t, http.StatusOK, resp.StatusCode) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "2", receivedQuery) + resp.Body.Close() +} + +func TestRefreshApiClientTimeout(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Sleep longer than the timeout + time.Sleep(2 * time.Second) + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + // Use a client without its own timeout so the context timeout is the only constraint + client := newRefreshApiClientWithTransport(server.URL, http.DefaultTransport) + // Override the timeout to something short for the test + client.(*refreshApiClient).timeout = 100 * time.Millisecond + + resp, err := client.Post("/slow", nil, nil, nil) + assert.NotNil(t, err) + if resp != nil { + resp.Body.Close() + } + // Verify the error is a deadline/context error + assert.True(t, strings.Contains(err.Error(), "deadline") || strings.Contains(err.Error(), "context"), + "expected deadline/context error, got: %s", err.Error()) +} diff --git a/backend/plugins/github/token/round_tripper.go b/backend/plugins/github/token/round_tripper.go index 45ba3e9a7b6..8868572dae6 100644 --- a/backend/plugins/github/token/round_tripper.go +++ b/backend/plugins/github/token/round_tripper.go @@ -26,6 +26,11 @@ import ( // On 401's the round tripper will: // - Force a refresh of the OAuth token via the TokenProvider // - Retry the original request with the new token +// +// Note: When active, the RefreshRoundTripper overwrites the Authorization header +// on every request, superseding any header previously set by SetupAuthentication. +// This is intentional — for connections using token refresh (OAuth or GitHub App), +// the round tripper is the single source of truth for the current bearer token. type RefreshRoundTripper struct { base http.RoundTripper tokenProvider *TokenProvider diff --git a/backend/plugins/github/token/round_tripper_test.go b/backend/plugins/github/token/round_tripper_test.go index e766adb8824..2748d59e118 100644 --- a/backend/plugins/github/token/round_tripper_test.go +++ b/backend/plugins/github/token/round_tripper_test.go @@ -24,6 +24,7 @@ import ( "testing" "time" + "github.com/apache/incubator-devlake/core/errors" "github.com/apache/incubator-devlake/helpers/pluginhelper/api" "github.com/apache/incubator-devlake/impls/logruslog" "github.com/apache/incubator-devlake/plugins/github/models" @@ -32,6 +33,36 @@ import ( "github.com/stretchr/testify/mock" ) +// testRSAKey is a throwaway 2048-bit RSA private key used only in tests. +// It is NOT a real credential. +const testRSAKey = `-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEA1xJuX407giVTO/FY2pbp6bdB/XxaiPqAuvWIcEqabzq+d3ft +O7fGtbXSQrCdtxEQt5dHFKdJofHcGlKPDnq1BNjWM3/xLWsQWQPSwUZ9H1qy/nDI +GX+ciXmP8hbzoe5B1OXidAdrJUGWH3ox8Yp8OVd/yK9p34teCbzPnqVEc9tkgUT1 +94gHKLmvP28VefybFyGbYB3ujVIuA8Z26c4gQsyFzR2v1fVeDIu1e1afyH5WTFgr +EWaztOo6pI5stzH00U7fNMzULBuYQ+ufQ4iQ7Ewt7fK5yyQNkx3pX0o1OQ/aYxQr +hyBrHakxHBe78eq8rSR2KwSr5nDuYzIAOUhtnwIDAQABAoH/EknhZ6EkvRPR7G5e +bq4/NduuSriDcITwbWuuuFPufzgdIZVzlu8xHfPOT9152qE2aD7XahGUk85fwbXh +EeP64x/cA3FnmrAxPYfuUFnB0+i7KHGpzW2Wa/LqXlz3vS/UQePoDwl+xDYeUt2u +xALHoSxZdlTWv6HLhLmw7ge9QJLc/xQO9dC678c4Y4JTCRrEvhE+eZiDEXb6HL2D +uNMEwFqMLTxOurYKXE+iyzKg0e4D6oDkw6BC+vOUBnuH+9iKV6wnal24E1WnyhCa +vy7iHBoc8npeKYAHjU5wKDQiWPMT9DjYBRucHvSyEYTeS5eyQHed42PGv+Ss2IL4 +w0JZAoGBAPjxiEJwck29NyfElw5+/P0IKIE8BkuIOSC8JoGfo+JpKYWFIXaLRbK8 +qvOiLDotHgC5IxFZyN8pejVe7Zvif5PepR9qMcV3e6Uz2nmJqtp5gFlzjNUSzbeM +J6gqkejtwn7wJY3dwZhfbTTDDY7cZ9f3Cydarx+iu07unGCRT9DDAoGBAN0rG/F9 +tsOSwrgsnPwNeVPfJn2Zw6mvb1xuOoJl11tDnpS6tH50FBJ/TkERZak9+VZ761pq +CvuMjLtePgC31rsAcUEEt1OwYCxne8gNxuzl2mWKD98ActbFf96VM4Q68Jvntxl1 +eOMMnSA+/yjvhpxdVabm3180mRu9eBv6SaH1AoGBAMpwL6xHoMwS6L1QIr7JCZYC +gl3FoCDgIAS8vFuApFbDyd4oSvQJgZ49yo7g/DI66kEQTLIZXz4KjrTEA1lWsQRg +c8q+Isc/yK6pIirfhq6vS25yhr3m0p9GPCGGrKzMW/O5+fAJuxrbzwSu8WGRXmjD +HrDcD7kcLlGbvFLTGCLdAoGBALNID7W5Z16v6AItv++d6Hzxhh0IeRBi8s2lWO59 +KY6EiNcdZdSfuemoosGiHZuMbkMJ3qWDEnYI38e+xFoGrB0YZbYD4awIbF1yYWew +q1E7ncbznJvznCO3I0lF/uWwdXyb39PWYvECN5h9GI+RYrf7/MN3oRhm5boT43oi +cG/FAoGBALD1zSG9qrysLg8fw4dc4cs6dAHZhAszh47zw2WiMmUVWQZCbB+uhs44 +qGCAaer5KdnTqy9NdwW6rlcr4Y8jUFnMlMFA5HdmHkIfgTi+zi4Qf1mb9yzpJqnU +jKflsh1Lyyqv2KsoIMz4vjew+lCVn80FZaEEQ1q9tAkyu5m53w65 +-----END RSA PRIVATE KEY-----` + func TestRoundTripper401Refresh(t *testing.T) { mockRT := new(MockRoundTripper) client := &http.Client{Transport: mockRT} @@ -56,7 +87,7 @@ func TestRoundTripper401Refresh(t *testing.T) { } logger, _ := logruslog.NewDefaultLogger(logrus.New()) - tp := NewTokenProvider(conn, nil, client, logger) + tp := NewTokenProvider(conn, nil, client, logger, "") rt := NewRefreshRoundTripper(mockRT, tp) // Request @@ -100,3 +131,163 @@ func TestRoundTripper401Refresh(t *testing.T) { mockRT.AssertExpectations(t) } + +func TestRoundTripper401WithAppKeyRefresh(t *testing.T) { + mockRT := new(MockRoundTripper) + + expiry := time.Now().Add(10 * time.Minute) // Not expired (proactive refresh won't trigger) + conn := &models.GithubConnection{ + GithubConn: models.GithubConn{ + GithubAccessToken: models.GithubAccessToken{ + AccessToken: api.AccessToken{ + Token: "old_app_token", + }, + }, + TokenExpiresAt: &expiry, + }, + } + // Use tokens slice so GetToken returns the current token + conn.UpdateToken("old_app_token", "", &expiry, nil) + + // refreshFn simulates minting a new installation token + refreshCalled := 0 + tp := &TokenProvider{ + conn: conn, + refreshFn: func(tp *TokenProvider) errors.Error { + refreshCalled++ + newExpiry := time.Now().Add(1 * time.Hour) + tp.conn.UpdateToken("new_app_token", "", &newExpiry, nil) + return nil + }, + } + + rt := NewRefreshRoundTripper(mockRT, tp) + + req, _ := http.NewRequest("GET", "https://api.github.com/repos/test/test", nil) + + // 1. First call returns 401 + resp401 := &http.Response{ + StatusCode: 401, + Body: io.NopCloser(bytes.NewBufferString("Bad credentials")), + } + mockRT.On("RoundTrip", mock.MatchedBy(func(r *http.Request) bool { + return r.Header.Get("Authorization") == "Bearer old_app_token" + })).Return(resp401, nil).Once() + + // 2. Retry call with new token (after refreshFn runs) + resp200 := &http.Response{ + StatusCode: 200, + Body: io.NopCloser(bytes.NewBufferString(`{"full_name":"test/test"}`)), + } + mockRT.On("RoundTrip", mock.MatchedBy(func(r *http.Request) bool { + return r.Header.Get("Authorization") == "Bearer new_app_token" + })).Return(resp200, nil).Once() + + // Execute + resp, err := rt.RoundTrip(req) + assert.NoError(t, err) + assert.Equal(t, 200, resp.StatusCode) + assert.Equal(t, 1, refreshCalled, "refreshFn should have been called exactly once") + + body, _ := io.ReadAll(resp.Body) + assert.Equal(t, `{"full_name":"test/test"}`, string(body)) + + mockRT.AssertExpectations(t) +} + +// TestProactiveRefreshNoDeadlock verifies that when the RefreshRoundTripper wraps +// the same http.Client's transport, a proactive token refresh does not deadlock. +// This reproduces the real-world scenario: GetToken() holds the mutex, calls +// refreshGitHubAppInstallationToken, which makes an HTTP request. If that request +// goes through the RefreshRoundTripper (re-entering GetToken), it would deadlock. +// The fix is that the refresh uses baseTransport directly. +func TestProactiveRefreshNoDeadlock(t *testing.T) { + // Set up a mock transport that will serve both: + // 1. The installation token refresh POST + // 2. The actual API GET after refresh + mockRT := new(MockRoundTripper) + client := &http.Client{Transport: mockRT} + + // Token is expired — proactive refresh WILL trigger on GetToken() + expired := time.Now().Add(-1 * time.Minute) + conn := &models.GithubConnection{ + GithubConn: models.GithubConn{ + RestConnection: api.RestConnection{ + Endpoint: "https://api.github.com/", + }, + MultiAuth: api.MultiAuth{ + AuthMethod: models.AppKey, + }, + GithubAccessToken: models.GithubAccessToken{ + AccessToken: api.AccessToken{ + Token: "expired_ghs_token", + }, + }, + GithubAppKey: models.GithubAppKey{ + AppKey: api.AppKey{ + AppId: "12345", + SecretKey: testRSAKey, + }, + InstallationID: 99999, + }, + TokenExpiresAt: &expired, + }, + } + conn.UpdateToken("expired_ghs_token", "", &expired, nil) + + // Create the TokenProvider with baseTransport = mockRT (the unwrapped transport). + // This is what NewAppInstallationTokenProvider does: it captures client.Transport + // BEFORE the caller wraps it with RefreshRoundTripper. + tp := NewAppInstallationTokenProvider(conn, nil, client, nil, "") + + // Now wrap the client's transport with RefreshRoundTripper (simulating what + // CreateApiClient does). After this, client.Transport = RefreshRoundTripper, + // but tp.baseTransport still points to mockRT. + rt := NewRefreshRoundTripper(mockRT, tp) + client.Transport = rt + + // Mock: installation token refresh POST (goes through baseTransport, not RT) + newExpiry := time.Now().Add(1 * time.Hour) + installTokenBody := `{"token":"new_ghs_token","expires_at":"` + newExpiry.Format(time.RFC3339) + `"}` + mockRT.On("RoundTrip", mock.MatchedBy(func(r *http.Request) bool { + return r.Method == "POST" && r.URL.Path == "/app/installations/99999/access_tokens" + })).Return(&http.Response{ + StatusCode: 201, + Body: io.NopCloser(bytes.NewBufferString(installTokenBody)), + }, nil).Once() + + // Mock: the actual API request (goes through RefreshRoundTripper → GetToken → mockRT) + mockRT.On("RoundTrip", mock.MatchedBy(func(r *http.Request) bool { + return r.Method == "GET" && r.URL.Path == "/repos/test/test" + })).Return(&http.Response{ + StatusCode: 200, + Body: io.NopCloser(bytes.NewBufferString(`{"full_name":"test/test"}`)), + }, nil).Once() + + // Execute through the RefreshRoundTripper with a timeout to detect deadlocks. + done := make(chan struct{}) + var resp *http.Response + var reqErr error + go func() { + req, _ := http.NewRequest("GET", "https://api.github.com/repos/test/test", nil) + resp, reqErr = rt.RoundTrip(req) + close(done) + }() + + select { + case <-done: + // Success — no deadlock + case <-time.After(5 * time.Second): + t.Fatal("DEADLOCK: RoundTrip did not complete within 5 seconds — " + + "the refresh call is likely going through RefreshRoundTripper instead of baseTransport") + } + + assert.NoError(t, reqErr) + assert.Equal(t, 200, resp.StatusCode) + assert.Equal(t, "new_ghs_token", conn.Token, "token should have been refreshed") + + body, _ := io.ReadAll(resp.Body) + assert.Equal(t, `{"full_name":"test/test"}`, string(body)) + + mockRT.AssertExpectations(t) +} diff --git a/backend/plugins/github/token/token_provider.go b/backend/plugins/github/token/token_provider.go index ed3fa2256e4..6af31aaf5af 100644 --- a/backend/plugins/github/token/token_provider.go +++ b/backend/plugins/github/token/token_provider.go @@ -40,23 +40,57 @@ const ( ) type TokenProvider struct { - conn *models.GithubConnection - dal dal.Dal - httpClient *http.Client - logger log.Logger - mu sync.Mutex - refreshURL string + conn *models.GithubConnection + dal dal.Dal + encryptionSecret string + httpClient *http.Client + baseTransport http.RoundTripper // original transport, before RefreshRoundTripper wrapping + logger log.Logger + mu sync.Mutex + refreshURL string + refreshFn func(*TokenProvider) errors.Error } // NewTokenProvider creates a TokenProvider for the given GitHub connection using // the provided DAL, HTTP client, and logger, and returns a pointer to it. -func NewTokenProvider(conn *models.GithubConnection, d dal.Dal, client *http.Client, logger log.Logger) *TokenProvider { +func NewTokenProvider(conn *models.GithubConnection, d dal.Dal, client *http.Client, logger log.Logger, encryptionSecret string) *TokenProvider { return &TokenProvider{ - conn: conn, - dal: d, - httpClient: client, - logger: logger, - refreshURL: "https://github.com/login/oauth/access_token", + conn: conn, + dal: d, + encryptionSecret: encryptionSecret, + httpClient: client, + logger: logger, + refreshURL: "https://github.com/login/oauth/access_token", + } +} + +// NewAppInstallationTokenProvider creates a TokenProvider that refreshes GitHub App installation tokens. +// IMPORTANT: Call this BEFORE wrapping the client's transport with RefreshRoundTripper, +// so that baseTransport captures the unwrapped transport and refresh calls don't deadlock. +func NewAppInstallationTokenProvider(conn *models.GithubConnection, d dal.Dal, client *http.Client, logger log.Logger, encryptionSecret string) *TokenProvider { + if logger != nil { + expiresStr := "unknown" + if conn.TokenExpiresAt != nil { + expiresStr = conn.TokenExpiresAt.Format(time.RFC3339) + } + logger.Info("Created AppInstallation token provider for connection %d (installation %d, token expires at %s)", + conn.ID, conn.InstallationID, expiresStr) + } + // Capture the transport now, before the caller wraps it with RefreshRoundTripper. + // This avoids a deadlock: refresh calls must bypass the RefreshRoundTripper that + // holds the TokenProvider mutex during GetToken(). + baseTransport := client.Transport + if baseTransport == nil { + baseTransport = http.DefaultTransport + } + return &TokenProvider{ + conn: conn, + dal: d, + encryptionSecret: encryptionSecret, + httpClient: client, + baseTransport: baseTransport, + logger: logger, + refreshFn: refreshGitHubAppInstallationToken, } } @@ -65,6 +99,14 @@ func (tp *TokenProvider) GetToken() (string, errors.Error) { defer tp.mu.Unlock() if tp.needsRefresh() { + if tp.logger != nil { + expiresStr := "unknown" + if tp.conn.TokenExpiresAt != nil { + expiresStr = tp.conn.TokenExpiresAt.Format(time.RFC3339) + } + tp.logger.Info("Proactive token refresh triggered for connection %d (token expires at %s)", + tp.conn.ID, expiresStr) + } if err := tp.refreshToken(); err != nil { return "", err } @@ -73,10 +115,6 @@ func (tp *TokenProvider) GetToken() (string, errors.Error) { } func (tp *TokenProvider) needsRefresh() bool { - if tp.conn.RefreshToken == "" { - return false - } - buffer := DefaultRefreshBuffer if envBuffer := os.Getenv("GITHUB_TOKEN_REFRESH_BUFFER_MINUTES"); envBuffer != "" { if val, err := strconv.Atoi(envBuffer); err == nil { @@ -84,6 +122,16 @@ func (tp *TokenProvider) needsRefresh() bool { } } + if tp.refreshFn != nil { + if tp.conn.TokenExpiresAt == nil { + return false + } + return time.Now().Add(buffer).After(*tp.conn.TokenExpiresAt) + } + + if tp.conn.RefreshToken == "" { + return false + } if tp.conn.TokenExpiresAt == nil { return false } @@ -91,6 +139,9 @@ func (tp *TokenProvider) needsRefresh() bool { } func (tp *TokenProvider) refreshToken() errors.Error { + if tp.refreshFn != nil { + return tp.refreshFn(tp) + } tp.logger.Info("Refreshing GitHub token for connection %d", tp.conn.ID) data := map[string]string{ @@ -159,13 +210,12 @@ func (tp *TokenProvider) refreshToken() errors.Error { ) if tp.dal != nil { - err := tp.dal.UpdateColumns(tp.conn, []dal.DalSet{ - {ColumnName: "token", Value: tp.conn.Token}, - {ColumnName: "refresh_token", Value: tp.conn.RefreshToken}, - {ColumnName: "token_expires_at", Value: tp.conn.TokenExpiresAt}, - {ColumnName: "refresh_token_expires_at", Value: tp.conn.RefreshTokenExpiresAt}, - }) - if err != nil { + // Manually encrypt and use UpdateColumns to persist only the token-related + // columns. We cannot use dal.Update (GORM Save) because it writes ALL fields + // including refresh_token_expires_at which may have Go zero time that MySQL + // rejects. We cannot use UpdateColumns with plaintext because it bypasses the + // GORM encdec serializer. So we encrypt manually and write the ciphertext. + if err := PersistEncryptedTokenColumns(tp.dal, tp.conn, tp.encryptionSecret, tp.logger, true); err != nil { tp.logger.Warn(err, "failed to persist refreshed token") } } @@ -184,8 +234,14 @@ func (tp *TokenProvider) ForceRefresh(oldToken string) errors.Error { // If the token has changed since the request was made, it means another thread // has already refreshed it. if tp.conn.Token != oldToken { + if tp.logger != nil { + tp.logger.Info("Skipping reactive token refresh for connection %d — token already changed by another goroutine", tp.conn.ID) + } return nil } + if tp.logger != nil { + tp.logger.Info("Reactive token refresh triggered for connection %d (received 401)", tp.conn.ID) + } return tp.refreshToken() } diff --git a/backend/plugins/github/token/token_provider_test.go b/backend/plugins/github/token/token_provider_test.go index 3319f55910f..a754ad63ce3 100644 --- a/backend/plugins/github/token/token_provider_test.go +++ b/backend/plugins/github/token/token_provider_test.go @@ -29,7 +29,6 @@ import ( "github.com/apache/incubator-devlake/core/errors" "github.com/apache/incubator-devlake/helpers/pluginhelper/api" "github.com/apache/incubator-devlake/impls/logruslog" - mockdal "github.com/apache/incubator-devlake/mocks/core/dal" "github.com/apache/incubator-devlake/plugins/github/models" "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" @@ -93,7 +92,7 @@ func TestTokenProviderConcurrency(t *testing.T) { } logger, _ := logruslog.NewDefaultLogger(logrus.New()) - tp := NewTokenProvider(conn, nil, client, logger) + tp := NewTokenProvider(conn, nil, client, logger, "") // Mock response for refresh respBody := `{"access_token":"new_token","refresh_token":"new_refresh_token","expires_in":3600,"refresh_token_expires_in":3600}` @@ -143,44 +142,108 @@ func TestConfigurableBuffer(t *testing.T) { assert.False(t, tp.needsRefresh()) } -func TestPersistenceFailure(t *testing.T) { - mockRT := new(MockRoundTripper) - client := &http.Client{Transport: mockRT} - mockDal := new(mockdal.Dal) +// fakeRefreshFn returns a refreshFn that updates the connection token to newToken +// and increments the call counter pointed to by count. +func fakeRefreshFn(newToken string, count *int) func(*TokenProvider) errors.Error { + return func(tp *TokenProvider) errors.Error { + *count++ + newExpiry := time.Now().Add(1 * time.Hour) + tp.conn.UpdateToken(newToken, "", &newExpiry, nil) + return nil + } +} + +func TestNeedsRefreshWithRefreshFn(t *testing.T) { + callCount := 0 + tp := &TokenProvider{ + conn: &models.GithubConnection{}, + refreshFn: fakeRefreshFn("unused", &callCount), + } + + // Token not expired — outside default 5m buffer + expiry1 := time.Now().Add(10 * time.Minute) + tp.conn.TokenExpiresAt = &expiry1 + assert.False(t, tp.needsRefresh(), "should not refresh when token is 10m from expiry") + + // Token inside 5m buffer + expiry2 := time.Now().Add(2 * time.Minute) + tp.conn.TokenExpiresAt = &expiry2 + assert.True(t, tp.needsRefresh(), "should refresh when token is 2m from expiry") + // Token already expired + expiry3 := time.Now().Add(-1 * time.Minute) + tp.conn.TokenExpiresAt = &expiry3 + assert.True(t, tp.needsRefresh(), "should refresh when token is expired") + + // TokenExpiresAt is nil — can't determine expiry, don't refresh (401 fallback covers this) + tp.conn.TokenExpiresAt = nil + assert.False(t, tp.needsRefresh(), "should not refresh when TokenExpiresAt is nil") + + // Provider with neither refreshFn nor RefreshToken — should never refresh + tp2 := &TokenProvider{ + conn: &models.GithubConnection{}, + } + expiry4 := time.Now().Add(-1 * time.Minute) + tp2.conn.TokenExpiresAt = &expiry4 + assert.False(t, tp2.needsRefresh(), "should not refresh without refreshFn or RefreshToken") +} + +func TestAppKeyGetTokenTriggersRefresh(t *testing.T) { + callCount := 0 + expired := time.Now().Add(-1 * time.Minute) conn := &models.GithubConnection{ GithubConn: models.GithubConn{ - RefreshToken: "refresh_token", GithubAccessToken: models.GithubAccessToken{ AccessToken: api.AccessToken{ - Token: "old_token", + Token: "expired_token", }, }, - GithubAppKey: models.GithubAppKey{ - AppKey: api.AppKey{ - AppId: "123", - SecretKey: "secret", + TokenExpiresAt: &expired, + }, + } + + tp := &TokenProvider{ + conn: conn, + refreshFn: fakeRefreshFn("refreshed_app_token", &callCount), + } + + token, err := tp.GetToken() + assert.NoError(t, err) + assert.Equal(t, "refreshed_app_token", token) + assert.Equal(t, 1, callCount, "refreshFn should have been called exactly once") + + // Second call — token is now fresh, should not trigger refresh + token2, err := tp.GetToken() + assert.NoError(t, err) + assert.Equal(t, "refreshed_app_token", token2) + assert.Equal(t, 1, callCount, "refreshFn should not be called again for a fresh token") +} + +func TestAppKeyForceRefresh(t *testing.T) { + callCount := 0 + conn := &models.GithubConnection{ + GithubConn: models.GithubConn{ + GithubAccessToken: models.GithubAccessToken{ + AccessToken: api.AccessToken{ + Token: "old_app_token", }, }, }, } - logger, _ := logruslog.NewDefaultLogger(logrus.New()) - tp := NewTokenProvider(conn, mockDal, client, logger) - - // Mock response for refresh - respBody := `{"access_token":"new_token","refresh_token":"new_refresh_token","expires_in":3600,"refresh_token_expires_in":3600}` - resp := &http.Response{ - StatusCode: 200, - Body: io.NopCloser(bytes.NewBufferString(respBody)), + tp := &TokenProvider{ + conn: conn, + refreshFn: fakeRefreshFn("new_app_token", &callCount), } - mockRT.On("RoundTrip", mock.Anything).Return(resp, nil).Once() - // Mock DAL failure - mockDal.On("UpdateColumns", mock.Anything, mock.Anything, mock.AnythingOfType("[]dal.Clause")).Return(errors.Default.New("db error")) - err := tp.ForceRefresh("old_token") - assert.NoError(t, err) // Should not return error even if persistence fails + // ForceRefresh with matching old token — should trigger refresh + err := tp.ForceRefresh("old_app_token") + assert.NoError(t, err) + assert.Equal(t, 1, callCount) + assert.Equal(t, "new_app_token", conn.Token) - mockRT.AssertExpectations(t) - mockDal.AssertExpectations(t) + // ForceRefresh with stale old token — token has already changed, should be a no-op + err = tp.ForceRefresh("old_app_token") + assert.NoError(t, err) + assert.Equal(t, 1, callCount, "should not refresh when token has already changed") } From 207181102789050b3c4e7defaa62ac8f4f78b158 Mon Sep 17 00:00:00 2001 From: Klesh Wong Date: Thu, 12 Mar 2026 21:00:52 +0800 Subject: [PATCH 20/39] fix: cwe89 sql injection (#8762) --- backend/server/services/pushapi.go | 20 ++++++++++++++++++++ env.example | 1 + 2 files changed, 21 insertions(+) diff --git a/backend/server/services/pushapi.go b/backend/server/services/pushapi.go index deabe12b10d..d99d4e9067d 100644 --- a/backend/server/services/pushapi.go +++ b/backend/server/services/pushapi.go @@ -18,12 +18,32 @@ limitations under the License. package services import ( + "regexp" + "strings" + "github.com/apache/incubator-devlake/core/dal" "github.com/apache/incubator-devlake/core/errors" ) // InsertRow FIXME ... func InsertRow(table string, rows []map[string]interface{}) (int64, errors.Error) { + if !regexp.MustCompile(`^[a-zA-Z0-9_]+$`).MatchString(table) { + return 0, errors.BadInput.New("table name invalid") + } + + if allowedTables := cfg.GetString("PUSH_API_ALLOWED_TABLES"); allowedTables != "" { + allow := false + for _, t := range strings.Split(allowedTables, ",") { + if strings.TrimSpace(t) == table { + allow = true + break + } + } + if !allow { + return 0, errors.Forbidden.New("table name is not in the allowed list") + } + } + err := db.Create(rows, dal.From(table)) if err != nil { return 0, err diff --git a/env.example b/env.example index 58c89de1ac3..19acb7c94af 100755 --- a/env.example +++ b/env.example @@ -34,6 +34,7 @@ SKIP_SUBTASK_PROGRESS=false PORT=8080 MODE=release +# PUSH_API_ALLOWED_TABLES=table1,table2 NOTIFICATION_ENDPOINT= NOTIFICATION_SECRET= From f9b431c3bb0bdb81562830104d20c1a0cfa7def5 Mon Sep 17 00:00:00 2001 From: Warren Chen Date: Sun, 15 Mar 2026 18:41:10 +0800 Subject: [PATCH 21/39] feat(q-dev): add logging data ingestion and enrich Kiro dashboards (#8767) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(q-dev): add logging data ingestion and enrich Kiro dashboards Add support for ingesting S3 logging data (GenerateAssistantResponse and GenerateCompletions events) into new database tables, and enrich all three Kiro Grafana dashboards with additional metrics. Changes: - New models: QDevChatLog and QDevCompletionLog for logging event data - New extractor: s3_logging_extractor.go parses JSON.gz logging files - Updated S3 collector to also handle .json.gz files - Added logging S3 prefixes (GenerateAssistantResponse, GenerateCompletions) - New dashboard: "Kiro AI Activity Insights" with 10 panels including model usage distribution, active hours, conversation depth, feature adoption (Steering/Spec), file type usage, and prompt/response trends - Enriched "Kiro Code Metrics Dashboard" with DocGeneration, TestGeneration, and Dev (Agentic) metric panels - Fixed "Kiro Usage Dashboard" per-user table to sort by user_id - Migration script for new tables * fix(q-dev): use separate base path for logging S3 prefixes Logging data lives under a different S3 prefix ("logging/") than user report data ("user-report/"). Add LoggingBasePath option (defaults to "logging") so logging prefixes are constructed correctly. * fix(q-dev): auto-scan logging path without extra config Kiro exports to two well-known S3 prefixes in the same bucket: - user-report/AWSLogs/{accountId}/KiroLogs/ (CSV reports) - logging/AWSLogs/{accountId}/KiroLogs/ (interaction logs) When AccountId is set, automatically scan both paths. The "logging" prefix is hardcoded since it's a standard Kiro export convention. No additional configuration needed. * fix(q-dev): update scope tooltip to mention logging data scanning * fix(q-dev): fix scope ID routing and CSV/JSON file separation Three fixes: 1. Use *scopeId (catch-all) route pattern instead of :scopeId so scope IDs containing "/" (e.g. "034362076319/2026") work in URL paths 2. CSV extractor now filters for .csv files only, preventing it from trying to parse .json.gz logging files as CSV 3. Frontend scope API calls now encodeURIComponent(scopeId) for safe URL encoding * fix(q-dev): resolve *scopeId route conflict with dispatcher pattern The catch-all *scopeId route conflicts with *scopeId/latest-sync-state. Follow Jenkins/Bitbucket pattern: use a single *scopeId route with a GetScopeDispatcher that checks for /latest-sync-state suffix and dispatches accordingly. All scope handlers now TrimLeft "/" from scopeId. * fix(q-dev): use URL-safe scope ID format (underscore separator) Scope IDs like "034362076319/2026" break URL routing because "/" is a path separator. Change ID format to "034362076319_2026" (underscore) when AccountId is set. The Prefix field still uses "/" for S3 path matching. Revert to standard :scopeId routes since IDs are now safe. Note: existing scopes need to be recreated after this change. * fix(q-dev): use NoPKModel instead of Model in archived logging models archived.Model only has ID+timestamps, missing RawDataOrigin fields (_raw_data_params etc.) that common.NoPKModel includes. This caused "Unknown column '_raw_data_params'" errors at runtime. * fix(q-dev): fix GROUP BY in per-user table to merge display_name variants Remove display_name from GROUP BY so same user_id with different display_name values gets merged. Use MAX(display_name) in SELECT. * fix(q-dev): normalize logging user IDs to match CSV short UUID format Logging data uses "d-{directoryId}.{UUID}" format while CSV user-report uses plain "{UUID}". Strip the "d-xxx." prefix so the same user maps to one user_id across both data sources. * fix(q-dev): normalize user IDs in CSV extractors and sort table DESC Apply normalizeUserId to both createUserReportData and createUserDataWithDisplayName so user_report CSV data also strips the "d-{directoryId}." prefix. Change per-user table sort to ORDER BY user_id DESC. * style(q-dev): fix gofmt formatting in chat_log models * perf(q-dev): parallelize logging S3 downloads and batch DB writes Optimize logging extractor performance: - 10 goroutine workers for parallel S3 file downloads - Batch 50 files per DB transaction instead of 1-per-file - sync.Map cache for display name resolution (avoid repeated IAM calls) - Parse records in memory during download, write all at once This should improve throughput from ~1.5 files/sec to ~15+ files/sec for typical logging file sizes. * fix(q-dev): check tx.Rollback error return to satisfy errcheck lint * feat(q-dev): add per-user model usage table and models column Add "Per-User Model Usage" table (panel 11) showing each user's request count and avg prompt/response length per model_id. Also add "Models Used" column to the Per-User Activity table. * fix(q-dev): remove per-user model usage table, keep models column only * feat(q-dev): add Kiro Executive Dashboard with cross-source analytics New dashboard "Kiro Executive Dashboard" with 12 panels covering: - KPIs: WAU, credits efficiency, acceptance rate, steering adoption - Trends: weekly active users, new vs returning users - Adoption funnel: Chat→Inline→CodeFix→Review→DocGen→TestGen→Agentic→Steering→Spec - Cost: credits pace vs projected monthly, idle power users - Quality: acceptance rate trends, code review findings, test generation - Efficiency: per-user productivity table with credits/line ratio Correlates data across user_report (credits), user_data (code metrics), and chat_log (interaction patterns) for holistic Kiro usage insights. * fix(q-dev): fix pie charts to show per-row slices instead of single total Set reduceOptions.values=true so Grafana treats each SQL result row as a separate pie slice. Fixes Model Usage Distribution, File Type Usage, Kiro Feature Adoption, and Active File Types pie charts. * fix(q-dev): cast Hour to string for Active Hours bar chart x-axis * fix(q-dev): fix pie chart single-slice and GROUP BY display_name issues 1. qdev_user_report Panel 4 (Subscription Tier Distribution): set reduceOptions.values=true to show per-tier slices 2. qdev_user_data Panel 6 (User Interactions): remove display_name from GROUP BY, use MAX(display_name) to merge same user * fix(q-dev): prevent data inflation in user_report JOIN user_data user_report has multiple rows per (user_id, date) due to client_type (KIRO_IDE, KIRO_CLI), but user_data has only one row per (user_id, date). A direct JOIN causes user_data metrics to be counted multiple times. Fix: pre-aggregate user_report by (user_id, date) in a subquery before joining, so the JOIN is always 1:1. Affects: Credits Efficiency stat and User Productivity table. --- backend/plugins/q_dev/api/s3_slice_api.go | 41 - backend/plugins/q_dev/impl/impl.go | 20 +- backend/plugins/q_dev/impl/impl_test.go | 4 +- backend/plugins/q_dev/models/chat_log.go | 51 + .../plugins/q_dev/models/completion_log.go | 43 + .../20260314_add_logging_tables.go | 43 + .../migrationscripts/archived/chat_log.go | 50 + .../archived/completion_log.go | 42 + .../q_dev/models/migrationscripts/register.go | 1 + backend/plugins/q_dev/models/s3_slice.go | 11 +- .../plugins/q_dev/tasks/s3_data_extractor.go | 14 +- .../plugins/q_dev/tasks/s3_file_collector.go | 6 +- .../q_dev/tasks/s3_logging_extractor.go | 438 ++++++++ .../src/plugins/register/q-dev/data-scope.tsx | 2 +- grafana/dashboards/qdev_executive.json | 958 ++++++++++++++++++ grafana/dashboards/qdev_logging.json | 800 +++++++++++++++ grafana/dashboards/qdev_user_data.json | 389 ++++++- grafana/dashboards/qdev_user_report.json | 8 +- 18 files changed, 2859 insertions(+), 62 deletions(-) create mode 100644 backend/plugins/q_dev/models/chat_log.go create mode 100644 backend/plugins/q_dev/models/completion_log.go create mode 100644 backend/plugins/q_dev/models/migrationscripts/20260314_add_logging_tables.go create mode 100644 backend/plugins/q_dev/models/migrationscripts/archived/chat_log.go create mode 100644 backend/plugins/q_dev/models/migrationscripts/archived/completion_log.go create mode 100644 backend/plugins/q_dev/tasks/s3_logging_extractor.go create mode 100644 grafana/dashboards/qdev_executive.json create mode 100644 grafana/dashboards/qdev_logging.json diff --git a/backend/plugins/q_dev/api/s3_slice_api.go b/backend/plugins/q_dev/api/s3_slice_api.go index 73708130294..158aa4ef785 100644 --- a/backend/plugins/q_dev/api/s3_slice_api.go +++ b/backend/plugins/q_dev/api/s3_slice_api.go @@ -60,62 +60,21 @@ func GetScopeList(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, er } // GetScope returns a single scope record -// @Summary get a Q Developer scope -// @Description get a Q Developer scope -// @Tags plugins/q_dev -// @Param connectionId path int true "connection ID" -// @Param scopeId path string true "scope id" -// @Param blueprints query bool false "include blueprint references" -// @Success 200 {object} ScopeDetail -// @Failure 400 {object} shared.ApiBody "Bad Request" -// @Failure 500 {object} shared.ApiBody "Internal Error" -// @Router /plugins/q_dev/connections/{connectionId}/scopes/{scopeId} [GET] func GetScope(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { return dsHelper.ScopeApi.GetScopeDetail(input) } // PatchScope updates a scope record -// @Summary patch a Q Developer scope -// @Description patch a Q Developer scope -// @Tags plugins/q_dev -// @Accept application/json -// @Param connectionId path int true "connection ID" -// @Param scopeId path string true "scope id" -// @Param scope body models.QDevS3Slice true "json" -// @Success 200 {object} models.QDevS3Slice -// @Failure 400 {object} shared.ApiBody "Bad Request" -// @Failure 500 {object} shared.ApiBody "Internal Error" -// @Router /plugins/q_dev/connections/{connectionId}/scopes/{scopeId} [PATCH] func PatchScope(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { return dsHelper.ScopeApi.Patch(input) } // DeleteScope removes a scope and optionally associated data. -// @Summary delete a Q Developer scope -// @Description delete Q Developer scope data -// @Tags plugins/q_dev -// @Param connectionId path int true "connection ID" -// @Param scopeId path string true "scope id" -// @Param delete_data_only query bool false "Only delete scope data" -// @Success 200 -// @Failure 400 {object} shared.ApiBody "Bad Request" -// @Failure 409 {object} srvhelper.DsRefs "References exist to this scope" -// @Failure 500 {object} shared.ApiBody "Internal Error" -// @Router /plugins/q_dev/connections/{connectionId}/scopes/{scopeId} [DELETE] func DeleteScope(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { return dsHelper.ScopeApi.Delete(input) } // GetScopeLatestSyncState returns scope sync state info -// @Summary latest sync state for a Q Developer scope -// @Description get latest sync state for a Q Developer scope -// @Tags plugins/q_dev -// @Param connectionId path int true "connection ID" -// @Param scopeId path string true "scope id" -// @Success 200 -// @Failure 400 {object} shared.ApiBody "Bad Request" -// @Failure 500 {object} shared.ApiBody "Internal Error" -// @Router /plugins/q_dev/connections/{connectionId}/scopes/{scopeId}/latest-sync-state [GET] func GetScopeLatestSyncState(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { return dsHelper.ScopeApi.GetScopeLatestSyncState(input) } diff --git a/backend/plugins/q_dev/impl/impl.go b/backend/plugins/q_dev/impl/impl.go index e38fe7ad73c..3c6e1ed1644 100644 --- a/backend/plugins/q_dev/impl/impl.go +++ b/backend/plugins/q_dev/impl/impl.go @@ -58,6 +58,8 @@ func (p QDev) GetTablesInfo() []dal.Tabler { &models.QDevS3FileMeta{}, &models.QDevS3Slice{}, &models.QDevUserReport{}, + &models.QDevChatLog{}, + &models.QDevCompletionLog{}, } } @@ -85,6 +87,7 @@ func (p QDev) SubTaskMetas() []plugin.SubTaskMeta { return []plugin.SubTaskMeta{ tasks.CollectQDevS3FilesMeta, tasks.ExtractQDevS3DataMeta, + tasks.ExtractQDevLoggingDataMeta, } } @@ -127,10 +130,21 @@ func (p QDev) PrepareTaskData(taskCtx plugin.TaskContext, options map[string]int if op.Month != nil { timePart = fmt.Sprintf("%04d/%02d", op.Year, *op.Month) } - base := fmt.Sprintf("%s/AWSLogs/%s/KiroLogs", op.BasePath, op.AccountId) + // Kiro exports data to two well-known S3 prefixes: + // {basePath}/AWSLogs/{accountId}/KiroLogs/ — user report CSVs + // logging/AWSLogs/{accountId}/KiroLogs/ — interaction logs (JSON.gz) + // When basePath is empty, default to "user-report" for CSV data. + reportBase := op.BasePath + if reportBase == "" { + reportBase = "user-report" + } + csvBase := fmt.Sprintf("%s/AWSLogs/%s/KiroLogs", reportBase, op.AccountId) + logBase := fmt.Sprintf("logging/AWSLogs/%s/KiroLogs", op.AccountId) s3Prefixes = []string{ - fmt.Sprintf("%s/by_user_analytic/%s/%s", base, region, timePart), - fmt.Sprintf("%s/user_report/%s/%s", base, region, timePart), + fmt.Sprintf("%s/by_user_analytic/%s/%s", csvBase, region, timePart), + fmt.Sprintf("%s/user_report/%s/%s", csvBase, region, timePart), + fmt.Sprintf("%s/GenerateAssistantResponse/%s/%s", logBase, region, timePart), + fmt.Sprintf("%s/GenerateCompletions/%s/%s", logBase, region, timePart), } } else { // Legacy scope: use S3Prefix directly diff --git a/backend/plugins/q_dev/impl/impl_test.go b/backend/plugins/q_dev/impl/impl_test.go index e61b5325162..7153617ab9a 100644 --- a/backend/plugins/q_dev/impl/impl_test.go +++ b/backend/plugins/q_dev/impl/impl_test.go @@ -34,11 +34,11 @@ func TestQDev_BasicPluginMethods(t *testing.T) { // Test table info tables := plugin.GetTablesInfo() - assert.Len(t, tables, 5) + assert.Len(t, tables, 7) // Test subtask metas subtasks := plugin.SubTaskMetas() - assert.Len(t, subtasks, 2) + assert.Len(t, subtasks, 3) // Test API resources apiResources := plugin.ApiResources() diff --git a/backend/plugins/q_dev/models/chat_log.go b/backend/plugins/q_dev/models/chat_log.go new file mode 100644 index 00000000000..06679c515b6 --- /dev/null +++ b/backend/plugins/q_dev/models/chat_log.go @@ -0,0 +1,51 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "time" + + "github.com/apache/incubator-devlake/core/models/common" +) + +// QDevChatLog stores parsed data from GenerateAssistantResponse logging events +type QDevChatLog struct { + common.NoPKModel + ConnectionId uint64 `gorm:"primaryKey"` + ScopeId string `gorm:"primaryKey;type:varchar(255)" json:"scopeId"` + RequestId string `gorm:"primaryKey;type:varchar(255)" json:"requestId"` + UserId string `gorm:"index;type:varchar(255)" json:"userId"` + DisplayName string `gorm:"type:varchar(255)" json:"displayName"` + Timestamp time.Time `gorm:"index" json:"timestamp"` + ChatTriggerType string `gorm:"type:varchar(50)" json:"chatTriggerType"` + HasCustomization bool `json:"hasCustomization"` + ConversationId string `gorm:"type:varchar(255)" json:"conversationId"` + UtteranceId string `gorm:"type:varchar(255)" json:"utteranceId"` + ModelId string `gorm:"type:varchar(100)" json:"modelId"` + PromptLength int `json:"promptLength"` + ResponseLength int `json:"responseLength"` + OpenFileCount int `json:"openFileCount"` + ActiveFileName string `gorm:"type:varchar(512)" json:"activeFileName"` + ActiveFileExtension string `gorm:"type:varchar(50)" json:"activeFileExtension"` + HasSteering bool `json:"hasSteering"` + IsSpecMode bool `json:"isSpecMode"` +} + +func (QDevChatLog) TableName() string { + return "_tool_q_dev_chat_log" +} diff --git a/backend/plugins/q_dev/models/completion_log.go b/backend/plugins/q_dev/models/completion_log.go new file mode 100644 index 00000000000..0d0e0404ce8 --- /dev/null +++ b/backend/plugins/q_dev/models/completion_log.go @@ -0,0 +1,43 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "time" + + "github.com/apache/incubator-devlake/core/models/common" +) + +// QDevCompletionLog stores parsed data from GenerateCompletions logging events +type QDevCompletionLog struct { + common.NoPKModel + ConnectionId uint64 `gorm:"primaryKey"` + ScopeId string `gorm:"primaryKey;type:varchar(255)" json:"scopeId"` + RequestId string `gorm:"primaryKey;type:varchar(255)" json:"requestId"` + UserId string `gorm:"index;type:varchar(255)" json:"userId"` + DisplayName string `gorm:"type:varchar(255)" json:"displayName"` + Timestamp time.Time `gorm:"index" json:"timestamp"` + FileName string `gorm:"type:varchar(512)" json:"fileName"` + FileExtension string `gorm:"type:varchar(50)" json:"fileExtension"` + HasCustomization bool `json:"hasCustomization"` + CompletionsCount int `json:"completionsCount"` +} + +func (QDevCompletionLog) TableName() string { + return "_tool_q_dev_completion_log" +} diff --git a/backend/plugins/q_dev/models/migrationscripts/20260314_add_logging_tables.go b/backend/plugins/q_dev/models/migrationscripts/20260314_add_logging_tables.go new file mode 100644 index 00000000000..cbd5943ecd2 --- /dev/null +++ b/backend/plugins/q_dev/models/migrationscripts/20260314_add_logging_tables.go @@ -0,0 +1,43 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/helpers/migrationhelper" + "github.com/apache/incubator-devlake/plugins/q_dev/models/migrationscripts/archived" +) + +type addLoggingTables struct{} + +func (*addLoggingTables) Up(basicRes context.BasicRes) errors.Error { + return migrationhelper.AutoMigrateTables( + basicRes, + &archived.QDevChatLog{}, + &archived.QDevCompletionLog{}, + ) +} + +func (*addLoggingTables) Version() uint64 { + return 20260314000001 +} + +func (*addLoggingTables) Name() string { + return "Add chat_log and completion_log tables for Kiro logging data" +} diff --git a/backend/plugins/q_dev/models/migrationscripts/archived/chat_log.go b/backend/plugins/q_dev/models/migrationscripts/archived/chat_log.go new file mode 100644 index 00000000000..8278f52ff1e --- /dev/null +++ b/backend/plugins/q_dev/models/migrationscripts/archived/chat_log.go @@ -0,0 +1,50 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package archived + +import ( + "time" + + "github.com/apache/incubator-devlake/core/models/migrationscripts/archived" +) + +type QDevChatLog struct { + archived.NoPKModel + ConnectionId uint64 `gorm:"primaryKey"` + ScopeId string `gorm:"primaryKey;type:varchar(255)" json:"scopeId"` + RequestId string `gorm:"primaryKey;type:varchar(255)" json:"requestId"` + UserId string `gorm:"index;type:varchar(255)" json:"userId"` + DisplayName string `gorm:"type:varchar(255)" json:"displayName"` + Timestamp time.Time `gorm:"index" json:"timestamp"` + ChatTriggerType string `gorm:"type:varchar(50)" json:"chatTriggerType"` + HasCustomization bool `json:"hasCustomization"` + ConversationId string `gorm:"type:varchar(255)" json:"conversationId"` + UtteranceId string `gorm:"type:varchar(255)" json:"utteranceId"` + ModelId string `gorm:"type:varchar(100)" json:"modelId"` + PromptLength int `json:"promptLength"` + ResponseLength int `json:"responseLength"` + OpenFileCount int `json:"openFileCount"` + ActiveFileName string `gorm:"type:varchar(512)" json:"activeFileName"` + ActiveFileExtension string `gorm:"type:varchar(50)" json:"activeFileExtension"` + HasSteering bool `json:"hasSteering"` + IsSpecMode bool `json:"isSpecMode"` +} + +func (QDevChatLog) TableName() string { + return "_tool_q_dev_chat_log" +} diff --git a/backend/plugins/q_dev/models/migrationscripts/archived/completion_log.go b/backend/plugins/q_dev/models/migrationscripts/archived/completion_log.go new file mode 100644 index 00000000000..035c13ef2e0 --- /dev/null +++ b/backend/plugins/q_dev/models/migrationscripts/archived/completion_log.go @@ -0,0 +1,42 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package archived + +import ( + "time" + + "github.com/apache/incubator-devlake/core/models/migrationscripts/archived" +) + +type QDevCompletionLog struct { + archived.NoPKModel + ConnectionId uint64 `gorm:"primaryKey"` + ScopeId string `gorm:"primaryKey;type:varchar(255)" json:"scopeId"` + RequestId string `gorm:"primaryKey;type:varchar(255)" json:"requestId"` + UserId string `gorm:"index;type:varchar(255)" json:"userId"` + DisplayName string `gorm:"type:varchar(255)" json:"displayName"` + Timestamp time.Time `gorm:"index" json:"timestamp"` + FileName string `gorm:"type:varchar(512)" json:"fileName"` + FileExtension string `gorm:"type:varchar(50)" json:"fileExtension"` + HasCustomization bool `json:"hasCustomization"` + CompletionsCount int `json:"completionsCount"` +} + +func (QDevCompletionLog) TableName() string { + return "_tool_q_dev_completion_log" +} diff --git a/backend/plugins/q_dev/models/migrationscripts/register.go b/backend/plugins/q_dev/models/migrationscripts/register.go index 9c68ae8f815..5480d5eaf29 100644 --- a/backend/plugins/q_dev/models/migrationscripts/register.go +++ b/backend/plugins/q_dev/models/migrationscripts/register.go @@ -35,5 +35,6 @@ func All() []plugin.MigrationScript { new(addAccountIdToS3Slice), new(fixDedupUserTables), new(resetS3FileMetaProcessed), + new(addLoggingTables), } } diff --git a/backend/plugins/q_dev/models/s3_slice.go b/backend/plugins/q_dev/models/s3_slice.go index e918258a908..19ecd6920bd 100644 --- a/backend/plugins/q_dev/models/s3_slice.go +++ b/backend/plugins/q_dev/models/s3_slice.go @@ -99,7 +99,16 @@ func (s *QDevS3Slice) normalize(strict bool) error { } if s.Id == "" { - s.Id = s.Prefix + if s.AccountId != "" { + // Use URL-safe ID: account_year or account_year_month + if s.Month != nil { + s.Id = fmt.Sprintf("%s_%04d_%02d", s.AccountId, s.Year, *s.Month) + } else { + s.Id = fmt.Sprintf("%s_%04d", s.AccountId, s.Year) + } + } else { + s.Id = s.Prefix + } } if s.AccountId != "" { diff --git a/backend/plugins/q_dev/tasks/s3_data_extractor.go b/backend/plugins/q_dev/tasks/s3_data_extractor.go index 1cf2a9f2ef5..da29bca07fe 100644 --- a/backend/plugins/q_dev/tasks/s3_data_extractor.go +++ b/backend/plugins/q_dev/tasks/s3_data_extractor.go @@ -40,10 +40,11 @@ func ExtractQDevS3Data(taskCtx plugin.SubTaskContext) errors.Error { data := taskCtx.GetData().(*QDevTaskData) db := taskCtx.GetDal() - // 查询未处理的文件元数据 + // 查询未处理的CSV文件元数据(排除.json.gz日志文件) cursor, err := db.Cursor( dal.From(&models.QDevS3FileMeta{}), - dal.Where("connection_id = ? AND processed = ?", data.Options.ConnectionId, false), + dal.Where("connection_id = ? AND processed = ? AND file_name LIKE ?", + data.Options.ConnectionId, false, "%.csv"), ) if err != nil { return errors.Default.Wrap(err, "failed to get file metadata cursor") @@ -202,8 +203,8 @@ func createUserReportData(logger interface { } } - // UserId - report.UserId = getStringField(fieldMap, "UserId") + // UserId (normalize to strip "d-{directoryId}." prefix if present) + report.UserId = normalizeUserId(getStringField(fieldMap, "UserId")) if report.UserId == "" { return nil, errors.Default.New("UserId not found in CSV record") } @@ -303,11 +304,12 @@ func createUserDataWithDisplayName(logger interface { var err error var ok bool - // 设置UserId - userData.UserId, ok = fieldMap["UserId"] + // 设置UserId (normalize to strip "d-{directoryId}." prefix if present) + rawUserId, ok := fieldMap["UserId"] if !ok { return nil, errors.Default.New("UserId not found in CSV record") } + userData.UserId = normalizeUserId(rawUserId) // 设置DisplayName (new functionality) userData.DisplayName = resolveDisplayName(logger, userData.UserId, identityClient) diff --git a/backend/plugins/q_dev/tasks/s3_file_collector.go b/backend/plugins/q_dev/tasks/s3_file_collector.go index 9d40919ae76..1ab4f8f0aa7 100644 --- a/backend/plugins/q_dev/tasks/s3_file_collector.go +++ b/backend/plugins/q_dev/tasks/s3_file_collector.go @@ -59,9 +59,9 @@ func CollectQDevS3Files(taskCtx plugin.SubTaskContext) errors.Error { } for _, object := range result.Contents { - // Only process CSV files - if !strings.HasSuffix(*object.Key, ".csv") { - taskCtx.GetLogger().Debug("Skipping non-CSV file: %s", *object.Key) + // Only process CSV and JSON.gz files + if !strings.HasSuffix(*object.Key, ".csv") && !strings.HasSuffix(*object.Key, ".json.gz") { + taskCtx.GetLogger().Debug("Skipping unsupported file: %s", *object.Key) continue } diff --git a/backend/plugins/q_dev/tasks/s3_logging_extractor.go b/backend/plugins/q_dev/tasks/s3_logging_extractor.go new file mode 100644 index 00000000000..df55a663b13 --- /dev/null +++ b/backend/plugins/q_dev/tasks/s3_logging_extractor.go @@ -0,0 +1,438 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "compress/gzip" + "encoding/json" + "path/filepath" + "strings" + "sync" + "time" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/plugins/q_dev/models" + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/service/s3" +) + +var _ plugin.SubTaskEntryPoint = ExtractQDevLoggingData + +const ( + loggingBatchSize = 50 // number of files to process per DB transaction + s3DownloadWorkers = 10 // parallel S3 download goroutines + s3DownloadChanSize = 20 // buffered channel size for download results +) + +// downloadResult holds the parsed records from one S3 file +type downloadResult struct { + FileMeta *models.QDevS3FileMeta + ChatLogs []*models.QDevChatLog + CompLogs []*models.QDevCompletionLog + Err error +} + +// ExtractQDevLoggingData extracts logging data from S3 JSON.gz files +func ExtractQDevLoggingData(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*QDevTaskData) + db := taskCtx.GetDal() + + cursor, err := db.Cursor( + dal.From(&models.QDevS3FileMeta{}), + dal.Where("connection_id = ? AND processed = ? AND file_name LIKE ?", + data.Options.ConnectionId, false, "%.json.gz"), + ) + if err != nil { + return errors.Default.Wrap(err, "failed to get logging file metadata cursor") + } + defer cursor.Close() + + // Collect all file metas first + var fileMetas []*models.QDevS3FileMeta + for cursor.Next() { + fm := &models.QDevS3FileMeta{} + if err := db.Fetch(cursor, fm); err != nil { + return errors.Default.Wrap(err, "failed to fetch file metadata") + } + fileMetas = append(fileMetas, fm) + } + + if len(fileMetas) == 0 { + return nil + } + + taskCtx.SetProgress(0, len(fileMetas)) + taskCtx.GetLogger().Info("Processing %d logging files with %d workers", len(fileMetas), s3DownloadWorkers) + + // Display name cache to avoid repeated IAM calls + displayNameCache := &sync.Map{} + + // Process in batches + for batchStart := 0; batchStart < len(fileMetas); batchStart += loggingBatchSize { + batchEnd := batchStart + loggingBatchSize + if batchEnd > len(fileMetas) { + batchEnd = len(fileMetas) + } + batch := fileMetas[batchStart:batchEnd] + + // Parallel download and parse + results := parallelDownloadAndParse(taskCtx, data, batch, displayNameCache) + + // Check for download errors + for _, r := range results { + if r.Err != nil { + return errors.Default.Wrap(errors.Convert(r.Err), + "failed to download/parse "+r.FileMeta.FileName) + } + } + + // Batch write to DB in a single transaction + tx := db.Begin() + var txErr errors.Error + for _, r := range results { + for _, chatLog := range r.ChatLogs { + if txErr = tx.CreateOrUpdate(chatLog); txErr != nil { + break + } + } + if txErr != nil { + break + } + for _, compLog := range r.CompLogs { + if txErr = tx.CreateOrUpdate(compLog); txErr != nil { + break + } + } + if txErr != nil { + break + } + r.FileMeta.Processed = true + now := time.Now() + r.FileMeta.ProcessedTime = &now + if txErr = tx.Update(r.FileMeta); txErr != nil { + break + } + } + if txErr != nil { + if rbErr := tx.Rollback(); rbErr != nil { + taskCtx.GetLogger().Error(rbErr, "failed to rollback transaction") + } + return errors.Default.Wrap(txErr, "failed to write logging batch") + } + if err := tx.Commit(); err != nil { + return errors.Default.Wrap(err, "failed to commit batch") + } + + taskCtx.IncProgress(len(batch)) + } + + return nil +} + +// parallelDownloadAndParse downloads and parses S3 files concurrently +func parallelDownloadAndParse( + taskCtx plugin.SubTaskContext, + data *QDevTaskData, + fileMetas []*models.QDevS3FileMeta, + displayNameCache *sync.Map, +) []downloadResult { + results := make([]downloadResult, len(fileMetas)) + jobs := make(chan int, s3DownloadChanSize) + var wg sync.WaitGroup + + // Start workers + for w := 0; w < s3DownloadWorkers; w++ { + wg.Add(1) + go func() { + defer wg.Done() + for idx := range jobs { + fm := fileMetas[idx] + result := downloadAndParseFile(taskCtx, data, fm, displayNameCache) + results[idx] = result + } + }() + } + + // Send jobs + for i := range fileMetas { + jobs <- i + } + close(jobs) + wg.Wait() + + return results +} + +// downloadAndParseFile downloads one S3 file and parses it into model records +func downloadAndParseFile( + taskCtx plugin.SubTaskContext, + data *QDevTaskData, + fileMeta *models.QDevS3FileMeta, + displayNameCache *sync.Map, +) downloadResult { + result := downloadResult{FileMeta: fileMeta} + + getResult, err := data.S3Client.S3.GetObject(&s3.GetObjectInput{ + Bucket: aws.String(data.S3Client.Bucket), + Key: aws.String(fileMeta.S3Path), + }) + if err != nil { + result.Err = err + return result + } + defer getResult.Body.Close() + + gzReader, err := gzip.NewReader(getResult.Body) + if err != nil { + result.Err = err + return result + } + defer gzReader.Close() + + var logFile loggingFile + if err := json.NewDecoder(gzReader).Decode(&logFile); err != nil { + result.Err = err + return result + } + + isChatLog := strings.Contains(fileMeta.S3Path, "GenerateAssistantResponse") + + for _, rawRecord := range logFile.Records { + if isChatLog { + chatLog, err := parseChatRecord(rawRecord, fileMeta, data.IdentityClient, displayNameCache) + if err != nil { + result.Err = err + return result + } + if chatLog != nil { + result.ChatLogs = append(result.ChatLogs, chatLog) + } + } else { + compLog, err := parseCompletionRecord(rawRecord, fileMeta, data.IdentityClient, displayNameCache) + if err != nil { + result.Err = err + return result + } + if compLog != nil { + result.CompLogs = append(result.CompLogs, compLog) + } + } + } + + return result +} + +// cachedResolveDisplayName resolves display name with caching +func cachedResolveDisplayName(userId string, identityClient UserDisplayNameResolver, cache *sync.Map) string { + if v, ok := cache.Load(userId); ok { + return v.(string) + } + if identityClient == nil { + cache.Store(userId, userId) + return userId + } + displayName, err := identityClient.ResolveUserDisplayName(userId) + if err != nil || displayName == "" { + cache.Store(userId, userId) + return userId + } + cache.Store(userId, displayName) + return displayName +} + +// JSON structures for logging data + +type loggingFile struct { + Records []json.RawMessage `json:"records"` +} + +type chatLogRecord struct { + Request *chatLogRequest `json:"generateAssistantResponseEventRequest"` + Response *chatLogResponse `json:"generateAssistantResponseEventResponse"` +} + +type chatLogRequest struct { + UserID string `json:"userId"` + Timestamp string `json:"timeStamp"` + ChatTriggerType string `json:"chatTriggerType"` + CustomizationArn *string `json:"customizationArn"` + ModelID string `json:"modelId"` + Prompt string `json:"prompt"` +} + +type chatLogResponse struct { + RequestID string `json:"requestId"` + AssistantResponse string `json:"assistantResponse"` + MessageMetadata struct { + ConversationID *string `json:"conversationId"` + UtteranceID *string `json:"utteranceId"` + } `json:"messageMetadata"` +} + +type completionLogRecord struct { + Request *completionLogRequest `json:"generateCompletionsEventRequest"` + Response *completionLogResponse `json:"generateCompletionsEventResponse"` +} + +type completionLogRequest struct { + UserID string `json:"userId"` + Timestamp string `json:"timeStamp"` + FileName string `json:"fileName"` + CustomizationArn *string `json:"customizationArn"` +} + +type completionLogResponse struct { + RequestID string `json:"requestId"` + Completions []json.RawMessage `json:"completions"` +} + +func parseChatRecord(raw json.RawMessage, fileMeta *models.QDevS3FileMeta, identityClient UserDisplayNameResolver, cache *sync.Map) (*models.QDevChatLog, error) { + var record chatLogRecord + if err := json.Unmarshal(raw, &record); err != nil { + return nil, err + } + + if record.Request == nil || record.Response == nil { + return nil, nil + } + + ts, err := time.Parse(time.RFC3339Nano, record.Request.Timestamp) + if err != nil { + ts = time.Now() + } + + userId := normalizeUserId(record.Request.UserID) + chatLog := &models.QDevChatLog{ + ConnectionId: fileMeta.ConnectionId, + ScopeId: fileMeta.ScopeId, + RequestId: record.Response.RequestID, + UserId: userId, + DisplayName: cachedResolveDisplayName(userId, identityClient, cache), + Timestamp: ts, + ChatTriggerType: record.Request.ChatTriggerType, + HasCustomization: record.Request.CustomizationArn != nil && *record.Request.CustomizationArn != "", + ModelId: record.Request.ModelID, + PromptLength: len(record.Request.Prompt), + ResponseLength: len(record.Response.AssistantResponse), + } + + // Parse structured info from prompt + prompt := record.Request.Prompt + chatLog.OpenFileCount = countOpenFiles(prompt) + chatLog.ActiveFileName, chatLog.ActiveFileExtension = parseActiveFile(prompt) + chatLog.HasSteering = strings.Contains(prompt, ".kiro/steering") + chatLog.IsSpecMode = strings.Contains(prompt, "implicit-rules") + + if record.Response.MessageMetadata.ConversationID != nil { + chatLog.ConversationId = *record.Response.MessageMetadata.ConversationID + } + if record.Response.MessageMetadata.UtteranceID != nil { + chatLog.UtteranceId = *record.Response.MessageMetadata.UtteranceID + } + + return chatLog, nil +} + +// countOpenFiles counts tags within block +func countOpenFiles(prompt string) int { + start := strings.Index(prompt, "") + if start == -1 { + return 0 + } + end := strings.Index(prompt, "") + if end == -1 { + return 0 + } + block := prompt[start:end] + return strings.Count(block, "") + if start == -1 { + return "", "" + } + end := strings.Index(prompt[start:], "") + if end == -1 { + return "", "" + } + block := prompt[start : start+end] + nameStart := strings.Index(block, "name=\"") + if nameStart == -1 { + return "", "" + } + nameStart += len("name=\"") + nameEnd := strings.Index(block[nameStart:], "\"") + if nameEnd == -1 { + return "", "" + } + fileName := block[nameStart : nameStart+nameEnd] + ext := filepath.Ext(fileName) + return fileName, ext +} + +func parseCompletionRecord(raw json.RawMessage, fileMeta *models.QDevS3FileMeta, identityClient UserDisplayNameResolver, cache *sync.Map) (*models.QDevCompletionLog, error) { + var record completionLogRecord + if err := json.Unmarshal(raw, &record); err != nil { + return nil, err + } + + if record.Request == nil || record.Response == nil { + return nil, nil + } + + ts, err := time.Parse(time.RFC3339Nano, record.Request.Timestamp) + if err != nil { + ts = time.Now() + } + + userId := normalizeUserId(record.Request.UserID) + return &models.QDevCompletionLog{ + ConnectionId: fileMeta.ConnectionId, + ScopeId: fileMeta.ScopeId, + RequestId: record.Response.RequestID, + UserId: userId, + DisplayName: cachedResolveDisplayName(userId, identityClient, cache), + Timestamp: ts, + FileName: record.Request.FileName, + FileExtension: filepath.Ext(record.Request.FileName), + HasCustomization: record.Request.CustomizationArn != nil && *record.Request.CustomizationArn != "", + CompletionsCount: len(record.Response.Completions), + }, nil +} + +// normalizeUserId strips the "d-{directoryId}." prefix from Identity Center user IDs +// so that logging user IDs match the short UUID format used in user-report CSVs. +func normalizeUserId(userId string) string { + if idx := strings.LastIndex(userId, "."); idx != -1 && strings.HasPrefix(userId, "d-") { + return userId[idx+1:] + } + return userId +} + +var ExtractQDevLoggingDataMeta = plugin.SubTaskMeta{ + Name: "extractQDevLoggingData", + EntryPoint: ExtractQDevLoggingData, + EnabledByDefault: true, + Description: "Extract logging data from S3 JSON.gz files (chat and completion events)", + DomainTypes: []string{plugin.DOMAIN_TYPE_CROSS}, + Dependencies: []*plugin.SubTaskMeta{&CollectQDevS3FilesMeta}, +} diff --git a/config-ui/src/plugins/register/q-dev/data-scope.tsx b/config-ui/src/plugins/register/q-dev/data-scope.tsx index c5eff68db6a..657d0bdb890 100644 --- a/config-ui/src/plugins/register/q-dev/data-scope.tsx +++ b/config-ui/src/plugins/register/q-dev/data-scope.tsx @@ -326,7 +326,7 @@ export const QDevDataScope = ({ const timePart = meta.month ? `${meta.year}/${ensureLeadingZero(meta.month)}` : `${meta.year}`; return ( {meta.basePath}/…/{meta.accountId}/…/{timePart} diff --git a/grafana/dashboards/qdev_executive.json b/grafana/dashboards/qdev_executive.json new file mode 100644 index 00000000000..c6e2524d70b --- /dev/null +++ b/grafana/dashboards/qdev_executive.json @@ -0,0 +1,958 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "datasource": "mysql", + "description": "Distinct users with chat activity in the last 7 days", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 6, + "x": 0, + "y": 0 + }, + "id": 1, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "sum" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT COUNT(DISTINCT user_id) as 'WAU'\nFROM lake._tool_q_dev_chat_log\nWHERE timestamp >= DATE_SUB(NOW(), INTERVAL 7 DAY)", + "refId": "A" + } + ], + "title": "Weekly Active Users", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Average credits spent per accepted line of code", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 6, + "x": 6, + "y": 0 + }, + "id": 2, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "sum" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT ROUND(SUM(r.credits_used) / NULLIF(SUM(d.total_accepted), 0), 2) as 'Credits per Accepted Line'\nFROM (\n SELECT user_id, date, SUM(credits_used) as credits_used\n FROM lake._tool_q_dev_user_report\n WHERE $__timeFilter(date)\n GROUP BY user_id, date\n) r\nJOIN (\n SELECT user_id, date,\n (inline_ai_code_lines + chat_ai_code_lines + code_fix_accepted_lines + dev_accepted_lines) as total_accepted\n FROM lake._tool_q_dev_user_data\n WHERE $__timeFilter(date)\n) d ON r.user_id = d.user_id AND r.date = d.date", + "refId": "A" + } + ], + "title": "Credits Efficiency", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Percentage of inline suggestions accepted", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 6, + "x": 12, + "y": 0 + }, + "id": 3, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "sum" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT ROUND(SUM(inline_acceptance_count) / NULLIF(SUM(inline_suggestions_count), 0) * 100, 1) as 'Acceptance %'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)", + "refId": "A" + } + ], + "title": "Inline Acceptance Rate", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Percentage of users using steering rules", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 6, + "x": 18, + "y": 0 + }, + "id": 4, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "sum" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT CONCAT(ROUND(COUNT(DISTINCT CASE WHEN has_steering = 1 THEN user_id END) / NULLIF(COUNT(DISTINCT user_id), 0) * 100, 0), '%') as 'Users with Steering'\nFROM lake._tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)", + "refId": "A" + } + ], + "title": "Steering Adoption", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Weekly active user count over time", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 6 + }, + "id": 5, + "options": { + "legend": { + "calcs": [ + "mean", + "max", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n STR_TO_DATE(CONCAT(YEARWEEK(timestamp, 1), ' Monday'), '%X%V %W') as time,\n COUNT(DISTINCT user_id) as 'Active Users'\nFROM lake._tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY YEARWEEK(timestamp, 1)\nORDER BY time", + "refId": "A" + } + ], + "title": "Weekly Active Users Trend", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "New vs returning users by week", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 6 + }, + "id": 6, + "options": { + "legend": { + "calcs": [ + "mean", + "max", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n week as time,\n SUM(CASE WHEN is_new = 1 THEN 1 ELSE 0 END) as 'New Users',\n SUM(CASE WHEN is_new = 0 THEN 1 ELSE 0 END) as 'Returning Users'\nFROM (\n SELECT\n u.user_id,\n STR_TO_DATE(CONCAT(YEARWEEK(u.timestamp, 1), ' Monday'), '%X%V %W') as week,\n CASE WHEN STR_TO_DATE(CONCAT(YEARWEEK(u.timestamp, 1), ' Monday'), '%X%V %W') = STR_TO_DATE(CONCAT(YEARWEEK(f.first_seen, 1), ' Monday'), '%X%V %W') THEN 1 ELSE 0 END as is_new\n FROM lake._tool_q_dev_chat_log u\n JOIN (SELECT user_id, MIN(timestamp) as first_seen FROM lake._tool_q_dev_chat_log GROUP BY user_id) f ON u.user_id = f.user_id\n WHERE $__timeFilter(u.timestamp)\n GROUP BY u.user_id, YEARWEEK(u.timestamp, 1), f.first_seen\n) weekly\nGROUP BY week\nORDER BY week", + "refId": "A" + } + ], + "title": "New vs Returning Users (Weekly)", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Number of users who used each feature", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.8, + "drawStyle": "bars", + "fillOpacity": 100, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 14 + }, + "id": 7, + "options": { + "barRadius": 0.1, + "barWidth": 0.8, + "fullHighlight": false, + "groupWidth": 0.7, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "orientation": "horizontal", + "showValue": "auto", + "stacking": "none", + "tooltip": { + "hideZeros": false, + "mode": "single", + "sort": "none" + }, + "xTickLabelRotation": 0 + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n 'Chat' as Feature, COUNT(DISTINCT CASE WHEN chat_messages_sent > 0 THEN user_id END) as Users FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Inline Suggestions', COUNT(DISTINCT CASE WHEN inline_suggestions_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Code Fix', COUNT(DISTINCT CASE WHEN code_fix_generation_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Code Review', COUNT(DISTINCT CASE WHEN code_review_succeeded_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Doc Generation', COUNT(DISTINCT CASE WHEN doc_generation_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Test Generation', COUNT(DISTINCT CASE WHEN test_generation_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Dev (Agentic)', COUNT(DISTINCT CASE WHEN dev_generation_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Steering', COUNT(DISTINCT CASE WHEN has_steering = 1 THEN user_id END) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp)\nUNION ALL SELECT 'Spec Mode', COUNT(DISTINCT CASE WHEN is_spec_mode = 1 THEN user_id END) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp)", + "refId": "A" + } + ], + "title": "Feature Adoption Funnel", + "type": "barchart" + }, + { + "datasource": "mysql", + "description": "Cumulative credits this month vs projected total", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 14 + }, + "id": 8, + "options": { + "legend": { + "calcs": [ + "mean", + "max", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n date as time,\n SUM(SUM(credits_used)) OVER (ORDER BY date) as 'Cumulative Credits',\n (SELECT SUM(credits_used) / COUNT(DISTINCT date) * DAY(LAST_DAY(CURDATE()))\n FROM lake._tool_q_dev_user_report\n WHERE YEAR(date) = YEAR(CURDATE()) AND MONTH(date) = MONTH(CURDATE())) as 'Projected Monthly'\nFROM lake._tool_q_dev_user_report\nWHERE YEAR(date) = YEAR(CURDATE()) AND MONTH(date) = MONTH(CURDATE())\nGROUP BY date\nORDER BY date", + "refId": "A" + } + ], + "title": "Credits Pace vs Projected (This Month)", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Acceptance rates for inline suggestions, code fix, and inline chat over time", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "percentunit" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 22 + }, + "id": 9, + "options": { + "legend": { + "calcs": [ + "mean", + "max", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n date as time,\n SUM(inline_acceptance_count) / NULLIF(SUM(inline_suggestions_count), 0) as 'Inline Suggestions',\n SUM(code_fix_acceptance_event_count) / NULLIF(SUM(code_fix_generation_event_count), 0) as 'Code Fix',\n SUM(inline_chat_acceptance_event_count) / NULLIF(SUM(inline_chat_total_event_count), 0) as 'Inline Chat'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER BY date", + "refId": "A" + } + ], + "title": "Acceptance Rate Trends", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Code review findings and test generation metrics over time", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 22 + }, + "id": 10, + "options": { + "legend": { + "calcs": [ + "mean", + "max", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n date as time,\n SUM(code_review_findings_count) as 'Review Findings',\n SUM(test_generation_event_count) as 'Test Gen Events',\n SUM(test_generation_accepted_tests) as 'Tests Accepted'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER BY date", + "refId": "A" + } + ], + "title": "Code Review Findings & Test Generation", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Per-user productivity and efficiency metrics", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "filterable": true, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 24, + "x": 0, + "y": 30 + }, + "id": 11, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true, + "sortBy": [] + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n COALESCE(MAX(d.display_name), d.user_id) as 'User',\n COALESCE(MAX(r.subscription_tier), '') as 'Tier',\n ROUND(SUM(r.credits_used), 1) as 'Credits Used',\n SUM(d.chat_ai_code_lines + d.inline_ai_code_lines + d.code_fix_accepted_lines + d.dev_accepted_lines) as 'Total Accepted Lines',\n CASE WHEN SUM(d.chat_ai_code_lines + d.inline_ai_code_lines + d.code_fix_accepted_lines + d.dev_accepted_lines) > 0\n THEN ROUND(SUM(r.credits_used) / SUM(d.chat_ai_code_lines + d.inline_ai_code_lines + d.code_fix_accepted_lines + d.dev_accepted_lines), 2)\n ELSE NULL END as 'Credits/Line',\n CONCAT(ROUND(SUM(d.inline_acceptance_count) / NULLIF(SUM(d.inline_suggestions_count), 0) * 100, 1), '%') as 'Accept Rate',\n SUM(d.code_review_findings_count) as 'Review Findings',\n SUM(d.test_generation_event_count) as 'Test Gen Events',\n SUM(d.dev_accepted_lines) as 'Agentic Lines',\n MIN(d.date) as 'First Active',\n MAX(d.date) as 'Last Active'\nFROM lake._tool_q_dev_user_data d\nLEFT JOIN (\n SELECT user_id, date, SUM(credits_used) as credits_used, MAX(subscription_tier) as subscription_tier\n FROM lake._tool_q_dev_user_report\n WHERE $__timeFilter(date)\n GROUP BY user_id, date\n) r ON d.user_id = r.user_id AND d.date = r.date\nWHERE $__timeFilter(d.date)\nGROUP BY d.user_id\nORDER BY SUM(r.credits_used) DESC", + "refId": "A" + } + ], + "title": "User Productivity & Efficiency", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Power tier users with no activity in the last 14 days", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "filterable": true, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 40 + }, + "id": 12, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true, + "sortBy": [] + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n COALESCE(MAX(display_name), user_id) as 'User',\n MAX(subscription_tier) as 'Tier',\n ROUND(SUM(credits_used), 1) as 'Total Credits Used',\n MAX(date) as 'Last Activity'\nFROM lake._tool_q_dev_user_report\nWHERE $__timeFilter(date)\n AND subscription_tier = 'POWER'\nGROUP BY user_id\nHAVING MAX(date) < DATE_SUB(NOW(), INTERVAL 14 DAY)\nORDER BY MAX(date)", + "refId": "A" + } + ], + "title": "Idle Power Users (No Activity in 14 Days)", + "type": "table" + } + ], + "preload": false, + "refresh": "5m", + "schemaVersion": 41, + "tags": [ + "q_dev", + "executive", + "kiro" + ], + "templating": { + "list": [] + }, + "time": { + "from": "now-30d", + "to": "now" + }, + "timepicker": {}, + "timezone": "utc", + "title": "Kiro Executive Dashboard", + "uid": "qdev_executive", + "version": 1 +} \ No newline at end of file diff --git a/grafana/dashboards/qdev_logging.json b/grafana/dashboards/qdev_logging.json new file mode 100644 index 00000000000..462adcd986e --- /dev/null +++ b/grafana/dashboards/qdev_logging.json @@ -0,0 +1,800 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "datasource": "mysql", + "description": "Overview of logging event metrics", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 1, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "sum" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n (SELECT COUNT(*) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp)) as 'Chat Events',\n (SELECT COUNT(DISTINCT user_id) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp)) as 'Chat Users',\n (SELECT COUNT(DISTINCT conversation_id) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp) AND conversation_id != '') as 'Conversations',\n (SELECT COUNT(*) FROM lake._tool_q_dev_completion_log WHERE $__timeFilter(timestamp)) as 'Completion Events',\n (SELECT COUNT(DISTINCT user_id) FROM lake._tool_q_dev_completion_log WHERE $__timeFilter(timestamp)) as 'Completion Users'", + "refId": "A" + } + ], + "title": "Logging Overview", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Hourly distribution of AI usage activity (chat + completions)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Hour of Day", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.8, + "drawStyle": "bars", + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "normal" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 6 + }, + "id": 2, + "options": { + "legend": { + "calcs": [ + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n LPAD(CAST(hour_of_day AS CHAR), 2, '0') as 'Hour',\n SUM(chat_count) as 'Chat Events',\n SUM(completion_count) as 'Completion Events'\nFROM (\n SELECT HOUR(timestamp) as hour_of_day, COUNT(*) as chat_count, 0 as completion_count\n FROM lake._tool_q_dev_chat_log\n WHERE $__timeFilter(timestamp)\n GROUP BY HOUR(timestamp)\n UNION ALL\n SELECT HOUR(timestamp) as hour_of_day, 0 as chat_count, COUNT(*) as completion_count\n FROM lake._tool_q_dev_completion_log\n WHERE $__timeFilter(timestamp)\n GROUP BY HOUR(timestamp)\n) combined\nGROUP BY hour_of_day\nORDER BY hour_of_day", + "refId": "A" + } + ], + "title": "Active Hours Distribution", + "type": "barchart" + }, + { + "datasource": "mysql", + "description": "Distribution of model usage across chat events", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 14 + }, + "id": 3, + "options": { + "displayLabels": [ + "name", + "percent" + ], + "legend": { + "displayMode": "table", + "placement": "right", + "showLegend": true, + "values": [ + "value", + "percent" + ] + }, + "pieType": "donut", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "/^Requests$/", + "values": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n CASE\n WHEN model_id = '' OR model_id IS NULL THEN '(unknown)'\n ELSE model_id\n END as 'Model',\n COUNT(*) as 'Requests'\nFROM lake._tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY model_id\nORDER BY COUNT(*) DESC", + "refId": "A" + } + ], + "title": "Model Usage Distribution", + "type": "piechart" + }, + { + "datasource": "mysql", + "description": "Top file extensions used with inline completions", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 14 + }, + "id": 4, + "options": { + "displayLabels": [ + "name", + "percent" + ], + "legend": { + "displayMode": "table", + "placement": "right", + "showLegend": true, + "values": [ + "value", + "percent" + ] + }, + "pieType": "pie", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n CASE\n WHEN file_extension = '' THEN '(unknown)'\n ELSE file_extension\n END as 'File Type',\n COUNT(*) as 'Completions'\nFROM lake._tool_q_dev_completion_log\nWHERE $__timeFilter(timestamp)\nGROUP BY file_extension\nORDER BY COUNT(*) DESC\nLIMIT 15", + "refId": "A" + } + ], + "title": "File Type Usage (Completions)", + "type": "piechart" + }, + { + "datasource": "mysql", + "description": "Average number of chat events per conversation", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 22 + }, + "id": 5, + "options": { + "legend": { + "calcs": [ + "mean", + "max", + "min" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n DATE(timestamp) as time,\n COUNT(*) / NULLIF(COUNT(DISTINCT CASE WHEN conversation_id != '' THEN conversation_id END), 0) as 'Avg Turns per Conversation',\n COUNT(DISTINCT CASE WHEN conversation_id != '' THEN conversation_id END) as 'Unique Conversations',\n COUNT(*) as 'Total Chat Events'\nFROM lake._tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY DATE(timestamp)\nORDER BY DATE(timestamp)", + "refId": "A" + } + ], + "title": "Conversation Depth Analysis", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Daily chat and completion events over time", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 30 + }, + "id": 6, + "options": { + "legend": { + "calcs": [ + "mean", + "max", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT time, SUM(chat) as 'Chat Events', SUM(completions) as 'Completion Events'\nFROM (\n SELECT DATE(timestamp) as time, COUNT(*) as chat, 0 as completions\n FROM lake._tool_q_dev_chat_log\n WHERE $__timeFilter(timestamp)\n GROUP BY DATE(timestamp)\n UNION ALL\n SELECT DATE(timestamp) as time, 0 as chat, COUNT(*) as completions\n FROM lake._tool_q_dev_completion_log\n WHERE $__timeFilter(timestamp)\n GROUP BY DATE(timestamp)\n) combined\nGROUP BY time\nORDER BY time", + "refId": "A" + } + ], + "title": "Daily Event Trends", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Per-user logging activity summary", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "filterable": true, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 24, + "x": 0, + "y": 38 + }, + "id": 7, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true, + "sortBy": [] + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n COALESCE(u.display_name, u.user_id) as 'User',\n u.user_id as 'User ID',\n u.chat_events as 'Chat Events',\n u.conversations as 'Conversations',\n ROUND(u.chat_events / NULLIF(u.conversations, 0), 1) as 'Avg Turns',\n COALESCE(c.completion_events, 0) as 'Completion Events',\n COALESCE(c.files_count, 0) as 'Distinct Files',\n ROUND(u.avg_prompt_len) as 'Avg Prompt Len',\n ROUND(u.avg_response_len) as 'Avg Response Len',\n u.steering_count as 'Steering Uses',\n u.spec_count as 'Spec Mode Uses',\n u.models_used as 'Models Used',\n u.first_seen as 'First Seen',\n GREATEST(u.last_seen, COALESCE(c.last_seen, u.last_seen)) as 'Last Seen'\nFROM (\n SELECT\n user_id,\n MAX(display_name) as display_name,\n COUNT(*) as chat_events,\n COUNT(DISTINCT CASE WHEN conversation_id != '' THEN conversation_id END) as conversations,\n AVG(prompt_length) as avg_prompt_len,\n AVG(response_length) as avg_response_len,\n GROUP_CONCAT(DISTINCT CASE WHEN model_id != '' AND model_id IS NOT NULL THEN model_id END ORDER BY model_id SEPARATOR ', ') as models_used,\n SUM(CASE WHEN has_steering = 1 THEN 1 ELSE 0 END) as steering_count,\n SUM(CASE WHEN is_spec_mode = 1 THEN 1 ELSE 0 END) as spec_count,\n MIN(timestamp) as first_seen,\n MAX(timestamp) as last_seen\n FROM lake._tool_q_dev_chat_log\n WHERE $__timeFilter(timestamp)\n GROUP BY user_id\n) u\nLEFT JOIN (\n SELECT\n user_id,\n COUNT(*) as completion_events,\n COUNT(DISTINCT file_name) as files_count,\n MAX(timestamp) as last_seen\n FROM lake._tool_q_dev_completion_log\n WHERE $__timeFilter(timestamp)\n GROUP BY user_id\n) c ON u.user_id = c.user_id\nORDER BY u.user_id", + "refId": "A" + } + ], + "title": "Per-User Activity", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Distribution of Kiro feature adoption: Steering, Spec Mode, and Plain Chat", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 48 + }, + "id": 8, + "options": { + "displayLabels": [ + "name", + "percent" + ], + "legend": { + "displayMode": "table", + "placement": "right", + "showLegend": true, + "values": [ + "value", + "percent" + ] + }, + "pieType": "donut", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n SUM(CASE WHEN has_steering = 1 THEN 1 ELSE 0 END) as 'Using Steering',\n SUM(CASE WHEN is_spec_mode = 1 THEN 1 ELSE 0 END) as 'Using Spec Mode',\n SUM(CASE WHEN has_steering = 0 AND is_spec_mode = 0 THEN 1 ELSE 0 END) as 'Plain Chat'\nFROM lake._tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)", + "refId": "A" + } + ], + "title": "Kiro Feature Adoption", + "type": "piechart" + }, + { + "datasource": "mysql", + "description": "Top file extensions active during chat events", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 48 + }, + "id": 9, + "options": { + "displayLabels": [ + "name", + "percent" + ], + "legend": { + "displayMode": "table", + "placement": "right", + "showLegend": true, + "values": [ + "value", + "percent" + ] + }, + "pieType": "pie", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n CASE\n WHEN active_file_extension = '' OR active_file_extension IS NULL THEN '(no file active)'\n ELSE active_file_extension\n END as 'File Type',\n COUNT(*) as 'Chat Events'\nFROM lake._tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY active_file_extension\nORDER BY COUNT(*) DESC\nLIMIT 15", + "refId": "A" + } + ], + "title": "Active File Types in Chat", + "type": "piechart" + }, + { + "datasource": "mysql", + "description": "Average and maximum prompt/response lengths over time", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 56 + }, + "id": 10, + "options": { + "legend": { + "calcs": [ + "mean", + "max", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n DATE(timestamp) as time,\n AVG(prompt_length) as 'Avg Prompt Length',\n AVG(response_length) as 'Avg Response Length',\n MAX(prompt_length) as 'Max Prompt Length',\n MAX(response_length) as 'Max Response Length'\nFROM lake._tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY DATE(timestamp)\nORDER BY DATE(timestamp)", + "refId": "A" + } + ], + "title": "Prompt & Response Length Trends", + "type": "timeseries" + } + ], + "preload": false, + "refresh": "5m", + "schemaVersion": 41, + "tags": [ + "q_dev", + "logging", + "kiro" + ], + "templating": { + "list": [] + }, + "time": { + "from": "now-30d", + "to": "now" + }, + "timepicker": {}, + "timezone": "utc", + "title": "Kiro AI Activity Insights", + "uid": "qdev_logging", + "version": 1 +} \ No newline at end of file diff --git a/grafana/dashboards/qdev_user_data.json b/grafana/dashboards/qdev_user_data.json index d80d57bab21..578cf095dca 100644 --- a/grafana/dashboards/qdev_user_data.json +++ b/grafana/dashboards/qdev_user_data.json @@ -730,7 +730,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n COALESCE(display_name, user_id) as 'User',\n SUM(chat_ai_code_lines) as 'Accepted Lines (Chat)',\n SUM(transformation_lines_ingested) as 'Lines Ingested (Java Transform)',\n SUM(transformation_lines_generated) as 'Lines Generated (Java Transform)',\n SUM(transformation_event_count) as 'Event Count (Java Transform)',\n SUM(code_review_findings_count) as 'Findings (Code Review)',\n SUM(code_fix_accepted_lines) as 'Accepted Lines (Code Fix)',\n SUM(code_fix_generated_lines) as 'Generated Lines (Code Fix)',\n SUM(code_fix_acceptance_event_count) as 'Accepted Count (Code Fix)',\n SUM(code_fix_generation_event_count) as 'Generated Count (Code Fix)',\n CONCAT(ROUND(SUM(code_fix_acceptance_event_count) / NULLIF(SUM(code_fix_generation_event_count), 0) * 100, 2), '%') as 'Acceptance Rate (Code Fix)',\n SUM(inline_ai_code_lines) as 'Accepted Lines (Inline Suggestion)',\n SUM(inline_acceptance_count) as 'Accepted Count (Inline Suggestion)',\n SUM(inline_suggestions_count) as 'Total Count (Inline Suggestion)',\n CONCAT(ROUND(SUM(inline_acceptance_count) / NULLIF(SUM(inline_suggestions_count), 0) * 100, 2), '%') as 'Acceptance Rate (Inline Suggestion)',\n SUM(inline_chat_accepted_line_additions) as 'Accepted Line Additions (Inline Chat)',\n SUM(inline_chat_accepted_line_deletions) as 'Accepted Line Deletions (Inline Chat)',\n SUM(inline_chat_acceptance_event_count) as 'Accepted Events (Inline Chat)',\n SUM(inline_chat_total_event_count) as 'Total Events (Inline Chat)',\n CONCAT(ROUND(SUM(inline_chat_acceptance_event_count) / NULLIF(SUM(inline_chat_total_event_count), 0) * 100, 2), '%') as 'Acceptance Rate (Inline Chat)',\n MIN(date) as 'First Activity',\n MAX(date) as 'Last Activity'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY user_id, display_name\nORDER BY SUM(inline_ai_code_lines) DESC", + "rawSql": "SELECT\n COALESCE(MAX(display_name), user_id) as 'User',\n SUM(chat_ai_code_lines) as 'Accepted Lines (Chat)',\n SUM(transformation_lines_ingested) as 'Lines Ingested (Java Transform)',\n SUM(transformation_lines_generated) as 'Lines Generated (Java Transform)',\n SUM(transformation_event_count) as 'Event Count (Java Transform)',\n SUM(code_review_findings_count) as 'Findings (Code Review)',\n SUM(code_fix_accepted_lines) as 'Accepted Lines (Code Fix)',\n SUM(code_fix_generated_lines) as 'Generated Lines (Code Fix)',\n SUM(code_fix_acceptance_event_count) as 'Accepted Count (Code Fix)',\n SUM(code_fix_generation_event_count) as 'Generated Count (Code Fix)',\n CONCAT(ROUND(SUM(code_fix_acceptance_event_count) / NULLIF(SUM(code_fix_generation_event_count), 0) * 100, 2), '%') as 'Acceptance Rate (Code Fix)',\n SUM(inline_ai_code_lines) as 'Accepted Lines (Inline Suggestion)',\n SUM(inline_acceptance_count) as 'Accepted Count (Inline Suggestion)',\n SUM(inline_suggestions_count) as 'Total Count (Inline Suggestion)',\n CONCAT(ROUND(SUM(inline_acceptance_count) / NULLIF(SUM(inline_suggestions_count), 0) * 100, 2), '%') as 'Acceptance Rate (Inline Suggestion)',\n SUM(inline_chat_accepted_line_additions) as 'Accepted Line Additions (Inline Chat)',\n SUM(inline_chat_accepted_line_deletions) as 'Accepted Line Deletions (Inline Chat)',\n SUM(inline_chat_acceptance_event_count) as 'Accepted Events (Inline Chat)',\n SUM(inline_chat_total_event_count) as 'Total Events (Inline Chat)',\n CONCAT(ROUND(SUM(inline_chat_acceptance_event_count) / NULLIF(SUM(inline_chat_total_event_count), 0) * 100, 2), '%') as 'Acceptance Rate (Inline Chat)',\n SUM(doc_generation_event_count) as 'Doc Gen Events',\n SUM(test_generation_event_count) as 'Test Gen Events',\n SUM(dev_accepted_lines) as 'Dev Accepted Lines',\n MIN(date) as 'First Activity',\n MAX(date) as 'Last Activity'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY user_id\nORDER BY SUM(inline_ai_code_lines) DESC", "refId": "A", "select": [ [ @@ -771,6 +771,393 @@ ], "title": "User Interactions", "type": "table" + }, + { + "datasource": "mysql", + "description": "Daily doc generation events and accepted/rejected lines", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 40 + }, + "id": 11, + "options": { + "legend": { + "calcs": [ + "mean", + "max", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "SELECT\n date as time,\n SUM(doc_generation_event_count) as 'Doc Generation Events',\n SUM(doc_generation_accepted_line_additions) as 'Accepted Line Additions',\n SUM(doc_generation_accepted_line_updates) as 'Accepted Line Updates',\n SUM(doc_generation_rejected_line_additions) as 'Rejected Line Additions',\n SUM(doc_generation_rejected_line_updates) as 'Rejected Line Updates'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER BY date", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + }, + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Doc Generation Metrics", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Daily test generation events and lines", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 40 + }, + "id": 12, + "options": { + "legend": { + "calcs": [ + "mean", + "max", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "SELECT\n date as time,\n SUM(test_generation_event_count) as 'Test Generation Events',\n SUM(test_generation_accepted_tests) as 'Accepted Tests',\n SUM(test_generation_generated_tests) as 'Generated Tests',\n SUM(test_generation_accepted_lines) as 'Accepted Lines',\n SUM(test_generation_generated_lines) as 'Generated Lines'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER BY date", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + }, + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Test Generation Metrics", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Daily agentic dev events and lines", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 48 + }, + "id": 13, + "options": { + "legend": { + "calcs": [ + "mean", + "max", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "SELECT\n date as time,\n SUM(dev_generation_event_count) as 'Dev Generation Events',\n SUM(dev_acceptance_event_count) as 'Dev Acceptance Events',\n SUM(dev_generated_lines) as 'Dev Generated Lines',\n SUM(dev_accepted_lines) as 'Dev Accepted Lines'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER BY date", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + }, + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Dev (Agentic) Metrics", + "type": "timeseries" } ], "preload": false, diff --git a/grafana/dashboards/qdev_user_report.json b/grafana/dashboards/qdev_user_report.json index e1a27bc539b..920fc1f6cde 100644 --- a/grafana/dashboards/qdev_user_report.json +++ b/grafana/dashboards/qdev_user_report.json @@ -305,10 +305,10 @@ "pieType": "pie", "reduceOptions": { "calcs": [ - "sum" + "lastNotNull" ], "fields": "", - "values": false + "values": true }, "tooltip": { "mode": "single", @@ -433,7 +433,7 @@ "editorMode": "code", "format": "table", "rawQuery": true, - "rawSql": "SELECT\n COALESCE(display_name, user_id) as 'User',\n subscription_tier as 'Tier',\n client_type as 'Client',\n SUM(credits_used) as 'Credits Used',\n SUM(total_messages) as 'Messages',\n SUM(chat_conversations) as 'Conversations',\n SUM(overage_credits_used) as 'Overage Credits',\n CASE WHEN MAX(CAST(overage_enabled AS UNSIGNED)) = 1 THEN 'Yes' ELSE 'No' END as 'Overage',\n MIN(date) as 'First Activity',\n MAX(date) as 'Last Activity'\nFROM lake._tool_q_dev_user_report\nWHERE $__timeFilter(date)\nGROUP BY user_id, display_name, subscription_tier, client_type\nORDER BY SUM(credits_used) DESC", + "rawSql": "SELECT\n COALESCE(MAX(display_name), user_id) as 'User',\n subscription_tier as 'Tier',\n client_type as 'Client',\n SUM(credits_used) as 'Credits Used',\n SUM(total_messages) as 'Messages',\n SUM(chat_conversations) as 'Conversations',\n SUM(overage_credits_used) as 'Overage Credits',\n CASE WHEN MAX(CAST(overage_enabled AS UNSIGNED)) = 1 THEN 'Yes' ELSE 'No' END as 'Overage',\n MIN(date) as 'First Activity',\n MAX(date) as 'Last Activity'\nFROM lake._tool_q_dev_user_report\nWHERE $__timeFilter(date)\nGROUP BY user_id, subscription_tier, client_type\nORDER BY user_id DESC", "refId": "A" } ], @@ -461,4 +461,4 @@ "title": "Kiro Usage Dashboard", "uid": "qdev_user_report", "version": 1 -} +} \ No newline at end of file From 475dc49bcd6e533d1d5537a59a3dccaddc45f67f Mon Sep 17 00:00:00 2001 From: NaRro Date: Tue, 17 Mar 2026 02:19:11 +0000 Subject: [PATCH 22/39] feat(qa): add is_invalid field to qa_test_case_executions (#8764) * feat(qa): add is_invalid field to qa_test_case_executions Add is_invalid boolean field to the domain layer qa_test_case_executions table to allow QA teams to flag test executions as invalid due to environmental issues, flaky tests, false positives, or false negatives. Changes: - Add IsInvalid field to QaTestCaseExecution domain model - Create migration script (20260313_add_is_invalid_to_qa_test_case_executions) - Register migration in migrationscripts/register.go - Update customize service to set default value for is_invalid - Update E2E test data to include new column Resolves #8763 Co-Authored-By: Claude Opus 4.6 * fix(qa): handle missing is_invalid column in CSV import Fix PostgreSQL compatibility issue when CSV files don't contain the is_invalid column. The field now defaults to false instead of an empty string. Changes: - Update qaTestCaseExecutionHandler to check for empty string values - Add E2E test for backward compatibility with CSV files lacking is_invalid - Add explicit IsInvalid initialization in Testmo plugin converter Resolves #8763 --------- Co-authored-by: Claude Opus 4.6 --- .../domainlayer/qa/qa_test_case_execution.go | 1 + ...d_is_invalid_to_qa_test_case_executions.go | 52 +++++++++++++++++++ .../core/models/migrationscripts/register.go | 1 + .../e2e/import_test_case_execution_test.go | 50 ++++++++++++++++++ .../qa_test_case_executions_output.csv | 8 +-- ...est_case_executions_output_incremental.csv | 10 ++-- ...executions_output_no_is_invalid_column.csv | 4 ++ backend/plugins/customize/service/service.go | 4 ++ 8 files changed, 121 insertions(+), 9 deletions(-) create mode 100644 backend/core/models/migrationscripts/20260313_add_is_invalid_to_qa_test_case_executions.go create mode 100644 backend/plugins/customize/e2e/snapshot_tables/qa_test_case_executions_output_no_is_invalid_column.csv diff --git a/backend/core/models/domainlayer/qa/qa_test_case_execution.go b/backend/core/models/domainlayer/qa/qa_test_case_execution.go index cfab89b6d05..aaf237b6fce 100644 --- a/backend/core/models/domainlayer/qa/qa_test_case_execution.go +++ b/backend/core/models/domainlayer/qa/qa_test_case_execution.go @@ -33,6 +33,7 @@ type QaTestCaseExecution struct { FinishTime time.Time `gorm:"comment:Test finish time"` CreatorId string `gorm:"type:varchar(255);comment:Executor ID"` Status string `gorm:"type:varchar(255);comment:Test execution status | PENDING | IN_PROGRESS | SUCCESS | FAILED"` // enum, using string + IsInvalid bool } func (QaTestCaseExecution) TableName() string { diff --git a/backend/core/models/migrationscripts/20260313_add_is_invalid_to_qa_test_case_executions.go b/backend/core/models/migrationscripts/20260313_add_is_invalid_to_qa_test_case_executions.go new file mode 100644 index 00000000000..ce7656e1e4c --- /dev/null +++ b/backend/core/models/migrationscripts/20260313_add_is_invalid_to_qa_test_case_executions.go @@ -0,0 +1,52 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" +) + +var _ plugin.MigrationScript = (*addIsInvalidToQaTestCaseExecution)(nil) + +type qaTestCaseExecution20260313 struct { + IsInvalid bool +} + +func (qaTestCaseExecution20260313) TableName() string { + return "qa_test_case_executions" +} + +type addIsInvalidToQaTestCaseExecution struct{} + +func (*addIsInvalidToQaTestCaseExecution) Up(basicRes context.BasicRes) errors.Error { + db := basicRes.GetDal() + if err := db.AutoMigrate(&qaTestCaseExecution20260313{}); err != nil { + return err + } + return nil +} + +func (*addIsInvalidToQaTestCaseExecution) Version() uint64 { + return 20260313100000 +} + +func (*addIsInvalidToQaTestCaseExecution) Name() string { + return "add is_invalid to qa_test_case_executions" +} diff --git a/backend/core/models/migrationscripts/register.go b/backend/core/models/migrationscripts/register.go index 5b682b66222..9372d8fbd02 100644 --- a/backend/core/models/migrationscripts/register.go +++ b/backend/core/models/migrationscripts/register.go @@ -136,6 +136,7 @@ func All() []plugin.MigrationScript { new(addCqIssueImpacts), new(addDueDateToIssues), new(createQaTables), + new(addIsInvalidToQaTestCaseExecution), new(increaseCqIssueComponentLength), new(extendFieldSizeForCq), new(addIssueFixVerion), diff --git a/backend/plugins/customize/e2e/import_test_case_execution_test.go b/backend/plugins/customize/e2e/import_test_case_execution_test.go index 92d5750b86b..a74c69940a6 100644 --- a/backend/plugins/customize/e2e/import_test_case_execution_test.go +++ b/backend/plugins/customize/e2e/import_test_case_execution_test.go @@ -71,6 +71,7 @@ func TestImportQaTestCaseExecutionsDataFlow(t *testing.T) { "start_time", "finish_time", "creator_id", + "is_invalid", "status", }) dataflowTester.VerifyTableWithRawData( @@ -104,6 +105,7 @@ func TestImportQaTestCaseExecutionsDataFlow(t *testing.T) { "start_time", "finish_time", "creator_id", + "is_invalid", "status", }) @@ -117,3 +119,51 @@ func TestImportQaTestCaseExecutionsDataFlow(t *testing.T) { }, ) } + +// TestImportQaTestCaseExecutions_NoIsInvalidColumn tests backward compatibility: +// Verifies that importing CSV files without the is_invalid column works correctly, +// and the is_invalid field defaults to false. +func TestImportQaTestCaseExecutions_NoIsInvalidColumn(t *testing.T) { + var plugin impl.Customize + dataflowTester := e2ehelper.NewDataFlowTester(t, "customize", plugin) + + // Flush the relevant table + dataflowTester.FlushTabler(&qa.QaTestCaseExecution{}) + dataflowTester.FlushTabler(&crossdomain.Account{}) + + // Create a new service instance + svc := service.NewService(dataflowTester.Dal) + + // Use the existing CSV file that does NOT contain is_invalid column + // This simulates backward compatibility with old CSV files + qaTestCaseExecutionsFile, err := os.Open("raw_tables/qa_test_case_executions_input.csv") + if err != nil { + t.Fatal(err) + } + defer qaTestCaseExecutionsFile.Close() + + // Define a dummy qaProjectId + qaProjectId := "test-backward-compat-project" + + // Import data from the CSV file (which has no is_invalid column) + err = svc.ImportQaTestCaseExecutions(qaProjectId, qaTestCaseExecutionsFile, false) + if err != nil { + t.Fatalf("ImportQaTestCaseExecutions failed: %v", err) + } + + // Verify the imported data has is_invalid defaulted to false + dataflowTester.VerifyTableWithRawData( + &qa.QaTestCaseExecution{}, + "snapshot_tables/qa_test_case_executions_output_no_is_invalid_column.csv", + []string{ + "id", + "qa_project_id", + "qa_test_case_id", + "create_time", + "start_time", + "finish_time", + "creator_id", + "is_invalid", + "status", + }) +} diff --git a/backend/plugins/customize/e2e/snapshot_tables/qa_test_case_executions_output.csv b/backend/plugins/customize/e2e/snapshot_tables/qa_test_case_executions_output.csv index 1d51d4a0d66..cd78e9ec52e 100644 --- a/backend/plugins/customize/e2e/snapshot_tables/qa_test_case_executions_output.csv +++ b/backend/plugins/customize/e2e/snapshot_tables/qa_test_case_executions_output.csv @@ -1,4 +1,4 @@ -id,qa_project_id,qa_test_case_id,create_time,start_time,finish_time,creator_id,status,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark -exec-1,test-qa-project-id,tc-1,2023-03-01T10:00:00.000+00:00,2023-03-01T10:01:00.000+00:00,2023-03-01T10:05:00.000+00:00,csv:CsvAccount:0:user-a,SUCCESS,test-qa-project-id,,, -exec-2,test-qa-project-id,tc-2,2023-03-01T11:00:00.000+00:00,2023-03-01T11:02:00.000+00:00,2023-03-01T11:06:00.000+00:00,csv:CsvAccount:0:user-b,FAILED,test-qa-project-id,,, -exec-3,test-qa-project-id,tc-1,2023-03-02T10:00:00.000+00:00,2023-03-02T10:01:00.000+00:00,2023-03-02T10:04:00.000+00:00,csv:CsvAccount:0:user-a,SUCCESS,test-qa-project-id,,, +id,qa_project_id,qa_test_case_id,create_time,start_time,finish_time,creator_id,is_invalid,status,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark +exec-1,test-qa-project-id,tc-1,2023-03-01T10:00:00.000+00:00,2023-03-01T10:01:00.000+00:00,2023-03-01T10:05:00.000+00:00,csv:CsvAccount:0:user-a,0,SUCCESS,test-qa-project-id,,, +exec-2,test-qa-project-id,tc-2,2023-03-01T11:00:00.000+00:00,2023-03-01T11:02:00.000+00:00,2023-03-01T11:06:00.000+00:00,csv:CsvAccount:0:user-b,0,FAILED,test-qa-project-id,,, +exec-3,test-qa-project-id,tc-1,2023-03-02T10:00:00.000+00:00,2023-03-02T10:01:00.000+00:00,2023-03-02T10:04:00.000+00:00,csv:CsvAccount:0:user-a,0,SUCCESS,test-qa-project-id,,, diff --git a/backend/plugins/customize/e2e/snapshot_tables/qa_test_case_executions_output_incremental.csv b/backend/plugins/customize/e2e/snapshot_tables/qa_test_case_executions_output_incremental.csv index e214d734b23..90e496a27bd 100644 --- a/backend/plugins/customize/e2e/snapshot_tables/qa_test_case_executions_output_incremental.csv +++ b/backend/plugins/customize/e2e/snapshot_tables/qa_test_case_executions_output_incremental.csv @@ -1,5 +1,5 @@ -id,qa_project_id,qa_test_case_id,create_time,start_time,finish_time,creator_id,status,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark -exec-1,test-qa-project-id,tc-1,2023-03-04T10:00:00.000+00:00,2023-03-04T10:01:00.000+00:00,2023-03-04T10:06:00.000+00:00,csv:CsvAccount:0:user-a,FAILED,test-qa-project-id,,, -exec-2,test-qa-project-id,tc-2,2023-03-01T11:00:00.000+00:00,2023-03-01T11:02:00.000+00:00,2023-03-01T11:06:00.000+00:00,csv:CsvAccount:0:user-b,FAILED,test-qa-project-id,,, -exec-3,test-qa-project-id,tc-1,2023-03-02T10:00:00.000+00:00,2023-03-02T10:01:00.000+00:00,2023-03-02T10:04:00.000+00:00,csv:CsvAccount:0:user-a,SUCCESS,test-qa-project-id,,, -exec-4,test-qa-project-id,tc-3,2023-03-03T10:00:00.000+00:00,2023-03-03T10:01:00.000+00:00,2023-03-03T10:05:00.000+00:00,csv:CsvAccount:0:user-c,SUCCESS,test-qa-project-id,,, +id,qa_project_id,qa_test_case_id,create_time,start_time,finish_time,creator_id,is_invalid,status,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark +exec-1,test-qa-project-id,tc-1,2023-03-04T10:00:00.000+00:00,2023-03-04T10:01:00.000+00:00,2023-03-04T10:06:00.000+00:00,csv:CsvAccount:0:user-a,0,FAILED,test-qa-project-id,,, +exec-2,test-qa-project-id,tc-2,2023-03-01T11:00:00.000+00:00,2023-03-01T11:02:00.000+00:00,2023-03-01T11:06:00.000+00:00,csv:CsvAccount:0:user-b,0,FAILED,test-qa-project-id,,, +exec-3,test-qa-project-id,tc-1,2023-03-02T10:00:00.000+00:00,2023-03-02T10:01:00.000+00:00,2023-03-02T10:04:00.000+00:00,csv:CsvAccount:0:user-a,0,SUCCESS,test-qa-project-id,,, +exec-4,test-qa-project-id,tc-3,2023-03-03T10:00:00.000+00:00,2023-03-03T10:01:00.000+00:00,2023-03-03T10:05:00.000+00:00,csv:CsvAccount:0:user-c,0,SUCCESS,test-qa-project-id,,, diff --git a/backend/plugins/customize/e2e/snapshot_tables/qa_test_case_executions_output_no_is_invalid_column.csv b/backend/plugins/customize/e2e/snapshot_tables/qa_test_case_executions_output_no_is_invalid_column.csv new file mode 100644 index 00000000000..e313377a0c5 --- /dev/null +++ b/backend/plugins/customize/e2e/snapshot_tables/qa_test_case_executions_output_no_is_invalid_column.csv @@ -0,0 +1,4 @@ +id,qa_project_id,qa_test_case_id,create_time,start_time,finish_time,creator_id,is_invalid,status,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark +exec-1,test-backward-compat-project,tc-1,2023-03-01T10:00:00.000+00:00,2023-03-01T10:01:00.000+00:00,2023-03-01T10:05:00.000+00:00,csv:CsvAccount:0:user-a,0,SUCCESS,test-backward-compat-project,,, +exec-2,test-backward-compat-project,tc-2,2023-03-01T11:00:00.000+00:00,2023-03-01T11:02:00.000+00:00,2023-03-01T11:06:00.000+00:00,csv:CsvAccount:0:user-b,0,FAILED,test-backward-compat-project,,, +exec-3,test-backward-compat-project,tc-1,2023-03-02T10:00:00.000+00:00,2023-03-02T10:01:00.000+00:00,2023-03-02T10:04:00.000+00:00,csv:CsvAccount:0:user-a,0,SUCCESS,test-backward-compat-project,,, \ No newline at end of file diff --git a/backend/plugins/customize/service/service.go b/backend/plugins/customize/service/service.go index 89e8833cf1c..5ef3bcbb3bb 100644 --- a/backend/plugins/customize/service/service.go +++ b/backend/plugins/customize/service/service.go @@ -541,6 +541,10 @@ func (s *Service) qaTestCaseExecutionHandler(qaProjectId string) func(record map } delete(record, "creator_name") record["qa_project_id"] = qaProjectId + // Set default value for is_invalid if not present or empty in the CSV + if isInvalid, exists := record["is_invalid"]; !exists || isInvalid == "" { + record["is_invalid"] = false + } return s.dal.CreateWithMap(&qa.QaTestCaseExecution{}, record) } } From c36180801765edd8a8fb80e70d4a009a2428e87c Mon Sep 17 00:00:00 2001 From: Reece Ward <47779818+ReeceXW@users.noreply.github.com> Date: Thu, 19 Mar 2026 13:05:00 +0000 Subject: [PATCH 23/39] feat(linker): link when branch names contain issue keys (#8777) * feat(linker): branch names containing issue keys * chore: add testing data --- backend/plugins/linker/e2e/snapshot_tables/board_issues.csv | 1 + .../linker/e2e/snapshot_tables/pull_request_issues.csv | 1 + backend/plugins/linker/e2e/snapshot_tables/pull_requests.csv | 3 ++- backend/plugins/linker/tasks/link_pr_and_issue.go | 5 +++-- 4 files changed, 7 insertions(+), 3 deletions(-) diff --git a/backend/plugins/linker/e2e/snapshot_tables/board_issues.csv b/backend/plugins/linker/e2e/snapshot_tables/board_issues.csv index 8c288bf48cf..0955b9ccd93 100644 --- a/backend/plugins/linker/e2e/snapshot_tables/board_issues.csv +++ b/backend/plugins/linker/e2e/snapshot_tables/board_issues.csv @@ -1,3 +1,4 @@ "board_id","issue_id","created_at","updated_at","_raw_data_params","_raw_data_table","_raw_data_id","_raw_data_remark" "github:GithubRepo:1:384111310","github:GithubIssue:1:1237324696","2024-05-14 10:42:37.541","2024-05-28 00:25:41.436","{""ConnectionId"":1,""Name"":""apache/incubator-devlake""}","_raw_github_graphql_issues",69,"" "github:GithubRepo:1:384111310","github:GithubIssue:1:1237324697","2024-05-14 10:42:37.541","2024-05-28 00:25:41.436","{""ConnectionId"":1,""Name"":""apache/incubator-devlake""}","_raw_github_graphql_issues",69,"" +"github:GithubRepo:1:384111310","github:GithubIssue:1:1237324698","2024-05-14 10:42:37.541","2024-05-28 00:25:41.436","{""ConnectionId"":1,""Name"":""apache/incubator-devlake""}","_raw_github_graphql_issues",69,"" diff --git a/backend/plugins/linker/e2e/snapshot_tables/pull_request_issues.csv b/backend/plugins/linker/e2e/snapshot_tables/pull_request_issues.csv index 6b7d30fb8a3..d602f600d5e 100644 --- a/backend/plugins/linker/e2e/snapshot_tables/pull_request_issues.csv +++ b/backend/plugins/linker/e2e/snapshot_tables/pull_request_issues.csv @@ -1,3 +1,4 @@ pull_request_id,issue_id,pull_request_key,issue_key,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark github:GithubPullRequest:1:1819250573,github:GithubIssue:1:1237324696,7317,1884,,,0,"pull_requests," github:GithubPullRequest:1:1819250573,github:GithubIssue:1:1237324697,7317,1885,,,0,"pull_requests," +github:GithubPullRequest:1:1819250574,github:GithubIssue:1:1237324698,7318,1886,,,0,"pull_requests," diff --git a/backend/plugins/linker/e2e/snapshot_tables/pull_requests.csv b/backend/plugins/linker/e2e/snapshot_tables/pull_requests.csv index 22635f9b0b7..3084711b0bc 100644 --- a/backend/plugins/linker/e2e/snapshot_tables/pull_requests.csv +++ b/backend/plugins/linker/e2e/snapshot_tables/pull_requests.csv @@ -1,2 +1,3 @@ "id","created_at","updated_at","_raw_data_params","_raw_data_table","_raw_data_id","_raw_data_remark","base_repo_id","base_ref","base_commit_sha","head_repo_id","head_ref","head_commit_sha","merge_commit_sha","status","original_status","type","component","title","description","url","author_name","author_id","parent_pr_id","pull_request_key","created_date","merged_date","closed_date" -"github:GithubPullRequest:1:1819250573","2024-05-15 12:07:36.778","2024-05-15 12:07:36.778","{""ConnectionId"":1,""Name"":""apache/incubator-devlake""}","_raw_github_api_pull_requests",191,"","github:GithubRepo:1:384111310","main","64c52748f3529784cb6c8a372691aa0f638fa73d","github:GithubRepo:1:384111310","fix#7275","14fb6488f2208e6a65374a86efce12dd460987e0","91dbce48759da14a4a030124c3ef751f1c5d8389","CLOSED","closed","","","fix: can't GET projects which have / in their name #1884 #1885","desc","https://github.com/apache/incubator-devlake/pull/7317","abeizn","github:GithubAccount:1:101256042","",7317,"2024-04-12 05:31:43.000","2024-04-13 05:31:43.000","2024-04-12 06:44:27.000" \ No newline at end of file +"github:GithubPullRequest:1:1819250573","2024-05-15 12:07:36.778","2024-05-15 12:07:36.778","{""ConnectionId"":1,""Name"":""apache/incubator-devlake""}","_raw_github_api_pull_requests",191,"","github:GithubRepo:1:384111310","main","64c52748f3529784cb6c8a372691aa0f638fa73d","github:GithubRepo:1:384111310","fix#7275","14fb6488f2208e6a65374a86efce12dd460987e0","91dbce48759da14a4a030124c3ef751f1c5d8389","CLOSED","closed","","","fix: can't GET projects which have / in their name #1884 #1885","desc","https://github.com/apache/incubator-devlake/pull/7317","abeizn","github:GithubAccount:1:101256042","",7317,"2024-04-12 05:31:43.000","2024-04-13 05:31:43.000","2024-04-12 06:44:27.000" +"github:GithubPullRequest:1:1819250574","2024-05-15 12:07:36.778","2024-05-15 12:07:36.778","{""ConnectionId"":1,""Name"":""apache/incubator-devlake""}","_raw_github_api_pull_requests",192,"","github:GithubRepo:1:384111310","main","64c52748f3529784cb6c8a372691aa0f638fa73d","github:GithubRepo:1:384111310","fix#1886","14fb6488f2208e6a65374a86efce12dd460987e0","","MERGED","merged","","","Fix deployment policy recreation","no issue key here","https://github.com/apache/incubator-devlake/pull/7318","abeizn","github:GithubAccount:1:101256042","",7318,"2024-04-12 05:31:43.000","2024-04-13 05:31:43.000","2024-04-12 06:44:27.000" diff --git a/backend/plugins/linker/tasks/link_pr_and_issue.go b/backend/plugins/linker/tasks/link_pr_and_issue.go index 7fcd3baa5de..52fb7aa861d 100644 --- a/backend/plugins/linker/tasks/link_pr_and_issue.go +++ b/backend/plugins/linker/tasks/link_pr_and_issue.go @@ -18,10 +18,11 @@ limitations under the License. package tasks import ( + "strings" + "github.com/apache/incubator-devlake/core/models/domainlayer/code" "github.com/apache/incubator-devlake/core/models/domainlayer/crossdomain" "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" - "strings" "github.com/apache/incubator-devlake/core/dal" "github.com/apache/incubator-devlake/core/errors" @@ -97,7 +98,7 @@ func LinkPrToIssue(taskCtx plugin.SubTaskContext) errors.Error { Enrich: func(pullRequest *code.PullRequest) ([]interface{}, errors.Error) { var issueKeys []string - for _, text := range []string{pullRequest.Title, pullRequest.Description} { + for _, text := range []string{pullRequest.Title, pullRequest.Description, pullRequest.HeadRef} { foundIssueKeys := data.PrToIssueRegexp.FindAllString(text, -1) if len(foundIssueKeys) > 0 { for _, issueKey := range foundIssueKeys { From 7a4b03dbdf635cd6acf3b1936faa5ac08d79c04c Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Thu, 19 Mar 2026 10:40:06 -0400 Subject: [PATCH 24/39] Add codespell support with configuration and fixes (#8761) * ci(codespell): add codespell config and GitHub Actions workflow Add .codespellrc with skip patterns for generated files, camelCase/PascalCase ignore-regex, and project-specific word list (convertor, crypted, te, thur). Add GitHub Actions workflow to run codespell on push to main and PRs. Co-Authored-By: Claude Code 2.1.63 / Claude Opus 4.6 Signed-off-by: Yaroslav Halchenko * fix(codespell): fix ambiguous typos requiring context review Manual fixes for typos that needed human review to avoid breaking code: - Comment/string typos: occured->occurred, destory->destroy, writting->writing, retreive->retrieve, identifer->identifier, etc. - Struct field comments and documentation corrections - Migration script comment fixes (preserving Go identifiers like DataConvertor) Co-Authored-By: Claude Code 2.1.63 / Claude Opus 4.6 Signed-off-by: Yaroslav Halchenko * fix(codespell): fix non-ambiguous typos with codespell -w Automated fix via `codespell -w` for clear-cut typos across backend, config-ui, and grafana dashboards. Examples: sucess->success, occurence->occurrence, exeucte->execute, asynchornous->asynchronous, Grafana panel typos, etc. Co-Authored-By: Claude Code 2.1.63 / Claude Opus 4.6 Signed-off-by: Yaroslav Halchenko --------- Signed-off-by: Yaroslav Halchenko Co-authored-by: Claude Code 2.1.63 / Claude Opus 4.6 --- .codespellrc | 28 +++++++++++++ .github/actions/auto-cherry-pick/action.yml | 2 +- .github/workflows/codespell.yml | 39 +++++++++++++++++++ backend/core/models/domainlayer/README.md | 2 +- backend/core/models/locking.go | 2 +- backend/core/plugin/plugin_blueprint.go | 4 +- backend/core/plugin/plugin_task.go | 2 +- backend/core/utils/network_helper_test.go | 2 +- backend/helpers/e2ehelper/data_flow_tester.go | 2 +- .../migrationhelper/migrationhelper.go | 6 +-- .../migrationhelper/migrationhelper_test.go | 16 ++++---- .../pluginhelper/api/api_async_client.go | 2 +- .../helpers/pluginhelper/api/api_client.go | 2 +- .../pluginhelper/api/batch_save_divider.go | 4 +- .../api/data_convertor_stateful.go | 2 +- .../api/ds_remote_api_scope_list_helper.go | 2 +- backend/helpers/pluginhelper/api/iterator.go | 4 +- .../pluginhelper/api/model_api_helper.go | 4 +- .../pluginhelper/api/worker_scheduler.go | 4 +- backend/helpers/pluginhelper/csv_file_test.go | 4 +- .../helpers/srvhelper/model_service_helper.go | 2 +- .../srvhelper/scope_service_helper_test.go | 2 +- backend/impls/context/default_basic_res.go | 2 +- backend/impls/dalgorm/dalgorm.go | 4 +- backend/plugins/ae/api/connection.go | 2 +- backend/plugins/bamboo/api/connection_api.go | 10 ++--- backend/plugins/bamboo/models/plan.go | 2 +- backend/plugins/bamboo/tasks/shared.go | 2 +- backend/plugins/customize/service/service.go | 2 +- backend/plugins/dora/api/data.go | 6 +-- .../migrationscripts/archived/connection.go | 2 +- .../plugins/gitextractor/parser/repo_gogit.go | 2 +- backend/plugins/gitlab/e2e/job_test.go | 2 +- .../migrationscripts/archived/connection.go | 2 +- backend/plugins/gitlab/tasks/shared.go | 2 +- .../tasks/issue_status_history_convertor.go | 2 +- backend/plugins/jenkins/models/build.go | 2 +- .../20220916_modify_jenkins_build.go | 4 +- .../20221131_add_fullName_for_builds.go | 4 +- .../models/migrationscripts/archived/build.go | 2 +- .../20220716_add_init_tables.go | 2 +- .../migrationscripts/archived/source.go | 2 +- backend/plugins/jira/tasks/issue_extractor.go | 2 +- backend/plugins/linker/impl/impl.go | 2 +- .../refdiff/tasks/refdiff_task_data.go | 6 +-- backend/plugins/tapd/api/blueprint_v200.go | 2 +- .../archived/tapd_connection.go | 2 +- .../migrationscripts/archived/connection.go | 2 +- .../tasks/execution_summary_dev_extractor.go | 2 +- .../plugins/zentao/tasks/task_collector.go | 2 +- backend/python/pydevlake/pydevlake/api.py | 2 +- backend/server/api/README.md | 2 +- backend/server/services/blueprint.go | 2 +- backend/server/services/pipeline.go | 6 +-- backend/server/services/pipeline_runner.go | 4 +- backend/server/services/project.go | 2 +- backend/test/helper/client.go | 2 +- .../register/github/transformation.tsx | 2 +- .../detail/components/sync-policy/index.tsx | 12 +++--- .../src/routes/onboard/components/card.tsx | 4 +- config-ui/src/routes/onboard/index.tsx | 2 +- config-ui/src/routes/onboard/styled.ts | 12 +++--- ...veryQuality(RequireJiraAndGitlabData).json | 2 +- grafana/_archive/Gitlab.json | 4 +- grafana/dashboards/DORADebug.json | 4 +- .../DORADetails-ChangeFailureRate.json | 2 +- ...FastDoWeRespondToCustomerRequirements.json | 2 +- 67 files changed, 174 insertions(+), 107 deletions(-) create mode 100644 .codespellrc create mode 100644 .github/workflows/codespell.yml diff --git a/.codespellrc b/.codespellrc new file mode 100644 index 00000000000..a10c0a3c634 --- /dev/null +++ b/.codespellrc @@ -0,0 +1,28 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +[codespell] +# Ref: https://github.com/codespell-project/codespell#using-a-config-file +skip = .git,.gitignore,.gitattributes,*.svg,go.sum,*.lock,*.css,.codespellrc,.cache,.npm,.yarn,*/e2e/raw_tables/*,*/e2e/snapshot_tables/* +check-hidden = true +# Ignore camelCase and PascalCase identifiers (common in Go and TypeScript code) +ignore-regex = \b[a-z]+[A-Z]\w*\b|\b[A-Z][a-z]+[A-Z]\w*\b +# convertor,convertors - project's deliberate spelling for Go types and filenames (DataConvertor, etc.) +# crypted - variable name in AES encrypt/decrypt functions +# te - Tapd API field name (Te/te struct fields) +# thur - Thursday abbreviation in Grafana dashboard SQL column alias +ignore-words-list = convertor,convertors,crypted,te,thur diff --git a/.github/actions/auto-cherry-pick/action.yml b/.github/actions/auto-cherry-pick/action.yml index 94eafe36f81..054ffbcf39d 100644 --- a/.github/actions/auto-cherry-pick/action.yml +++ b/.github/actions/auto-cherry-pick/action.yml @@ -19,7 +19,7 @@ name: "Auto Cherry Pick" description: "cherry pick commits from Pull Requests into Release branch" inputs: trigger_label_prefix: - description: "The trigger label prefic" + description: "The trigger label prefix" default: "needs-cherrypick-" required: false author_email: diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml new file mode 100644 index 00000000000..7697e7bd09f --- /dev/null +++ b/.github/workflows/codespell.yml @@ -0,0 +1,39 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Codespell configuration is within .codespellrc +--- +name: Codespell + +on: + push: + branches: [main] + pull_request: + branches: [main] + +permissions: + contents: read + +jobs: + codespell: + name: Check for spelling errors + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Codespell + uses: codespell-project/actions-codespell@v2 diff --git a/backend/core/models/domainlayer/README.md b/backend/core/models/domainlayer/README.md index e6b85d4bb9e..18f082db207 100644 --- a/backend/core/models/domainlayer/README.md +++ b/backend/core/models/domainlayer/README.md @@ -57,7 +57,7 @@ The following rules make sure Domain Layer Entities serve its purpose - Read data from platform specific table, convert and store record into one(or multiple) domain table(s) - Generate its own `Id` accordingly -- Generate foreign key accordlingly +- Generate foreign key accordingly - Fields conversion Sample code: diff --git a/backend/core/models/locking.go b/backend/core/models/locking.go index 9510e24f345..ff2b1cdd55f 100644 --- a/backend/core/models/locking.go +++ b/backend/core/models/locking.go @@ -19,7 +19,7 @@ package models import "time" -// LockingHistory is desgned for preventing mutiple delake instances from sharing the same database which may cause +// LockingHistory is desgned for preventing multiple delake instances from sharing the same database which may cause // problems like #3537, #3466. It works by the following step: // // 1. Each devlake insert a record to this table whie `Succeeded=false` diff --git a/backend/core/plugin/plugin_blueprint.go b/backend/core/plugin/plugin_blueprint.go index 492e54d43dd..35c36b36b25 100644 --- a/backend/core/plugin/plugin_blueprint.go +++ b/backend/core/plugin/plugin_blueprint.go @@ -78,9 +78,9 @@ type DataSourcePluginBlueprintV200 interface { // BlueprintConnectionV200 contains the pluginName/connectionId and related Scopes, // MetricPluginBlueprintV200 is similar to the DataSourcePluginBlueprintV200 -// but for Metric Plugin, take dora as an example, it doens't have any scope, +// but for Metric Plugin, take dora as an example, it doesn't have any scope, // nor does it produce any, however, it does require other plugin to be -// executed beforehand, like calcuating refdiff before it can connect PR to the +// executed beforehand, like calculating refdiff before it can connect PR to the // right Deployment keep in mind it would be called IFF the plugin was enabled // for the project. type MetricPluginBlueprintV200 interface { diff --git a/backend/core/plugin/plugin_task.go b/backend/core/plugin/plugin_task.go index 136f4f17eb8..e1404ff0848 100644 --- a/backend/core/plugin/plugin_task.go +++ b/backend/core/plugin/plugin_task.go @@ -106,7 +106,7 @@ type SubTaskMeta struct { Dependencies []*SubTaskMeta DependencyTables []string ProductTables []string - ForceRunOnResume bool // Should a subtask be ran dispite it was finished before + ForceRunOnResume bool // Should a subtask be ran despite it was finished before } // PluginTask Implement this interface to let framework run tasks for you diff --git a/backend/core/utils/network_helper_test.go b/backend/core/utils/network_helper_test.go index 86f76fbb1b2..a46d4096b26 100644 --- a/backend/core/utils/network_helper_test.go +++ b/backend/core/utils/network_helper_test.go @@ -53,7 +53,7 @@ func TestResolvePort(t *testing.T) { } _, err = ResolvePort("", "rabbitmq") if err == nil { - t.Errorf("Expected error %s, Got nil", "schema not fount") + t.Errorf("Expected error %s, Got nil", "schema not found") } _, err = ResolvePort("", "") if err == nil { diff --git a/backend/helpers/e2ehelper/data_flow_tester.go b/backend/helpers/e2ehelper/data_flow_tester.go index 69b6830e2c1..5a2163ef723 100644 --- a/backend/helpers/e2ehelper/data_flow_tester.go +++ b/backend/helpers/e2ehelper/data_flow_tester.go @@ -112,7 +112,7 @@ func NewDataFlowTester(t *testing.T, pluginName string, pluginMeta plugin.Plugin cfg.Set(`DB_URL`, cfg.GetString(`E2E_DB_URL`)) db, err := runner.NewGormDb(cfg, logruslog.Global) if err != nil { - // if here fail with error `acces denied for user` you need to create database by your self as follow command + // if here fail with error `access denied for user` you need to create database by your self as follow command // create databases lake_test; // grant all on lake_test.* to 'merico'@'%'; panic(err) diff --git a/backend/helpers/migrationhelper/migrationhelper.go b/backend/helpers/migrationhelper/migrationhelper.go index 24eb537a793..691413b1a56 100644 --- a/backend/helpers/migrationhelper/migrationhelper.go +++ b/backend/helpers/migrationhelper/migrationhelper.go @@ -32,7 +32,7 @@ import ( helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" ) -// AutoMigrateTables runs AutoMigrate for muliple tables +// AutoMigrateTables runs AutoMigrate for multiple tables func AutoMigrateTables(basicRes context.BasicRes, dst ...interface{}) errors.Error { db := basicRes.GetDal() for _, entity := range dst { @@ -74,14 +74,14 @@ func ChangeColumnsType[D any]( err = db.AutoMigrate(new(D), dal.From(tableName)) if err != nil { - return errors.Default.Wrap(err, "AutoMigrate for Add Colume Error") + return errors.Default.Wrap(err, "AutoMigrate for Add Column Error") } defer func() { if err != nil { err1 := db.DropColumns(tableName, columns...) if err1 != nil { - err = errors.Default.Wrap(err, fmt.Sprintf("RollBack by DropColume failed.Relevant data needs to be repaired manually.%s", err1.Error())) + err = errors.Default.Wrap(err, fmt.Sprintf("RollBack by DropColumn failed.Relevant data needs to be repaired manually.%s", err1.Error())) } } }() diff --git a/backend/helpers/migrationhelper/migrationhelper_test.go b/backend/helpers/migrationhelper/migrationhelper_test.go index 5fe70d52b35..7962100e453 100644 --- a/backend/helpers/migrationhelper/migrationhelper_test.go +++ b/backend/helpers/migrationhelper/migrationhelper_test.go @@ -132,7 +132,7 @@ func TestTransformTable(t *testing.T) { assert.Equal(t, dts[2].Id, "fd61a03af4f77d870fc21e05e7e80678095c92d808cfb3b5c279ee04c74aca1357ef3d346f24f386216563752b0c447a35c041e0b7143f929dc4de27742e3307") }).Return(nil).Once() - // for Primarykey autoincrement cheking + // for Primarykey autoincrement checking mockDal.On("GetColumns", mock.Anything, mock.Anything).Run(func(args mock.Arguments) { tableName := args.Get(0).(dal.Tabler).TableName() assert.Equal(t, tableName, TestTableNameSrc) @@ -188,7 +188,7 @@ func TestTransformTable_RollBack(t *testing.T) { assert.NotEqual(t, oldname, tmpname) }).Return(nil).Once() - // checking if Rename and Drop RollBack working with rigth table + // checking if Rename and Drop RollBack working with right table mockDal.On("RenameTable", mock.Anything, mock.Anything).Run(func(args mock.Arguments) { tmpname, ok := args.Get(0).(string) assert.Equal(t, ok, true) @@ -203,7 +203,7 @@ func TestTransformTable_RollBack(t *testing.T) { assert.Equal(t, oldname, TestTableNameSrc) }).Return(nil).Once() - // for Primarykey autoincrement cheking + // for Primarykey autoincrement checking mockDal.On("GetColumns", mock.Anything, mock.Anything).Run(func(args mock.Arguments) { tableName := args.Get(0).(dal.Tabler).TableName() assert.Equal(t, tableName, TestTableNameSrc) @@ -296,7 +296,7 @@ func TestCopyTableColumns(t *testing.T) { assert.Equal(t, dts[2].Id, "fd61a03af4f77d870fc21e05e7e80678095c92d808cfb3b5c279ee04c74aca1357ef3d346f24f386216563752b0c447a35c041e0b7143f929dc4de27742e3307") }).Return(nil).Once() - // for Primarykey autoincrement cheking + // for Primarykey autoincrement checking mockDal.On("GetColumns", mock.Anything, mock.Anything).Run(func(args mock.Arguments) { tableName := args.Get(0).(dal.Tabler).TableName() assert.Equal(t, tableName, TestTableNameSrc) @@ -352,7 +352,7 @@ func TestCopyTableColumns_RollBack(t *testing.T) { assert.NotEqual(t, oldname, tmpname) }).Return(nil).Once() - // checking if Rename and Drop RollBack working with rigth table + // checking if Rename and Drop RollBack working with right table mockDal.On("RenameTable", mock.Anything, mock.Anything).Run(func(args mock.Arguments) { tmpname, ok := args.Get(0).(string) assert.Equal(t, ok, true) @@ -367,7 +367,7 @@ func TestCopyTableColumns_RollBack(t *testing.T) { assert.Equal(t, oldname, TestTableNameSrc) }).Return(nil).Once() - // for Primarykey autoincrement cheking + // for Primarykey autoincrement checking mockDal.On("GetColumns", mock.Anything, mock.Anything).Run(func(args mock.Arguments) { tableName := args.Get(0).(dal.Tabler).TableName() assert.Equal(t, tableName, TestTableNameSrc) @@ -521,7 +521,7 @@ func TestTransformColumns_RollBack(t *testing.T) { assert.NotEqual(t, columnName, tmpColumnName) }).Return(nil).Once() - // checking if Rename and Drop RollBack working with rigth table + // checking if Rename and Drop RollBack working with right table mockDal.On("RenameColumn", mock.Anything, mock.Anything, mock.Anything).Run(func(args mock.Arguments) { tableName, ok := args.Get(0).(string) assert.Equal(t, ok, true) @@ -634,7 +634,7 @@ func TestChangeColumnsType_Rollback(t *testing.T) { assert.NotEqual(t, columnName, tmpColumnName) }).Return(nil).Once() - // checking if Rename and Drop RollBack working with rigth table + // checking if Rename and Drop RollBack working with right table mockDal.On("RenameColumn", mock.Anything, mock.Anything, mock.Anything).Run(func(args mock.Arguments) { tableName, ok := args.Get(0).(string) assert.Equal(t, ok, true) diff --git a/backend/helpers/pluginhelper/api/api_async_client.go b/backend/helpers/pluginhelper/api/api_async_client.go index 79ef16f1e90..c926c33ac24 100644 --- a/backend/helpers/pluginhelper/api/api_async_client.go +++ b/backend/helpers/pluginhelper/api/api_async_client.go @@ -122,7 +122,7 @@ func CreateAsyncApiClient( return nil, errors.Default.Wrap(err, "failed to create scheduler") } - // finally, wrap around api client with async sematic + // finally, wrap around api client with async semantic return &ApiAsyncClient{ apiClient, scheduler, diff --git a/backend/helpers/pluginhelper/api/api_client.go b/backend/helpers/pluginhelper/api/api_client.go index b0cfccf499f..7354a29db78 100644 --- a/backend/helpers/pluginhelper/api/api_client.go +++ b/backend/helpers/pluginhelper/api/api_client.go @@ -432,7 +432,7 @@ func UnmarshalResponse(res *http.Response, v interface{}) errors.Error { if err != nil { statusCode := res.StatusCode if statusCode == http.StatusUnauthorized || statusCode == http.StatusForbidden { - statusCode = http.StatusBadRequest // to avoid Basic Auth Dialog poping up + statusCode = http.StatusBadRequest // to avoid Basic Auth Dialog popping up } return errors.HttpStatus(statusCode).Wrap(err, fmt.Sprintf("error decoding response from %s: raw response: %s", res.Request.URL.String(), string(resBody))) } diff --git a/backend/helpers/pluginhelper/api/batch_save_divider.go b/backend/helpers/pluginhelper/api/batch_save_divider.go index 406d638e91d..393bbcc9bd9 100644 --- a/backend/helpers/pluginhelper/api/batch_save_divider.go +++ b/backend/helpers/pluginhelper/api/batch_save_divider.go @@ -73,10 +73,10 @@ func (d *BatchSaveDivider) ForType(rowType reflect.Type) (*BatchSave, errors.Err rowElemType := rowType.Elem() d.log.Debug("missing BatchSave for type %s", rowElemType.Name()) row := reflect.New(rowElemType).Interface() - // check if rowType had RawDataOrigin embeded + // check if rowType had RawDataOrigin embedded field, hasField := rowElemType.FieldByName("RawDataOrigin") if !hasField || field.Type != reflect.TypeOf(common.RawDataOrigin{}) { - return nil, errors.Default.New(fmt.Sprintf("type %s must have RawDataOrigin embeded", rowElemType.Name())) + return nil, errors.Default.New(fmt.Sprintf("type %s must have RawDataOrigin embedded", rowElemType.Name())) } d.batches[rowType] = batch if !d.incrementalMode { diff --git a/backend/helpers/pluginhelper/api/data_convertor_stateful.go b/backend/helpers/pluginhelper/api/data_convertor_stateful.go index 1c5292dc230..f742df83c73 100644 --- a/backend/helpers/pluginhelper/api/data_convertor_stateful.go +++ b/backend/helpers/pluginhelper/api/data_convertor_stateful.go @@ -192,7 +192,7 @@ func (converter *StatefulDataConverter[InputType]) Execute() errors.Error { if err != nil { return err } - // save the incremantal state + // save the incremental state return converter.SubtaskStateManager.Close() } diff --git a/backend/helpers/pluginhelper/api/ds_remote_api_scope_list_helper.go b/backend/helpers/pluginhelper/api/ds_remote_api_scope_list_helper.go index 076b2c6f820..83fef06122b 100644 --- a/backend/helpers/pluginhelper/api/ds_remote_api_scope_list_helper.go +++ b/backend/helpers/pluginhelper/api/ds_remote_api_scope_list_helper.go @@ -31,7 +31,7 @@ const ( RAS_ENTRY_TYPE_SCOPE = "scope" ) -// DsListRemoteScopes is the function type for listing remote scopes that must be implmeneted by the plugin +// DsListRemoteScopes is the function type for listing remote scopes that must be implemented by the plugin type DsListRemoteScopes[C plugin.ToolLayerApiConnection, S plugin.ToolLayerScope, P any] func( connection *C, apiClient plugin.ApiClient, groupId string, page P) (children []models.DsRemoteApiScopeListEntry[S], nextPage *P, errr errors.Error) diff --git a/backend/helpers/pluginhelper/api/iterator.go b/backend/helpers/pluginhelper/api/iterator.go index 37ffd2b85cc..6f991eb9079 100644 --- a/backend/helpers/pluginhelper/api/iterator.go +++ b/backend/helpers/pluginhelper/api/iterator.go @@ -55,7 +55,7 @@ func NewBatchedDalCursorIterator(db dal.Dal, cursor dal.Rows, elemType reflect.T }, nil } -// HasNext increments the row curser. If we're at the end, it'll return false. +// HasNext increments the row cursor. If we're at the end, it'll return false. func (c *DalCursorIterator) HasNext() bool { return c.cursor.Next() } @@ -149,7 +149,7 @@ type QueueIterator struct { queue *Queue } -// HasNext increments the row curser. If we're at the end, it'll return false. +// HasNext increments the row cursor. If we're at the end, it'll return false. func (q *QueueIterator) HasNext() bool { return q.queue.GetCount() > 0 } diff --git a/backend/helpers/pluginhelper/api/model_api_helper.go b/backend/helpers/pluginhelper/api/model_api_helper.go index 3e35a7c5d4c..809192aec71 100644 --- a/backend/helpers/pluginhelper/api/model_api_helper.go +++ b/backend/helpers/pluginhelper/api/model_api_helper.go @@ -226,11 +226,11 @@ func parsePagination[P any](input *plugin.ApiResourceInput) (*P, errors.Error) { pagination := new(P) err := utils.DecodeMapStruct(input.Query, pagination, false) if err != nil { - return nil, errors.BadInput.Wrap(err, "faild to decode pagination from query string") + return nil, errors.BadInput.Wrap(err, "failed to decode pagination from query string") } err = utils.DecodeMapStruct(input.Params, pagination, false) if err != nil { - return nil, errors.BadInput.Wrap(err, "faild to decode pagination from path variables") + return nil, errors.BadInput.Wrap(err, "failed to decode pagination from path variables") } if e := vld.Struct(pagination); e != nil { return nil, errors.BadInput.Wrap(e, "invalid pagination parameters") diff --git a/backend/helpers/pluginhelper/api/worker_scheduler.go b/backend/helpers/pluginhelper/api/worker_scheduler.go index d7cfc50961b..74d1c3f70a3 100644 --- a/backend/helpers/pluginhelper/api/worker_scheduler.go +++ b/backend/helpers/pluginhelper/api/worker_scheduler.go @@ -84,7 +84,7 @@ func NewWorkerScheduler( // It doesn't return error because it wouldn't be any when with a Blocking semantic, returned error does nothing but // causing confusion, more often, people thought it is returned by the task. // Since it is async task, the callframes would not be available for production mode, you can export Environment -// Varaible ASYNC_CF=true to enable callframes capturing when debugging. +// Variable ASYNC_CF=true to enable callframes capturing when debugging. // IMPORTANT: do NOT call SubmitBlocking inside the async task, it is likely to cause a deadlock, call // SubmitNonBlocking instead when number of tasks is relatively small. func (s *WorkerScheduler) SubmitBlocking(task func() errors.Error) { @@ -118,7 +118,7 @@ func (s *WorkerScheduler) SubmitBlocking(task func() errors.Error) { /* func (s *WorkerScheduler) gatherCallFrames() string { - cf := "set Environment Varaible ASYNC_CF=true to enable callframes capturing" + cf := "set Environment Variable ASYNC_CF=true to enable callframes capturing" if callframeEnabled { cf = utils.GatherCallFrames(1) } diff --git a/backend/helpers/pluginhelper/csv_file_test.go b/backend/helpers/pluginhelper/csv_file_test.go index b2444868819..0958ca7d0d8 100644 --- a/backend/helpers/pluginhelper/csv_file_test.go +++ b/backend/helpers/pluginhelper/csv_file_test.go @@ -36,8 +36,8 @@ func TestExampleCsvFile(t *testing.T) { defer iter.Close() for iter.HasNext() { row := iter.Fetch() - assert.Equal(t, row["name"], "foobar", "name not euqal") - assert.Equal(t, row["json"], `{"url": "https://example.com"}`, "json not euqal") + assert.Equal(t, row["name"], "foobar", "name not equal") + assert.Equal(t, row["json"], `{"url": "https://example.com"}`, "json not equal") } } diff --git a/backend/helpers/srvhelper/model_service_helper.go b/backend/helpers/srvhelper/model_service_helper.go index 9c3d05ecee7..0ca94827c52 100644 --- a/backend/helpers/srvhelper/model_service_helper.go +++ b/backend/helpers/srvhelper/model_service_helper.go @@ -89,7 +89,7 @@ func (srv *ModelSrvHelper[M]) ValidateModel(model *M) errors.Error { } // basic validator if e := srv.validator.Struct(model); e != nil { - return errors.BadInput.Wrap(e, "validation faild") + return errors.BadInput.Wrap(e, "validation failed") } return nil } diff --git a/backend/helpers/srvhelper/scope_service_helper_test.go b/backend/helpers/srvhelper/scope_service_helper_test.go index 1052b8164f4..54d7979f17c 100644 --- a/backend/helpers/srvhelper/scope_service_helper_test.go +++ b/backend/helpers/srvhelper/scope_service_helper_test.go @@ -35,7 +35,7 @@ func Test_setDefaultEntities(t *testing.T) { setDefaultEntities(sc1) assert.Equal(t, sc1.Entities, plugin.DOMAIN_TYPES) - // plugin embeded the common ScopeConfig + // plugin embedded the common ScopeConfig sc2 := &struct { common.ScopeConfig }{ diff --git a/backend/impls/context/default_basic_res.go b/backend/impls/context/default_basic_res.go index cb277a540b9..1ee4976dbea 100644 --- a/backend/impls/context/default_basic_res.go +++ b/backend/impls/context/default_basic_res.go @@ -36,7 +36,7 @@ func (c *DefaultBasicRes) GetConfigReader() config.ConfigReader { return c.cfg } -// GetConfig returns the value of the specificed name +// GetConfig returns the value of the specified name func (c *DefaultBasicRes) GetConfig(name string) string { return c.cfg.GetString(name) } diff --git a/backend/impls/dalgorm/dalgorm.go b/backend/impls/dalgorm/dalgorm.go index 3b11312a3c1..ba635355f41 100644 --- a/backend/impls/dalgorm/dalgorm.go +++ b/backend/impls/dalgorm/dalgorm.go @@ -277,7 +277,7 @@ func (d *Dalgorm) Delete(entity interface{}, clauses ...dal.Clause) errors.Error return d.convertGormError(buildTx(d.db, clauses).Delete(entity).Error) } -// UpdateColumn allows you to update mulitple records +// UpdateColumn allows you to update multiple records func (d *Dalgorm) UpdateColumn(entityOrTable interface{}, columnName string, value interface{}, clauses ...dal.Clause) errors.Error { d.unwrapDynamic(&entityOrTable, &clauses) if expr, ok := value.(dal.DalClause); ok { @@ -286,7 +286,7 @@ func (d *Dalgorm) UpdateColumn(entityOrTable interface{}, columnName string, val return d.convertGormError(buildTx(d.db, clauses).Update(columnName, value).Error) } -// UpdateColumns allows you to update multiple columns of mulitple records +// UpdateColumns allows you to update multiple columns of multiple records func (d *Dalgorm) UpdateColumns(entityOrTable interface{}, set []dal.DalSet, clauses ...dal.Clause) errors.Error { d.unwrapDynamic(&entityOrTable, &clauses) updatesSet := make(map[string]interface{}) diff --git a/backend/plugins/ae/api/connection.go b/backend/plugins/ae/api/connection.go index 2e42c6152ee..cc3a3889e6e 100644 --- a/backend/plugins/ae/api/connection.go +++ b/backend/plugins/ae/api/connection.go @@ -52,7 +52,7 @@ func testConnection(ctx context.Context, connection models.AeConn) (*plugin.ApiR return &plugin.ApiResourceOutput{Body: true, Status: 200}, nil case 401: // error secretKey or nonceStr return &plugin.ApiResourceOutput{Body: false, Status: http.StatusBadRequest}, nil - default: // unknow what happen , back to user + default: // unknown what happen , back to user return &plugin.ApiResourceOutput{Body: res.Body, Status: res.StatusCode}, nil } } diff --git a/backend/plugins/bamboo/api/connection_api.go b/backend/plugins/bamboo/api/connection_api.go index 3fd72a22e48..4c142fc5d27 100644 --- a/backend/plugins/bamboo/api/connection_api.go +++ b/backend/plugins/bamboo/api/connection_api.go @@ -108,7 +108,7 @@ func TestExistingConnection(input *plugin.ApiResourceInput) (*plugin.ApiResource // @Param body body models.BambooConnection true "json body" // @Success 200 {object} models.BambooConnection // @Failure 400 {string} errcode.Error "Bad Request" -// @Failure 500 {string} errcode.Error "Internel Error" +// @Failure 500 {string} errcode.Error "Internal Error" // @Router /plugins/bamboo/connections [POST] func PostConnections(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { return dsHelper.ConnApi.Post(input) @@ -121,7 +121,7 @@ func PostConnections(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, // @Param connectionId path int true "connection ID" // @Success 200 {object} models.BambooConnection // @Failure 400 {string} errcode.Error "Bad Request" -// @Failure 500 {string} errcode.Error "Internel Error" +// @Failure 500 {string} errcode.Error "Internal Error" // @Router /plugins/bamboo/connections/{connectionId} [PATCH] func PatchConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { return dsHelper.ConnApi.Patch(input) @@ -134,7 +134,7 @@ func PatchConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, // @Success 200 {object} models.BambooConnection // @Failure 400 {string} errcode.Error "Bad Request" // @Failure 409 {object} services.BlueprintProjectPairs "References exist to this connection" -// @Failure 500 {string} errcode.Error "Internel Error" +// @Failure 500 {string} errcode.Error "Internal Error" // @Router /plugins/bamboo/connections/{connectionId} [DELETE] func DeleteConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { return dsHelper.ConnApi.Delete(input) @@ -145,7 +145,7 @@ func DeleteConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput // @Tags plugins/bamboo // @Success 200 {object} []models.BambooConnection // @Failure 400 {string} errcode.Error "Bad Request" -// @Failure 500 {string} errcode.Error "Internel Error" +// @Failure 500 {string} errcode.Error "Internal Error" // @Router /plugins/bamboo/connections [GET] func ListConnections(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { return dsHelper.ConnApi.GetAll(input) @@ -157,7 +157,7 @@ func ListConnections(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, // @Param connectionId path int true "connection ID" // @Success 200 {object} models.BambooConnection // @Failure 400 {string} errcode.Error "Bad Request" -// @Failure 500 {string} errcode.Error "Internel Error" +// @Failure 500 {string} errcode.Error "Internal Error" // @Router /plugins/bamboo/connections/{connectionId} [GET] func GetConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { return dsHelper.ConnApi.GetDetail(input) diff --git a/backend/plugins/bamboo/models/plan.go b/backend/plugins/bamboo/models/plan.go index 3811596f119..386fbbbc433 100644 --- a/backend/plugins/bamboo/models/plan.go +++ b/backend/plugins/bamboo/models/plan.go @@ -110,7 +110,7 @@ type SearchEntity struct { Type string `json:"type"` } -// Name trys to keep plan's name field the same with name in /remote-scopes. +// Name tries to keep plan's name field the same with name in /remote-scopes. // In /remote-scopes, plan's name is "{projectName - planName}". func (entity SearchEntity) Name() string { return strings.Join([]string{entity.ProjectName, entity.PlanName}, " - ") diff --git a/backend/plugins/bamboo/tasks/shared.go b/backend/plugins/bamboo/tasks/shared.go index 182670b954d..fc715aa114f 100644 --- a/backend/plugins/bamboo/tasks/shared.go +++ b/backend/plugins/bamboo/tasks/shared.go @@ -139,7 +139,7 @@ func generateFakeRepoUrl(endpoint string, repoId int) (string, error) { return fmt.Sprintf("fake://%s/repos/%d", endpointURL.Host, repoId), nil } -// covertError will indentify some known errors and transform it to a simple form. +// covertError will identify some known errors and transform it to a simple form. func covertError(err errors.Error) errors.Error { if err == nil { return nil diff --git a/backend/plugins/customize/service/service.go b/backend/plugins/customize/service/service.go index 5ef3bcbb3bb..8217be3a5c1 100644 --- a/backend/plugins/customize/service/service.go +++ b/backend/plugins/customize/service/service.go @@ -550,7 +550,7 @@ func (s *Service) qaTestCaseExecutionHandler(qaProjectId string) func(record map } // issueRepoCommitHandlerFactory returns a handler that will populate the `issue_commits` and `issue_repo_commits` table -// ths issueCommitsFields is used to filter the fields that should be inserted into the `issue_commits` table +// the issueCommitsFields is used to filter the fields that should be inserted into the `issue_commits` table func (s *Service) issueRepoCommitHandler(record map[string]interface{}) errors.Error { err := s.dal.CreateWithMap(&crossdomain.IssueRepoCommit{}, record) if err != nil { diff --git a/backend/plugins/dora/api/data.go b/backend/plugins/dora/api/data.go index baada7a4c46..6f610ffa453 100644 --- a/backend/plugins/dora/api/data.go +++ b/backend/plugins/dora/api/data.go @@ -25,7 +25,7 @@ import ( const RAW_DEPLOYMENTS_TABLE = `dora_deplyments` -//TODO Please modify the folowing code to adapt to your plugin +//TODO Please modify the following code to adapt to your plugin /* POST /plugins/dora/deployments { @@ -39,7 +39,7 @@ func PostDeployments(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, const RAW_ISSUES_TABLE = `dora_issues` -//TODO Please modify the folowing code to adapt to your plugin +//TODO Please modify the following code to adapt to your plugin /* POST /plugins/dora/issues { @@ -51,7 +51,7 @@ func PostIssues(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, erro return &plugin.ApiResourceOutput{Body: nil, Status: http.StatusOK}, nil } -//TODO Please modify the folowing code to adapt to your plugin +//TODO Please modify the following code to adapt to your plugin /* POST /plugins/dora/issues/:id/close { diff --git a/backend/plugins/gitee/models/migrationscripts/archived/connection.go b/backend/plugins/gitee/models/migrationscripts/archived/connection.go index 55af7fc7713..14370ed586a 100644 --- a/backend/plugins/gitee/models/migrationscripts/archived/connection.go +++ b/backend/plugins/gitee/models/migrationscripts/archived/connection.go @@ -30,7 +30,7 @@ type RestConnection struct { BaseConnection `mapstructure:",squash"` Endpoint string `mapstructure:"endpoint" validate:"required" json:"endpoint"` Proxy string `mapstructure:"proxy" json:"proxy"` - RateLimitPerHour int `comment:"api request rate limt per hour" json:"rateLimit"` + RateLimitPerHour int `comment:"api request rate limit per hour" json:"rateLimit"` } type BaseConnection struct { diff --git a/backend/plugins/gitextractor/parser/repo_gogit.go b/backend/plugins/gitextractor/parser/repo_gogit.go index 21cf09cd9e9..8837aa38347 100644 --- a/backend/plugins/gitextractor/parser/repo_gogit.go +++ b/backend/plugins/gitextractor/parser/repo_gogit.go @@ -400,7 +400,7 @@ func (r *GogitRepoCollector) storeParentCommits(commitSha string, commit *object for i := 0; i < commit.NumParents(); i++ { parent, err := commit.Parent(i) if err != nil { - // parent commit might not exist when repo is shallow cloned (tradeoff of supporting timeAfter paramenter) + // parent commit might not exist when repo is shallow cloned (tradeoff of supporting timeAfter parameter) if err.Error() == "object not found" { continue } diff --git a/backend/plugins/gitlab/e2e/job_test.go b/backend/plugins/gitlab/e2e/job_test.go index 1705f59a8c6..64f45824032 100644 --- a/backend/plugins/gitlab/e2e/job_test.go +++ b/backend/plugins/gitlab/e2e/job_test.go @@ -71,7 +71,7 @@ func TestGitlabJobDataFlow(t *testing.T) { ), ) - // verifi when production regex is omitted + // verify when production regex is omitted dataflowTester.FlushTabler(&devops.CICDTask{}) dataflowTester.Subtask(tasks.ConvertJobMeta, taskData) dataflowTester.VerifyTableWithOptions(&devops.CICDTask{}, e2ehelper.TableOptions{ diff --git a/backend/plugins/gitlab/models/migrationscripts/archived/connection.go b/backend/plugins/gitlab/models/migrationscripts/archived/connection.go index f7b7f9f6783..7d717b5ddd7 100644 --- a/backend/plugins/gitlab/models/migrationscripts/archived/connection.go +++ b/backend/plugins/gitlab/models/migrationscripts/archived/connection.go @@ -31,7 +31,7 @@ type RestConnection struct { BaseConnection `mapstructure:",squash"` Endpoint string `mapstructure:"endpoint" validate:"required" json:"endpoint"` Proxy string `mapstructure:"proxy" json:"proxy"` - RateLimitPerHour int `comment:"api request rate limt per hour" json:"rateLimit"` + RateLimitPerHour int `comment:"api request rate limit per hour" json:"rateLimit"` } type BaseConnection struct { diff --git a/backend/plugins/gitlab/tasks/shared.go b/backend/plugins/gitlab/tasks/shared.go index 3c100d7dd2e..6621ba3f2fd 100644 --- a/backend/plugins/gitlab/tasks/shared.go +++ b/backend/plugins/gitlab/tasks/shared.go @@ -187,7 +187,7 @@ func GetMergeRequestsIterator(taskCtx plugin.SubTaskContext, apiCollector *api.S clauses := []dal.Clause{ dal.Select("gmr.gitlab_id, gmr.iid"), dal.From("_tool_gitlab_merge_requests gmr"), - // collect only openning merge request's notes and commits to speed up the process + // collect only opening merge request's notes and commits to speed up the process dal.Where( `gmr.project_id = ? and gmr.connection_id = ?`, data.Options.ProjectId, data.Options.ConnectionId, diff --git a/backend/plugins/issue_trace/tasks/issue_status_history_convertor.go b/backend/plugins/issue_trace/tasks/issue_status_history_convertor.go index 42bfd0fd328..efe8ddb4f9a 100644 --- a/backend/plugins/issue_trace/tasks/issue_status_history_convertor.go +++ b/backend/plugins/issue_trace/tasks/issue_status_history_convertor.go @@ -219,7 +219,7 @@ func ConvertIssueStatusHistory(taskCtx plugin.SubTaskContext) errors.Error { } } } - logger.Info("issues status history covert successfully") + logger.Info("issues status history converted successfully") return nil } diff --git a/backend/plugins/jenkins/models/build.go b/backend/plugins/jenkins/models/build.go index 203d81d0023..b38aaa15afa 100644 --- a/backend/plugins/jenkins/models/build.go +++ b/backend/plugins/jenkins/models/build.go @@ -35,7 +35,7 @@ type JenkinsBuild struct { Number int64 `gorm:"index"` Result string // Result Timestamp int64 // start time - StartTime time.Time // convered by timestamp + StartTime time.Time // converted by timestamp Type string `gorm:"index;type:varchar(255)"` Class string `gorm:"index;type:varchar(255)" ` TriggeredBy string `gorm:"type:varchar(255)"` diff --git a/backend/plugins/jenkins/models/migrationscripts/20220916_modify_jenkins_build.go b/backend/plugins/jenkins/models/migrationscripts/20220916_modify_jenkins_build.go index c0d49a0e913..ec4e3322966 100644 --- a/backend/plugins/jenkins/models/migrationscripts/20220916_modify_jenkins_build.go +++ b/backend/plugins/jenkins/models/migrationscripts/20220916_modify_jenkins_build.go @@ -40,7 +40,7 @@ type jenkinsBuild20220916Before struct { Number int64 `gorm:"primaryKey"` Result string // Result Timestamp int64 // start time - StartTime time.Time // convered by timestamp + StartTime time.Time // converted by timestamp Type string `gorm:"index;type:varchar(255)"` Class string `gorm:"index;type:varchar(255)" ` TriggeredBy string `gorm:"type:varchar(255)"` @@ -63,7 +63,7 @@ type jenkinsBuild20220916After struct { Number int64 `gorm:"index"` Result string // Result Timestamp int64 // start time - StartTime time.Time // convered by timestamp + StartTime time.Time // converted by timestamp Type string `gorm:"index;type:varchar(255)"` Class string `gorm:"index;type:varchar(255)" ` TriggeredBy string `gorm:"type:varchar(255)"` diff --git a/backend/plugins/jenkins/models/migrationscripts/20221131_add_fullName_for_builds.go b/backend/plugins/jenkins/models/migrationscripts/20221131_add_fullName_for_builds.go index a7c0692c25c..2802835e033 100644 --- a/backend/plugins/jenkins/models/migrationscripts/20221131_add_fullName_for_builds.go +++ b/backend/plugins/jenkins/models/migrationscripts/20221131_add_fullName_for_builds.go @@ -48,7 +48,7 @@ type jenkinsBuild20221131Before struct { Number int64 `gorm:"index"` Result string // Result Timestamp int64 // start time - StartTime time.Time // convered by timestamp + StartTime time.Time // converted by timestamp Type string `gorm:"index;type:varchar(255)"` Class string `gorm:"index;type:varchar(255)" ` TriggeredBy string `gorm:"type:varchar(255)"` @@ -72,7 +72,7 @@ type jenkinsBuild20221131After struct { Number int64 `gorm:"index"` Result string // Result Timestamp int64 // start time - StartTime time.Time // convered by timestamp + StartTime time.Time // converted by timestamp Type string `gorm:"index;type:varchar(255)"` Class string `gorm:"index;type:varchar(255)" ` TriggeredBy string `gorm:"type:varchar(255)"` diff --git a/backend/plugins/jenkins/models/migrationscripts/archived/build.go b/backend/plugins/jenkins/models/migrationscripts/archived/build.go index 4659e0cc2da..70bc781f62d 100644 --- a/backend/plugins/jenkins/models/migrationscripts/archived/build.go +++ b/backend/plugins/jenkins/models/migrationscripts/archived/build.go @@ -35,7 +35,7 @@ type JenkinsBuild struct { Number int64 `gorm:"primaryKey"` Result string // Result Timestamp int64 // start time - StartTime time.Time // convered by timestamp + StartTime time.Time // converted by timestamp CommitSha string `gorm:"type:varchar(255)"` } diff --git a/backend/plugins/jira/models/migrationscripts/20220716_add_init_tables.go b/backend/plugins/jira/models/migrationscripts/20220716_add_init_tables.go index d27726f76c3..6f161976c0b 100644 --- a/backend/plugins/jira/models/migrationscripts/20220716_add_init_tables.go +++ b/backend/plugins/jira/models/migrationscripts/20220716_add_init_tables.go @@ -45,7 +45,7 @@ type jiraConnection20220716Before struct { StoryPointField string `gorm:"type:varchar(50);" json:"storyPointField"` RemotelinkCommitShaPattern string `gorm:"type:varchar(255);comment='golang regexp, the first group will be recognized as commit sha, ref https://github.com/google/re2/wiki/Syntax'" json:"remotelinkCommitShaPattern"` Proxy string `json:"proxy"` - RateLimit int `comment:"api request rate limt per hour" json:"rateLimit"` + RateLimit int `comment:"api request rate limit per hour" json:"rateLimit"` } type addInitTables20220716 struct{} diff --git a/backend/plugins/jira/models/migrationscripts/archived/source.go b/backend/plugins/jira/models/migrationscripts/archived/source.go index 2ceed1613b3..951eba5b598 100644 --- a/backend/plugins/jira/models/migrationscripts/archived/source.go +++ b/backend/plugins/jira/models/migrationscripts/archived/source.go @@ -30,7 +30,7 @@ type JiraSource struct { StoryPointField string `gorm:"type:varchar(50);" json:"storyPointField"` RemotelinkCommitShaPattern string `gorm:"type:varchar(255);comment='golang regexp, the first group will be recognized as commit sha, ref https://github.com/google/re2/wiki/Syntax'" json:"remotelinkCommitShaPattern"` Proxy string `json:"proxy"` - RateLimit int `comment:"api request rate limt per second"` + RateLimit int `comment:"api request rate limit per second"` } type JiraIssueTypeMapping struct { diff --git a/backend/plugins/jira/tasks/issue_extractor.go b/backend/plugins/jira/tasks/issue_extractor.go index d25423b918f..94620711399 100644 --- a/backend/plugins/jira/tasks/issue_extractor.go +++ b/backend/plugins/jira/tasks/issue_extractor.go @@ -163,7 +163,7 @@ func extractIssues(data *JiraTaskData, mappings *typeMappings, apiIssue *apiv2mo if value, ok := mappings.StandardStatusMappings[issue.Type][issue.StatusKey]; ok { issue.StdStatus = value.StandardStatus } - // issue commments + // issue comments results = append(results, issue) for _, comment := range comments { results = append(results, comment) diff --git a/backend/plugins/linker/impl/impl.go b/backend/plugins/linker/impl/impl.go index 917b22df9aa..e2efb809d9d 100644 --- a/backend/plugins/linker/impl/impl.go +++ b/backend/plugins/linker/impl/impl.go @@ -42,7 +42,7 @@ var _ interface { type Linker struct{} func (p Linker) Description() string { - return "link some cross table datas together" + return "link some cross table data together" } // RequiredDataEntities hasn't been used so far diff --git a/backend/plugins/refdiff/tasks/refdiff_task_data.go b/backend/plugins/refdiff/tasks/refdiff_task_data.go index 782bb921419..c4bc074a234 100644 --- a/backend/plugins/refdiff/tasks/refdiff_task_data.go +++ b/backend/plugins/refdiff/tasks/refdiff_task_data.go @@ -121,7 +121,7 @@ func (rs RefsReverseSemver) Swap(i, j int) { func CalculateTagPattern(db dal.Dal, tagsPattern string, tagsLimit int, tagsOrder string) (Refs, errors.Error) { rs := Refs{} - // caculate Pattern part + // calculate Pattern part if tagsPattern == "" || tagsLimit <= 1 { return rs, nil } @@ -176,7 +176,7 @@ func CalculateCommitPairs(db dal.Dal, repoId string, pairs []models.RefPair, rs commitPairs = append(commitPairs, [4]string{rs[i-1].CommitSha, rs[i].CommitSha, rs[i-1].Name, rs[i].Name}) } - // caculate pairs part + // calculate pairs part // convert ref pairs into commit pairs ref2sha := func(refName string) (string, error) { ref := &code.Ref{} @@ -186,7 +186,7 @@ func CalculateCommitPairs(db dal.Dal, repoId string, pairs []models.RefPair, rs ref.Id = fmt.Sprintf("%s:%s", repoId, refName) err := db.First(ref) if err != nil && !db.IsErrorNotFound(err) { - return "", errors.NotFound.Wrap(err, fmt.Sprintf("faild to load Ref info for repoId:%s, refName:%s", repoId, refName)) + return "", errors.NotFound.Wrap(err, fmt.Sprintf("failed to load Ref info for repoId:%s, refName:%s", repoId, refName)) } return ref.CommitSha, nil } diff --git a/backend/plugins/tapd/api/blueprint_v200.go b/backend/plugins/tapd/api/blueprint_v200.go index 676ed30cf00..abb8695a72b 100644 --- a/backend/plugins/tapd/api/blueprint_v200.go +++ b/backend/plugins/tapd/api/blueprint_v200.go @@ -96,7 +96,7 @@ func makeScopesV200( // get workspace and scope config from db tapdWorkspace, scopeConfig := scopeDetail.Scope, scopeDetail.ScopeConfig - // add wrokspace to scopes + // add workspace to scopes if utils.StringsContains(scopeConfig.Entities, plugin.DOMAIN_TYPE_TICKET) { id := idgen.Generate(connection.ID, tapdWorkspace.Id) board := ticket.NewBoard(id, tapdWorkspace.Name) diff --git a/backend/plugins/tapd/models/migrationscripts/archived/tapd_connection.go b/backend/plugins/tapd/models/migrationscripts/archived/tapd_connection.go index b07d89d6a9f..9af60b102c2 100644 --- a/backend/plugins/tapd/models/migrationscripts/archived/tapd_connection.go +++ b/backend/plugins/tapd/models/migrationscripts/archived/tapd_connection.go @@ -39,7 +39,7 @@ type RestConnection struct { BaseConnection `mapstructure:",squash"` Endpoint string `mapstructure:"endpoint" validate:"required" json:"endpoint"` Proxy string `mapstructure:"proxy" json:"proxy"` - RateLimitPerHour int `comment:"api request rate limt per hour" json:"rateLimit"` + RateLimitPerHour int `comment:"api request rate limit per hour" json:"rateLimit"` } type TapdConnection struct { diff --git a/backend/plugins/zentao/models/migrationscripts/archived/connection.go b/backend/plugins/zentao/models/migrationscripts/archived/connection.go index dab806d988b..7bee6db349b 100644 --- a/backend/plugins/zentao/models/migrationscripts/archived/connection.go +++ b/backend/plugins/zentao/models/migrationscripts/archived/connection.go @@ -46,7 +46,7 @@ type RestConnection struct { BaseConnection `mapstructure:",squash"` Endpoint string `mapstructure:"endpoint" validate:"required" json:"endpoint"` Proxy string `mapstructure:"proxy" json:"proxy"` - RateLimitPerHour int `comment:"api request rate limt per hour" json:"rateLimit"` + RateLimitPerHour int `comment:"api request rate limit per hour" json:"rateLimit"` } type BaseConnection struct { diff --git a/backend/plugins/zentao/tasks/execution_summary_dev_extractor.go b/backend/plugins/zentao/tasks/execution_summary_dev_extractor.go index 7d3fd24a38e..b77ab4f2638 100644 --- a/backend/plugins/zentao/tasks/execution_summary_dev_extractor.go +++ b/backend/plugins/zentao/tasks/execution_summary_dev_extractor.go @@ -33,7 +33,7 @@ var ExtractExecutionSummaryDevMeta = plugin.SubTaskMeta{ Name: "extractExecutionSummaryDev", EntryPoint: ExtractExecutionSummaryDev, EnabledByDefault: true, - Description: "extract Zentao execution summary from build-in page api", + Description: "extract Zentao execution summary from built-in page api", DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, } diff --git a/backend/plugins/zentao/tasks/task_collector.go b/backend/plugins/zentao/tasks/task_collector.go index ea8a3976a6e..ac0d9d7e6d9 100644 --- a/backend/plugins/zentao/tasks/task_collector.go +++ b/backend/plugins/zentao/tasks/task_collector.go @@ -88,7 +88,7 @@ func CollectTask(taskCtx plugin.SubTaskContext) errors.Error { // extract task's children childTasks, err := extractChildrenWithDFS(task) if err != nil { - return nil, errors.Default.New(fmt.Sprintf("extract task: %v chidren err: %v", task, err)) + return nil, errors.Default.New(fmt.Sprintf("extract task: %v children err: %v", task, err)) } for _, task := range childTasks { allTaskRecords[task.Id] = task diff --git a/backend/python/pydevlake/pydevlake/api.py b/backend/python/pydevlake/pydevlake/api.py index a50b71d9424..cc47601cdfd 100644 --- a/backend/python/pydevlake/pydevlake/api.py +++ b/backend/python/pydevlake/pydevlake/api.py @@ -221,7 +221,7 @@ def get_next_page_id(self, response) -> Optional[int | str]: """ Extracts or compute the id of the next page from the response, e.g. incrementing the value of `page` of a JSON body. - This id will be suplied to the next request via `set_next_page_param`. + This id will be supplied to the next request via `set_next_page_param`. Returning None indicates that the response is the last page. """ pass diff --git a/backend/server/api/README.md b/backend/server/api/README.md index 887a85b90ff..04273b38526 100644 --- a/backend/server/api/README.md +++ b/backend/server/api/README.md @@ -18,7 +18,7 @@ limitations under the License. ### Summary -Users can set pipepline plan by config-ui to create schedule jobs. +Users can set pipeline plan by config-ui to create schedule jobs. And config-ui will send blueprint request with cronConfig in crontab format. ### Cron Job diff --git a/backend/server/services/blueprint.go b/backend/server/services/blueprint.go index 91bab693420..2fb05c3b718 100644 --- a/backend/server/services/blueprint.go +++ b/backend/server/services/blueprint.go @@ -258,7 +258,7 @@ func DeleteBlueprint(id uint64) errors.Error { var blueprintReloadLock sync.Mutex var bpCronIdMap map[uint64]cron.EntryID -// ReloadBlueprints reloades cronjobs based on blueprints +// ReloadBlueprints reloads cronjobs based on blueprints func ReloadBlueprints() (err errors.Error) { enable := true isManual := false diff --git a/backend/server/services/pipeline.go b/backend/server/services/pipeline.go index f6c770f9c6f..080028079ac 100644 --- a/backend/server/services/pipeline.go +++ b/backend/server/services/pipeline.go @@ -439,7 +439,7 @@ func CancelPipeline(pipelineId uint64) errors.Error { pipeline.Status = models.TASK_CANCELLED err = db.Update(pipeline) if err != nil { - return errors.Default.Wrap(err, "faile to update pipeline") + return errors.Default.Wrap(err, "failed to update pipeline") } // now, with RunPipelineInQueue being block and target pipeline got updated // we should update the related tasks as well @@ -449,7 +449,7 @@ func CancelPipeline(pipelineId uint64) errors.Error { dal.Where("pipeline_id = ?", pipelineId), ) if err != nil { - return errors.Default.Wrap(err, "faile to update pipeline tasks") + return errors.Default.Wrap(err, "failed to update pipeline tasks") } // the target pipeline is pending, no running, no need to perform the actual cancel operation return nil @@ -563,7 +563,7 @@ func RerunPipeline(pipelineId uint64, task *models.Task) (tasks []*models.Task, rerunTasks = append(rerunTasks, rerunTask) } - // mark pipline rerun + // mark pipeline rerun err = tx.UpdateColumn(&models.Pipeline{}, "status", models.TASK_RERUN, dal.Where("id = ?", pipelineId), diff --git a/backend/server/services/pipeline_runner.go b/backend/server/services/pipeline_runner.go index b16dc2dc389..0f215a061ff 100644 --- a/backend/server/services/pipeline_runner.go +++ b/backend/server/services/pipeline_runner.go @@ -106,8 +106,8 @@ func runPipeline(pipelineId uint64) errors.Error { return NotifyExternal(pipelineId) } -// ComputePipelineStatus determines pipleline status by its latest(rerun included) tasks statuses -// 1. TASK_COMPLETED: all tasks were executed sucessfully +// ComputePipelineStatus determines pipeline status by its latest(rerun included) tasks statuses +// 1. TASK_COMPLETED: all tasks were executed successfully // 2. TASK_FAILED: SkipOnFail=false with failed task(s) // 3. TASK_PARTIAL: SkipOnFail=true with failed task(s) func ComputePipelineStatus(pipeline *models.Pipeline, isCancelled bool) (string, errors.Error) { diff --git a/backend/server/services/project.go b/backend/server/services/project.go index c030a9af53d..a1b92b03ae4 100644 --- a/backend/server/services/project.go +++ b/backend/server/services/project.go @@ -106,7 +106,7 @@ func CreateProject(projectInput *models.ApiInputProject) (*models.ApiOutputProje return nil, err } - // create transaction to updte multiple tables + // create transaction to update multiple tables var err errors.Error tx := db.Begin() defer func() { diff --git a/backend/test/helper/client.go b/backend/test/helper/client.go index 0af014b0513..8c5c9d22aed 100644 --- a/backend/test/helper/client.go +++ b/backend/test/helper/client.go @@ -345,7 +345,7 @@ func runWithTimeout(timeout time.Duration, f func() (bool, errors.Error)) errors select { case <-timer: if !resp.completed { - return errors.Default.New(fmt.Sprintf("timed out calling function after %d miliseconds", timeout.Milliseconds())) + return errors.Default.New(fmt.Sprintf("timed out calling function after %d milliseconds", timeout.Milliseconds())) } return nil case resp = <-resChan: diff --git a/config-ui/src/plugins/register/github/transformation.tsx b/config-ui/src/plugins/register/github/transformation.tsx index ef9d4d7473f..1758f7e6835 100644 --- a/config-ui/src/plugins/register/github/transformation.tsx +++ b/config-ui/src/plugins/register/github/transformation.tsx @@ -206,7 +206,7 @@ const renderCollapseItems = ({ label={ <> Issue Severity - + } > diff --git a/config-ui/src/routes/blueprint/detail/components/sync-policy/index.tsx b/config-ui/src/routes/blueprint/detail/components/sync-policy/index.tsx index 9efd012f16f..9f7b24c7340 100644 --- a/config-ui/src/routes/blueprint/detail/components/sync-policy/index.tsx +++ b/config-ui/src/routes/blueprint/detail/components/sync-policy/index.tsx @@ -76,7 +76,7 @@ export const SyncPolicy = ({ const cron = useMemo(() => getCron(isManual, cronConfig), [isManual, cronConfig]); - const [mintue, hour, day, month, week] = useMemo(() => cronConfig.split(' '), [cronConfig]); + const [minute, hour, day, month, week] = useMemo(() => cronConfig.split(' '), [cronConfig]); const handleChangeFrequency = (e: RadioChangeEvent) => { const value = e.target.value; @@ -152,32 +152,32 @@ export const SyncPolicy = ({ onChangeCronConfig([e.target.value, hour, day, month, week].join(' '))} /> onChangeCronConfig([mintue, e.target.value, day, month, week].join(' '))} + onChange={(e) => onChangeCronConfig([minute, e.target.value, day, month, week].join(' '))} /> onChangeCronConfig([mintue, hour, e.target.value, month, week].join(' '))} + onChange={(e) => onChangeCronConfig([minute, hour, e.target.value, month, week].join(' '))} /> onChangeCronConfig([mintue, hour, day, e.target.value, week].join(' '))} + onChange={(e) => onChangeCronConfig([minute, hour, day, e.target.value, week].join(' '))} /> onChangeCronConfig([mintue, hour, day, month, e.target.value].join(' '))} + onChange={(e) => onChangeCronConfig([minute, hour, day, month, e.target.value].join(' '))} /> diff --git a/config-ui/src/routes/onboard/components/card.tsx b/config-ui/src/routes/onboard/components/card.tsx index 3c59ee082d2..d2250573da3 100644 --- a/config-ui/src/routes/onboard/components/card.tsx +++ b/config-ui/src/routes/onboard/components/card.tsx @@ -32,7 +32,7 @@ interface Props { } export const OnboardCard = ({ style }: Props) => { - const [oeprating, setOperating] = useState(false); + const [operating, setOperating] = useState(false); const [version, setVersion] = useState(0); const navigate = useNavigate(); @@ -91,7 +91,7 @@ export const OnboardCard = ({ style }: Props) => { title: 'Permanently close this entry?', content: 'You will not be able to get back to the onboarding session again.', okButtonProps: { - loading: oeprating, + loading: operating, }, okText: 'Confirm', onOk: async () => { diff --git a/config-ui/src/routes/onboard/index.tsx b/config-ui/src/routes/onboard/index.tsx index 061be20331f..d29ed40fbcc 100644 --- a/config-ui/src/routes/onboard/index.tsx +++ b/config-ui/src/routes/onboard/index.tsx @@ -129,7 +129,7 @@ export const Onboard = ({ logo, title }: Props) => { {[1, 2, 3].includes(step) && ( {steps.map((it) => ( - + {it.step} {it.title} diff --git a/config-ui/src/routes/onboard/styled.ts b/config-ui/src/routes/onboard/styled.ts index b8d95a9e798..3e083694c9b 100644 --- a/config-ui/src/routes/onboard/styled.ts +++ b/config-ui/src/routes/onboard/styled.ts @@ -49,7 +49,7 @@ export const Step = styled.ul` margin-bottom: 50px; `; -export const StepItem = styled.li<{ $actived: boolean; $activedColor: string }>` +export const StepItem = styled.li<{ $activated: boolean; $activatedColor: string }>` display: flex; align-items: center; position: relative; @@ -65,19 +65,19 @@ export const StepItem = styled.li<{ $actived: boolean; $activedColor: string }>` border: 1px solid rgba(0, 0, 0, 0.25); border-radius: 50%; - ${({ $actived, $activedColor }) => - $actived + ${({ $activated, $activatedColor }) => + $activated ? ` color: #fff; - background-color: ${$activedColor}; + background-color: ${$activatedColor}; border: none; ` : ''} } span:last-child { - ${({ $actived }) => - $actived + ${({ $activated }) => + $activated ? ` font-size: 24px; font-weight: 600;` diff --git a/grafana/_archive/DeliveryQuality(RequireJiraAndGitlabData).json b/grafana/_archive/DeliveryQuality(RequireJiraAndGitlabData).json index 92e2fef5ee0..4f384919be4 100644 --- a/grafana/_archive/DeliveryQuality(RequireJiraAndGitlabData).json +++ b/grafana/_archive/DeliveryQuality(RequireJiraAndGitlabData).json @@ -621,7 +621,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\n timestamp(DATE_ADD(date(gitlab_created_at), INTERVAL -$interval(date(gitlab_created_at))+1 DAY)) as time,\n avg(review_rounds) as \"Pull Request Reveiw Round\"\nFROM\n gitlab_merge_requests gmr\n LEFT JOIN jira_board_gitlab_projects jbgp ON jbgp.gitlab_project_id = gmr.project_id\nWHERE\n state = 'merged'\n and review_rounds > 0\n and jbgp.jira_board_id = $board_id\n and $__timeFilter(gitlab_created_at)\nGROUP BY 1\nORDER BY 1", + "rawSql": "SELECT\n timestamp(DATE_ADD(date(gitlab_created_at), INTERVAL -$interval(date(gitlab_created_at))+1 DAY)) as time,\n avg(review_rounds) as \"Pull Request Review Round\"\nFROM\n gitlab_merge_requests gmr\n LEFT JOIN jira_board_gitlab_projects jbgp ON jbgp.gitlab_project_id = gmr.project_id\nWHERE\n state = 'merged'\n and review_rounds > 0\n and jbgp.jira_board_id = $board_id\n and $__timeFilter(gitlab_created_at)\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ diff --git a/grafana/_archive/Gitlab.json b/grafana/_archive/Gitlab.json index 1baf386b8c6..806613efbf0 100644 --- a/grafana/_archive/Gitlab.json +++ b/grafana/_archive/Gitlab.json @@ -56,7 +56,7 @@ }, "id": 48, "options": { - "content": "
\n
\n \"No.1\"\n

MR Troughput and Pass Rate

\n
\n
", + "content": "
\n
\n \"No.1\"\n

MR Throughput and Pass Rate

\n
\n
", "mode": "html" }, "pluginVersion": "8.0.6", @@ -857,7 +857,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\n timestamp(DATE_ADD(date(gitlab_created_at), INTERVAL -$interval(date(gitlab_created_at))+1 DAY)) as time,\n avg(review_rounds) as \"Pull Request Reveiw Round\"\nFROM\n gitlab_merge_requests gmr\nWHERE\n state = 'merged'\n and review_rounds > 0\n and gmr.project_id = $repo_id\n and $__timeFilter(gitlab_created_at)\nGROUP BY 1\nORDER BY 1", + "rawSql": "SELECT\n timestamp(DATE_ADD(date(gitlab_created_at), INTERVAL -$interval(date(gitlab_created_at))+1 DAY)) as time,\n avg(review_rounds) as \"Pull Request Review Round\"\nFROM\n gitlab_merge_requests gmr\nWHERE\n state = 'merged'\n and review_rounds > 0\n and gmr.project_id = $repo_id\n and $__timeFilter(gitlab_created_at)\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/DORADebug.json b/grafana/dashboards/DORADebug.json index 935b035c1da..1fc487e930e 100644 --- a/grafana/dashboards/DORADebug.json +++ b/grafana/dashboards/DORADebug.json @@ -2641,7 +2641,7 @@ ] } ], - "title": "Step 5 - check the median change lead time for each month in Figure 4 (Compare the change_lead_time with the max ranks in GREEN before the first occurence of ORANGE in each month)", + "title": "Step 5 - check the median change lead time for each month in Figure 4 (Compare the change_lead_time with the max ranks in GREEN before the first occurrence of ORANGE in each month)", "type": "table" }, { @@ -3464,7 +3464,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _deployments as(\n select\n distinct d.cicd_deployment_id as deployment_id,\n d.result,\n d.environment,\n d.finished_date,\n d.cicd_scope_id,\n pm.project_name\n from\n cicd_deployment_commits d\n join project_mapping pm on d.cicd_scope_id = pm.row_id\n and pm.`table` = 'cicd_scopes'\n where\n -- only result needs to specified, not envioronment\n d.result = 'SUCCESS' -- choose your project_name\n and pm.project_name in ($project)\n),\n_incidents as(\n select\n distinct i.id as issue_id,\n i.created_date,\n pm.project_name\n from\n incidents i\n join project_mapping pm on i.scope_id = pm.row_id\n and i.`table` = pm.`table`\n where\n -- choose your project_name\n pm.project_name in ($project)\n)\nselect\n deployment_id as id,\n 'DEPLOYMENT' as type,\n finished_date as time\nfrom\n _deployments\nunion\nselect\n issue_id as id,\n 'INCIDENT' as type,\n created_date as time\nfrom\n _incidents\norder by\n time", + "rawSql": "with _deployments as(\n select\n distinct d.cicd_deployment_id as deployment_id,\n d.result,\n d.environment,\n d.finished_date,\n d.cicd_scope_id,\n pm.project_name\n from\n cicd_deployment_commits d\n join project_mapping pm on d.cicd_scope_id = pm.row_id\n and pm.`table` = 'cicd_scopes'\n where\n -- only result needs to specified, not environment\n d.result = 'SUCCESS' -- choose your project_name\n and pm.project_name in ($project)\n),\n_incidents as(\n select\n distinct i.id as issue_id,\n i.created_date,\n pm.project_name\n from\n incidents i\n join project_mapping pm on i.scope_id = pm.row_id\n and i.`table` = pm.`table`\n where\n -- choose your project_name\n pm.project_name in ($project)\n)\nselect\n deployment_id as id,\n 'DEPLOYMENT' as type,\n finished_date as time\nfrom\n _deployments\nunion\nselect\n issue_id as id,\n 'INCIDENT' as type,\n created_date as time\nfrom\n _incidents\norder by\n time", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/DORADetails-ChangeFailureRate.json b/grafana/dashboards/DORADetails-ChangeFailureRate.json index 8048b282b1f..87e3b9779a0 100644 --- a/grafana/dashboards/DORADetails-ChangeFailureRate.json +++ b/grafana/dashboards/DORADetails-ChangeFailureRate.json @@ -627,7 +627,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _deployments as(\n select\n distinct d.cicd_deployment_id as deployment_id,\n d.result,\n d.environment,\n d.finished_date,\n d.cicd_scope_id,\n pm.project_name\n from\n cicd_deployment_commits d\n join project_mapping pm on d.cicd_scope_id = pm.row_id\n and pm.`table` = 'cicd_scopes'\n where\n -- only result needs to specified, not envioronment\n d.result = 'SUCCESS' -- choose your project_name\n and pm.project_name in ($project)\n and $__timeFilter(d.finished_date)\n),\n_incidents as(\n select\n distinct i.id as issue_id,\n i.created_date,\n pm.project_name\n from\n incidents i\n join project_mapping pm on i.scope_id = pm.row_id\n and i.`table` = pm.`table`\n where\n -- choose your project_name\n pm.project_name in ($project)\n and $__timeFilter(i.created_date)\n)\nselect\n finished_date as 'Time (Ascending)',\n deployment_id as 'Entity ID',\n 'DEPLOYMENT' as 'Entity Type (Deployment/Incident)'\nfrom\n _deployments\nunion\nselect\n created_date as 'Time (Ascending)',\n issue_id as 'Entity ID',\n 'INCIDENT' as 'Entity Type (Deployment/Incident)'\nfrom\n _incidents\norder by\n 1", + "rawSql": "with _deployments as(\n select\n distinct d.cicd_deployment_id as deployment_id,\n d.result,\n d.environment,\n d.finished_date,\n d.cicd_scope_id,\n pm.project_name\n from\n cicd_deployment_commits d\n join project_mapping pm on d.cicd_scope_id = pm.row_id\n and pm.`table` = 'cicd_scopes'\n where\n -- only result needs to specified, not environment\n d.result = 'SUCCESS' -- choose your project_name\n and pm.project_name in ($project)\n and $__timeFilter(d.finished_date)\n),\n_incidents as(\n select\n distinct i.id as issue_id,\n i.created_date,\n pm.project_name\n from\n incidents i\n join project_mapping pm on i.scope_id = pm.row_id\n and i.`table` = pm.`table`\n where\n -- choose your project_name\n pm.project_name in ($project)\n and $__timeFilter(i.created_date)\n)\nselect\n finished_date as 'Time (Ascending)',\n deployment_id as 'Entity ID',\n 'DEPLOYMENT' as 'Entity Type (Deployment/Incident)'\nfrom\n _deployments\nunion\nselect\n created_date as 'Time (Ascending)',\n issue_id as 'Entity ID',\n 'INCIDENT' as 'Entity Type (Deployment/Incident)'\nfrom\n _incidents\norder by\n 1", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/DemoHowFastDoWeRespondToCustomerRequirements.json b/grafana/dashboards/DemoHowFastDoWeRespondToCustomerRequirements.json index 5a4b1ccfadb..c15b08cafbe 100644 --- a/grafana/dashboards/DemoHowFastDoWeRespondToCustomerRequirements.json +++ b/grafana/dashboards/DemoHowFastDoWeRespondToCustomerRequirements.json @@ -169,7 +169,7 @@ }, "id": 101, "options": { - "content": "
\n \"Merico\"\n

MARI Guide - Requirement Lead Time

\n
\n\nSection | Description\n:----------------- | :-------------\nMetric Definition | Total duration of requirements from proposal to delivery. It can be divided by flow status in the practice domain or project management system to count the time share of each phase and help locate the links that drag out the requirement delivery cycle.\nMetric Value | The Requirement Lead Time reflects the rapid responsiveness of the R&D team.
In theory, the faster you can deliver value to customers, the better, but other aspects such as whether the delivered value meets customer expectations, requirement throughput, and delivery quality must be considered together. Fast delivery does not necessarily equate to good R&D practices.\n\n***\n#### *M (Measure)*\n1. Count the average or 80th percentile requiremnt lead time for different times.\n2. Counts the average or 80th percentile requiremnt lead time for different projects.
\n3. Count the length of time that requirements stay in different practice domains (requirements analysis, design, development, testing, release) or in different states.\n\n##### *A (Analyze)*\n1. Compare the requirement delivery speed of different projects to find the fastest and slowest delivering projects.\n2. Analyze the trend of the average requirement lead time within each cycle, make a vertical comparison, and locate the key points such as maximum value, minimum value, continuous up cycle, and continuous down cycle.\n3. Analyze the trend of the delivery cycle of 80% of the requirements within each cycle, make a longitudinal comparison, and locate the key points such as maximum value, minimum value, continuous up cycle, and continuous down cycle.

\nWhy choose the 80% quantile instead of using the average?
\nThe point of statistics is to make predictions with real and valid data to support better decisions, while the mean and median cannot have the role of supporting predictions.
\nTypically, the mean and 80% quantile statistics will appear twice as far apart, and the 80% and 99% quantile tend to be approximately twice as related.
\nTherefore, the 80% quantile is a good balance point for prediction.\n
\n4. Analysis compares the length of time requirement stays in different practice domains or different states to identify the most time-consuming links and find the key bottlenecks that affect overall delivery speed.\n5. Requirement lead time is correlated with requirement throughput to identify whether the requirement delivery trend is healthy or not.\n - Healthy trend: requirement lead time is shortened and requirement throughput is increased.\n - Unhealthy trend: longer requirement lead time and lower requirement throughput.\n\n\n##### *R (Review)*\nBased on the analysis results, focus on a few key points and use Ishikawa diagram (fishbone diagram) or Root Cause Analysis (RCA) to conduct root cause analysis, research and review. For example, if the requirement delivery cycle becomes longer in several consecutive statistical cycles, it is necessary to further investigate the length of stay of requirements in different phases and find the longest phase for root cause analysis.\n\n1. The requirements phase takes too long: unclear requirements, frequent changes, overly granular requirements, requirements priorities not clearly defined, insufficient resources or experience of requirements analysts or product managers?\n2. The design phase takes too long: unclear requirement documents, insufficient resources or experience of R&D leaders or architects?\n3. The development phase takes too long: unclear design documents, uneven task distribution, high stream load (parallel tasks), too much technical debt, too many bugs, insufficient resources or experience of developers?\n4. The testing phase takes too long: unclear requirements documentation, poor code quality, few automated tests, insufficient resources or experience of testers?\n5. The release phase takes too long: too long build or deployment time, insufficient resources or experience of operation and maintenance staff?\n\n##### *I (Improve)*\nBased on the review results, focus on the key root causes, and give targeted improvement measures in terms of norms, processes, tools and behaviors, etc., with clear improvement targets, improvement measures, verification cycles and responsible persons.\n\nThe following are the improvement ideas for reference:\n\n1. Communicate with customers or business parties to clarify requirements, reasonably disassemble requirements and define priorities, and invite business parties, R&D leaders and testing leaders to review requirements.\n2. Invite product managers, R&D personnel, and test leaders to conduct design reviews.\n3. Reduce requirement or task granularity, distribute tasks evenly, reduce flow load, increase unit testing, and solve technical debt and code problems in time to reduce the number of bugs and rework.\n4. Test left, develop self-test, code review, increase automated testing, and improve continuous integration capabilities.\n5. Automate deployment, shorten build time, and improve continuous delivery.\n6. Reasonable resource allocation and necessary training for each job holder.\n7. The improvement results should also be quantifiable to facilitate continuous metrics and tracking of improvement effects.", + "content": "
\n \"Merico\"\n

MARI Guide - Requirement Lead Time

\n
\n\nSection | Description\n:----------------- | :-------------\nMetric Definition | Total duration of requirements from proposal to delivery. It can be divided by flow status in the practice domain or project management system to count the time share of each phase and help locate the links that drag out the requirement delivery cycle.\nMetric Value | The Requirement Lead Time reflects the rapid responsiveness of the R&D team.
In theory, the faster you can deliver value to customers, the better, but other aspects such as whether the delivered value meets customer expectations, requirement throughput, and delivery quality must be considered together. Fast delivery does not necessarily equate to good R&D practices.\n\n***\n#### *M (Measure)*\n1. Count the average or 80th percentile requirement lead time for different times.\n2. Counts the average or 80th percentile requirement lead time for different projects.
\n3. Count the length of time that requirements stay in different practice domains (requirements analysis, design, development, testing, release) or in different states.\n\n##### *A (Analyze)*\n1. Compare the requirement delivery speed of different projects to find the fastest and slowest delivering projects.\n2. Analyze the trend of the average requirement lead time within each cycle, make a vertical comparison, and locate the key points such as maximum value, minimum value, continuous up cycle, and continuous down cycle.\n3. Analyze the trend of the delivery cycle of 80% of the requirements within each cycle, make a longitudinal comparison, and locate the key points such as maximum value, minimum value, continuous up cycle, and continuous down cycle.

\nWhy choose the 80% quantile instead of using the average?
\nThe point of statistics is to make predictions with real and valid data to support better decisions, while the mean and median cannot have the role of supporting predictions.
\nTypically, the mean and 80% quantile statistics will appear twice as far apart, and the 80% and 99% quantile tend to be approximately twice as related.
\nTherefore, the 80% quantile is a good balance point for prediction.\n
\n4. Analysis compares the length of time requirement stays in different practice domains or different states to identify the most time-consuming links and find the key bottlenecks that affect overall delivery speed.\n5. Requirement lead time is correlated with requirement throughput to identify whether the requirement delivery trend is healthy or not.\n - Healthy trend: requirement lead time is shortened and requirement throughput is increased.\n - Unhealthy trend: longer requirement lead time and lower requirement throughput.\n\n\n##### *R (Review)*\nBased on the analysis results, focus on a few key points and use Ishikawa diagram (fishbone diagram) or Root Cause Analysis (RCA) to conduct root cause analysis, research and review. For example, if the requirement delivery cycle becomes longer in several consecutive statistical cycles, it is necessary to further investigate the length of stay of requirements in different phases and find the longest phase for root cause analysis.\n\n1. The requirements phase takes too long: unclear requirements, frequent changes, overly granular requirements, requirements priorities not clearly defined, insufficient resources or experience of requirements analysts or product managers?\n2. The design phase takes too long: unclear requirement documents, insufficient resources or experience of R&D leaders or architects?\n3. The development phase takes too long: unclear design documents, uneven task distribution, high stream load (parallel tasks), too much technical debt, too many bugs, insufficient resources or experience of developers?\n4. The testing phase takes too long: unclear requirements documentation, poor code quality, few automated tests, insufficient resources or experience of testers?\n5. The release phase takes too long: too long build or deployment time, insufficient resources or experience of operation and maintenance staff?\n\n##### *I (Improve)*\nBased on the review results, focus on the key root causes, and give targeted improvement measures in terms of norms, processes, tools and behaviors, etc., with clear improvement targets, improvement measures, verification cycles and responsible persons.\n\nThe following are the improvement ideas for reference:\n\n1. Communicate with customers or business parties to clarify requirements, reasonably disassemble requirements and define priorities, and invite business parties, R&D leaders and testing leaders to review requirements.\n2. Invite product managers, R&D personnel, and test leaders to conduct design reviews.\n3. Reduce requirement or task granularity, distribute tasks evenly, reduce flow load, increase unit testing, and solve technical debt and code problems in time to reduce the number of bugs and rework.\n4. Test left, develop self-test, code review, increase automated testing, and improve continuous integration capabilities.\n5. Automate deployment, shorten build time, and improve continuous delivery.\n6. Reasonable resource allocation and necessary training for each job holder.\n7. The improvement results should also be quantifiable to facilitate continuous metrics and tracking of improvement effects.", "mode": "markdown" }, "pluginVersion": "8.0.6", From 815f02c0d3accf620b9f6568ff55215e2b7f0a9c Mon Sep 17 00:00:00 2001 From: Warren Chen Date: Sat, 21 Mar 2026 10:08:42 +0800 Subject: [PATCH 25/39] feat(q-dev): enrich logging fields, separate dashboards, add E2E tests (#8786) * feat(q-dev): enrich logging fields, separate dashboards by data source, add E2E tests - Add new fields to chat_log: CodeReferenceCount, WebLinkCount, HasFollowupPrompts (from codeReferenceEvents, supplementaryWebLinksEvent, followupPrompts in JSON) - Add new fields to completion_log: LeftContextLength, RightContextLength (from leftContext/rightContext in JSON) - Update s3_logging_extractor to parse and populate new fields - Add migration script 20260319_add_logging_fields - Create qdev_feature_metrics dashboard for legacy by_user_analytic data - Reorganize qdev_executive dashboard with Row dividers labeling data sources and cross-dashboard navigation links - Enrich qdev_logging dashboard with new panels: Chat Trigger Type Distribution, Response Enrichment Breakdown, Completion Context Size Trends, Response Enrichment Trends - Fix SQL compatibility with only_full_group_by mode in executive dashboard (Weekly Active Users Trend, New vs Returning Users) - Fix Steering Adoption stat panel returning string instead of numeric value - Add Playwright E2E test covering full pipeline flow and dashboard verification * fix: add Apache license headers to e2e files, fix gofmt alignment --- backend/plugins/q_dev/models/chat_log.go | 3 + .../plugins/q_dev/models/completion_log.go | 22 +- .../20260319_add_logging_fields.go | 45 + .../migrationscripts/archived/chat_log.go | 3 + .../archived/completion_log.go | 22 +- .../q_dev/models/migrationscripts/register.go | 1 + .../q_dev/tasks/s3_logging_extractor.go | 32 +- e2e/package.json | 16 + e2e/playwright.config.ts | 39 + e2e/qdev-full-flow.spec.ts | 248 +++++ grafana/dashboards/qdev_executive.json | 438 +++----- grafana/dashboards/qdev_feature_metrics.json | 948 ++++++++++++++++++ grafana/dashboards/qdev_logging.json | 338 ++++++- 13 files changed, 1801 insertions(+), 354 deletions(-) create mode 100644 backend/plugins/q_dev/models/migrationscripts/20260319_add_logging_fields.go create mode 100644 e2e/package.json create mode 100644 e2e/playwright.config.ts create mode 100644 e2e/qdev-full-flow.spec.ts create mode 100644 grafana/dashboards/qdev_feature_metrics.json diff --git a/backend/plugins/q_dev/models/chat_log.go b/backend/plugins/q_dev/models/chat_log.go index 06679c515b6..6b39bffa4de 100644 --- a/backend/plugins/q_dev/models/chat_log.go +++ b/backend/plugins/q_dev/models/chat_log.go @@ -44,6 +44,9 @@ type QDevChatLog struct { ActiveFileExtension string `gorm:"type:varchar(50)" json:"activeFileExtension"` HasSteering bool `json:"hasSteering"` IsSpecMode bool `json:"isSpecMode"` + CodeReferenceCount int `json:"codeReferenceCount"` + WebLinkCount int `json:"webLinkCount"` + HasFollowupPrompts bool `json:"hasFollowupPrompts"` } func (QDevChatLog) TableName() string { diff --git a/backend/plugins/q_dev/models/completion_log.go b/backend/plugins/q_dev/models/completion_log.go index 0d0e0404ce8..00a1b471f13 100644 --- a/backend/plugins/q_dev/models/completion_log.go +++ b/backend/plugins/q_dev/models/completion_log.go @@ -26,16 +26,18 @@ import ( // QDevCompletionLog stores parsed data from GenerateCompletions logging events type QDevCompletionLog struct { common.NoPKModel - ConnectionId uint64 `gorm:"primaryKey"` - ScopeId string `gorm:"primaryKey;type:varchar(255)" json:"scopeId"` - RequestId string `gorm:"primaryKey;type:varchar(255)" json:"requestId"` - UserId string `gorm:"index;type:varchar(255)" json:"userId"` - DisplayName string `gorm:"type:varchar(255)" json:"displayName"` - Timestamp time.Time `gorm:"index" json:"timestamp"` - FileName string `gorm:"type:varchar(512)" json:"fileName"` - FileExtension string `gorm:"type:varchar(50)" json:"fileExtension"` - HasCustomization bool `json:"hasCustomization"` - CompletionsCount int `json:"completionsCount"` + ConnectionId uint64 `gorm:"primaryKey"` + ScopeId string `gorm:"primaryKey;type:varchar(255)" json:"scopeId"` + RequestId string `gorm:"primaryKey;type:varchar(255)" json:"requestId"` + UserId string `gorm:"index;type:varchar(255)" json:"userId"` + DisplayName string `gorm:"type:varchar(255)" json:"displayName"` + Timestamp time.Time `gorm:"index" json:"timestamp"` + FileName string `gorm:"type:varchar(512)" json:"fileName"` + FileExtension string `gorm:"type:varchar(50)" json:"fileExtension"` + HasCustomization bool `json:"hasCustomization"` + CompletionsCount int `json:"completionsCount"` + LeftContextLength int `json:"leftContextLength"` + RightContextLength int `json:"rightContextLength"` } func (QDevCompletionLog) TableName() string { diff --git a/backend/plugins/q_dev/models/migrationscripts/20260319_add_logging_fields.go b/backend/plugins/q_dev/models/migrationscripts/20260319_add_logging_fields.go new file mode 100644 index 00000000000..f98c3d1066f --- /dev/null +++ b/backend/plugins/q_dev/models/migrationscripts/20260319_add_logging_fields.go @@ -0,0 +1,45 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/helpers/migrationhelper" + "github.com/apache/incubator-devlake/plugins/q_dev/models/migrationscripts/archived" +) + +var _ = (*addLoggingFields)(nil) + +type addLoggingFields struct{} + +func (*addLoggingFields) Up(basicRes context.BasicRes) errors.Error { + return migrationhelper.AutoMigrateTables( + basicRes, + &archived.QDevChatLog{}, + &archived.QDevCompletionLog{}, + ) +} + +func (*addLoggingFields) Version() uint64 { + return 20260319000001 +} + +func (*addLoggingFields) Name() string { + return "Add code_reference_count, web_link_count, has_followup_prompts to chat_log; left/right_context_length to completion_log" +} diff --git a/backend/plugins/q_dev/models/migrationscripts/archived/chat_log.go b/backend/plugins/q_dev/models/migrationscripts/archived/chat_log.go index 8278f52ff1e..ee7d10a1e87 100644 --- a/backend/plugins/q_dev/models/migrationscripts/archived/chat_log.go +++ b/backend/plugins/q_dev/models/migrationscripts/archived/chat_log.go @@ -43,6 +43,9 @@ type QDevChatLog struct { ActiveFileExtension string `gorm:"type:varchar(50)" json:"activeFileExtension"` HasSteering bool `json:"hasSteering"` IsSpecMode bool `json:"isSpecMode"` + CodeReferenceCount int `json:"codeReferenceCount"` + WebLinkCount int `json:"webLinkCount"` + HasFollowupPrompts bool `json:"hasFollowupPrompts"` } func (QDevChatLog) TableName() string { diff --git a/backend/plugins/q_dev/models/migrationscripts/archived/completion_log.go b/backend/plugins/q_dev/models/migrationscripts/archived/completion_log.go index 035c13ef2e0..4acff956978 100644 --- a/backend/plugins/q_dev/models/migrationscripts/archived/completion_log.go +++ b/backend/plugins/q_dev/models/migrationscripts/archived/completion_log.go @@ -25,16 +25,18 @@ import ( type QDevCompletionLog struct { archived.NoPKModel - ConnectionId uint64 `gorm:"primaryKey"` - ScopeId string `gorm:"primaryKey;type:varchar(255)" json:"scopeId"` - RequestId string `gorm:"primaryKey;type:varchar(255)" json:"requestId"` - UserId string `gorm:"index;type:varchar(255)" json:"userId"` - DisplayName string `gorm:"type:varchar(255)" json:"displayName"` - Timestamp time.Time `gorm:"index" json:"timestamp"` - FileName string `gorm:"type:varchar(512)" json:"fileName"` - FileExtension string `gorm:"type:varchar(50)" json:"fileExtension"` - HasCustomization bool `json:"hasCustomization"` - CompletionsCount int `json:"completionsCount"` + ConnectionId uint64 `gorm:"primaryKey"` + ScopeId string `gorm:"primaryKey;type:varchar(255)" json:"scopeId"` + RequestId string `gorm:"primaryKey;type:varchar(255)" json:"requestId"` + UserId string `gorm:"index;type:varchar(255)" json:"userId"` + DisplayName string `gorm:"type:varchar(255)" json:"displayName"` + Timestamp time.Time `gorm:"index" json:"timestamp"` + FileName string `gorm:"type:varchar(512)" json:"fileName"` + FileExtension string `gorm:"type:varchar(50)" json:"fileExtension"` + HasCustomization bool `json:"hasCustomization"` + CompletionsCount int `json:"completionsCount"` + LeftContextLength int `json:"leftContextLength"` + RightContextLength int `json:"rightContextLength"` } func (QDevCompletionLog) TableName() string { diff --git a/backend/plugins/q_dev/models/migrationscripts/register.go b/backend/plugins/q_dev/models/migrationscripts/register.go index 5480d5eaf29..8b5de0bcc16 100644 --- a/backend/plugins/q_dev/models/migrationscripts/register.go +++ b/backend/plugins/q_dev/models/migrationscripts/register.go @@ -36,5 +36,6 @@ func All() []plugin.MigrationScript { new(fixDedupUserTables), new(resetS3FileMetaProcessed), new(addLoggingTables), + new(addLoggingFields), } } diff --git a/backend/plugins/q_dev/tasks/s3_logging_extractor.go b/backend/plugins/q_dev/tasks/s3_logging_extractor.go index df55a663b13..3fa771789a1 100644 --- a/backend/plugins/q_dev/tasks/s3_logging_extractor.go +++ b/backend/plugins/q_dev/tasks/s3_logging_extractor.go @@ -280,10 +280,13 @@ type chatLogRequest struct { type chatLogResponse struct { RequestID string `json:"requestId"` AssistantResponse string `json:"assistantResponse"` + FollowupPrompts string `json:"followupPrompts"` MessageMetadata struct { ConversationID *string `json:"conversationId"` UtteranceID *string `json:"utteranceId"` } `json:"messageMetadata"` + CodeReferenceEvents []json.RawMessage `json:"codeReferenceEvents"` + SupplementaryWebLinksEvent []json.RawMessage `json:"supplementaryWebLinksEvent"` } type completionLogRecord struct { @@ -296,6 +299,8 @@ type completionLogRequest struct { Timestamp string `json:"timeStamp"` FileName string `json:"fileName"` CustomizationArn *string `json:"customizationArn"` + LeftContext string `json:"leftContext"` + RightContext string `json:"rightContext"` } type completionLogResponse struct { @@ -347,6 +352,11 @@ func parseChatRecord(raw json.RawMessage, fileMeta *models.QDevS3FileMeta, ident chatLog.UtteranceId = *record.Response.MessageMetadata.UtteranceID } + // New fields from docs: codeReferenceEvents, supplementaryWebLinksEvent, followupPrompts + chatLog.CodeReferenceCount = len(record.Response.CodeReferenceEvents) + chatLog.WebLinkCount = len(record.Response.SupplementaryWebLinksEvent) + chatLog.HasFollowupPrompts = record.Response.FollowupPrompts != "" + return chatLog, nil } @@ -406,16 +416,18 @@ func parseCompletionRecord(raw json.RawMessage, fileMeta *models.QDevS3FileMeta, userId := normalizeUserId(record.Request.UserID) return &models.QDevCompletionLog{ - ConnectionId: fileMeta.ConnectionId, - ScopeId: fileMeta.ScopeId, - RequestId: record.Response.RequestID, - UserId: userId, - DisplayName: cachedResolveDisplayName(userId, identityClient, cache), - Timestamp: ts, - FileName: record.Request.FileName, - FileExtension: filepath.Ext(record.Request.FileName), - HasCustomization: record.Request.CustomizationArn != nil && *record.Request.CustomizationArn != "", - CompletionsCount: len(record.Response.Completions), + ConnectionId: fileMeta.ConnectionId, + ScopeId: fileMeta.ScopeId, + RequestId: record.Response.RequestID, + UserId: userId, + DisplayName: cachedResolveDisplayName(userId, identityClient, cache), + Timestamp: ts, + FileName: record.Request.FileName, + FileExtension: filepath.Ext(record.Request.FileName), + HasCustomization: record.Request.CustomizationArn != nil && *record.Request.CustomizationArn != "", + CompletionsCount: len(record.Response.Completions), + LeftContextLength: len(record.Request.LeftContext), + RightContextLength: len(record.Request.RightContext), }, nil } diff --git a/e2e/package.json b/e2e/package.json new file mode 100644 index 00000000000..af179c7b6e7 --- /dev/null +++ b/e2e/package.json @@ -0,0 +1,16 @@ +{ + "name": "e2e", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "type": "commonjs", + "dependencies": { + "@playwright/test": "^1.58.2" + } +} diff --git a/e2e/playwright.config.ts b/e2e/playwright.config.ts new file mode 100644 index 00000000000..d4006283687 --- /dev/null +++ b/e2e/playwright.config.ts @@ -0,0 +1,39 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +import { defineConfig } from '@playwright/test'; + +export default defineConfig({ + testDir: '.', + testMatch: '*.spec.ts', + timeout: 180000, + expect: { + timeout: 10000, + }, + use: { + baseURL: 'http://localhost:4000', + screenshot: 'on', + trace: 'on-first-retry', + }, + reporter: [['html', { open: 'never' }], ['list']], + projects: [ + { + name: 'chromium', + use: { browserName: 'chromium', viewport: { width: 1440, height: 900 } }, + }, + ], +}); diff --git a/e2e/qdev-full-flow.spec.ts b/e2e/qdev-full-flow.spec.ts new file mode 100644 index 00000000000..0498df67baa --- /dev/null +++ b/e2e/qdev-full-flow.spec.ts @@ -0,0 +1,248 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +import { test, expect, request, Page } from '@playwright/test'; +import * as path from 'path'; +import * as fs from 'fs'; + +const API = 'http://localhost:8080'; +const UI = 'http://localhost:4000'; +const GRAFANA = 'http://localhost:3002'; +const SCREENSHOT_DIR = path.join(__dirname, 'screenshots'); + +// Use existing connection with valid credentials +const EXISTING_CONNECTION_ID = 5; + +const state: { + connectionId: number; + scopeId: string; + blueprintId: number; + pipelineId: number; +} = { connectionId: EXISTING_CONNECTION_ID, scopeId: '', blueprintId: 0, pipelineId: 0 }; + +fs.mkdirSync(SCREENSHOT_DIR, { recursive: true }); + +async function grafanaLogin(page: Page) { + await page.goto(`${GRAFANA}/grafana/login`); + await page.waitForLoadState('networkidle'); + if (page.url().includes('/login')) { + await page.locator('input[name="user"]').fill('admin'); + await page.locator('input[name="password"]').fill('admin'); + await page.locator('button[type="submit"]').click(); + await page.waitForTimeout(2000); + // Handle "change password" prompt if shown + const skipBtn = page.locator('a:has-text("Skip")'); + if (await skipBtn.isVisible({ timeout: 2000 }).catch(() => false)) { + await skipBtn.click(); + } + await page.waitForTimeout(1000); + } +} + +async function openGrafanaDashboard(page: Page, uid: string, screenshotPath: string) { + await grafanaLogin(page); + await page.goto(`${GRAFANA}/grafana/d/${uid}?orgId=1&from=now-90d&to=now`); + + // Wait for first panel data to load + try { + await page.waitForResponse( + (resp) => resp.url().includes('/api/ds/query') && resp.status() === 200, + { timeout: 30000 } + ); + } catch { /* some dashboards may not fire queries immediately */ } + + // Wait for rendering to settle + await page.waitForTimeout(5000); + + // Take viewport screenshot (top section) + await page.screenshot({ path: screenshotPath.replace('.png', '-top.png') }); + + // Scroll down and take more sections + const scrollHeight = await page.evaluate(() => document.body.scrollHeight); + let section = 1; + for (let y = 900; y < scrollHeight; y += 900) { + await page.evaluate((scrollY) => window.scrollTo(0, scrollY), y); + await page.waitForTimeout(3000); + section++; + await page.screenshot({ path: screenshotPath.replace('.png', `-section${section}.png`) }); + } + + // Also take full page screenshot + await page.evaluate(() => window.scrollTo(0, 0)); + await page.waitForTimeout(2000); + await page.screenshot({ path: screenshotPath, fullPage: true }); +} + +test.describe.serial('Q-Dev Plugin Full Flow', () => { + + test('Step 1: Verify Existing Connection via API', async () => { + const api = await request.newContext({ baseURL: API }); + + const resp = await api.get(`/plugins/q_dev/connections/${state.connectionId}`); + expect(resp.ok()).toBeTruthy(); + const conn = await resp.json(); + console.log(`Using connection: id=${conn.id}, name=${conn.name}, bucket=${conn.bucket}`); + + const testResp = await api.post(`/plugins/q_dev/connections/${state.connectionId}/test`); + const testBody = await testResp.json(); + console.log('Test connection:', testBody.success ? 'OK' : testBody.message); + expect(testResp.ok()).toBeTruthy(); + }); + + test('Step 2: View Config-UI Home', async ({ page }) => { + await page.goto(UI); + await page.waitForLoadState('networkidle'); + await page.waitForTimeout(1000); + await page.screenshot({ path: path.join(SCREENSHOT_DIR, '01-config-ui-home.png'), fullPage: true }); + console.log('Screenshot: Config-UI home'); + }); + + test('Step 3: Create Scope (S3 Slice) via API', async () => { + const api = await request.newContext({ baseURL: API }); + + const resp = await api.put(`/plugins/q_dev/connections/${state.connectionId}/scopes`, { + data: { + data: [ + { + accountId: '034362076319', + basePath: '', + year: 2026, + month: 3, + }, + ], + }, + }); + + const body = await resp.json(); + console.log('Scope created:', resp.status()); + expect(resp.ok()).toBeTruthy(); + state.scopeId = body[0]?.id; + expect(state.scopeId).toBeTruthy(); + console.log(`Scope id: ${state.scopeId}`); + }); + + test('Step 4: Create Blueprint via API', async () => { + const api = await request.newContext({ baseURL: API }); + + const resp = await api.post('/blueprints', { + data: { + name: `e2e-blueprint-${Date.now()}`, + mode: 'NORMAL', + enable: true, + cronConfig: '0 0 * * *', + isManual: true, + connections: [ + { + pluginName: 'q_dev', + connectionId: state.connectionId, + scopes: [{ scopeId: state.scopeId }], + }, + ], + }, + }); + + const body = await resp.json(); + expect(resp.ok()).toBeTruthy(); + state.blueprintId = body.id; + console.log(`Blueprint created: id=${state.blueprintId}`); + }); + + test('Step 5: Trigger Pipeline via API', async () => { + const api = await request.newContext({ baseURL: API }); + + const resp = await api.post(`/blueprints/${state.blueprintId}/trigger`, { data: {} }); + const body = await resp.json(); + expect(resp.ok()).toBeTruthy(); + state.pipelineId = body.id; + console.log(`Pipeline triggered: id=${state.pipelineId}`); + }); + + test('Step 6: Wait for Pipeline to Complete', async () => { + const api = await request.newContext({ baseURL: API }); + const maxWait = 120000; + const start = Date.now(); + let status = ''; + + while (Date.now() - start < maxWait) { + const resp = await api.get(`/pipelines/${state.pipelineId}`); + const pipeline = await resp.json(); + status = pipeline.status; + console.log(`Pipeline status: ${status} (${Math.round((Date.now() - start) / 1000)}s)`); + if (['TASK_COMPLETED', 'TASK_FAILED', 'TASK_PARTIAL'].includes(status)) break; + await new Promise((r) => setTimeout(r, 3000)); + } + + // Print task details + const tasksResp = await api.get(`/pipelines/${state.pipelineId}/tasks`); + if (tasksResp.ok()) { + const { tasks } = await tasksResp.json(); + for (const t of tasks || []) { + console.log(` Task ${t.id}: ${t.status}${t.failedSubTask ? ` (failed: ${t.failedSubTask})` : ''}`); + if (t.message) console.log(` Error: ${t.message.substring(0, 300)}`); + } + } + + expect(status).toBe('TASK_COMPLETED'); + }); + + test('Step 7: Verify Data via MySQL', async () => { + const api = await request.newContext({ baseURL: API }); + + // Use pipeline tasks to confirm data was processed + const tasksResp = await api.get(`/pipelines/${state.pipelineId}/tasks`); + const { tasks } = await tasksResp.json(); + expect(tasks[0].status).toBe('TASK_COMPLETED'); + console.log(`Pipeline completed in ${tasks[0].spentSeconds}s`); + }); + + test('Step 8: Grafana - Kiro Usage Dashboard (new format)', async ({ page }) => { + await openGrafanaDashboard(page, 'qdev_user_report', path.join(SCREENSHOT_DIR, '02-dashboard-user-report.png')); + console.log('Screenshot: Kiro Usage Dashboard'); + }); + + test('Step 9: Grafana - Kiro Legacy Feature Metrics', async ({ page }) => { + await openGrafanaDashboard(page, 'qdev_feature_metrics', path.join(SCREENSHOT_DIR, '03-dashboard-feature-metrics.png')); + console.log('Screenshot: Kiro Legacy Feature Metrics'); + }); + + test('Step 10: Grafana - Kiro AI Activity Insights (logging)', async ({ page }) => { + await openGrafanaDashboard(page, 'qdev_logging', path.join(SCREENSHOT_DIR, '04-dashboard-logging.png')); + console.log('Screenshot: Kiro AI Activity Insights'); + }); + + test('Step 11: Grafana - Kiro Executive Dashboard', async ({ page }) => { + await openGrafanaDashboard(page, 'qdev_executive', path.join(SCREENSHOT_DIR, '05-dashboard-executive.png')); + console.log('Screenshot: Kiro Executive Dashboard'); + }); + + test('Step 12: View Pipeline in Config-UI', async ({ page }) => { + // Navigate to the API proxy route for pipelines + await page.goto(`${UI}/api/pipelines?pageSize=5`); + await page.waitForLoadState('networkidle'); + await page.screenshot({ path: path.join(SCREENSHOT_DIR, '06-config-ui-pipelines.png'), fullPage: true }); + console.log('Screenshot: Pipelines API response'); + }); + + test('Step 13: Cleanup', async () => { + const api = await request.newContext({ baseURL: API }); + if (state.blueprintId) { + await api.delete(`/blueprints/${state.blueprintId}`); + console.log(`Deleted blueprint ${state.blueprintId}`); + } + console.log('Cleanup complete'); + }); +}); diff --git a/grafana/dashboards/qdev_executive.json b/grafana/dashboards/qdev_executive.json index c6e2524d70b..df36a6c9e0e 100644 --- a/grafana/dashboards/qdev_executive.json +++ b/grafana/dashboards/qdev_executive.json @@ -16,11 +16,61 @@ "fiscalYearStartMonth": 0, "graphTooltip": 0, "id": null, - "links": [], + "links": [ + { + "asDropdown": false, + "icon": "external link", + "includeVars": true, + "keepTime": true, + "tags": [], + "targetBlank": true, + "title": "Usage (New)", + "tooltip": "Kiro Usage Dashboard - Credits & Messages (new format)", + "type": "link", + "url": "/d/qdev_user_report" + }, + { + "asDropdown": false, + "icon": "external link", + "includeVars": true, + "keepTime": true, + "tags": [], + "targetBlank": true, + "title": "Feature Metrics (Legacy)", + "tooltip": "Kiro Legacy Feature Metrics (old format)", + "type": "link", + "url": "/d/qdev_feature_metrics" + }, + { + "asDropdown": false, + "icon": "external link", + "includeVars": true, + "keepTime": true, + "tags": [], + "targetBlank": true, + "title": "Prompt Logging", + "tooltip": "Kiro AI Activity Insights - Prompt Logging", + "type": "link", + "url": "/d/qdev_logging" + } + ], "panels": [ + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 100, + "panels": [], + "title": "KPI Overview (cross-source)", + "type": "row" + }, { "datasource": "mysql", - "description": "Distinct users with chat activity in the last 7 days", + "description": "Distinct users with chat activity in the last 7 days (from prompt logging)", "fieldConfig": { "defaults": { "color": { @@ -42,7 +92,7 @@ "h": 6, "w": 6, "x": 0, - "y": 0 + "y": 1 }, "id": 1, "options": { @@ -74,12 +124,12 @@ "refId": "A" } ], - "title": "Weekly Active Users", + "title": "Weekly Active Users (logging)", "type": "stat" }, { "datasource": "mysql", - "description": "Average credits spent per accepted line of code", + "description": "Average credits spent per accepted line of code (new report + legacy metrics)", "fieldConfig": { "defaults": { "color": { @@ -101,7 +151,7 @@ "h": 6, "w": 6, "x": 6, - "y": 0 + "y": 1 }, "id": 2, "options": { @@ -133,12 +183,12 @@ "refId": "A" } ], - "title": "Credits Efficiency", + "title": "Credits Efficiency (new + legacy)", "type": "stat" }, { "datasource": "mysql", - "description": "Percentage of inline suggestions accepted", + "description": "Percentage of inline suggestions accepted (from legacy feature metrics)", "fieldConfig": { "defaults": { "color": { @@ -160,7 +210,7 @@ "h": 6, "w": 6, "x": 12, - "y": 0 + "y": 1 }, "id": 3, "options": { @@ -192,12 +242,12 @@ "refId": "A" } ], - "title": "Inline Acceptance Rate", + "title": "Inline Acceptance Rate (legacy)", "type": "stat" }, { "datasource": "mysql", - "description": "Percentage of users using steering rules", + "description": "Percentage of users who used steering rules (from prompt logging)", "fieldConfig": { "defaults": { "color": { @@ -211,7 +261,8 @@ "color": "green" } ] - } + }, + "unit": "percent" }, "overrides": [] }, @@ -219,7 +270,7 @@ "h": 6, "w": 6, "x": 18, - "y": 0 + "y": 1 }, "id": 4, "options": { @@ -247,16 +298,29 @@ "editorMode": "code", "format": "table", "rawQuery": true, - "rawSql": "SELECT CONCAT(ROUND(COUNT(DISTINCT CASE WHEN has_steering = 1 THEN user_id END) / NULLIF(COUNT(DISTINCT user_id), 0) * 100, 0), '%') as 'Users with Steering'\nFROM lake._tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)", + "rawSql": "SELECT ROUND(COUNT(DISTINCT CASE WHEN has_steering = 1 THEN user_id END) / NULLIF(COUNT(DISTINCT user_id), 0) * 100, 0) as 'Steering %'\nFROM lake._tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)", "refId": "A" } ], - "title": "Steering Adoption", + "title": "Steering Adoption (logging)", "type": "stat" }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 7 + }, + "id": 101, + "panels": [], + "title": "User Engagement (logging data: _tool_q_dev_chat_log)", + "type": "row" + }, { "datasource": "mysql", - "description": "Weekly active user count over time", + "description": "Weekly active user count over time (from prompt logging)", "fieldConfig": { "defaults": { "color": { @@ -312,7 +376,7 @@ "h": 8, "w": 12, "x": 0, - "y": 6 + "y": 8 }, "id": 5, "options": { @@ -339,7 +403,7 @@ "editorMode": "code", "format": "time_series", "rawQuery": true, - "rawSql": "SELECT\n STR_TO_DATE(CONCAT(YEARWEEK(timestamp, 1), ' Monday'), '%X%V %W') as time,\n COUNT(DISTINCT user_id) as 'Active Users'\nFROM lake._tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY YEARWEEK(timestamp, 1)\nORDER BY time", + "rawSql": "SELECT\n STR_TO_DATE(CONCAT(yw, ' Monday'), '%X%V %W') as time,\n COUNT(DISTINCT user_id) as 'Active Users'\nFROM (\n SELECT user_id, YEARWEEK(timestamp, 1) as yw\n FROM lake._tool_q_dev_chat_log\n WHERE $__timeFilter(timestamp)\n) t\nGROUP BY yw\nORDER BY time", "refId": "A" } ], @@ -348,7 +412,7 @@ }, { "datasource": "mysql", - "description": "New vs returning users by week", + "description": "New vs returning users by week (from prompt logging)", "fieldConfig": { "defaults": { "color": { @@ -404,7 +468,7 @@ "h": 8, "w": 12, "x": 12, - "y": 6 + "y": 8 }, "id": 6, "options": { @@ -431,7 +495,7 @@ "editorMode": "code", "format": "time_series", "rawQuery": true, - "rawSql": "SELECT\n week as time,\n SUM(CASE WHEN is_new = 1 THEN 1 ELSE 0 END) as 'New Users',\n SUM(CASE WHEN is_new = 0 THEN 1 ELSE 0 END) as 'Returning Users'\nFROM (\n SELECT\n u.user_id,\n STR_TO_DATE(CONCAT(YEARWEEK(u.timestamp, 1), ' Monday'), '%X%V %W') as week,\n CASE WHEN STR_TO_DATE(CONCAT(YEARWEEK(u.timestamp, 1), ' Monday'), '%X%V %W') = STR_TO_DATE(CONCAT(YEARWEEK(f.first_seen, 1), ' Monday'), '%X%V %W') THEN 1 ELSE 0 END as is_new\n FROM lake._tool_q_dev_chat_log u\n JOIN (SELECT user_id, MIN(timestamp) as first_seen FROM lake._tool_q_dev_chat_log GROUP BY user_id) f ON u.user_id = f.user_id\n WHERE $__timeFilter(u.timestamp)\n GROUP BY u.user_id, YEARWEEK(u.timestamp, 1), f.first_seen\n) weekly\nGROUP BY week\nORDER BY week", + "rawSql": "SELECT\n STR_TO_DATE(CONCAT(yw, ' Monday'), '%X%V %W') as time,\n SUM(CASE WHEN yw = first_yw THEN 1 ELSE 0 END) as 'New Users',\n SUM(CASE WHEN yw != first_yw THEN 1 ELSE 0 END) as 'Returning Users'\nFROM (\n SELECT DISTINCT u.user_id, YEARWEEK(u.timestamp, 1) as yw, f.first_yw\n FROM lake._tool_q_dev_chat_log u\n JOIN (SELECT user_id, YEARWEEK(MIN(timestamp), 1) as first_yw FROM lake._tool_q_dev_chat_log GROUP BY user_id) f\n ON u.user_id = f.user_id\n WHERE $__timeFilter(u.timestamp)\n) weekly\nGROUP BY yw\nORDER BY time", "refId": "A" } ], @@ -439,104 +503,21 @@ "type": "timeseries" }, { - "datasource": "mysql", - "description": "Number of users who used each feature", - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "barWidthFactor": 0.8, - "drawStyle": "bars", - "fillOpacity": 100, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "never", - "spanNulls": true, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green" - } - ] - }, - "unit": "short" - }, - "overrides": [] - }, + "collapsed": false, "gridPos": { - "h": 8, - "w": 12, + "h": 1, + "w": 24, "x": 0, - "y": 14 - }, - "id": 7, - "options": { - "barRadius": 0.1, - "barWidth": 0.8, - "fullHighlight": false, - "groupWidth": 0.7, - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": false - }, - "orientation": "horizontal", - "showValue": "auto", - "stacking": "none", - "tooltip": { - "hideZeros": false, - "mode": "single", - "sort": "none" - }, - "xTickLabelRotation": 0 + "y": 16 }, - "pluginVersion": "11.6.2", - "targets": [ - { - "datasource": "mysql", - "editorMode": "code", - "format": "table", - "rawQuery": true, - "rawSql": "SELECT\n 'Chat' as Feature, COUNT(DISTINCT CASE WHEN chat_messages_sent > 0 THEN user_id END) as Users FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Inline Suggestions', COUNT(DISTINCT CASE WHEN inline_suggestions_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Code Fix', COUNT(DISTINCT CASE WHEN code_fix_generation_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Code Review', COUNT(DISTINCT CASE WHEN code_review_succeeded_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Doc Generation', COUNT(DISTINCT CASE WHEN doc_generation_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Test Generation', COUNT(DISTINCT CASE WHEN test_generation_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Dev (Agentic)', COUNT(DISTINCT CASE WHEN dev_generation_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Steering', COUNT(DISTINCT CASE WHEN has_steering = 1 THEN user_id END) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp)\nUNION ALL SELECT 'Spec Mode', COUNT(DISTINCT CASE WHEN is_spec_mode = 1 THEN user_id END) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp)", - "refId": "A" - } - ], - "title": "Feature Adoption Funnel", - "type": "barchart" + "id": 102, + "panels": [], + "title": "Credits & Subscription (new format: _tool_q_dev_user_report)", + "type": "row" }, { "datasource": "mysql", - "description": "Cumulative credits this month vs projected total", + "description": "Cumulative credits this month vs projected total (from new user_report)", "fieldConfig": { "defaults": { "color": { @@ -591,8 +572,8 @@ "gridPos": { "h": 8, "w": 12, - "x": 12, - "y": 14 + "x": 0, + "y": 17 }, "id": 8, "options": { @@ -628,191 +609,7 @@ }, { "datasource": "mysql", - "description": "Acceptance rates for inline suggestions, code fix, and inline chat over time", - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "barWidthFactor": 0.6, - "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "smooth", - "lineWidth": 2, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "never", - "spanNulls": true, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green" - } - ] - }, - "unit": "percentunit" - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 22 - }, - "id": 9, - "options": { - "legend": { - "calcs": [ - "mean", - "max", - "sum" - ], - "displayMode": "table", - "placement": "right", - "showLegend": true - }, - "tooltip": { - "hideZeros": false, - "mode": "multi", - "sort": "none" - } - }, - "pluginVersion": "11.6.2", - "targets": [ - { - "datasource": "mysql", - "editorMode": "code", - "format": "time_series", - "rawQuery": true, - "rawSql": "SELECT\n date as time,\n SUM(inline_acceptance_count) / NULLIF(SUM(inline_suggestions_count), 0) as 'Inline Suggestions',\n SUM(code_fix_acceptance_event_count) / NULLIF(SUM(code_fix_generation_event_count), 0) as 'Code Fix',\n SUM(inline_chat_acceptance_event_count) / NULLIF(SUM(inline_chat_total_event_count), 0) as 'Inline Chat'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER BY date", - "refId": "A" - } - ], - "title": "Acceptance Rate Trends", - "type": "timeseries" - }, - { - "datasource": "mysql", - "description": "Code review findings and test generation metrics over time", - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "barWidthFactor": 0.6, - "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "smooth", - "lineWidth": 2, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "never", - "spanNulls": true, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green" - } - ] - }, - "unit": "short" - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 12, - "y": 22 - }, - "id": 10, - "options": { - "legend": { - "calcs": [ - "mean", - "max", - "sum" - ], - "displayMode": "table", - "placement": "right", - "showLegend": true - }, - "tooltip": { - "hideZeros": false, - "mode": "multi", - "sort": "none" - } - }, - "pluginVersion": "11.6.2", - "targets": [ - { - "datasource": "mysql", - "editorMode": "code", - "format": "time_series", - "rawQuery": true, - "rawSql": "SELECT\n date as time,\n SUM(code_review_findings_count) as 'Review Findings',\n SUM(test_generation_event_count) as 'Test Gen Events',\n SUM(test_generation_accepted_tests) as 'Tests Accepted'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER BY date", - "refId": "A" - } - ], - "title": "Code Review Findings & Test Generation", - "type": "timeseries" - }, - { - "datasource": "mysql", - "description": "Per-user productivity and efficiency metrics", + "description": "Power tier users with no activity in the last 14 days (from new user_report)", "fieldConfig": { "defaults": { "color": { @@ -839,12 +636,12 @@ "overrides": [] }, "gridPos": { - "h": 10, - "w": 24, - "x": 0, - "y": 30 + "h": 8, + "w": 12, + "x": 12, + "y": 17 }, - "id": 11, + "id": 12, "options": { "cellHeight": "sm", "footer": { @@ -865,16 +662,29 @@ "editorMode": "code", "format": "table", "rawQuery": true, - "rawSql": "SELECT\n COALESCE(MAX(d.display_name), d.user_id) as 'User',\n COALESCE(MAX(r.subscription_tier), '') as 'Tier',\n ROUND(SUM(r.credits_used), 1) as 'Credits Used',\n SUM(d.chat_ai_code_lines + d.inline_ai_code_lines + d.code_fix_accepted_lines + d.dev_accepted_lines) as 'Total Accepted Lines',\n CASE WHEN SUM(d.chat_ai_code_lines + d.inline_ai_code_lines + d.code_fix_accepted_lines + d.dev_accepted_lines) > 0\n THEN ROUND(SUM(r.credits_used) / SUM(d.chat_ai_code_lines + d.inline_ai_code_lines + d.code_fix_accepted_lines + d.dev_accepted_lines), 2)\n ELSE NULL END as 'Credits/Line',\n CONCAT(ROUND(SUM(d.inline_acceptance_count) / NULLIF(SUM(d.inline_suggestions_count), 0) * 100, 1), '%') as 'Accept Rate',\n SUM(d.code_review_findings_count) as 'Review Findings',\n SUM(d.test_generation_event_count) as 'Test Gen Events',\n SUM(d.dev_accepted_lines) as 'Agentic Lines',\n MIN(d.date) as 'First Active',\n MAX(d.date) as 'Last Active'\nFROM lake._tool_q_dev_user_data d\nLEFT JOIN (\n SELECT user_id, date, SUM(credits_used) as credits_used, MAX(subscription_tier) as subscription_tier\n FROM lake._tool_q_dev_user_report\n WHERE $__timeFilter(date)\n GROUP BY user_id, date\n) r ON d.user_id = r.user_id AND d.date = r.date\nWHERE $__timeFilter(d.date)\nGROUP BY d.user_id\nORDER BY SUM(r.credits_used) DESC", + "rawSql": "SELECT\n COALESCE(MAX(display_name), user_id) as 'User',\n MAX(subscription_tier) as 'Tier',\n ROUND(SUM(credits_used), 1) as 'Total Credits Used',\n MAX(date) as 'Last Activity'\nFROM lake._tool_q_dev_user_report\nWHERE $__timeFilter(date)\n AND subscription_tier = 'POWER'\nGROUP BY user_id\nHAVING MAX(date) < DATE_SUB(NOW(), INTERVAL 14 DAY)\nORDER BY MAX(date)", "refId": "A" } ], - "title": "User Productivity & Efficiency", + "title": "Idle Power Users (No Activity in 14 Days)", "type": "table" }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 25 + }, + "id": 103, + "panels": [], + "title": "Cross-Source: User Productivity (new report + legacy metrics)", + "type": "row" + }, { "datasource": "mysql", - "description": "Power tier users with no activity in the last 14 days", + "description": "Per-user productivity combining credits (new format) with feature metrics (legacy). Only shows users present in both data sources.", "fieldConfig": { "defaults": { "color": { @@ -901,12 +711,12 @@ "overrides": [] }, "gridPos": { - "h": 8, + "h": 10, "w": 24, "x": 0, - "y": 40 + "y": 26 }, - "id": 12, + "id": 11, "options": { "cellHeight": "sm", "footer": { @@ -927,11 +737,11 @@ "editorMode": "code", "format": "table", "rawQuery": true, - "rawSql": "SELECT\n COALESCE(MAX(display_name), user_id) as 'User',\n MAX(subscription_tier) as 'Tier',\n ROUND(SUM(credits_used), 1) as 'Total Credits Used',\n MAX(date) as 'Last Activity'\nFROM lake._tool_q_dev_user_report\nWHERE $__timeFilter(date)\n AND subscription_tier = 'POWER'\nGROUP BY user_id\nHAVING MAX(date) < DATE_SUB(NOW(), INTERVAL 14 DAY)\nORDER BY MAX(date)", + "rawSql": "SELECT\n COALESCE(MAX(d.display_name), d.user_id) as 'User',\n COALESCE(MAX(r.subscription_tier), '') as 'Tier',\n ROUND(SUM(r.credits_used), 1) as 'Credits Used',\n SUM(d.chat_ai_code_lines + d.inline_ai_code_lines + d.code_fix_accepted_lines + d.dev_accepted_lines) as 'Total Accepted Lines',\n CASE WHEN SUM(d.chat_ai_code_lines + d.inline_ai_code_lines + d.code_fix_accepted_lines + d.dev_accepted_lines) > 0\n THEN ROUND(SUM(r.credits_used) / SUM(d.chat_ai_code_lines + d.inline_ai_code_lines + d.code_fix_accepted_lines + d.dev_accepted_lines), 2)\n ELSE NULL END as 'Credits/Line',\n CONCAT(ROUND(SUM(d.inline_acceptance_count) / NULLIF(SUM(d.inline_suggestions_count), 0) * 100, 1), '%') as 'Accept Rate',\n SUM(d.code_review_findings_count) as 'Review Findings',\n SUM(d.test_generation_event_count) as 'Test Gen Events',\n SUM(d.dev_accepted_lines) as 'Agentic Lines',\n MIN(d.date) as 'First Active',\n MAX(d.date) as 'Last Active'\nFROM lake._tool_q_dev_user_data d\nLEFT JOIN (\n SELECT user_id, date, SUM(credits_used) as credits_used, MAX(subscription_tier) as subscription_tier\n FROM lake._tool_q_dev_user_report\n WHERE $__timeFilter(date)\n GROUP BY user_id, date\n) r ON d.user_id = r.user_id AND d.date = r.date\nWHERE $__timeFilter(d.date)\nGROUP BY d.user_id\nORDER BY SUM(r.credits_used) DESC", "refId": "A" } ], - "title": "Idle Power Users (No Activity in 14 Days)", + "title": "User Productivity & Efficiency", "type": "table" } ], @@ -955,4 +765,4 @@ "title": "Kiro Executive Dashboard", "uid": "qdev_executive", "version": 1 -} \ No newline at end of file +} diff --git a/grafana/dashboards/qdev_feature_metrics.json b/grafana/dashboards/qdev_feature_metrics.json new file mode 100644 index 00000000000..597217bcf0e --- /dev/null +++ b/grafana/dashboards/qdev_feature_metrics.json @@ -0,0 +1,948 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "datasource": "mysql", + "description": "High-level summary of legacy feature-level activity metrics (from by_user_analytic CSV reports)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 1, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "sum" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n COUNT(DISTINCT user_id) as 'Active Users',\n SUM(inline_suggestions_count) as 'Inline Suggestions',\n SUM(inline_acceptance_count) as 'Inline Accepted',\n SUM(chat_messages_sent) as 'Chat Messages',\n SUM(chat_ai_code_lines) as 'Chat AI Lines',\n SUM(code_review_findings_count) as 'Review Findings',\n SUM(test_generation_event_count) as 'Test Gen Events',\n SUM(dev_accepted_lines) as 'Agentic Lines'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)", + "refId": "A" + } + ], + "title": "Legacy Feature Metrics Overview", + "type": "stat" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 6 + }, + "id": 20, + "panels": [], + "title": "Inline Suggestions", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Daily inline suggestion and acceptance counts", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 7 + }, + "id": 2, + "options": { + "legend": { + "calcs": [ + "mean", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n date as time,\n SUM(inline_suggestions_count) as 'Suggestions',\n SUM(inline_acceptance_count) as 'Accepted',\n SUM(inline_ai_code_lines) as 'AI Code Lines'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER BY date", + "refId": "A" + } + ], + "title": "Inline Suggestions & Acceptance", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Acceptance rates for inline suggestions, code fix, and inline chat over time", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "percentunit" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 7 + }, + "id": 3, + "options": { + "legend": { + "calcs": [ + "mean", + "max" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n date as time,\n SUM(inline_acceptance_count) / NULLIF(SUM(inline_suggestions_count), 0) as 'Inline Suggestions',\n SUM(code_fix_acceptance_event_count) / NULLIF(SUM(code_fix_generation_event_count), 0) as 'Code Fix',\n SUM(inline_chat_acceptance_event_count) / NULLIF(SUM(inline_chat_total_event_count), 0) as 'Inline Chat'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER BY date", + "refId": "A" + } + ], + "title": "Acceptance Rate Trends", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 15 + }, + "id": 21, + "panels": [], + "title": "Chat & Agentic (Dev)", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Daily chat messages sent and AI-generated code lines from chat", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 16 + }, + "id": 4, + "options": { + "legend": { + "calcs": [ + "mean", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n date as time,\n SUM(chat_messages_sent) as 'Messages Sent',\n SUM(chat_messages_interacted) as 'Messages Interacted',\n SUM(chat_ai_code_lines) as 'AI Code Lines'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER BY date", + "refId": "A" + } + ], + "title": "Chat Activity", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Agentic (Dev) code generation and acceptance metrics", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 16 + }, + "id": 5, + "options": { + "legend": { + "calcs": [ + "mean", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n date as time,\n SUM(dev_generation_event_count) as 'Generation Events',\n SUM(dev_generated_lines) as 'Generated Lines',\n SUM(dev_accepted_lines) as 'Accepted Lines',\n SUM(dev_acceptance_event_count) as 'Acceptance Events'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER BY date", + "refId": "A" + } + ], + "title": "Agentic (Dev) Activity", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 24 + }, + "id": 22, + "panels": [], + "title": "Code Review, Test Gen & Transformations", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Code review findings and test generation metrics over time", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 25 + }, + "id": 6, + "options": { + "legend": { + "calcs": [ + "mean", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n date as time,\n SUM(code_review_findings_count) as 'Review Findings',\n SUM(code_review_succeeded_event_count) as 'Reviews Succeeded',\n SUM(code_review_failed_event_count) as 'Reviews Failed'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER BY date", + "refId": "A" + } + ], + "title": "Code Review Activity", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Test generation events and acceptance over time", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 25 + }, + "id": 7, + "options": { + "legend": { + "calcs": [ + "mean", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n date as time,\n SUM(test_generation_event_count) as 'Test Gen Events',\n SUM(test_generation_generated_tests) as 'Tests Generated',\n SUM(test_generation_accepted_tests) as 'Tests Accepted',\n SUM(test_generation_generated_lines) as 'Lines Generated',\n SUM(test_generation_accepted_lines) as 'Lines Accepted'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER BY date", + "refId": "A" + } + ], + "title": "Test Generation Activity", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Doc generation and code transformation events", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 33 + }, + "id": 8, + "options": { + "legend": { + "calcs": [ + "mean", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n date as time,\n SUM(doc_generation_event_count) as 'Doc Gen Events',\n SUM(doc_generation_accepted_line_additions) as 'Doc Lines Accepted',\n SUM(transformation_event_count) as 'Transformation Events',\n SUM(transformation_lines_generated) as 'Transform Lines Generated'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER BY date", + "refId": "A" + } + ], + "title": "Doc Generation & Transformations", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Number of users who used each feature in the selected period", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.8, + "drawStyle": "bars", + "fillOpacity": 100, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 33 + }, + "id": 9, + "options": { + "barRadius": 0.1, + "barWidth": 0.8, + "fullHighlight": false, + "groupWidth": 0.7, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "orientation": "horizontal", + "showValue": "auto", + "stacking": "none", + "tooltip": { + "hideZeros": false, + "mode": "single", + "sort": "none" + }, + "xTickLabelRotation": 0 + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT 'Chat' as Feature, COUNT(DISTINCT CASE WHEN chat_messages_sent > 0 THEN user_id END) as Users FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Inline Suggestions', COUNT(DISTINCT CASE WHEN inline_suggestions_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Code Fix', COUNT(DISTINCT CASE WHEN code_fix_generation_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Code Review', COUNT(DISTINCT CASE WHEN code_review_succeeded_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Doc Generation', COUNT(DISTINCT CASE WHEN doc_generation_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Test Generation', COUNT(DISTINCT CASE WHEN test_generation_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Dev (Agentic)', COUNT(DISTINCT CASE WHEN dev_generation_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)\nUNION ALL SELECT 'Transformation', COUNT(DISTINCT CASE WHEN transformation_event_count > 0 THEN user_id END) FROM lake._tool_q_dev_user_data WHERE $__timeFilter(date)", + "refId": "A" + } + ], + "title": "Feature Adoption (Users per Feature)", + "type": "barchart" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 41 + }, + "id": 23, + "panels": [], + "title": "Per-User Detail", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Per-user breakdown of legacy feature-level metrics", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "filterable": true, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 24, + "x": 0, + "y": 42 + }, + "id": 10, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true, + "sortBy": [] + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n COALESCE(MAX(display_name), user_id) as 'User',\n SUM(inline_suggestions_count) as 'Suggestions',\n SUM(inline_acceptance_count) as 'Accepted',\n CONCAT(ROUND(SUM(inline_acceptance_count) / NULLIF(SUM(inline_suggestions_count), 0) * 100, 1), '%') as 'Accept %',\n SUM(chat_messages_sent) as 'Chat Msgs',\n SUM(chat_ai_code_lines) as 'Chat Lines',\n SUM(dev_accepted_lines) as 'Agentic Lines',\n SUM(code_review_findings_count) as 'Review Findings',\n SUM(test_generation_accepted_tests) as 'Tests Accepted',\n SUM(doc_generation_event_count) as 'Doc Gen',\n SUM(transformation_event_count) as 'Transforms',\n MIN(date) as 'First Active',\n MAX(date) as 'Last Active'\nFROM lake._tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY user_id\nORDER BY SUM(inline_suggestions_count) DESC", + "refId": "A" + } + ], + "title": "Per-User Feature Metrics", + "type": "table" + } + ], + "preload": false, + "refresh": "5m", + "schemaVersion": 41, + "tags": [ + "q_dev", + "legacy", + "kiro" + ], + "templating": { + "list": [] + }, + "time": { + "from": "now-30d", + "to": "now" + }, + "timepicker": {}, + "timezone": "utc", + "title": "Kiro Legacy Feature Metrics", + "uid": "qdev_feature_metrics", + "version": 1 +} diff --git a/grafana/dashboards/qdev_logging.json b/grafana/dashboards/qdev_logging.json index 462adcd986e..6a47b03bf2b 100644 --- a/grafana/dashboards/qdev_logging.json +++ b/grafana/dashboards/qdev_logging.json @@ -70,7 +70,7 @@ "editorMode": "code", "format": "table", "rawQuery": true, - "rawSql": "SELECT\n (SELECT COUNT(*) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp)) as 'Chat Events',\n (SELECT COUNT(DISTINCT user_id) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp)) as 'Chat Users',\n (SELECT COUNT(DISTINCT conversation_id) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp) AND conversation_id != '') as 'Conversations',\n (SELECT COUNT(*) FROM lake._tool_q_dev_completion_log WHERE $__timeFilter(timestamp)) as 'Completion Events',\n (SELECT COUNT(DISTINCT user_id) FROM lake._tool_q_dev_completion_log WHERE $__timeFilter(timestamp)) as 'Completion Users'", + "rawSql": "SELECT\n (SELECT COUNT(*) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp)) as 'Chat Events',\n (SELECT COUNT(DISTINCT user_id) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp)) as 'Chat Users',\n (SELECT COUNT(DISTINCT conversation_id) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp) AND conversation_id != '') as 'Conversations',\n (SELECT COUNT(*) FROM lake._tool_q_dev_completion_log WHERE $__timeFilter(timestamp)) as 'Completion Events',\n (SELECT COUNT(DISTINCT user_id) FROM lake._tool_q_dev_completion_log WHERE $__timeFilter(timestamp)) as 'Completion Users',\n (SELECT SUM(code_reference_count) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp)) as 'Code References',\n (SELECT SUM(web_link_count) FROM lake._tool_q_dev_chat_log WHERE $__timeFilter(timestamp)) as 'Web Links Cited'", "refId": "A" } ], @@ -168,7 +168,7 @@ }, { "datasource": "mysql", - "description": "Distribution of model usage across chat events", + "description": "Distribution of chat trigger types: MANUAL (chat window) vs INLINE_CHAT", "fieldConfig": { "defaults": { "color": { @@ -188,10 +188,78 @@ }, "gridPos": { "h": 8, - "w": 12, + "w": 8, "x": 0, "y": 14 }, + "id": 11, + "options": { + "displayLabels": [ + "name", + "percent" + ], + "legend": { + "displayMode": "table", + "placement": "right", + "showLegend": true, + "values": [ + "value", + "percent" + ] + }, + "pieType": "donut", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n CASE\n WHEN chat_trigger_type = '' OR chat_trigger_type IS NULL THEN '(unknown)'\n ELSE chat_trigger_type\n END as 'Trigger Type',\n COUNT(*) as 'Events'\nFROM lake._tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY chat_trigger_type\nORDER BY COUNT(*) DESC", + "refId": "A" + } + ], + "title": "Chat Trigger Type Distribution", + "type": "piechart" + }, + { + "datasource": "mysql", + "description": "Distribution of model usage across chat events", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 8, + "y": 14 + }, "id": 3, "options": { "displayLabels": [ @@ -256,8 +324,8 @@ }, "gridPos": { "h": 8, - "w": 12, - "x": 12, + "w": 8, + "x": 16, "y": 14 }, "id": 4, @@ -541,7 +609,7 @@ "editorMode": "code", "format": "table", "rawQuery": true, - "rawSql": "SELECT\n COALESCE(u.display_name, u.user_id) as 'User',\n u.user_id as 'User ID',\n u.chat_events as 'Chat Events',\n u.conversations as 'Conversations',\n ROUND(u.chat_events / NULLIF(u.conversations, 0), 1) as 'Avg Turns',\n COALESCE(c.completion_events, 0) as 'Completion Events',\n COALESCE(c.files_count, 0) as 'Distinct Files',\n ROUND(u.avg_prompt_len) as 'Avg Prompt Len',\n ROUND(u.avg_response_len) as 'Avg Response Len',\n u.steering_count as 'Steering Uses',\n u.spec_count as 'Spec Mode Uses',\n u.models_used as 'Models Used',\n u.first_seen as 'First Seen',\n GREATEST(u.last_seen, COALESCE(c.last_seen, u.last_seen)) as 'Last Seen'\nFROM (\n SELECT\n user_id,\n MAX(display_name) as display_name,\n COUNT(*) as chat_events,\n COUNT(DISTINCT CASE WHEN conversation_id != '' THEN conversation_id END) as conversations,\n AVG(prompt_length) as avg_prompt_len,\n AVG(response_length) as avg_response_len,\n GROUP_CONCAT(DISTINCT CASE WHEN model_id != '' AND model_id IS NOT NULL THEN model_id END ORDER BY model_id SEPARATOR ', ') as models_used,\n SUM(CASE WHEN has_steering = 1 THEN 1 ELSE 0 END) as steering_count,\n SUM(CASE WHEN is_spec_mode = 1 THEN 1 ELSE 0 END) as spec_count,\n MIN(timestamp) as first_seen,\n MAX(timestamp) as last_seen\n FROM lake._tool_q_dev_chat_log\n WHERE $__timeFilter(timestamp)\n GROUP BY user_id\n) u\nLEFT JOIN (\n SELECT\n user_id,\n COUNT(*) as completion_events,\n COUNT(DISTINCT file_name) as files_count,\n MAX(timestamp) as last_seen\n FROM lake._tool_q_dev_completion_log\n WHERE $__timeFilter(timestamp)\n GROUP BY user_id\n) c ON u.user_id = c.user_id\nORDER BY u.user_id", + "rawSql": "SELECT\n COALESCE(u.display_name, u.user_id) as 'User',\n u.user_id as 'User ID',\n u.chat_events as 'Chat Events',\n u.conversations as 'Conversations',\n ROUND(u.chat_events / NULLIF(u.conversations, 0), 1) as 'Avg Turns',\n COALESCE(c.completion_events, 0) as 'Completion Events',\n COALESCE(c.files_count, 0) as 'Distinct Files',\n ROUND(u.avg_prompt_len) as 'Avg Prompt Len',\n ROUND(u.avg_response_len) as 'Avg Response Len',\n u.steering_count as 'Steering Uses',\n u.spec_count as 'Spec Mode Uses',\n u.code_ref_count as 'Code Refs',\n u.web_link_count as 'Web Links',\n u.models_used as 'Models Used',\n u.first_seen as 'First Seen',\n GREATEST(u.last_seen, COALESCE(c.last_seen, u.last_seen)) as 'Last Seen'\nFROM (\n SELECT\n user_id,\n MAX(display_name) as display_name,\n COUNT(*) as chat_events,\n COUNT(DISTINCT CASE WHEN conversation_id != '' THEN conversation_id END) as conversations,\n AVG(prompt_length) as avg_prompt_len,\n AVG(response_length) as avg_response_len,\n GROUP_CONCAT(DISTINCT CASE WHEN model_id != '' AND model_id IS NOT NULL THEN model_id END ORDER BY model_id SEPARATOR ', ') as models_used,\n SUM(CASE WHEN has_steering = 1 THEN 1 ELSE 0 END) as steering_count,\n SUM(CASE WHEN is_spec_mode = 1 THEN 1 ELSE 0 END) as spec_count,\n SUM(code_reference_count) as code_ref_count,\n SUM(web_link_count) as web_link_count,\n MIN(timestamp) as first_seen,\n MAX(timestamp) as last_seen\n FROM lake._tool_q_dev_chat_log\n WHERE $__timeFilter(timestamp)\n GROUP BY user_id\n) u\nLEFT JOIN (\n SELECT\n user_id,\n COUNT(*) as completion_events,\n COUNT(DISTINCT file_name) as files_count,\n MAX(timestamp) as last_seen\n FROM lake._tool_q_dev_completion_log\n WHERE $__timeFilter(timestamp)\n GROUP BY user_id\n) c ON u.user_id = c.user_id\nORDER BY u.user_id", "refId": "A" } ], @@ -570,7 +638,7 @@ }, "gridPos": { "h": 8, - "w": 12, + "w": 8, "x": 0, "y": 48 }, @@ -638,8 +706,8 @@ }, "gridPos": { "h": 8, - "w": 12, - "x": 12, + "w": 8, + "x": 8, "y": 48 }, "id": 9, @@ -684,6 +752,74 @@ "title": "Active File Types in Chat", "type": "piechart" }, + { + "datasource": "mysql", + "description": "How often Kiro responses include code references and web links", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 16, + "y": 48 + }, + "id": 12, + "options": { + "displayLabels": [ + "name", + "percent" + ], + "legend": { + "displayMode": "table", + "placement": "right", + "showLegend": true, + "values": [ + "value", + "percent" + ] + }, + "pieType": "donut", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n SUM(CASE WHEN code_reference_count > 0 THEN 1 ELSE 0 END) as 'With Code References',\n SUM(CASE WHEN web_link_count > 0 THEN 1 ELSE 0 END) as 'With Web Links',\n SUM(CASE WHEN has_followup_prompts = 1 THEN 1 ELSE 0 END) as 'With Followup Prompts',\n SUM(CASE WHEN code_reference_count = 0 AND web_link_count = 0 AND has_followup_prompts = 0 THEN 1 ELSE 0 END) as 'Plain Response'\nFROM lake._tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)", + "refId": "A" + } + ], + "title": "Response Enrichment Breakdown", + "type": "piechart" + }, { "datasource": "mysql", "description": "Average and maximum prompt/response lengths over time", @@ -775,6 +911,188 @@ ], "title": "Prompt & Response Length Trends", "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Average code context size provided to inline completions over time", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Characters", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 64 + }, + "id": 13, + "options": { + "legend": { + "calcs": [ + "mean", + "max" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n DATE(timestamp) as time,\n ROUND(AVG(left_context_length)) as 'Avg Left Context',\n ROUND(AVG(right_context_length)) as 'Avg Right Context',\n ROUND(AVG(left_context_length + right_context_length)) as 'Avg Total Context'\nFROM lake._tool_q_dev_completion_log\nWHERE $__timeFilter(timestamp)\nGROUP BY DATE(timestamp)\nORDER BY DATE(timestamp)", + "refId": "A" + } + ], + "title": "Completion Context Size Trends", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Daily trend of code references and web links in chat responses", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 72 + }, + "id": 14, + "options": { + "legend": { + "calcs": [ + "mean", + "sum" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.6.2", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n DATE(timestamp) as time,\n SUM(code_reference_count) as 'Code References',\n SUM(web_link_count) as 'Web Links',\n SUM(CASE WHEN has_followup_prompts = 1 THEN 1 ELSE 0 END) as 'Followup Prompts'\nFROM lake._tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY DATE(timestamp)\nORDER BY DATE(timestamp)", + "refId": "A" + } + ], + "title": "Response Enrichment Trends", + "type": "timeseries" } ], "preload": false, @@ -797,4 +1115,4 @@ "title": "Kiro AI Activity Insights", "uid": "qdev_logging", "version": 1 -} \ No newline at end of file +} From d9e6bc8e596c853cb65cbcba2848693dde8bf783 Mon Sep 17 00:00:00 2001 From: Warren Chen Date: Sat, 21 Mar 2026 10:09:31 +0800 Subject: [PATCH 26/39] fix: add SQL identifier validation to prevent SQL injection via table/column names (#8769) Add ValidateTableName and ValidateColumnName functions in core/dal to ensure table and column names used in dynamic SQL are safe identifiers. Applied to scope_service_helper, scope_generic_helper, and customized_fields_extractor. --- backend/core/dal/identifier.go | 50 +++++++++++++++++++ .../pluginhelper/api/scope_generic_helper.go | 3 ++ .../helpers/srvhelper/scope_service_helper.go | 3 ++ .../tasks/customized_fields_extractor.go | 18 +++++-- 4 files changed, 71 insertions(+), 3 deletions(-) create mode 100644 backend/core/dal/identifier.go diff --git a/backend/core/dal/identifier.go b/backend/core/dal/identifier.go new file mode 100644 index 00000000000..710d94e6fdb --- /dev/null +++ b/backend/core/dal/identifier.go @@ -0,0 +1,50 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package dal + +import ( + "fmt" + "regexp" + + "github.com/apache/incubator-devlake/core/errors" +) + +// validIdentifierRegex matches valid SQL identifiers: alphanumeric, underscores, and dots (for schema.table) +var validIdentifierRegex = regexp.MustCompile(`^[a-zA-Z_][a-zA-Z0-9_.]*$`) + +// ValidateTableName checks that a table name is a safe SQL identifier to prevent SQL injection. +func ValidateTableName(name string) errors.Error { + if name == "" { + return errors.Default.New("table name must not be empty") + } + if !validIdentifierRegex.MatchString(name) { + return errors.Default.New(fmt.Sprintf("invalid table name: %q", name)) + } + return nil +} + +// ValidateColumnName checks that a column name is a safe SQL identifier to prevent SQL injection. +func ValidateColumnName(name string) errors.Error { + if name == "" { + return errors.Default.New("column name must not be empty") + } + if !validIdentifierRegex.MatchString(name) { + return errors.Default.New(fmt.Sprintf("invalid column name: %q", name)) + } + return nil +} diff --git a/backend/helpers/pluginhelper/api/scope_generic_helper.go b/backend/helpers/pluginhelper/api/scope_generic_helper.go index a782b4e091e..895f3427a4d 100644 --- a/backend/helpers/pluginhelper/api/scope_generic_helper.go +++ b/backend/helpers/pluginhelper/api/scope_generic_helper.go @@ -565,6 +565,9 @@ func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) transactionalDelete(t } tx := gs.db.Begin() for _, table := range tables { + if err := dal.ValidateTableName(table); err != nil { + return errors.Default.Wrap(err, fmt.Sprintf("unsafe table name %q when deleting scope data", table)) + } where, params := generateWhereClause(table) gs.log.Info("deleting data from table %s with WHERE \"%s\" and params: \"%v\"", table, where, params) sql := fmt.Sprintf("DELETE FROM %s WHERE %s", table, where) diff --git a/backend/helpers/srvhelper/scope_service_helper.go b/backend/helpers/srvhelper/scope_service_helper.go index 544536f01c2..e5d4671c53d 100644 --- a/backend/helpers/srvhelper/scope_service_helper.go +++ b/backend/helpers/srvhelper/scope_service_helper.go @@ -255,6 +255,9 @@ func (scopeSrv *ScopeSrvHelper[C, S, SC]) deleteScopeData(scope plugin.ToolLayer } tables := errors.Must1(scopeSrv.getAffectedTables()) for _, table := range tables { + if err := dal.ValidateTableName(table); err != nil { + panic(errors.Default.Wrap(err, fmt.Sprintf("unsafe table name %q when deleting scope data", table))) + } where, params := generateWhereClause(table) scopeSrv.log.Info("deleting data from table %s with WHERE \"%s\" and params: \"%v\"", table, where, params) sql := fmt.Sprintf("DELETE FROM %s WHERE %s", table, where) diff --git a/backend/plugins/customize/tasks/customized_fields_extractor.go b/backend/plugins/customize/tasks/customized_fields_extractor.go index b9f7c10081b..7bd1d932d7b 100644 --- a/backend/plugins/customize/tasks/customized_fields_extractor.go +++ b/backend/plugins/customize/tasks/customized_fields_extractor.go @@ -149,7 +149,10 @@ func extractCustomizedFields(ctx context.Context, d dal.Dal, table, rawTable, ra // remove columns that are not primary key delete(row, "_raw_data_id") delete(row, "data") - query, params := mkUpdate(table, updates, row) + query, params, err := mkUpdate(table, updates, row) + if err != nil { + return err + } err = d.Exec(query, params...) if err != nil { return errors.Default.Wrap(err, "Exec SQL error") @@ -169,18 +172,27 @@ func fillInUpdates(result gjson.Result, field string, updates map[string]interfa } // mkUpdate generates SQL statement and parameters for updating a record -func mkUpdate(table string, updates map[string]interface{}, pk map[string]interface{}) (string, []interface{}) { +func mkUpdate(table string, updates map[string]interface{}, pk map[string]interface{}) (string, []interface{}, error) { + if err := dal.ValidateTableName(table); err != nil { + return "", nil, err + } var params []interface{} stat := fmt.Sprintf("UPDATE %s SET ", table) var uu []string for field, value := range updates { + if err := dal.ValidateColumnName(field); err != nil { + return "", nil, err + } uu = append(uu, fmt.Sprintf("%s = ?", field)) params = append(params, value) } var ww []string for field, value := range pk { + if err := dal.ValidateColumnName(field); err != nil { + return "", nil, err + } ww = append(ww, fmt.Sprintf("%s = ?", field)) params = append(params, value) } - return stat + strings.Join(uu, ", ") + " WHERE " + strings.Join(ww, " AND "), params + return stat + strings.Join(uu, ", ") + " WHERE " + strings.Join(ww, " AND "), params, nil } From e0714d559be11832b0220739d90f687e71170bdf Mon Sep 17 00:00:00 2001 From: Warren Chen Date: Sun, 22 Mar 2026 22:17:28 +0800 Subject: [PATCH 27/39] feat(q-dev): add Kiro Credits + DORA Correlation dashboard (#8792) Add a new Grafana dashboard that correlates Kiro AI usage (credits, messages, active users) with DORA metrics at weekly aggregate level. Panels include: - Pearson's r correlation between weekly credits and PR cycle time - High AI Usage vs Low AI Usage cycle time comparison - Weekly credits vs deployment frequency trend - Weekly credits vs change failure rate trend Data is joined by week_start between _tool_q_dev_user_report and project_pr_metrics / cicd_deployment_commits. --- grafana/dashboards/KiroCreditsDORA.json | 426 ++++++++++++++++++++++++ 1 file changed, 426 insertions(+) create mode 100644 grafana/dashboards/KiroCreditsDORA.json diff --git a/grafana/dashboards/KiroCreditsDORA.json b/grafana/dashboards/KiroCreditsDORA.json new file mode 100644 index 00000000000..d8195d8bee5 --- /dev/null +++ b/grafana/dashboards/KiroCreditsDORA.json @@ -0,0 +1,426 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 1, + "id": null, + "links": [ + { + "asDropdown": false, + "icon": "external link", + "includeVars": true, + "keepTime": true, + "tags": [], + "targetBlank": true, + "title": "DORA Dashboard", + "type": "link", + "url": "/d/qNo8_0M4z/dora" + }, + { + "asDropdown": false, + "icon": "external link", + "includeVars": true, + "keepTime": true, + "tags": [], + "targetBlank": true, + "title": "Kiro Usage Dashboard", + "type": "link", + "url": "/d/qdev_user_report" + } + ], + "panels": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "gridPos": { "h": 3, "w": 24, "x": 0, "y": 0 }, + "id": 1, + "options": { + "code": { "language": "plaintext", "showLineNumbers": false, "showMiniMap": false }, + "content": "## Kiro Credits + DORA Correlation\nThis dashboard correlates **Kiro AI usage (credits and messages)** with **DORA** performance indicators at a weekly aggregate level.\n\n- **Pearson's r** measures linear correlation: negative r suggests higher AI usage may correlate with shorter cycle times.\n- Data is aggregated by **week** and joined on `week_start`.", + "mode": "markdown" + }, + "title": "Dashboard Introduction", + "type": "text" + }, + { + "collapsed": false, + "gridPos": { "h": 1, "w": 24, "x": 0, "y": 3 }, + "id": 2, + "panels": [], + "title": "Overview", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Pearson correlation coefficient between weekly Kiro credits and PR cycle time", + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [{ "options": { "match": "null", "result": { "text": "Need more data" } }, "type": "special" }], + "thresholds": { + "mode": "absolute", + "steps": [ + { "color": "green", "value": null }, + { "color": "yellow", "value": -0.3 }, + { "color": "orange", "value": 0.3 } + ] + } + }, + "overrides": [] + }, + "gridPos": { "h": 6, "w": 8, "x": 0, "y": 4 }, + "id": 3, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { "calcs": ["lastNotNull"], "fields": "", "values": false }, + "showPercentChange": false, + "textMode": "auto" + }, + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "WITH _kiro_weekly AS (\n SELECT\n DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY) AS week_start,\n SUM(credits_used) AS weekly_credits\n FROM _tool_q_dev_user_report\n WHERE $__timeFilter(date)\n GROUP BY DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY)\n),\n_pr_weekly AS (\n SELECT\n DATE_SUB(DATE(pr_merged_date), INTERVAL WEEKDAY(DATE(pr_merged_date)) DAY) AS week_start,\n AVG(pr_cycle_time) / 60.0 AS avg_cycle_hours\n FROM project_pr_metrics\n WHERE pr_merged_date IS NOT NULL\n AND pr_cycle_time IS NOT NULL\n AND $__timeFilter(pr_merged_date)\n GROUP BY DATE_SUB(DATE(pr_merged_date), INTERVAL WEEKDAY(DATE(pr_merged_date)) DAY)\n),\n_joined AS (\n SELECT k.weekly_credits AS x, p.avg_cycle_hours AS y\n FROM _kiro_weekly k\n INNER JOIN _pr_weekly p ON k.week_start = p.week_start\n),\n_stats AS (\n SELECT COUNT(*) AS n, AVG(x) AS mx, AVG(y) AS my, STDDEV_POP(x) AS sx, STDDEV_POP(y) AS sy\n FROM _joined\n)\nSELECT CASE\n WHEN s.n < 4 THEN NULL\n WHEN s.sx = 0 OR s.sy = 0 THEN 0\n ELSE ROUND((SELECT SUM((j.x - s.mx) * (j.y - s.my)) FROM _joined j) / (s.n * s.sx * s.sy), 2)\nEND AS 'r'\nFROM _stats s", + "refId": "A" + } + ], + "title": "Credits vs Cycle Time (r)", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Total Kiro credits consumed in period", + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [], + "thresholds": { "mode": "absolute", "steps": [{ "color": "blue", "value": null }] } + }, + "overrides": [] + }, + "gridPos": { "h": 6, "w": 4, "x": 8, "y": 4 }, + "id": 4, + "options": { + "colorMode": "value", + "graphMode": "area", + "reduceOptions": { "calcs": ["sum"], "fields": "", "values": false }, + "textMode": "auto" + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT SUM(credits_used) AS 'Credits Used'\nFROM _tool_q_dev_user_report\nWHERE $__timeFilter(date)", + "refId": "A" + } + ], + "title": "Total Credits", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Median PR cycle time in hours", + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [], + "thresholds": { "mode": "absolute", "steps": [{ "color": "green", "value": null }] }, + "unit": "h" + }, + "overrides": [] + }, + "gridPos": { "h": 6, "w": 4, "x": 12, "y": 4 }, + "id": 5, + "options": { + "colorMode": "value", + "graphMode": "area", + "reduceOptions": { "calcs": ["sum"], "fields": "", "values": false }, + "textMode": "auto" + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "WITH _ranked AS (\n SELECT pr_cycle_time / 60.0 AS ct,\n PERCENT_RANK() OVER (ORDER BY pr_cycle_time) AS prank\n FROM project_pr_metrics\n WHERE pr_merged_date IS NOT NULL AND pr_cycle_time IS NOT NULL\n AND $__timeFilter(pr_merged_date)\n)\nSELECT ROUND(MAX(ct), 1) AS 'Median Cycle Time'\nFROM _ranked WHERE prank <= 0.5", + "refId": "A" + } + ], + "title": "Median Cycle Time", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Cycle time comparison: weeks with above-median vs below-median AI credits usage", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "mappings": [], + "thresholds": { "mode": "absolute", "steps": [{ "color": "green", "value": null }] }, + "unit": "h" + }, + "overrides": [] + }, + "gridPos": { "h": 6, "w": 8, "x": 16, "y": 4 }, + "id": 6, + "options": { + "barRadius": 0.1, + "barWidth": 0.6, + "orientation": "horizontal", + "showValue": "auto", + "stacking": "none", + "tooltip": { "mode": "single" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "WITH _kiro_weekly AS (\n SELECT DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY) AS week_start,\n SUM(credits_used) AS weekly_credits\n FROM _tool_q_dev_user_report WHERE $__timeFilter(date)\n GROUP BY DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY)\n),\n_pr_weekly AS (\n SELECT DATE_SUB(DATE(pr_merged_date), INTERVAL WEEKDAY(DATE(pr_merged_date)) DAY) AS week_start,\n AVG(pr_cycle_time) / 60.0 AS avg_ct\n FROM project_pr_metrics\n WHERE pr_merged_date IS NOT NULL AND pr_cycle_time IS NOT NULL AND $__timeFilter(pr_merged_date)\n GROUP BY DATE_SUB(DATE(pr_merged_date), INTERVAL WEEKDAY(DATE(pr_merged_date)) DAY)\n),\n_joined AS (\n SELECT k.weekly_credits, p.avg_ct FROM _kiro_weekly k INNER JOIN _pr_weekly p ON k.week_start = p.week_start\n),\n_med AS (\n SELECT weekly_credits AS med FROM _joined ORDER BY weekly_credits LIMIT 1 OFFSET (SELECT FLOOR(COUNT(*)/2) FROM _joined)\n)\nSELECT\n CASE WHEN j.weekly_credits >= m.med THEN 'High AI Usage' ELSE 'Low AI Usage' END AS 'Tier',\n ROUND(AVG(j.avg_ct), 1) AS 'Avg Cycle Time'\nFROM _joined j, _med m\nGROUP BY CASE WHEN j.weekly_credits >= m.med THEN 'High AI Usage' ELSE 'Low AI Usage' END", + "refId": "A" + } + ], + "title": "Cycle Time: High vs Low AI Usage", + "type": "bargauge" + }, + { + "collapsed": false, + "gridPos": { "h": 1, "w": 24, "x": 0, "y": 10 }, + "id": 10, + "panels": [], + "title": "Weekly Trends", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Weekly Kiro credits consumed", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisBorderShow": false, "axisLabel": "", "axisPlacement": "auto", + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "pointSize": 5, "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 11 }, + "id": 11, + "options": { + "legend": { "calcs": ["mean", "sum"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY) AS time,\n SUM(credits_used) AS 'Credits Used',\n COUNT(DISTINCT user_id) AS 'Active Users'\nFROM _tool_q_dev_user_report\nWHERE $__timeFilter(date)\nGROUP BY DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY)\nORDER BY time", + "refId": "A" + } + ], + "title": "Weekly Kiro Credits & Active Users", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Weekly average PR cycle time in hours", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisBorderShow": false, "axisLabel": "", "axisPlacement": "auto", + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "pointSize": 5, "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "h" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 11 }, + "id": 12, + "options": { + "legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n DATE_SUB(DATE(pr_merged_date), INTERVAL WEEKDAY(DATE(pr_merged_date)) DAY) AS time,\n ROUND(AVG(pr_cycle_time) / 60.0, 1) AS 'Avg Cycle Time (hrs)',\n COUNT(*) AS 'PRs Merged'\nFROM project_pr_metrics\nWHERE pr_merged_date IS NOT NULL AND pr_cycle_time IS NOT NULL\n AND $__timeFilter(pr_merged_date)\nGROUP BY DATE_SUB(DATE(pr_merged_date), INTERVAL WEEKDAY(DATE(pr_merged_date)) DAY)\nORDER BY time", + "refId": "A" + } + ], + "title": "Weekly PR Cycle Time & Volume", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { "h": 1, "w": 24, "x": 0, "y": 19 }, + "id": 20, + "panels": [], + "title": "Deployment Frequency", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Pearson correlation between weekly credits and deployment count", + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [{ "options": { "match": "null", "result": { "text": "Need more data" } }, "type": "special" }], + "thresholds": { "mode": "absolute", "steps": [{ "color": "green", "value": null }] } + }, + "overrides": [] + }, + "gridPos": { "h": 6, "w": 6, "x": 0, "y": 20 }, + "id": 21, + "options": { + "colorMode": "value", + "graphMode": "none", + "reduceOptions": { "calcs": ["lastNotNull"], "fields": "", "values": false }, + "textMode": "auto" + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "WITH _kiro_weekly AS (\n SELECT DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY) AS week_start,\n SUM(credits_used) AS weekly_credits\n FROM _tool_q_dev_user_report WHERE $__timeFilter(date)\n GROUP BY DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY)\n),\n_deploy_weekly AS (\n SELECT DATE_SUB(DATE(cdc.finished_date), INTERVAL WEEKDAY(DATE(cdc.finished_date)) DAY) AS week_start,\n COUNT(DISTINCT cdc.cicd_deployment_id) AS deploys\n FROM cicd_deployment_commits cdc\n JOIN project_mapping pm ON cdc.cicd_scope_id = pm.row_id AND pm.`table` = 'cicd_scopes'\n WHERE cdc.result = 'SUCCESS' AND cdc.environment = 'PRODUCTION'\n AND $__timeFilter(cdc.finished_date)\n GROUP BY DATE_SUB(DATE(cdc.finished_date), INTERVAL WEEKDAY(DATE(cdc.finished_date)) DAY)\n),\n_joined AS (\n SELECT k.weekly_credits AS x, d.deploys AS y\n FROM _kiro_weekly k INNER JOIN _deploy_weekly d ON k.week_start = d.week_start\n),\n_stats AS (\n SELECT COUNT(*) AS n, AVG(x) AS mx, AVG(y) AS my, STDDEV_POP(x) AS sx, STDDEV_POP(y) AS sy FROM _joined\n)\nSELECT CASE\n WHEN s.n < 4 THEN NULL\n WHEN s.sx = 0 OR s.sy = 0 THEN 0\n ELSE ROUND((SELECT SUM((j.x - s.mx) * (j.y - s.my)) FROM _joined j) / (s.n * s.sx * s.sy), 2)\nEND AS 'r'\nFROM _stats s", + "refId": "A" + } + ], + "title": "Credits vs Deploy Freq (r)", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Weekly credits overlaid with deployment count", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisBorderShow": false, "axisLabel": "", "axisPlacement": "auto", + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "pointSize": 5, "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 18, "x": 6, "y": 20 }, + "id": 22, + "options": { + "legend": { "calcs": ["mean", "sum"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT time, SUM(credits) AS 'Credits Used', SUM(deploys) AS 'Deployments'\nFROM (\n SELECT DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY) AS time,\n SUM(credits_used) AS credits, 0 AS deploys\n FROM _tool_q_dev_user_report WHERE $__timeFilter(date)\n GROUP BY DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY)\n UNION ALL\n SELECT DATE_SUB(DATE(cdc.finished_date), INTERVAL WEEKDAY(DATE(cdc.finished_date)) DAY) AS time,\n 0 AS credits, COUNT(DISTINCT cdc.cicd_deployment_id) AS deploys\n FROM cicd_deployment_commits cdc\n JOIN project_mapping pm ON cdc.cicd_scope_id = pm.row_id AND pm.`table` = 'cicd_scopes'\n WHERE cdc.result = 'SUCCESS' AND cdc.environment = 'PRODUCTION'\n AND $__timeFilter(cdc.finished_date)\n GROUP BY DATE_SUB(DATE(cdc.finished_date), INTERVAL WEEKDAY(DATE(cdc.finished_date)) DAY)\n) combined\nGROUP BY time ORDER BY time", + "refId": "A" + } + ], + "title": "Weekly Credits vs Deployments", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { "h": 1, "w": 24, "x": 0, "y": 28 }, + "id": 30, + "panels": [], + "title": "Change Failure Rate", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Change failure rate: % of deployments that caused incidents", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisBorderShow": false, "axisLabel": "", "axisPlacement": "auto", + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "pointSize": 5, "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 24, "x": 0, "y": 29 }, + "id": 31, + "options": { + "legend": { "calcs": ["mean", "sum"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT time, SUM(credits) AS 'Credits Used', SUM(cfr) AS 'Change Failure Rate'\nFROM (\n SELECT DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY) AS time,\n SUM(credits_used) AS credits, 0 AS cfr\n FROM _tool_q_dev_user_report WHERE $__timeFilter(date)\n GROUP BY DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY)\n UNION ALL\n SELECT DATE_SUB(DATE(d.deployment_finished_date), INTERVAL WEEKDAY(DATE(d.deployment_finished_date)) DAY) AS time,\n 0 AS credits,\n SUM(CASE WHEN i.id IS NOT NULL THEN 1 ELSE 0 END) / COUNT(DISTINCT d.deployment_id) AS cfr\n FROM (\n SELECT cdc.cicd_deployment_id AS deployment_id, MAX(cdc.finished_date) AS deployment_finished_date\n FROM cicd_deployment_commits cdc\n JOIN project_mapping pm ON cdc.cicd_scope_id = pm.row_id AND pm.`table` = 'cicd_scopes'\n WHERE cdc.result = 'SUCCESS' AND cdc.environment = 'PRODUCTION'\n AND $__timeFilter(cdc.finished_date)\n GROUP BY cdc.cicd_deployment_id\n ) d\n LEFT JOIN project_incident_deployment_relationships pidr ON d.deployment_id = pidr.deployment_id\n LEFT JOIN incidents i ON pidr.id = i.id\n GROUP BY DATE_SUB(DATE(d.deployment_finished_date), INTERVAL WEEKDAY(DATE(d.deployment_finished_date)) DAY)\n) combined\nGROUP BY time ORDER BY time", + "refId": "A" + } + ], + "title": "Weekly Credits vs Change Failure Rate", + "type": "timeseries" + } + ], + "preload": false, + "refresh": "5m", + "schemaVersion": 41, + "tags": ["q_dev", "dora", "kiro", "correlation"], + "templating": { + "list": [ + { + "current": { "selected": true, "text": ["All"], "value": ["$__all"] }, + "datasource": "mysql", + "definition": "SELECT DISTINCT name FROM projects", + "hide": 0, + "includeAll": true, + "label": "Project", + "multi": true, + "name": "project", + "options": [], + "query": "SELECT DISTINCT name FROM projects", + "refresh": 1, + "type": "query" + } + ] + }, + "time": { "from": "now-90d", "to": "now" }, + "timepicker": {}, + "timezone": "utc", + "title": "Kiro Credits + DORA Correlation", + "uid": "kiro_credits_dora", + "version": 1 +} From 4f4c7b5dd92e7c1e2b3dca1f0721547826cd2a6c Mon Sep 17 00:00:00 2001 From: Warren Chen Date: Sun, 22 Mar 2026 22:17:48 +0800 Subject: [PATCH 28/39] feat(q-dev): add AI Cost-Efficiency dashboard (#8793) Add a Grafana dashboard showing AI tool cost-efficiency metrics: - Credits per merged PR (overall + weekly trend) - Credits per production deployment (overall + weekly trend) - Credits per issue resolved (overall + weekly trend) - Weekly AI activity volume (credits, messages, conversations) Joins _tool_q_dev_user_report with pull_requests, cicd_deployment_commits, and issues by weekly aggregation. --- grafana/dashboards/AICostEfficiency.json | 317 +++++++++++++++++++++++ 1 file changed, 317 insertions(+) create mode 100644 grafana/dashboards/AICostEfficiency.json diff --git a/grafana/dashboards/AICostEfficiency.json b/grafana/dashboards/AICostEfficiency.json new file mode 100644 index 00000000000..d6e8bf4a998 --- /dev/null +++ b/grafana/dashboards/AICostEfficiency.json @@ -0,0 +1,317 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 1, + "id": null, + "links": [ + { + "asDropdown": false, + "icon": "external link", + "includeVars": true, + "keepTime": true, + "tags": [], + "targetBlank": true, + "title": "Kiro Usage Dashboard", + "type": "link", + "url": "/d/qdev_user_report" + } + ], + "panels": [ + { + "collapsed": false, + "gridPos": { "h": 1, "w": 24, "x": 0, "y": 0 }, + "id": 1, + "panels": [], + "title": "Summary", + "type": "row" + }, + { + "datasource": "mysql", + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [], + "thresholds": { "mode": "absolute", "steps": [{ "color": "blue", "value": null }] } + }, + "overrides": [] + }, + "gridPos": { "h": 6, "w": 6, "x": 0, "y": 1 }, + "id": 2, + "options": { + "colorMode": "value", + "graphMode": "area", + "reduceOptions": { "calcs": ["sum"], "fields": "", "values": false }, + "textMode": "auto" + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT ROUND(SUM(credits_used)) AS 'Total Credits'\nFROM _tool_q_dev_user_report WHERE $__timeFilter(date)", + "refId": "A" + } + ], + "title": "Total Credits Used", + "type": "stat" + }, + { + "datasource": "mysql", + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [], + "thresholds": { "mode": "absolute", "steps": [{ "color": "green", "value": null }] } + }, + "overrides": [] + }, + "gridPos": { "h": 6, "w": 6, "x": 6, "y": 1 }, + "id": 3, + "options": { + "colorMode": "value", + "graphMode": "area", + "reduceOptions": { "calcs": ["sum"], "fields": "", "values": false }, + "textMode": "auto" + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT ROUND(SUM(r.credits_used) / NULLIF(COUNT(DISTINCT pr.id), 0), 1) AS 'Credits / PR'\nFROM _tool_q_dev_user_report r\nCROSS JOIN (\n SELECT DISTINCT id FROM pull_requests\n WHERE merged_date IS NOT NULL AND $__timeFilter(merged_date)\n) pr\nWHERE $__timeFilter(r.date)", + "refId": "A" + } + ], + "title": "Credits per PR (Overall)", + "type": "stat" + }, + { + "datasource": "mysql", + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [], + "thresholds": { "mode": "absolute", "steps": [{ "color": "green", "value": null }] } + }, + "overrides": [] + }, + "gridPos": { "h": 6, "w": 6, "x": 12, "y": 1 }, + "id": 4, + "options": { + "colorMode": "value", + "graphMode": "area", + "reduceOptions": { "calcs": ["sum"], "fields": "", "values": false }, + "textMode": "auto" + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT ROUND(SUM(r.credits_used) / NULLIF(COUNT(DISTINCT cdc.cicd_deployment_id), 0), 1) AS 'Credits / Deploy'\nFROM _tool_q_dev_user_report r\nCROSS JOIN (\n SELECT DISTINCT cicd_deployment_id\n FROM cicd_deployment_commits\n WHERE result = 'SUCCESS' AND environment = 'PRODUCTION'\n AND $__timeFilter(finished_date)\n) cdc\nWHERE $__timeFilter(r.date)", + "refId": "A" + } + ], + "title": "Credits per Deployment (Overall)", + "type": "stat" + }, + { + "datasource": "mysql", + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [], + "thresholds": { "mode": "absolute", "steps": [{ "color": "green", "value": null }] } + }, + "overrides": [] + }, + "gridPos": { "h": 6, "w": 6, "x": 18, "y": 1 }, + "id": 5, + "options": { + "colorMode": "value", + "graphMode": "area", + "reduceOptions": { "calcs": ["sum"], "fields": "", "values": false }, + "textMode": "auto" + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT ROUND(SUM(r.credits_used) / NULLIF(COUNT(DISTINCT i.id), 0), 1) AS 'Credits / Issue'\nFROM _tool_q_dev_user_report r\nCROSS JOIN (\n SELECT DISTINCT id FROM issues\n WHERE resolution_date IS NOT NULL AND type != 'INCIDENT'\n AND $__timeFilter(resolution_date)\n) i\nWHERE $__timeFilter(r.date)", + "refId": "A" + } + ], + "title": "Credits per Issue Resolved", + "type": "stat" + }, + { + "collapsed": false, + "gridPos": { "h": 1, "w": 24, "x": 0, "y": 7 }, + "id": 10, + "panels": [], + "title": "Weekly Trends", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Weekly cost per merged PR", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisBorderShow": false, "axisLabel": "", "axisPlacement": "auto", + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "pointSize": 5, "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 8 }, + "id": 11, + "options": { + "legend": { "calcs": ["mean", "min"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "WITH _credits AS (\n SELECT DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY) AS week_start,\n SUM(credits_used) AS credits\n FROM _tool_q_dev_user_report WHERE $__timeFilter(date)\n GROUP BY DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY)\n),\n_prs AS (\n SELECT DATE_SUB(DATE(merged_date), INTERVAL WEEKDAY(DATE(merged_date)) DAY) AS week_start,\n COUNT(*) AS prs\n FROM pull_requests\n WHERE merged_date IS NOT NULL AND $__timeFilter(merged_date)\n GROUP BY DATE_SUB(DATE(merged_date), INTERVAL WEEKDAY(DATE(merged_date)) DAY)\n)\nSELECT c.week_start AS time,\n ROUND(c.credits / NULLIF(p.prs, 0), 1) AS 'Credits per PR'\nFROM _credits c\nLEFT JOIN _prs p ON c.week_start = p.week_start\nORDER BY time", + "refId": "A" + } + ], + "title": "Credits per Merged PR (Weekly)", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Weekly cost per production deployment", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisBorderShow": false, "axisLabel": "", "axisPlacement": "auto", + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "pointSize": 5, "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 8 }, + "id": 12, + "options": { + "legend": { "calcs": ["mean", "min"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "WITH _credits AS (\n SELECT DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY) AS week_start,\n SUM(credits_used) AS credits\n FROM _tool_q_dev_user_report WHERE $__timeFilter(date)\n GROUP BY DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY)\n),\n_deploys AS (\n SELECT DATE_SUB(DATE(finished_date), INTERVAL WEEKDAY(DATE(finished_date)) DAY) AS week_start,\n COUNT(DISTINCT cicd_deployment_id) AS deploys\n FROM cicd_deployment_commits\n WHERE result = 'SUCCESS' AND environment = 'PRODUCTION'\n AND $__timeFilter(finished_date)\n GROUP BY DATE_SUB(DATE(finished_date), INTERVAL WEEKDAY(DATE(finished_date)) DAY)\n)\nSELECT c.week_start AS time,\n ROUND(c.credits / NULLIF(d.deploys, 0), 1) AS 'Credits per Deploy'\nFROM _credits c\nLEFT JOIN _deploys d ON c.week_start = d.week_start\nORDER BY time", + "refId": "A" + } + ], + "title": "Credits per Deployment (Weekly)", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Weekly cost per resolved issue", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisBorderShow": false, "axisLabel": "", "axisPlacement": "auto", + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "pointSize": 5, "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 16 }, + "id": 13, + "options": { + "legend": { "calcs": ["mean", "min"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "WITH _credits AS (\n SELECT DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY) AS week_start,\n SUM(credits_used) AS credits\n FROM _tool_q_dev_user_report WHERE $__timeFilter(date)\n GROUP BY DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY)\n),\n_issues AS (\n SELECT DATE_SUB(DATE(resolution_date), INTERVAL WEEKDAY(DATE(resolution_date)) DAY) AS week_start,\n COUNT(*) AS resolved\n FROM issues\n WHERE resolution_date IS NOT NULL AND type != 'INCIDENT'\n AND $__timeFilter(resolution_date)\n GROUP BY DATE_SUB(DATE(resolution_date), INTERVAL WEEKDAY(DATE(resolution_date)) DAY)\n)\nSELECT c.week_start AS time,\n ROUND(c.credits / NULLIF(i.resolved, 0), 1) AS 'Credits per Issue'\nFROM _credits c\nLEFT JOIN _issues i ON c.week_start = i.week_start\nORDER BY time", + "refId": "A" + } + ], + "title": "Credits per Issue Resolved (Weekly)", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Is cost efficiency improving over time?", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisBorderShow": false, "axisLabel": "", "axisPlacement": "auto", + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "pointSize": 5, "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 16 }, + "id": 14, + "options": { + "legend": { "calcs": ["mean", "sum"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY) AS time,\n SUM(credits_used) AS 'Credits',\n SUM(total_messages) AS 'Messages',\n SUM(chat_conversations) AS 'Conversations'\nFROM _tool_q_dev_user_report\nWHERE $__timeFilter(date)\nGROUP BY DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY)\nORDER BY time", + "refId": "A" + } + ], + "title": "Weekly AI Activity Volume", + "type": "timeseries" + } + ], + "preload": false, + "refresh": "5m", + "schemaVersion": 41, + "tags": ["q_dev", "kiro", "cost", "efficiency"], + "templating": { "list": [] }, + "time": { "from": "now-90d", "to": "now" }, + "timepicker": {}, + "timezone": "utc", + "title": "AI Cost-Efficiency", + "uid": "ai_cost_efficiency", + "version": 1 +} From aa1ef95e59e3d1a5114f6d22bab1467bf09dd9dd Mon Sep 17 00:00:00 2001 From: Warren Chen Date: Sun, 22 Mar 2026 22:18:05 +0800 Subject: [PATCH 29/39] feat(q-dev): add Multi-AI Tool Comparison dashboard (Copilot vs Kiro) (#8794) Add a Grafana dashboard comparing GitHub Copilot and Kiro side by side: - Weekly active users comparison - Code suggestions & acceptance events (per tool) - LOC accepted comparison (combined time series) - Acceptance rate comparison (bar gauge) Template variables for Copilot connection/scope selection. Data from _tool_copilot_enterprise_daily_metrics vs _tool_q_dev_user_report and _tool_q_dev_user_data. --- grafana/dashboards/MultiAIComparison.json | 299 ++++++++++++++++++++++ 1 file changed, 299 insertions(+) create mode 100644 grafana/dashboards/MultiAIComparison.json diff --git a/grafana/dashboards/MultiAIComparison.json b/grafana/dashboards/MultiAIComparison.json new file mode 100644 index 00000000000..0ca1a3367ab --- /dev/null +++ b/grafana/dashboards/MultiAIComparison.json @@ -0,0 +1,299 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 1, + "id": null, + "links": [ + { + "asDropdown": false, + "icon": "external link", + "includeVars": true, + "keepTime": true, + "tags": [], + "targetBlank": true, + "title": "Kiro Usage", + "type": "link", + "url": "/d/qdev_user_report" + }, + { + "asDropdown": false, + "icon": "external link", + "includeVars": true, + "keepTime": true, + "tags": [], + "targetBlank": true, + "title": "Copilot Adoption", + "type": "link", + "url": "/d/copilot_adoption" + } + ], + "panels": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "gridPos": { "h": 3, "w": 24, "x": 0, "y": 0 }, + "id": 1, + "options": { + "code": { "language": "plaintext", "showLineNumbers": false, "showMiniMap": false }, + "content": "## Multi-AI Tool Comparison: Copilot vs Kiro\nCompare GitHub Copilot and Kiro (Amazon Q Developer) adoption and usage side by side.\n\n**Note:** Copilot metrics come from `_tool_copilot_enterprise_daily_metrics` (enterprise-level aggregates). Kiro metrics come from `_tool_q_dev_user_report` and `_tool_q_dev_user_data`. Select the Copilot connection and scope using the dropdowns above.", + "mode": "markdown" + }, + "title": "About", + "type": "text" + }, + { + "collapsed": false, + "gridPos": { "h": 1, "w": 24, "x": 0, "y": 3 }, + "id": 2, + "panels": [], + "title": "Active Users", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Weekly active users for both tools", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisBorderShow": false, "axisLabel": "", "axisPlacement": "auto", + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "pointSize": 5, "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 24, "x": 0, "y": 4 }, + "id": 3, + "options": { + "legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY) AS time,\n COUNT(DISTINCT user_id) AS 'Kiro Active Users'\nFROM _tool_q_dev_user_report\nWHERE $__timeFilter(date)\nGROUP BY DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY)\nORDER BY time", + "refId": "A" + }, + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n DATE_SUB(DATE(day), INTERVAL WEEKDAY(DATE(day)) DAY) AS time,\n MAX(daily_active_users) AS 'Copilot Active Users'\nFROM _tool_copilot_enterprise_daily_metrics\nWHERE connection_id = ${copilot_connection_id}\n AND scope_id = '${copilot_scope_id}'\n AND $__timeFilter(day)\nGROUP BY DATE_SUB(DATE(day), INTERVAL WEEKDAY(DATE(day)) DAY)\nORDER BY time", + "refId": "B" + } + ], + "title": "Weekly Active Users Comparison", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { "h": 1, "w": 24, "x": 0, "y": 12 }, + "id": 10, + "panels": [], + "title": "Code Generation", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Weekly code generation and acceptance events", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisBorderShow": false, "axisLabel": "", "axisPlacement": "auto", + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "pointSize": 5, "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 13 }, + "id": 11, + "options": { + "legend": { "calcs": ["mean", "sum"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY) AS time,\n SUM(inline_suggestions_count) AS 'Kiro: Suggestions',\n SUM(inline_acceptance_count) AS 'Kiro: Accepted'\nFROM _tool_q_dev_user_data\nWHERE $__timeFilter(date)\nGROUP BY DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY)\nORDER BY time", + "refId": "A" + } + ], + "title": "Kiro: Code Suggestions & Acceptance", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Weekly Copilot code generation and acceptance events", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisBorderShow": false, "axisLabel": "", "axisPlacement": "auto", + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "pointSize": 5, "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 13 }, + "id": 12, + "options": { + "legend": { "calcs": ["mean", "sum"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n DATE_SUB(DATE(day), INTERVAL WEEKDAY(DATE(day)) DAY) AS time,\n SUM(code_generation_activity_count) AS 'Copilot: Suggestions',\n SUM(code_acceptance_activity_count) AS 'Copilot: Accepted'\nFROM _tool_copilot_enterprise_daily_metrics\nWHERE connection_id = ${copilot_connection_id}\n AND scope_id = '${copilot_scope_id}'\n AND $__timeFilter(day)\nGROUP BY DATE_SUB(DATE(day), INTERVAL WEEKDAY(DATE(day)) DAY)\nORDER BY time", + "refId": "A" + } + ], + "title": "Copilot: Code Suggestions & Acceptance", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { "h": 1, "w": 24, "x": 0, "y": 21 }, + "id": 20, + "panels": [], + "title": "Lines of Code & Acceptance Rate", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Weekly AI-generated lines of code accepted", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisBorderShow": false, "axisLabel": "", "axisPlacement": "auto", + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "pointSize": 5, "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 22 }, + "id": 21, + "options": { + "legend": { "calcs": ["mean", "sum"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT time, SUM(kiro_loc) AS 'Kiro LOC Accepted', SUM(copilot_loc) AS 'Copilot LOC Added'\nFROM (\n SELECT DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY) AS time,\n SUM(inline_ai_code_lines + chat_ai_code_lines) AS kiro_loc, 0 AS copilot_loc\n FROM _tool_q_dev_user_data WHERE $__timeFilter(date)\n GROUP BY DATE_SUB(DATE(date), INTERVAL WEEKDAY(DATE(date)) DAY)\n UNION ALL\n SELECT DATE_SUB(DATE(day), INTERVAL WEEKDAY(DATE(day)) DAY) AS time,\n 0 AS kiro_loc, SUM(loc_added_sum) AS copilot_loc\n FROM _tool_copilot_enterprise_daily_metrics\n WHERE connection_id = ${copilot_connection_id}\n AND scope_id = '${copilot_scope_id}'\n AND $__timeFilter(day)\n GROUP BY DATE_SUB(DATE(day), INTERVAL WEEKDAY(DATE(day)) DAY)\n) combined\nGROUP BY time ORDER BY time", + "refId": "A" + } + ], + "title": "LOC Accepted: Kiro vs Copilot", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Overall acceptance rates for both tools", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "mappings": [], + "thresholds": { "mode": "absolute", "steps": [{ "color": "green", "value": null }] }, + "unit": "percent" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 22 }, + "id": 22, + "options": { + "barRadius": 0.1, + "barWidth": 0.5, + "orientation": "horizontal", + "showValue": "auto", + "stacking": "none", + "tooltip": { "mode": "single" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT 'Kiro' AS Tool,\n ROUND(SUM(inline_acceptance_count) * 100.0 / NULLIF(SUM(inline_suggestions_count), 0), 1) AS 'Acceptance Rate'\nFROM _tool_q_dev_user_data\nWHERE $__timeFilter(date)\nUNION ALL\nSELECT 'Copilot' AS Tool,\n ROUND(SUM(code_acceptance_activity_count) * 100.0 / NULLIF(SUM(code_generation_activity_count), 0), 1)\nFROM _tool_copilot_enterprise_daily_metrics\nWHERE connection_id = ${copilot_connection_id}\n AND scope_id = '${copilot_scope_id}'\n AND $__timeFilter(day)", + "refId": "A" + } + ], + "title": "Acceptance Rate Comparison", + "type": "bargauge" + } + ], + "preload": false, + "refresh": "5m", + "schemaVersion": 41, + "tags": ["q_dev", "copilot", "kiro", "comparison"], + "templating": { + "list": [ + { + "current": {}, + "datasource": "mysql", + "definition": "SELECT DISTINCT connection_id FROM _tool_copilot_scopes ORDER BY connection_id DESC", + "hide": 0, + "label": "Copilot Connection", + "name": "copilot_connection_id", + "options": [], + "query": "SELECT DISTINCT connection_id FROM _tool_copilot_scopes ORDER BY connection_id DESC", + "refresh": 1, + "type": "query" + }, + { + "current": {}, + "datasource": "mysql", + "definition": "SELECT DISTINCT id FROM _tool_copilot_scopes WHERE connection_id = CAST('${copilot_connection_id}' AS UNSIGNED)", + "hide": 0, + "label": "Copilot Scope", + "name": "copilot_scope_id", + "options": [], + "query": "SELECT DISTINCT id FROM _tool_copilot_scopes WHERE connection_id = CAST('${copilot_connection_id}' AS UNSIGNED)", + "refresh": 1, + "type": "query" + } + ] + }, + "time": { "from": "now-90d", "to": "now" }, + "timepicker": {}, + "timezone": "utc", + "title": "Multi-AI Tool Comparison", + "uid": "multi_ai_comparison", + "version": 1 +} From 4f0e5ee498313b4b48577b8b961b5237fbbbee7e Mon Sep 17 00:00:00 2001 From: Warren Chen Date: Sun, 22 Mar 2026 22:36:02 +0800 Subject: [PATCH 30/39] feat(q-dev): add Kiro AI Model ROI dashboard (#8795) Add a Grafana dashboard analyzing per-model performance from chat logs: - Model Performance Summary table (requests, share%, avg prompt/response length, response/prompt ratio, steering/spec mode usage) - Daily Model Usage Distribution (stacked bar chart) - Avg Response Length by Model trend (output quality proxy) Data source: _tool_q_dev_chat_log grouped by model_id. --- grafana/dashboards/AIModelROI.json | 124 +++++++++++++++++++++++++++++ 1 file changed, 124 insertions(+) create mode 100644 grafana/dashboards/AIModelROI.json diff --git a/grafana/dashboards/AIModelROI.json b/grafana/dashboards/AIModelROI.json new file mode 100644 index 00000000000..4ee15620bb1 --- /dev/null +++ b/grafana/dashboards/AIModelROI.json @@ -0,0 +1,124 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 1, + "id": null, + "links": [], + "panels": [ + { + "datasource": "mysql", + "description": "Requests, avg prompt/response length, and usage share per model", + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "custom": { "align": "auto", "cellOptions": { "type": "auto" }, "filterable": true }, + "thresholds": { "mode": "absolute", "steps": [{ "color": "green" }] } + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 24, "x": 0, "y": 0 }, + "id": 1, + "options": { "cellHeight": "sm", "showHeader": true, "sortBy": [] }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n CASE WHEN model_id = '' OR model_id IS NULL THEN '(unknown)' ELSE model_id END AS 'Model',\n COUNT(*) AS 'Requests',\n ROUND(COUNT(*) * 100.0 / (SELECT COUNT(*) FROM _tool_q_dev_chat_log WHERE $__timeFilter(timestamp)), 1) AS 'Share %',\n ROUND(AVG(prompt_length)) AS 'Avg Prompt Len',\n ROUND(AVG(response_length)) AS 'Avg Response Len',\n ROUND(AVG(response_length) / NULLIF(AVG(prompt_length), 0), 2) AS 'Response/Prompt Ratio',\n SUM(CASE WHEN has_steering = 1 THEN 1 ELSE 0 END) AS 'Steering Uses',\n SUM(CASE WHEN is_spec_mode = 1 THEN 1 ELSE 0 END) AS 'Spec Mode Uses'\nFROM _tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY model_id\nORDER BY COUNT(*) DESC", + "refId": "A" + } + ], + "title": "Model Performance Summary", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Request volume by model over time", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "drawStyle": "bars", "fillOpacity": 80, "lineWidth": 1, + "stacking": { "mode": "normal" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 24, "x": 0, "y": 8 }, + "id": 2, + "options": { + "legend": { "calcs": ["sum"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT DATE(timestamp) AS time,\n CASE WHEN model_id = '' OR model_id IS NULL THEN '(unknown)' ELSE model_id END AS metric,\n COUNT(*) AS value\nFROM _tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY DATE(timestamp), model_id\nORDER BY time", + "refId": "A" + } + ], + "title": "Daily Model Usage Distribution", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Average response length per model over time — proxy for output quality", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 24, "x": 0, "y": 16 }, + "id": 3, + "options": { + "legend": { "calcs": ["mean"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT DATE(timestamp) AS time,\n CASE WHEN model_id = '' OR model_id IS NULL THEN '(unknown)' ELSE model_id END AS metric,\n ROUND(AVG(response_length)) AS value\nFROM _tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY DATE(timestamp), model_id\nORDER BY time", + "refId": "A" + } + ], + "title": "Avg Response Length by Model (Daily)", + "type": "timeseries" + } + ], + "preload": false, + "refresh": "5m", + "schemaVersion": 41, + "tags": ["q_dev", "kiro", "model", "roi"], + "templating": { "list": [] }, + "time": { "from": "now-90d", "to": "now" }, + "timepicker": {}, + "timezone": "utc", + "title": "Kiro AI Model ROI", + "uid": "kiro_model_roi", + "version": 1 +} From 371cf682d15eaae347ac4ecc48fb431b9edbbdfd Mon Sep 17 00:00:00 2001 From: Warren Chen Date: Sun, 22 Mar 2026 22:36:39 +0800 Subject: [PATCH 31/39] feat(q-dev): add Steering & Spec Mode Adoption dashboard (#8798) Track Kiro steering rules and spec mode adoption: - User/request adoption rate stats - Weekly adoption rate trend - Steering impact on prompt/response length - Per-user feature adoption table --- .../dashboards/SteeringAdoptionTracker.json | 212 ++++++++++++++++++ 1 file changed, 212 insertions(+) create mode 100644 grafana/dashboards/SteeringAdoptionTracker.json diff --git a/grafana/dashboards/SteeringAdoptionTracker.json b/grafana/dashboards/SteeringAdoptionTracker.json new file mode 100644 index 00000000000..4a840893827 --- /dev/null +++ b/grafana/dashboards/SteeringAdoptionTracker.json @@ -0,0 +1,212 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 1, + "id": null, + "links": [], + "panels": [ + { + "datasource": "mysql", + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [], + "thresholds": { "mode": "absolute", "steps": [{ "color": "blue" }] }, + "unit": "percent" + }, + "overrides": [] + }, + "gridPos": { "h": 6, "w": 8, "x": 0, "y": 0 }, + "id": 1, + "options": { + "colorMode": "value", "graphMode": "area", + "reduceOptions": { "calcs": ["sum"], "fields": "", "values": false }, + "textMode": "auto" + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT ROUND(COUNT(DISTINCT CASE WHEN has_steering = 1 THEN user_id END) * 100.0 / NULLIF(COUNT(DISTINCT user_id), 0), 0) AS 'value'\nFROM _tool_q_dev_chat_log WHERE $__timeFilter(timestamp)", + "refId": "A" + } + ], + "title": "Users with Steering", + "type": "stat" + }, + { + "datasource": "mysql", + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [], + "thresholds": { "mode": "absolute", "steps": [{ "color": "purple" }] }, + "unit": "percent" + }, + "overrides": [] + }, + "gridPos": { "h": 6, "w": 8, "x": 8, "y": 0 }, + "id": 2, + "options": { + "colorMode": "value", "graphMode": "area", + "reduceOptions": { "calcs": ["sum"], "fields": "", "values": false }, + "textMode": "auto" + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT ROUND(COUNT(DISTINCT CASE WHEN is_spec_mode = 1 THEN user_id END) * 100.0 / NULLIF(COUNT(DISTINCT user_id), 0), 0) AS 'value'\nFROM _tool_q_dev_chat_log WHERE $__timeFilter(timestamp)", + "refId": "A" + } + ], + "title": "Users with Spec Mode", + "type": "stat" + }, + { + "datasource": "mysql", + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "mappings": [], + "thresholds": { "mode": "absolute", "steps": [{ "color": "green" }] }, + "unit": "percent" + }, + "overrides": [] + }, + "gridPos": { "h": 6, "w": 8, "x": 16, "y": 0 }, + "id": 3, + "options": { + "colorMode": "value", "graphMode": "area", + "reduceOptions": { "calcs": ["sum"], "fields": "", "values": false }, + "textMode": "auto" + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT ROUND(SUM(CASE WHEN has_steering = 1 THEN 1 ELSE 0 END) * 100.0 / COUNT(*), 1) AS 'value'\nFROM _tool_q_dev_chat_log WHERE $__timeFilter(timestamp)", + "refId": "A" + } + ], + "title": "Requests with Steering", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Weekly trend of steering and spec mode adoption rate", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "percentunit" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 24, "x": 0, "y": 6 }, + "id": 4, + "options": { + "legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n DATE_SUB(DATE(timestamp), INTERVAL WEEKDAY(DATE(timestamp)) DAY) AS time,\n SUM(CASE WHEN has_steering = 1 THEN 1 ELSE 0 END) / COUNT(*) AS 'Steering Rate',\n SUM(CASE WHEN is_spec_mode = 1 THEN 1 ELSE 0 END) / COUNT(*) AS 'Spec Mode Rate'\nFROM _tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY DATE_SUB(DATE(timestamp), INTERVAL WEEKDAY(DATE(timestamp)) DAY)\nORDER BY time", + "refId": "A" + } + ], + "title": "Weekly Steering & Spec Mode Adoption Rate", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Compare prompt and response quality between steering and non-steering sessions", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "thresholds": { "mode": "absolute", "steps": [{ "color": "green" }] }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 14 }, + "id": 5, + "options": { + "barRadius": 0.1, "barWidth": 0.5, + "orientation": "horizontal", "showValue": "auto", "stacking": "none", + "tooltip": { "mode": "single" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT 'With Steering' AS 'Mode', ROUND(AVG(prompt_length)) AS 'Avg Prompt', ROUND(AVG(response_length)) AS 'Avg Response'\nFROM _tool_q_dev_chat_log WHERE has_steering = 1 AND $__timeFilter(timestamp)\nUNION ALL\nSELECT 'Without Steering', ROUND(AVG(prompt_length)), ROUND(AVG(response_length))\nFROM _tool_q_dev_chat_log WHERE has_steering = 0 AND $__timeFilter(timestamp)", + "refId": "A" + } + ], + "title": "Steering Impact: Prompt & Response Length", + "type": "bargauge" + }, + { + "datasource": "mysql", + "description": "Per-user steering and spec mode usage", + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "custom": { "align": "auto", "cellOptions": { "type": "auto" }, "filterable": true }, + "thresholds": { "mode": "absolute", "steps": [{ "color": "green" }] } + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 14 }, + "id": 6, + "options": { "cellHeight": "sm", "showHeader": true }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n COALESCE(MAX(display_name), user_id) AS 'User',\n COUNT(*) AS 'Total Chats',\n SUM(CASE WHEN has_steering = 1 THEN 1 ELSE 0 END) AS 'Steering',\n SUM(CASE WHEN is_spec_mode = 1 THEN 1 ELSE 0 END) AS 'Spec Mode',\n ROUND(SUM(CASE WHEN has_steering = 1 THEN 1 ELSE 0 END) * 100.0 / COUNT(*), 1) AS 'Steering %'\nFROM _tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY user_id\nORDER BY COUNT(*) DESC", + "refId": "A" + } + ], + "title": "Per-User Feature Adoption", + "type": "table" + } + ], + "preload": false, + "refresh": "5m", + "schemaVersion": 41, + "tags": ["q_dev", "kiro", "steering", "adoption"], + "templating": { "list": [] }, + "time": { "from": "now-90d", "to": "now" }, + "timepicker": {}, + "timezone": "utc", + "title": "Kiro Steering & Spec Mode Adoption", + "uid": "kiro_steering_adoption", + "version": 1 +} From bb8103dbed8469aed686bc9752a75832dc530faa Mon Sep 17 00:00:00 2001 From: Warren Chen Date: Sun, 22 Mar 2026 22:36:56 +0800 Subject: [PATCH 32/39] feat(q-dev): add Developer AI Productivity Hours dashboard (#8797) Analyze when developers are most productive with AI tools: - AI Activity by Hour of Day (chat + completions stacked bar) - Prompt & Response Length by Hour (complexity patterns) - Feature Usage by Hour (steering/spec mode/plain chat) - AI Activity by Day of Week --- .../DeveloperProductivityHours.json | 162 ++++++++++++++++++ 1 file changed, 162 insertions(+) create mode 100644 grafana/dashboards/DeveloperProductivityHours.json diff --git a/grafana/dashboards/DeveloperProductivityHours.json b/grafana/dashboards/DeveloperProductivityHours.json new file mode 100644 index 00000000000..3201acdc08b --- /dev/null +++ b/grafana/dashboards/DeveloperProductivityHours.json @@ -0,0 +1,162 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 1, + "id": null, + "links": [], + "panels": [ + { + "datasource": "mysql", + "description": "Chat and completion events by hour of day", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "drawStyle": "bars", "fillOpacity": 80, "lineWidth": 1, + "stacking": { "mode": "normal" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 24, "x": 0, "y": 0 }, + "id": 1, + "options": { + "legend": { "calcs": ["sum"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT LPAD(CAST(hr AS CHAR), 2, '0') AS 'Hour',\n SUM(chat) AS 'Chat Events', SUM(comp) AS 'Completion Events'\nFROM (\n SELECT HOUR(timestamp) AS hr, COUNT(*) AS chat, 0 AS comp\n FROM _tool_q_dev_chat_log WHERE $__timeFilter(timestamp)\n GROUP BY HOUR(timestamp)\n UNION ALL\n SELECT HOUR(timestamp) AS hr, 0 AS chat, COUNT(*) AS comp\n FROM _tool_q_dev_completion_log WHERE $__timeFilter(timestamp)\n GROUP BY HOUR(timestamp)\n) t GROUP BY hr ORDER BY hr", + "refId": "A" + } + ], + "title": "AI Activity by Hour of Day (UTC)", + "type": "barchart" + }, + { + "datasource": "mysql", + "description": "Average prompt complexity and response richness by hour", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "showPoints": "auto", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 8 }, + "id": 2, + "options": { + "legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT LPAD(CAST(HOUR(timestamp) AS CHAR), 2, '0') AS 'Hour',\n ROUND(AVG(prompt_length)) AS 'Avg Prompt Length',\n ROUND(AVG(response_length)) AS 'Avg Response Length'\nFROM _tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY HOUR(timestamp)\nORDER BY HOUR(timestamp)", + "refId": "A" + } + ], + "title": "Prompt & Response Length by Hour", + "type": "barchart" + }, + { + "datasource": "mysql", + "description": "Steering and spec mode usage concentration by hour", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "drawStyle": "bars", "fillOpacity": 80, "lineWidth": 1, + "stacking": { "mode": "normal" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 8 }, + "id": 3, + "options": { + "legend": { "calcs": ["sum"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT LPAD(CAST(HOUR(timestamp) AS CHAR), 2, '0') AS 'Hour',\n SUM(CASE WHEN has_steering = 1 THEN 1 ELSE 0 END) AS 'Steering',\n SUM(CASE WHEN is_spec_mode = 1 THEN 1 ELSE 0 END) AS 'Spec Mode',\n SUM(CASE WHEN has_steering = 0 AND is_spec_mode = 0 THEN 1 ELSE 0 END) AS 'Plain Chat'\nFROM _tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY HOUR(timestamp)\nORDER BY HOUR(timestamp)", + "refId": "A" + } + ], + "title": "Feature Usage by Hour", + "type": "barchart" + }, + { + "datasource": "mysql", + "description": "Day-of-week activity pattern", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "drawStyle": "bars", "fillOpacity": 80, "lineWidth": 1, + "stacking": { "mode": "normal" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 24, "x": 0, "y": 16 }, + "id": 4, + "options": { + "legend": { "calcs": ["sum"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n CASE DAYOFWEEK(timestamp)\n WHEN 1 THEN 'Sun' WHEN 2 THEN 'Mon' WHEN 3 THEN 'Tue'\n WHEN 4 THEN 'Wed' WHEN 5 THEN 'Thu' WHEN 6 THEN 'Fri' WHEN 7 THEN 'Sat'\n END AS 'Day',\n SUM(chat) AS 'Chat Events', SUM(comp) AS 'Completions'\nFROM (\n SELECT timestamp, COUNT(*) AS chat, 0 AS comp FROM _tool_q_dev_chat_log WHERE $__timeFilter(timestamp) GROUP BY timestamp\n UNION ALL\n SELECT timestamp, 0, COUNT(*) FROM _tool_q_dev_completion_log WHERE $__timeFilter(timestamp) GROUP BY timestamp\n) t\nGROUP BY DAYOFWEEK(timestamp)\nORDER BY DAYOFWEEK(timestamp)", + "refId": "A" + } + ], + "title": "AI Activity by Day of Week", + "type": "barchart" + } + ], + "preload": false, + "refresh": "5m", + "schemaVersion": 41, + "tags": ["q_dev", "kiro", "productivity", "hours"], + "templating": { "list": [] }, + "time": { "from": "now-90d", "to": "now" }, + "timepicker": {}, + "timezone": "utc", + "title": "Developer AI Productivity Hours", + "uid": "kiro_productivity_hours", + "version": 1 +} From e15532113c57b925670ee5b0a12c48d54b48ac58 Mon Sep 17 00:00:00 2001 From: Warren Chen Date: Sun, 22 Mar 2026 22:37:08 +0800 Subject: [PATCH 33/39] feat(q-dev): add Language AI Heatmap dashboard (#8796) Analyze AI-assisted coding patterns by programming language: - Language Completion Profile table (requests, avg completions, context sizes, users per language) - Daily Completions by Language (stacked bar) - Active File Types During Chat (donut) - Avg Context Size by Language trend (top 5) --- grafana/dashboards/LanguageAIHeatmap.json | 156 ++++++++++++++++++++++ 1 file changed, 156 insertions(+) create mode 100644 grafana/dashboards/LanguageAIHeatmap.json diff --git a/grafana/dashboards/LanguageAIHeatmap.json b/grafana/dashboards/LanguageAIHeatmap.json new file mode 100644 index 00000000000..adb3958071e --- /dev/null +++ b/grafana/dashboards/LanguageAIHeatmap.json @@ -0,0 +1,156 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 1, + "id": null, + "links": [], + "panels": [ + { + "datasource": "mysql", + "description": "Completion requests, avg context size, and completion rate per language", + "fieldConfig": { + "defaults": { + "color": { "mode": "thresholds" }, + "custom": { "align": "auto", "cellOptions": { "type": "auto" }, "filterable": true }, + "thresholds": { "mode": "absolute", "steps": [{ "color": "green" }] } + }, + "overrides": [] + }, + "gridPos": { "h": 9, "w": 24, "x": 0, "y": 0 }, + "id": 1, + "options": { "cellHeight": "sm", "showHeader": true, "sortBy": [{ "desc": true, "displayName": "Requests" }] }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n CASE WHEN file_extension = '' THEN '(unknown)' ELSE file_extension END AS 'Language',\n COUNT(*) AS 'Requests',\n ROUND(AVG(completions_count), 2) AS 'Avg Completions',\n ROUND(AVG(left_context_length)) AS 'Avg Left Context',\n ROUND(AVG(right_context_length)) AS 'Avg Right Context',\n ROUND(AVG(left_context_length + right_context_length)) AS 'Avg Total Context',\n COUNT(DISTINCT user_id) AS 'Users'\nFROM _tool_q_dev_completion_log\nWHERE $__timeFilter(timestamp)\nGROUP BY file_extension\nORDER BY COUNT(*) DESC", + "refId": "A" + } + ], + "title": "Language Completion Profile", + "type": "table" + }, + { + "datasource": "mysql", + "description": "Daily completion requests by language", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "drawStyle": "bars", "fillOpacity": 80, "lineWidth": 1, + "stacking": { "mode": "normal" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 24, "x": 0, "y": 9 }, + "id": 2, + "options": { + "legend": { "calcs": ["sum"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT DATE(timestamp) AS time,\n CASE WHEN file_extension = '' THEN '(unknown)' ELSE file_extension END AS metric,\n COUNT(*) AS value\nFROM _tool_q_dev_completion_log\nWHERE $__timeFilter(timestamp)\nGROUP BY DATE(timestamp), file_extension\nORDER BY time", + "refId": "A" + } + ], + "title": "Daily Completions by Language", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Which file types are most active during chat sessions", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "mappings": [], + "thresholds": { "mode": "absolute", "steps": [{ "color": "green" }] } + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 17 }, + "id": 3, + "options": { + "displayLabels": ["name", "percent"], + "legend": { "displayMode": "table", "placement": "right", "showLegend": true, "values": ["value", "percent"] }, + "pieType": "donut", + "reduceOptions": { "calcs": ["lastNotNull"], "fields": "", "values": true }, + "tooltip": { "mode": "single" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "table", + "rawQuery": true, + "rawSql": "SELECT\n CASE WHEN active_file_extension = '' OR active_file_extension IS NULL THEN '(no file)' ELSE active_file_extension END AS 'File Type',\n COUNT(*) AS 'Chat Events'\nFROM _tool_q_dev_chat_log\nWHERE $__timeFilter(timestamp)\nGROUP BY active_file_extension\nORDER BY COUNT(*) DESC\nLIMIT 10", + "refId": "A" + } + ], + "title": "Active File Types During Chat", + "type": "piechart" + }, + { + "datasource": "mysql", + "description": "Context size trends for top languages", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "drawStyle": "line", "fillOpacity": 10, "lineInterpolation": "smooth", "lineWidth": 2, + "showPoints": "never", "spanNulls": true, + "stacking": { "mode": "none" }, "thresholdsStyle": { "mode": "off" } + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 17 }, + "id": 4, + "options": { + "legend": { "calcs": ["mean"], "displayMode": "table", "placement": "right", "showLegend": true }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT DATE(timestamp) AS time,\n file_extension AS metric,\n ROUND(AVG(left_context_length + right_context_length)) AS value\nFROM _tool_q_dev_completion_log\nWHERE $__timeFilter(timestamp)\n AND file_extension IN (SELECT file_extension FROM _tool_q_dev_completion_log GROUP BY file_extension ORDER BY COUNT(*) DESC LIMIT 5)\nGROUP BY DATE(timestamp), file_extension\nORDER BY time", + "refId": "A" + } + ], + "title": "Avg Context Size by Language (Top 5)", + "type": "timeseries" + } + ], + "preload": false, + "refresh": "5m", + "schemaVersion": 41, + "tags": ["q_dev", "kiro", "language", "completions"], + "templating": { "list": [] }, + "time": { "from": "now-90d", "to": "now" }, + "timepicker": {}, + "timezone": "utc", + "title": "Kiro Language AI Heatmap", + "uid": "kiro_language_heatmap", + "version": 1 +} From 46a5fa0fb1926bd548c30dd95dbb81b58cb4e3f6 Mon Sep 17 00:00:00 2001 From: Chris Pavlicek Date: Mon, 23 Mar 2026 14:21:55 +0000 Subject: [PATCH 34/39] Fix/circleci column names (#8799) * fix(circleci): rename created_at to created_date in jobs/workflows Add migration to copy created_at -> created_date and update models/converters. * fix(circleci): update pipeline parsing * test(circleci): add incremental tests for collectors --- .../plugins/circleci/e2e/incremental_test.go | 154 ++++++++++++++++++ .../_raw_circleci_api_jobs_incremental.csv | 73 +++++++++ ...raw_circleci_api_workflows_incremental.csv | 67 ++++++++ .../snapshot_tables/_tool_circleci_jobs.csv | 20 +-- .../_tool_circleci_jobs_incremental.csv | 7 + .../_tool_circleci_pipelines.csv | 22 +-- .../_tool_circleci_workflows_incremental.csv | 7 + .../e2e/snapshot_tables/cicd_pipelines.csv | 20 +-- backend/plugins/circleci/e2e/workflow_test.go | 2 +- backend/plugins/circleci/models/job.go | 2 +- ...60322_rename_created_at_to_created_date.go | 71 ++++++++ .../models/migrationscripts/register.go | 1 + backend/plugins/circleci/models/pipeline.go | 4 +- backend/plugins/circleci/models/workflow.go | 2 +- .../plugins/circleci/tasks/job_converter.go | 4 +- .../circleci/tasks/workflow_converter.go | 6 +- .../circleci/tasks/workflow_extractor.go | 4 +- 17 files changed, 423 insertions(+), 43 deletions(-) create mode 100644 backend/plugins/circleci/e2e/incremental_test.go create mode 100644 backend/plugins/circleci/e2e/raw_tables/_raw_circleci_api_jobs_incremental.csv create mode 100644 backend/plugins/circleci/e2e/raw_tables/_raw_circleci_api_workflows_incremental.csv create mode 100644 backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_jobs_incremental.csv create mode 100644 backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_workflows_incremental.csv create mode 100644 backend/plugins/circleci/models/migrationscripts/20260322_rename_created_at_to_created_date.go diff --git a/backend/plugins/circleci/e2e/incremental_test.go b/backend/plugins/circleci/e2e/incremental_test.go new file mode 100644 index 00000000000..85de8a7ba33 --- /dev/null +++ b/backend/plugins/circleci/e2e/incremental_test.go @@ -0,0 +1,154 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package e2e + +import ( + "sort" + "testing" + "time" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/models/common" + "github.com/apache/incubator-devlake/helpers/e2ehelper" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/circleci/impl" + "github.com/apache/incubator-devlake/plugins/circleci/models" + "github.com/apache/incubator-devlake/plugins/circleci/tasks" + "github.com/stretchr/testify/assert" +) + +// TestCircleciWorkflowIncremental verifies that the workflow collector's incremental +// logic correctly filters pipelines by created_date. This is a regression test for +// the created_at -> created_date column rename. +func TestCircleciWorkflowIncremental(t *testing.T) { + var circleci impl.Circleci + + dataflowTester := e2ehelper.NewDataFlowTester(t, "circleci", circleci) + taskData := &tasks.CircleciTaskData{ + Options: &tasks.CircleciOptions{ + ConnectionId: 1, + ProjectSlug: "github/coldgust/coldgust.github.io", + }, + RegexEnricher: api.NewRegexEnricher(), + } + + // seed pipelines table via extraction + dataflowTester.ImportCsvIntoRawTable("./raw_tables/_raw_circleci_api_pipelines.csv", "_raw_circleci_api_pipelines") + dataflowTester.FlushTabler(&models.CircleciPipeline{}) + dataflowTester.Subtask(tasks.ExtractPipelinesMeta, taskData) + + // Part 1: verify the SQL query used by BuildInputIterator works with created_date. + // Pipelines #4-10 have created_date > 17:45 — assert the exact IDs returned. + createdAfter := time.Date(2023, 3, 25, 17, 45, 0, 0, time.UTC) + var pipelines []models.CircleciPipeline + assert.Nil(t, dataflowTester.Dal.All(&pipelines, + dal.Where("connection_id = ? AND project_slug = ? AND created_date > ?", + 1, "github/coldgust/coldgust.github.io", createdAfter), + )) + pipelineIds := make([]string, len(pipelines)) + for i, p := range pipelines { + pipelineIds[i] = p.Id + } + sort.Strings(pipelineIds) + assert.Equal(t, []string{ + "23622ee4-e150-4920-9d66-81533fa765a4", // pipeline #5 + "2c45280f-7fb3-4025-b703-a547c4a94916", // pipeline #4 + "70f3eb15-3b94-4f80-b65e-f23f4b74c33a", // pipeline #6 + "7fcc1623-edcc-4a76-ad20-cd81aa83519f", // pipeline #9 + "866e967d-f826-4470-aed6-fc0c92e98703", // pipeline #7 + "afe0cabe-e7ee-4eb7-bf13-bb6170d139f0", // pipeline #8 + "d323f088-02fa-4ed5-9696-fc2f89a27150", // pipeline #10 + }, pipelineIds) + + // Part 2: verify extraction with only the incrementally-collected workflow raw data + // (workflows for pipelines #4-9 only). + dataflowTester.ImportCsvIntoRawTable("./raw_tables/_raw_circleci_api_workflows_incremental.csv", "_raw_circleci_api_workflows") + dataflowTester.FlushTabler(&models.CircleciWorkflow{}) + dataflowTester.Subtask(tasks.ExtractWorkflowsMeta, taskData) + dataflowTester.VerifyTableWithOptions( + models.CircleciWorkflow{}, + e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/_tool_circleci_workflows_incremental.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + IgnoreFields: []string{"started_at", "stopped_at"}, + }, + ) +} + +// TestCircleciJobIncremental verifies that the job collector's incremental logic +// correctly filters workflows by created_date. Regression test for the column rename. +func TestCircleciJobIncremental(t *testing.T) { + var circleci impl.Circleci + + dataflowTester := e2ehelper.NewDataFlowTester(t, "circleci", circleci) + taskData := &tasks.CircleciTaskData{ + Options: &tasks.CircleciOptions{ + ConnectionId: 1, + ProjectSlug: "github/coldgust/coldgust.github.io", + }, + RegexEnricher: api.NewRegexEnricher(), + Project: &models.CircleciProject{ + Id: "abcd", + }, + } + + // seed workflows table via extraction (all 10 workflows including b3b77371 with null created_date) + dataflowTester.ImportCsvIntoRawTable("./raw_tables/_raw_circleci_api_workflows.csv", "_raw_circleci_api_workflows") + dataflowTester.FlushTabler(&models.CircleciWorkflow{}) + dataflowTester.Subtask(tasks.ExtractWorkflowsMeta, taskData) + + // Part 1: verify the SQL query used by BuildInputIterator works with created_date. + // Workflows for pipelines #4-9 have created_date > 17:45 — assert the exact IDs returned. + // Workflow b3b77371 (null created_date) is excluded by the > comparison. + createdAfter := time.Date(2023, 3, 25, 17, 45, 0, 0, time.UTC) + var workflows []models.CircleciWorkflow + assert.Nil(t, dataflowTester.Dal.All(&workflows, + dal.Where("connection_id = ? AND project_slug = ? AND created_date > ?", + 1, "github/coldgust/coldgust.github.io", createdAfter), + )) + workflowIds := make([]string, len(workflows)) + for i, w := range workflows { + workflowIds[i] = w.Id + } + sort.Strings(workflowIds) + assert.Equal(t, []string{ + "6731159f-5275-4bfa-ba70-39d343d63814", // pipeline #5 + "7370985a-9de3-4a47-acbc-e6a1fe8e5812", // pipeline #7 + "b9ab7bbe-2f30-4c59-b4e2-eb2005bffb14", // pipeline #6 + "c7df82a6-0d2b-4e19-a36a-3f3aa9fd3943", // pipeline #4 + "fc76deef-bcdd-4856-8e96-a8e2d1c5a85f", // pipeline #8 + "fd0bd4f5-264f-4e3c-a151-06153c018f78", // pipeline #9 + }, workflowIds) + + // Part 2: verify extraction with only the incrementally-collected job raw data + // (jobs for workflows from pipelines #4-9 only). + dataflowTester.ImportCsvIntoRawTable("./raw_tables/_raw_circleci_api_projects.csv", "_raw_circleci_api_projects") + dataflowTester.FlushTabler(&models.CircleciProject{}) + dataflowTester.Subtask(tasks.ExtractProjectsMeta, taskData) + + dataflowTester.ImportCsvIntoRawTable("./raw_tables/_raw_circleci_api_jobs_incremental.csv", "_raw_circleci_api_jobs") + dataflowTester.FlushTabler(&models.CircleciJob{}) + dataflowTester.Subtask(tasks.ExtractJobsMeta, taskData) + dataflowTester.VerifyTableWithOptions( + models.CircleciJob{}, + e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/_tool_circleci_jobs_incremental.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }, + ) +} diff --git a/backend/plugins/circleci/e2e/raw_tables/_raw_circleci_api_jobs_incremental.csv b/backend/plugins/circleci/e2e/raw_tables/_raw_circleci_api_jobs_incremental.csv new file mode 100644 index 00000000000..24e3fbdcb6a --- /dev/null +++ b/backend/plugins/circleci/e2e/raw_tables/_raw_circleci_api_jobs_incremental.csv @@ -0,0 +1,73 @@ +id,params,data,url,input,created_at +20,"{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}","{ + ""dependencies"" : [ ], + ""job_number"" : 5, + ""id"" : ""ab8c3282-0e74-4a41-834e-152a71280bed"", + ""started_at"" : ""2023-03-25T17:52:20Z"", + ""created_at"" : ""2023-03-25T17:52:20Z"", + ""name"" : ""build"", + ""project_slug"" : ""gh/coldgust/coldgust.github.io"", + ""status"" : ""failed"", + ""type"" : ""build"", + ""stopped_at"" : ""2023-03-25T17:52:23Z"" + }",https://circleci.com/api/v2/workflow/6731159f-5275-4bfa-ba70-39d343d63814/job,"{""id"": ""6731159f-5275-4bfa-ba70-39d343d63814"", ""tag"": """", ""name"": """", ""status"": """", ""createdAt"": ""0001-01-01T00:00:00Z"", ""updatedAt"": ""0001-01-01T00:00:00Z"", ""created_at"": null, ""errored_by"": """", ""started_by"": """", ""stopped_at"": null, ""canceled_by"": """", ""pipeline_id"": ""23622ee4-e150-4920-9d66-81533fa765a4"", ""ConnectionId"": 0, ""_raw_data_id"": 0, ""duration_sec"": 0, ""project_slug"": """", ""_raw_data_table"": """", ""pipeline_number"": 0, ""_raw_data_params"": """", ""_raw_data_remark"": """"}",2023-03-28 15:39:58.775 +21,"{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}","{ + ""dependencies"" : [ ], + ""job_number"" : 7, + ""id"" : ""a00f80bc-f759-4900-97a5-2d121d80bde8"", + ""started_at"" : ""2023-03-25T17:56:27Z"", + ""created_at"" : ""2023-03-25T17:56:27Z"", + ""name"" : ""build"", + ""project_slug"" : ""gh/coldgust/coldgust.github.io"", + ""status"" : ""failed"", + ""type"" : ""build"", + ""stopped_at"" : ""2023-03-25T17:56:43Z"" + }",https://circleci.com/api/v2/workflow/7370985a-9de3-4a47-acbc-e6a1fe8e5812/job,"{""id"": ""7370985a-9de3-4a47-acbc-e6a1fe8e5812"", ""tag"": """", ""name"": """", ""status"": """", ""createdAt"": ""0001-01-01T00:00:00Z"", ""updatedAt"": ""0001-01-01T00:00:00Z"", ""created_at"": null, ""errored_by"": """", ""started_by"": """", ""stopped_at"": null, ""canceled_by"": """", ""pipeline_id"": ""866e967d-f826-4470-aed6-fc0c92e98703"", ""ConnectionId"": 0, ""_raw_data_id"": 0, ""duration_sec"": 0, ""project_slug"": """", ""_raw_data_table"": """", ""pipeline_number"": 0, ""_raw_data_params"": """", ""_raw_data_remark"": """"}",2023-03-28 15:39:58.953 +23,"{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}","{ + ""dependencies"" : [ ], + ""job_number"" : 9, + ""id"" : ""2ff3594e-9da1-4306-aefa-77b72a97971e"", + ""started_at"" : ""2023-03-25T18:13:25Z"", + ""created_at"" : ""2023-03-25T18:13:25Z"", + ""name"" : ""build"", + ""project_slug"" : ""gh/coldgust/coldgust.github.io"", + ""status"" : ""success"", + ""type"" : ""build"", + ""stopped_at"" : ""2023-03-25T18:13:38Z"" + }",https://circleci.com/api/v2/workflow/fd0bd4f5-264f-4e3c-a151-06153c018f78/job,"{""id"": ""fd0bd4f5-264f-4e3c-a151-06153c018f78"", ""tag"": """", ""name"": """", ""status"": """", ""createdAt"": ""0001-01-01T00:00:00Z"", ""updatedAt"": ""0001-01-01T00:00:00Z"", ""created_at"": null, ""errored_by"": """", ""started_by"": """", ""stopped_at"": null, ""canceled_by"": """", ""pipeline_id"": ""7fcc1623-edcc-4a76-ad20-cd81aa83519f"", ""ConnectionId"": 0, ""_raw_data_id"": 0, ""duration_sec"": 0, ""project_slug"": """", ""_raw_data_table"": """", ""pipeline_number"": 0, ""_raw_data_params"": """", ""_raw_data_remark"": """"}",2023-03-28 15:39:59.299 +24,"{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}","{ + ""dependencies"" : [ ], + ""job_number"" : 6, + ""id"" : ""76c1f2cc-27ea-47aa-8167-48d2633abdba"", + ""started_at"" : ""2023-03-25T17:54:11Z"", + ""created_at"" : ""2023-03-25T17:54:11Z"", + ""name"" : ""build"", + ""project_slug"" : ""gh/coldgust/coldgust.github.io"", + ""status"" : ""failed"", + ""type"" : ""build"", + ""stopped_at"" : ""2023-03-25T17:54:23Z"" + }",https://circleci.com/api/v2/workflow/b9ab7bbe-2f30-4c59-b4e2-eb2005bffb14/job,"{""id"": ""b9ab7bbe-2f30-4c59-b4e2-eb2005bffb14"", ""tag"": """", ""name"": """", ""status"": """", ""createdAt"": ""0001-01-01T00:00:00Z"", ""updatedAt"": ""0001-01-01T00:00:00Z"", ""created_at"": null, ""errored_by"": """", ""started_by"": """", ""stopped_at"": null, ""canceled_by"": """", ""pipeline_id"": ""70f3eb15-3b94-4f80-b65e-f23f4b74c33a"", ""ConnectionId"": 0, ""_raw_data_id"": 0, ""duration_sec"": 0, ""project_slug"": """", ""_raw_data_table"": """", ""pipeline_number"": 0, ""_raw_data_params"": """", ""_raw_data_remark"": """"}",2023-03-28 15:39:59.482 +25,"{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}","{ + ""dependencies"" : [ ], + ""job_number"" : 4, + ""id"" : ""a4af3dd5-a3ae-48e8-b634-e2d63aafbb5b"", + ""started_at"" : ""2023-03-25T17:50:22Z"", + ""created_at"" : ""2023-03-25T17:50:22Z"", + ""name"" : ""build"", + ""project_slug"" : ""gh/coldgust/coldgust.github.io"", + ""status"" : ""failed"", + ""type"" : ""build"", + ""stopped_at"" : ""2023-03-25T17:50:25Z"" + }",https://circleci.com/api/v2/workflow/c7df82a6-0d2b-4e19-a36a-3f3aa9fd3943/job,"{""id"": ""c7df82a6-0d2b-4e19-a36a-3f3aa9fd3943"", ""tag"": """", ""name"": """", ""status"": """", ""createdAt"": ""0001-01-01T00:00:00Z"", ""updatedAt"": ""0001-01-01T00:00:00Z"", ""created_at"": null, ""errored_by"": """", ""started_by"": """", ""stopped_at"": null, ""canceled_by"": """", ""pipeline_id"": ""2c45280f-7fb3-4025-b703-a547c4a94916"", ""ConnectionId"": 0, ""_raw_data_id"": 0, ""duration_sec"": 0, ""project_slug"": """", ""_raw_data_table"": """", ""pipeline_number"": 0, ""_raw_data_params"": """", ""_raw_data_remark"": """"}",2023-03-28 15:39:59.663 +27,"{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}","{ + ""dependencies"" : [ ], + ""job_number"" : 8, + ""id"" : ""004e3e27-17d7-4ccb-9b21-a7f55bcf2b3e"", + ""started_at"" : ""2023-03-25T18:06:15Z"", + ""created_at"" : ""2023-03-25T18:06:15Z"", + ""name"" : ""build"", + ""project_slug"" : ""gh/coldgust/coldgust.github.io"", + ""status"" : ""failed"", + ""type"" : ""build"", + ""stopped_at"" : ""2023-03-25T18:06:28Z"" + }",https://circleci.com/api/v2/workflow/fc76deef-bcdd-4856-8e96-a8e2d1c5a85f/job,"{""id"": ""fc76deef-bcdd-4856-8e96-a8e2d1c5a85f"", ""tag"": """", ""name"": """", ""status"": """", ""createdAt"": ""0001-01-01T00:00:00Z"", ""updatedAt"": ""0001-01-01T00:00:00Z"", ""created_at"": null, ""errored_by"": """", ""started_by"": """", ""stopped_at"": null, ""canceled_by"": """", ""pipeline_id"": ""afe0cabe-e7ee-4eb7-bf13-bb6170d139f0"", ""ConnectionId"": 0, ""_raw_data_id"": 0, ""duration_sec"": 0, ""project_slug"": """", ""_raw_data_table"": """", ""pipeline_number"": 0, ""_raw_data_params"": """", ""_raw_data_remark"": """"}",2023-03-28 15:40:00.022 diff --git a/backend/plugins/circleci/e2e/raw_tables/_raw_circleci_api_workflows_incremental.csv b/backend/plugins/circleci/e2e/raw_tables/_raw_circleci_api_workflows_incremental.csv new file mode 100644 index 00000000000..2a70cdd511d --- /dev/null +++ b/backend/plugins/circleci/e2e/raw_tables/_raw_circleci_api_workflows_incremental.csv @@ -0,0 +1,67 @@ +id,params,data,url,input,created_at +19,"{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}","{ + ""pipeline_id"" : ""23622ee4-e150-4920-9d66-81533fa765a4"", + ""id"" : ""6731159f-5275-4bfa-ba70-39d343d63814"", + ""name"" : ""workflow"", + ""project_slug"" : ""gh/coldgust/coldgust.github.io"", + ""status"" : ""failed"", + ""started_by"" : ""1c762fc2-b0fb-4fe2-97d2-5e54ddd1eba7"", + ""pipeline_number"" : 5, + ""created_at"" : ""2023-03-25T17:52:15Z"", + ""stopped_at"" : ""2023-03-25T17:52:23Z"" + }",https://circleci.com/api/v2/pipeline/23622ee4-e150-4920-9d66-81533fa765a4/workflow,"{""id"": ""23622ee4-e150-4920-9d66-81533fa765a4"", ""vcs"": {""tag"": """", ""branch"": """", ""commit"": {""body"": """", ""subject"": """"}, ""revision"": """", ""review_id"": """", ""review_url"": """", ""provider_name"": """", ""origin_repository_url"": """", ""target_repository_url"": """"}, ""state"": """", ""errors"": null, ""number"": 0, ""trigger"": {""type"": """", ""actor"": {""login"": """", ""avatar_url"": """"}, ""received_at"": """"}, ""createdAt"": ""0001-01-01T00:00:00Z"", ""updatedAt"": ""0001-01-01T00:00:00Z"", ""created_at"": null, ""updated_at"": null, ""ConnectionId"": 0, ""_raw_data_id"": 0, ""project_slug"": """", ""_raw_data_table"": """", ""_raw_data_params"": """", ""_raw_data_remark"": """", ""trigger_parameters"": null}",2023-03-28 15:39:56.782 +20,"{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}","{ + ""pipeline_id"" : ""2c45280f-7fb3-4025-b703-a547c4a94916"", + ""id"" : ""c7df82a6-0d2b-4e19-a36a-3f3aa9fd3943"", + ""name"" : ""workflow"", + ""project_slug"" : ""gh/coldgust/coldgust.github.io"", + ""status"" : ""failed"", + ""started_by"" : ""1c762fc2-b0fb-4fe2-97d2-5e54ddd1eba7"", + ""pipeline_number"" : 4, + ""created_at"" : ""2023-03-25T17:50:20Z"", + ""stopped_at"" : ""2023-03-25T17:50:25Z"" + }",https://circleci.com/api/v2/pipeline/2c45280f-7fb3-4025-b703-a547c4a94916/workflow,"{""id"": ""2c45280f-7fb3-4025-b703-a547c4a94916"", ""vcs"": {""tag"": """", ""branch"": """", ""commit"": {""body"": """", ""subject"": """"}, ""revision"": """", ""review_id"": """", ""review_url"": """", ""provider_name"": """", ""origin_repository_url"": """", ""target_repository_url"": """"}, ""state"": """", ""errors"": null, ""number"": 0, ""trigger"": {""type"": """", ""actor"": {""login"": """", ""avatar_url"": """"}, ""received_at"": """"}, ""createdAt"": ""0001-01-01T00:00:00Z"", ""updatedAt"": ""0001-01-01T00:00:00Z"", ""created_at"": null, ""updated_at"": null, ""ConnectionId"": 0, ""_raw_data_id"": 0, ""project_slug"": """", ""_raw_data_table"": """", ""_raw_data_params"": """", ""_raw_data_remark"": """", ""trigger_parameters"": null}",2023-03-28 15:39:56.849 +22,"{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}","{ + ""pipeline_id"" : ""7fcc1623-edcc-4a76-ad20-cd81aa83519f"", + ""id"" : ""fd0bd4f5-264f-4e3c-a151-06153c018f78"", + ""name"" : ""workflow"", + ""project_slug"" : ""gh/coldgust/coldgust.github.io"", + ""status"" : ""success"", + ""started_by"" : ""1c762fc2-b0fb-4fe2-97d2-5e54ddd1eba7"", + ""pipeline_number"" : 9, + ""created_at"" : ""2023-03-25T18:13:21Z"", + ""stopped_at"" : ""2023-03-25T18:13:38Z"" + }",https://circleci.com/api/v2/pipeline/7fcc1623-edcc-4a76-ad20-cd81aa83519f/workflow,"{""id"": ""7fcc1623-edcc-4a76-ad20-cd81aa83519f"", ""vcs"": {""tag"": """", ""branch"": """", ""commit"": {""body"": """", ""subject"": """"}, ""revision"": """", ""review_id"": """", ""review_url"": """", ""provider_name"": """", ""origin_repository_url"": """", ""target_repository_url"": """"}, ""state"": """", ""errors"": null, ""number"": 0, ""trigger"": {""type"": """", ""actor"": {""login"": """", ""avatar_url"": """"}, ""received_at"": """"}, ""createdAt"": ""0001-01-01T00:00:00Z"", ""updatedAt"": ""0001-01-01T00:00:00Z"", ""created_at"": null, ""updated_at"": null, ""ConnectionId"": 0, ""_raw_data_id"": 0, ""project_slug"": """", ""_raw_data_table"": """", ""_raw_data_params"": """", ""_raw_data_remark"": """", ""trigger_parameters"": null}",2023-03-28 15:39:57.179 +24,"{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}","{ + ""pipeline_id"" : ""866e967d-f826-4470-aed6-fc0c92e98703"", + ""id"" : ""7370985a-9de3-4a47-acbc-e6a1fe8e5812"", + ""name"" : ""workflow"", + ""project_slug"" : ""gh/coldgust/coldgust.github.io"", + ""status"" : ""failed"", + ""started_by"" : ""1c762fc2-b0fb-4fe2-97d2-5e54ddd1eba7"", + ""pipeline_number"" : 7, + ""created_at"" : ""2023-03-25T17:56:24Z"", + ""stopped_at"" : ""2023-03-25T17:56:43Z"" + }",https://circleci.com/api/v2/pipeline/866e967d-f826-4470-aed6-fc0c92e98703/workflow,"{""id"": ""866e967d-f826-4470-aed6-fc0c92e98703"", ""vcs"": {""tag"": """", ""branch"": """", ""commit"": {""body"": """", ""subject"": """"}, ""revision"": """", ""review_id"": """", ""review_url"": """", ""provider_name"": """", ""origin_repository_url"": """", ""target_repository_url"": """"}, ""state"": """", ""errors"": null, ""number"": 0, ""trigger"": {""type"": """", ""actor"": {""login"": """", ""avatar_url"": """"}, ""received_at"": """"}, ""createdAt"": ""0001-01-01T00:00:00Z"", ""updatedAt"": ""0001-01-01T00:00:00Z"", ""created_at"": null, ""updated_at"": null, ""ConnectionId"": 0, ""_raw_data_id"": 0, ""project_slug"": """", ""_raw_data_table"": """", ""_raw_data_params"": """", ""_raw_data_remark"": """", ""trigger_parameters"": null}",2023-03-28 15:39:57.524 +26,"{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}","{ + ""pipeline_id"" : ""70f3eb15-3b94-4f80-b65e-f23f4b74c33a"", + ""id"" : ""b9ab7bbe-2f30-4c59-b4e2-eb2005bffb14"", + ""name"" : ""workflow"", + ""project_slug"" : ""gh/coldgust/coldgust.github.io"", + ""status"" : ""failed"", + ""started_by"" : ""1c762fc2-b0fb-4fe2-97d2-5e54ddd1eba7"", + ""pipeline_number"" : 6, + ""created_at"" : ""2023-03-25T17:54:09Z"", + ""stopped_at"" : ""2023-03-25T17:54:23Z"" + }",https://circleci.com/api/v2/pipeline/70f3eb15-3b94-4f80-b65e-f23f4b74c33a/workflow,"{""id"": ""70f3eb15-3b94-4f80-b65e-f23f4b74c33a"", ""vcs"": {""tag"": """", ""branch"": """", ""commit"": {""body"": """", ""subject"": """"}, ""revision"": """", ""review_id"": """", ""review_url"": """", ""provider_name"": """", ""origin_repository_url"": """", ""target_repository_url"": """"}, ""state"": """", ""errors"": null, ""number"": 0, ""trigger"": {""type"": """", ""actor"": {""login"": """", ""avatar_url"": """"}, ""received_at"": """"}, ""createdAt"": ""0001-01-01T00:00:00Z"", ""updatedAt"": ""0001-01-01T00:00:00Z"", ""created_at"": null, ""updated_at"": null, ""ConnectionId"": 0, ""_raw_data_id"": 0, ""project_slug"": """", ""_raw_data_table"": """", ""_raw_data_params"": """", ""_raw_data_remark"": """", ""trigger_parameters"": null}",2023-03-28 15:39:58.137 +27,"{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}","{ + ""pipeline_id"" : ""afe0cabe-e7ee-4eb7-bf13-bb6170d139f0"", + ""id"" : ""fc76deef-bcdd-4856-8e96-a8e2d1c5a85f"", + ""name"" : ""workflow"", + ""project_slug"" : ""gh/coldgust/coldgust.github.io"", + ""status"" : ""failed"", + ""started_by"" : ""1c762fc2-b0fb-4fe2-97d2-5e54ddd1eba7"", + ""pipeline_number"" : 8, + ""created_at"" : ""2023-03-25T18:06:13Z"", + ""stopped_at"" : ""2023-03-25T18:06:28Z"" + }",https://circleci.com/api/v2/pipeline/afe0cabe-e7ee-4eb7-bf13-bb6170d139f0/workflow,"{""id"": ""afe0cabe-e7ee-4eb7-bf13-bb6170d139f0"", ""vcs"": {""tag"": """", ""branch"": """", ""commit"": {""body"": """", ""subject"": """"}, ""revision"": """", ""review_id"": """", ""review_url"": """", ""provider_name"": """", ""origin_repository_url"": """", ""target_repository_url"": """"}, ""state"": """", ""errors"": null, ""number"": 0, ""trigger"": {""type"": """", ""actor"": {""login"": """", ""avatar_url"": """"}, ""received_at"": """"}, ""createdAt"": ""0001-01-01T00:00:00Z"", ""updatedAt"": ""0001-01-01T00:00:00Z"", ""created_at"": null, ""updated_at"": null, ""ConnectionId"": 0, ""_raw_data_id"": 0, ""project_slug"": """", ""_raw_data_table"": """", ""_raw_data_params"": """", ""_raw_data_remark"": """", ""trigger_parameters"": null}",2023-03-28 15:39:58.245 diff --git a/backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_jobs.csv b/backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_jobs.csv index 82f2b4c45fc..517060bd3e0 100644 --- a/backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_jobs.csv +++ b/backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_jobs.csv @@ -1,10 +1,10 @@ -connection_id,workflow_id,id,project_slug,canceled_by,dependencies,job_number,queued_at,started_at,name,approved_by,status,type,approval_request_id,stopped_at,duration_sec,pipeline_id,duration -1,6731159f-5275-4bfa-ba70-39d343d63814,ab8c3282-0e74-4a41-834e-152a71280bed,github/coldgust/coldgust.github.io,,[],5,,2023-03-25T17:52:20.000+00:00,build,,failed,build,,2023-03-25T17:52:23.000+00:00,3,23622ee4-e150-4920-9d66-81533fa765a4,0 -1,7370985a-9de3-4a47-acbc-e6a1fe8e5812,a00f80bc-f759-4900-97a5-2d121d80bde8,github/coldgust/coldgust.github.io,,[],7,,2023-03-25T17:56:27.000+00:00,build,,failed,build,,2023-03-25T17:56:43.000+00:00,16,866e967d-f826-4470-aed6-fc0c92e98703,0 -1,89054eb2-8e85-4f5c-9a93-66d753a0e970,c46092f9-6f82-4a52-8d8b-bd70d365dfc2,github/coldgust/coldgust.github.io,,[],3,,2023-03-25T17:39:25.000+00:00,say-hello,,success,build,,2023-03-25T17:39:28.000+00:00,3,625ca634-68fe-4515-91f0-7ba8af51dc99,0 -1,8971a56b-5547-4824-94dd-07bb467524c5,7b96e45b-b10e-47a0-95d0-96580b88bdda,github/coldgust/coldgust.github.io,,[],2,,2023-03-25T17:12:20.000+00:00,say-hello,,success,build,,2023-03-25T17:12:23.000+00:00,3,87aad008-1ad5-486a-8174-fdeed846561a,0 -1,8fe60291-68f7-40e2-acec-d99bff4da713,afde48dd-7319-4973-b3c8-e00308ff7667,github/coldgust/coldgust.github.io,,[],1,,2023-03-25T17:12:20.000+00:00,say-hello,,success,build,,2023-03-25T17:12:22.000+00:00,2,afef32b3-5ffe-48d2-8d9e-46dcedd82554,0 -1,b9ab7bbe-2f30-4c59-b4e2-eb2005bffb14,76c1f2cc-27ea-47aa-8167-48d2633abdba,github/coldgust/coldgust.github.io,,[],6,,2023-03-25T17:54:11.000+00:00,build,,failed,build,,2023-03-25T17:54:23.000+00:00,12,70f3eb15-3b94-4f80-b65e-f23f4b74c33a,0 -1,c7df82a6-0d2b-4e19-a36a-3f3aa9fd3943,a4af3dd5-a3ae-48e8-b634-e2d63aafbb5b,github/coldgust/coldgust.github.io,,[],4,,2023-03-25T17:50:22.000+00:00,build,,failed,build,,2023-03-25T17:50:25.000+00:00,3,2c45280f-7fb3-4025-b703-a547c4a94916,0 -1,fc76deef-bcdd-4856-8e96-a8e2d1c5a85f,004e3e27-17d7-4ccb-9b21-a7f55bcf2b3e,github/coldgust/coldgust.github.io,,[],8,,2023-03-25T18:06:15.000+00:00,build,,failed,build,,2023-03-25T18:06:28.000+00:00,13,afe0cabe-e7ee-4eb7-bf13-bb6170d139f0,0 -1,fd0bd4f5-264f-4e3c-a151-06153c018f78,2ff3594e-9da1-4306-aefa-77b72a97971e,github/coldgust/coldgust.github.io,,[],9,,2023-03-25T18:13:25.000+00:00,build,,success,build,,2023-03-25T18:13:38.000+00:00,13,7fcc1623-edcc-4a76-ad20-cd81aa83519f,0 +connection_id,workflow_id,id,project_slug,canceled_by,dependencies,job_number,created_date,queued_at,started_at,name,approved_by,status,type,approval_request_id,stopped_at,duration_sec,pipeline_id,duration +1,6731159f-5275-4bfa-ba70-39d343d63814,ab8c3282-0e74-4a41-834e-152a71280bed,github/coldgust/coldgust.github.io,,[],5,2023-03-25T17:52:20.000+00:00,,2023-03-25T17:52:20.000+00:00,build,,failed,build,,2023-03-25T17:52:23.000+00:00,3,23622ee4-e150-4920-9d66-81533fa765a4,0 +1,7370985a-9de3-4a47-acbc-e6a1fe8e5812,a00f80bc-f759-4900-97a5-2d121d80bde8,github/coldgust/coldgust.github.io,,[],7,2023-03-25T17:56:27.000+00:00,,2023-03-25T17:56:27.000+00:00,build,,failed,build,,2023-03-25T17:56:43.000+00:00,16,866e967d-f826-4470-aed6-fc0c92e98703,0 +1,89054eb2-8e85-4f5c-9a93-66d753a0e970,c46092f9-6f82-4a52-8d8b-bd70d365dfc2,github/coldgust/coldgust.github.io,,[],3,2023-03-25T17:39:25.000+00:00,,2023-03-25T17:39:25.000+00:00,say-hello,,success,build,,2023-03-25T17:39:28.000+00:00,3,625ca634-68fe-4515-91f0-7ba8af51dc99,0 +1,8971a56b-5547-4824-94dd-07bb467524c5,7b96e45b-b10e-47a0-95d0-96580b88bdda,github/coldgust/coldgust.github.io,,[],2,2023-03-25T17:12:20.000+00:00,,2023-03-25T17:12:20.000+00:00,say-hello,,success,build,,2023-03-25T17:12:23.000+00:00,3,87aad008-1ad5-486a-8174-fdeed846561a,0 +1,8fe60291-68f7-40e2-acec-d99bff4da713,afde48dd-7319-4973-b3c8-e00308ff7667,github/coldgust/coldgust.github.io,,[],1,2023-03-25T17:12:20.000+00:00,,2023-03-25T17:12:20.000+00:00,say-hello,,success,build,,2023-03-25T17:12:22.000+00:00,2,afef32b3-5ffe-48d2-8d9e-46dcedd82554,0 +1,b9ab7bbe-2f30-4c59-b4e2-eb2005bffb14,76c1f2cc-27ea-47aa-8167-48d2633abdba,github/coldgust/coldgust.github.io,,[],6,2023-03-25T17:54:11.000+00:00,,2023-03-25T17:54:11.000+00:00,build,,failed,build,,2023-03-25T17:54:23.000+00:00,12,70f3eb15-3b94-4f80-b65e-f23f4b74c33a,0 +1,c7df82a6-0d2b-4e19-a36a-3f3aa9fd3943,a4af3dd5-a3ae-48e8-b634-e2d63aafbb5b,github/coldgust/coldgust.github.io,,[],4,2023-03-25T17:50:22.000+00:00,,2023-03-25T17:50:22.000+00:00,build,,failed,build,,2023-03-25T17:50:25.000+00:00,3,2c45280f-7fb3-4025-b703-a547c4a94916,0 +1,fc76deef-bcdd-4856-8e96-a8e2d1c5a85f,004e3e27-17d7-4ccb-9b21-a7f55bcf2b3e,github/coldgust/coldgust.github.io,,[],8,2023-03-25T18:06:15.000+00:00,,2023-03-25T18:06:15.000+00:00,build,,failed,build,,2023-03-25T18:06:28.000+00:00,13,afe0cabe-e7ee-4eb7-bf13-bb6170d139f0,0 +1,fd0bd4f5-264f-4e3c-a151-06153c018f78,2ff3594e-9da1-4306-aefa-77b72a97971e,github/coldgust/coldgust.github.io,,[],9,2023-03-25T18:13:25.000+00:00,,2023-03-25T18:13:25.000+00:00,build,,success,build,,2023-03-25T18:13:38.000+00:00,13,7fcc1623-edcc-4a76-ad20-cd81aa83519f,0 diff --git a/backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_jobs_incremental.csv b/backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_jobs_incremental.csv new file mode 100644 index 00000000000..ea9a23a4456 --- /dev/null +++ b/backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_jobs_incremental.csv @@ -0,0 +1,7 @@ +connection_id,workflow_id,id,project_slug,canceled_by,dependencies,job_number,created_date,queued_at,started_at,name,approved_by,status,type,approval_request_id,stopped_at,duration_sec,pipeline_id,duration +1,6731159f-5275-4bfa-ba70-39d343d63814,ab8c3282-0e74-4a41-834e-152a71280bed,github/coldgust/coldgust.github.io,,[],5,2023-03-25T17:52:20.000+00:00,,2023-03-25T17:52:20.000+00:00,build,,failed,build,,2023-03-25T17:52:23.000+00:00,3,23622ee4-e150-4920-9d66-81533fa765a4,0 +1,7370985a-9de3-4a47-acbc-e6a1fe8e5812,a00f80bc-f759-4900-97a5-2d121d80bde8,github/coldgust/coldgust.github.io,,[],7,2023-03-25T17:56:27.000+00:00,,2023-03-25T17:56:27.000+00:00,build,,failed,build,,2023-03-25T17:56:43.000+00:00,16,866e967d-f826-4470-aed6-fc0c92e98703,0 +1,b9ab7bbe-2f30-4c59-b4e2-eb2005bffb14,76c1f2cc-27ea-47aa-8167-48d2633abdba,github/coldgust/coldgust.github.io,,[],6,2023-03-25T17:54:11.000+00:00,,2023-03-25T17:54:11.000+00:00,build,,failed,build,,2023-03-25T17:54:23.000+00:00,12,70f3eb15-3b94-4f80-b65e-f23f4b74c33a,0 +1,c7df82a6-0d2b-4e19-a36a-3f3aa9fd3943,a4af3dd5-a3ae-48e8-b634-e2d63aafbb5b,github/coldgust/coldgust.github.io,,[],4,2023-03-25T17:50:22.000+00:00,,2023-03-25T17:50:22.000+00:00,build,,failed,build,,2023-03-25T17:50:25.000+00:00,3,2c45280f-7fb3-4025-b703-a547c4a94916,0 +1,fc76deef-bcdd-4856-8e96-a8e2d1c5a85f,004e3e27-17d7-4ccb-9b21-a7f55bcf2b3e,github/coldgust/coldgust.github.io,,[],8,2023-03-25T18:06:15.000+00:00,,2023-03-25T18:06:15.000+00:00,build,,failed,build,,2023-03-25T18:06:28.000+00:00,13,afe0cabe-e7ee-4eb7-bf13-bb6170d139f0,0 +1,fd0bd4f5-264f-4e3c-a151-06153c018f78,2ff3594e-9da1-4306-aefa-77b72a97971e,github/coldgust/coldgust.github.io,,[],9,2023-03-25T18:13:25.000+00:00,,2023-03-25T18:13:25.000+00:00,build,,success,build,,2023-03-25T18:13:38.000+00:00,13,7fcc1623-edcc-4a76-ad20-cd81aa83519f,0 diff --git a/backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_pipelines.csv b/backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_pipelines.csv index dd01015be55..3d55ea884ea 100644 --- a/backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_pipelines.csv +++ b/backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_pipelines.csv @@ -1,11 +1,11 @@ -connection_id,id,project_slug,errors,updated_at,number,trigger_parameters,state,created_at,trigger,vcs,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark -1,23622ee4-e150-4920-9d66-81533fa765a4,github/coldgust/coldgust.github.io,[],2023-03-25 17:52:15.651,5,,created,2023-03-25 17:52:15.651,"{""type"":""webhook"",""received_at"":""2023-03-25T17:52:15.446Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""master"",""review_id"":"""",""review_url"":"""",""revision"":""d65e36c3316c6350c0eeb2dcaa6411a49d2aa29d"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""add circle"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,26,"" -1,2c45280f-7fb3-4025-b703-a547c4a94916,github/coldgust/coldgust.github.io,[],2023-03-25 17:50:20.316,4,,created,2023-03-25 17:50:20.316,"{""type"":""webhook"",""received_at"":""2023-03-25T17:50:20.150Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""master"",""review_id"":"""",""review_url"":"""",""revision"":""5d69b9233ed0579b0726caa9d36564cf90a7c63f"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""add circle"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,27,"" -1,625ca634-68fe-4515-91f0-7ba8af51dc99,github/coldgust/coldgust.github.io,[],2023-03-25 17:39:23.233,3,,created,2023-03-25 17:39:23.233,"{""type"":""webhook"",""received_at"":""2023-03-25T17:39:22.921Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""circleci-project-setup"",""review_id"":"""",""review_url"":"""",""revision"":""578575831946b53b763ef2807580320f8d8ff12d"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""Add .circleci/config.yml"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,28,"" -1,70f3eb15-3b94-4f80-b65e-f23f4b74c33a,github/coldgust/coldgust.github.io,[],2023-03-25 17:54:08.987,6,,created,2023-03-25 17:54:08.987,"{""type"":""webhook"",""received_at"":""2023-03-25T17:54:08.742Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""master"",""review_id"":"""",""review_url"":"""",""revision"":""fd096b2dbc1ae2ea4c37e7af5101b123e7f60223"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""add circle"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,25,"" -1,7fcc1623-edcc-4a76-ad20-cd81aa83519f,github/coldgust/coldgust.github.io,[],2023-03-25 18:13:21.590,9,,created,2023-03-25 18:13:21.590,"{""type"":""webhook"",""received_at"":""2023-03-25T18:13:21.404Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""master"",""review_id"":"""",""review_url"":"""",""revision"":""49874a6ac988e2b3434501d4a4554c54583e6820"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""add circle"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,22,"" -1,866e967d-f826-4470-aed6-fc0c92e98703,github/coldgust/coldgust.github.io,[],2023-03-25 17:56:24.409,7,,created,2023-03-25 17:56:24.409,"{""type"":""webhook"",""received_at"":""2023-03-25T17:56:24.245Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""master"",""review_id"":"""",""review_url"":"""",""revision"":""ad3aab6479048a72b6aade91b36fdeb7125a16a6"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""add circle"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,24,"" -1,87aad008-1ad5-486a-8174-fdeed846561a,github/coldgust/coldgust.github.io,[],2023-03-25 17:12:18.747,2,,created,2023-03-25 17:12:18.747,"{""type"":""webhook"",""received_at"":""2023-03-25T17:12:18.624Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""circleci-project-setup"",""review_id"":"""",""review_url"":"""",""revision"":""9fcceda0e9749bc63e6397b23531fb6bf83634df"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""Add .circleci/config.yml"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,29,"" -1,afe0cabe-e7ee-4eb7-bf13-bb6170d139f0,github/coldgust/coldgust.github.io,[],2023-03-25 18:06:13.404,8,,created,2023-03-25 18:06:13.404,"{""type"":""webhook"",""received_at"":""2023-03-25T18:06:13.246Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""master"",""review_id"":"""",""review_url"":"""",""revision"":""7fb6961080f03defe9b7abb35a111a560e6ab1cc"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""add circle"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,23,"" -1,afef32b3-5ffe-48d2-8d9e-46dcedd82554,github/coldgust/coldgust.github.io,[],2023-03-25 17:12:18.398,1,,created,2023-03-25 17:12:18.398,"{""type"":""api"",""received_at"":""2023-03-25T17:12:16.438Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""circleci-project-setup"",""review_id"":"""",""review_url"":"""",""revision"":""9fcceda0e9749bc63e6397b23531fb6bf83634df"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":"""",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,30,"" -1,d323f088-02fa-4ed5-9696-fc2f89a27150,github/coldgust/coldgust.github.io,"[{""type"":""config-fetch"",""message"":""Failed to fetch config.yml file.""}]",2023-03-25 18:13:43.446,10,"{""circleci"":{""org_name"":""coldgust"",""project_name"":""coldgust.github.io"",""provider_name"":""github""},""git"":{""checkout_sha"":""6ba0cdaf7e7791c5bc5aa89a13b9cd184f8ad296""}}",errored,2023-03-25 18:13:43.446,"{""type"":""Decoupled Ingestion System"",""received_at"":""1970-01-01T00:00:00.000Z"",""actor"":{""login"":"""",""avatar_url"":""""}}","{""provider_name"":"""",""target_repository_url"":"""",""branch"":"""",""review_id"":"""",""review_url"":"""",""revision"":"""",""tag"":"""",""origin_repository_url"":"""",""commit"":{""subject"":"""",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,21,"" +connection_id,id,project_slug,errors,updated_date,number,trigger_parameters,state,created_date,trigger,vcs,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark +1,23622ee4-e150-4920-9d66-81533fa765a4,github/coldgust/coldgust.github.io,[],2023-03-25T17:52:15.651+00:00,5,,created,2023-03-25T17:52:15.651+00:00,"{""type"":""webhook"",""received_at"":""2023-03-25T17:52:15.446Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""master"",""review_id"":"""",""review_url"":"""",""revision"":""d65e36c3316c6350c0eeb2dcaa6411a49d2aa29d"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""add circle"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,26,"" +1,2c45280f-7fb3-4025-b703-a547c4a94916,github/coldgust/coldgust.github.io,[],2023-03-25T17:50:20.316+00:00,4,,created,2023-03-25T17:50:20.316+00:00,"{""type"":""webhook"",""received_at"":""2023-03-25T17:50:20.150Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""master"",""review_id"":"""",""review_url"":"""",""revision"":""5d69b9233ed0579b0726caa9d36564cf90a7c63f"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""add circle"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,27,"" +1,625ca634-68fe-4515-91f0-7ba8af51dc99,github/coldgust/coldgust.github.io,[],2023-03-25T17:39:23.233+00:00,3,,created,2023-03-25T17:39:23.233+00:00,"{""type"":""webhook"",""received_at"":""2023-03-25T17:39:22.921Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""circleci-project-setup"",""review_id"":"""",""review_url"":"""",""revision"":""578575831946b53b763ef2807580320f8d8ff12d"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""Add .circleci/config.yml"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,28,"" +1,70f3eb15-3b94-4f80-b65e-f23f4b74c33a,github/coldgust/coldgust.github.io,[],2023-03-25T17:54:08.987+00:00,6,,created,2023-03-25T17:54:08.987+00:00,"{""type"":""webhook"",""received_at"":""2023-03-25T17:54:08.742Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""master"",""review_id"":"""",""review_url"":"""",""revision"":""fd096b2dbc1ae2ea4c37e7af5101b123e7f60223"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""add circle"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,25,"" +1,7fcc1623-edcc-4a76-ad20-cd81aa83519f,github/coldgust/coldgust.github.io,[],2023-03-25T18:13:21.590+00:00,9,,created,2023-03-25T18:13:21.590+00:00,"{""type"":""webhook"",""received_at"":""2023-03-25T18:13:21.404Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""master"",""review_id"":"""",""review_url"":"""",""revision"":""49874a6ac988e2b3434501d4a4554c54583e6820"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""add circle"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,22,"" +1,866e967d-f826-4470-aed6-fc0c92e98703,github/coldgust/coldgust.github.io,[],2023-03-25T17:56:24.409+00:00,7,,created,2023-03-25T17:56:24.409+00:00,"{""type"":""webhook"",""received_at"":""2023-03-25T17:56:24.245Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""master"",""review_id"":"""",""review_url"":"""",""revision"":""ad3aab6479048a72b6aade91b36fdeb7125a16a6"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""add circle"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,24,"" +1,87aad008-1ad5-486a-8174-fdeed846561a,github/coldgust/coldgust.github.io,[],2023-03-25T17:12:18.747+00:00,2,,created,2023-03-25T17:12:18.747+00:00,"{""type"":""webhook"",""received_at"":""2023-03-25T17:12:18.624Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""circleci-project-setup"",""review_id"":"""",""review_url"":"""",""revision"":""9fcceda0e9749bc63e6397b23531fb6bf83634df"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""Add .circleci/config.yml"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,29,"" +1,afe0cabe-e7ee-4eb7-bf13-bb6170d139f0,github/coldgust/coldgust.github.io,[],2023-03-25T18:06:13.404+00:00,8,,created,2023-03-25T18:06:13.404+00:00,"{""type"":""webhook"",""received_at"":""2023-03-25T18:06:13.246Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""master"",""review_id"":"""",""review_url"":"""",""revision"":""7fb6961080f03defe9b7abb35a111a560e6ab1cc"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":""add circle"",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,23,"" +1,afef32b3-5ffe-48d2-8d9e-46dcedd82554,github/coldgust/coldgust.github.io,[],2023-03-25T17:12:18.398+00:00,1,,created,2023-03-25T17:12:18.398+00:00,"{""type"":""api"",""received_at"":""2023-03-25T17:12:16.438Z"",""actor"":{""login"":""coldgust"",""avatar_url"":""https://avatars.githubusercontent.com/u/115207009?v=4""}}","{""provider_name"":""GitHub"",""target_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""branch"":""circleci-project-setup"",""review_id"":"""",""review_url"":"""",""revision"":""9fcceda0e9749bc63e6397b23531fb6bf83634df"",""tag"":"""",""origin_repository_url"":""https://github.com/coldgust/coldgust.github.io"",""commit"":{""subject"":"""",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,30,"" +1,d323f088-02fa-4ed5-9696-fc2f89a27150,github/coldgust/coldgust.github.io,"[{""type"":""config-fetch"",""message"":""Failed to fetch config.yml file.""}]",2023-03-25T18:13:43.446+00:00,10,"{""circleci"":{""org_name"":""coldgust"",""project_name"":""coldgust.github.io"",""provider_name"":""github""},""git"":{""checkout_sha"":""6ba0cdaf7e7791c5bc5aa89a13b9cd184f8ad296""}}",errored,2023-03-25T18:13:43.446+00:00,"{""type"":""Decoupled Ingestion System"",""received_at"":""1970-01-01T00:00:00.000Z"",""actor"":{""login"":"""",""avatar_url"":""""}}","{""provider_name"":"""",""target_repository_url"":"""",""branch"":"""",""review_id"":"""",""review_url"":"""",""revision"":"""",""tag"":"""",""origin_repository_url"":"""",""commit"":{""subject"":"""",""body"":""""}}","{""ConnectionId"":1,""ProjectSlug"":""github/coldgust/coldgust.github.io""}",_raw_circleci_api_pipelines,21,"" diff --git a/backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_workflows_incremental.csv b/backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_workflows_incremental.csv new file mode 100644 index 00000000000..529e847441b --- /dev/null +++ b/backend/plugins/circleci/e2e/snapshot_tables/_tool_circleci_workflows_incremental.csv @@ -0,0 +1,7 @@ +connection_id,id,project_slug,pipeline_id,canceled_by,name,errored_by,tag,status,started_by,pipeline_number,created_date,stopped_date,duration_sec +1,6731159f-5275-4bfa-ba70-39d343d63814,github/coldgust/coldgust.github.io,23622ee4-e150-4920-9d66-81533fa765a4,,workflow,,,failed,1c762fc2-b0fb-4fe2-97d2-5e54ddd1eba7,5,2023-03-25T17:52:15.000+00:00,2023-03-25T17:52:23.000+00:00,8 +1,7370985a-9de3-4a47-acbc-e6a1fe8e5812,github/coldgust/coldgust.github.io,866e967d-f826-4470-aed6-fc0c92e98703,,workflow,,,failed,1c762fc2-b0fb-4fe2-97d2-5e54ddd1eba7,7,2023-03-25T17:56:24.000+00:00,2023-03-25T17:56:43.000+00:00,19 +1,b9ab7bbe-2f30-4c59-b4e2-eb2005bffb14,github/coldgust/coldgust.github.io,70f3eb15-3b94-4f80-b65e-f23f4b74c33a,,workflow,,,failed,1c762fc2-b0fb-4fe2-97d2-5e54ddd1eba7,6,2023-03-25T17:54:09.000+00:00,2023-03-25T17:54:23.000+00:00,14 +1,c7df82a6-0d2b-4e19-a36a-3f3aa9fd3943,github/coldgust/coldgust.github.io,2c45280f-7fb3-4025-b703-a547c4a94916,,workflow,,,failed,1c762fc2-b0fb-4fe2-97d2-5e54ddd1eba7,4,2023-03-25T17:50:20.000+00:00,2023-03-25T17:50:25.000+00:00,5 +1,fc76deef-bcdd-4856-8e96-a8e2d1c5a85f,github/coldgust/coldgust.github.io,afe0cabe-e7ee-4eb7-bf13-bb6170d139f0,,workflow,,,failed,1c762fc2-b0fb-4fe2-97d2-5e54ddd1eba7,8,2023-03-25T18:06:13.000+00:00,2023-03-25T18:06:28.000+00:00,15 +1,fd0bd4f5-264f-4e3c-a151-06153c018f78,github/coldgust/coldgust.github.io,7fcc1623-edcc-4a76-ad20-cd81aa83519f,,workflow,,,success,1c762fc2-b0fb-4fe2-97d2-5e54ddd1eba7,9,2023-03-25T18:13:21.000+00:00,2023-03-25T18:13:38.000+00:00,17 diff --git a/backend/plugins/circleci/e2e/snapshot_tables/cicd_pipelines.csv b/backend/plugins/circleci/e2e/snapshot_tables/cicd_pipelines.csv index 45f61d9cea6..54a7ede5d59 100644 --- a/backend/plugins/circleci/e2e/snapshot_tables/cicd_pipelines.csv +++ b/backend/plugins/circleci/e2e/snapshot_tables/cicd_pipelines.csv @@ -1,10 +1,10 @@ -id,name,display_title,url,result,status,original_status,original_result,type,duration_sec,queued_duration_sec,environment,queued_date,started_date,cicd_scope_id -circleci:CircleciWorkflow:1:6731159f-5275-4bfa-ba70-39d343d63814,workflow,workflow#5,,FAILURE,DONE,failed,,,8,,PRODUCTION,,2023-03-25T17:52:15.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io -circleci:CircleciWorkflow:1:7370985a-9de3-4a47-acbc-e6a1fe8e5812,workflow,workflow#7,,FAILURE,DONE,failed,,,19,,PRODUCTION,,2023-03-25T17:56:24.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io -circleci:CircleciWorkflow:1:89054eb2-8e85-4f5c-9a93-66d753a0e970,say-hello-workflow,say-hello-workflow#3,,SUCCESS,DONE,success,,,5,,PRODUCTION,,2023-03-25T17:39:23.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io -circleci:CircleciWorkflow:1:8971a56b-5547-4824-94dd-07bb467524c5,say-hello-workflow,say-hello-workflow#2,,SUCCESS,DONE,success,,,5,,PRODUCTION,,2023-03-25T17:12:18.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io -circleci:CircleciWorkflow:1:8fe60291-68f7-40e2-acec-d99bff4da713,say-hello-workflow,say-hello-workflow#1,,SUCCESS,DONE,success,,,4,,PRODUCTION,,2023-03-25T17:12:18.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io -circleci:CircleciWorkflow:1:b9ab7bbe-2f30-4c59-b4e2-eb2005bffb14,workflow,workflow#6,,FAILURE,DONE,failed,,,14,,PRODUCTION,,2023-03-25T17:54:09.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io -circleci:CircleciWorkflow:1:c7df82a6-0d2b-4e19-a36a-3f3aa9fd3943,workflow,workflow#4,,FAILURE,DONE,failed,,,5,,PRODUCTION,,2023-03-25T17:50:20.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io -circleci:CircleciWorkflow:1:fc76deef-bcdd-4856-8e96-a8e2d1c5a85f,workflow,workflow#8,,FAILURE,DONE,failed,,,15,,PRODUCTION,,2023-03-25T18:06:13.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io -circleci:CircleciWorkflow:1:fd0bd4f5-264f-4e3c-a151-06153c018f78,workflow,workflow#9,,SUCCESS,DONE,success,,,17,,PRODUCTION,,2023-03-25T18:13:21.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io +id,name,display_title,url,result,status,original_status,original_result,type,duration_sec,queued_duration_sec,environment,queued_date,started_date,created_date,cicd_scope_id +circleci:CircleciWorkflow:1:6731159f-5275-4bfa-ba70-39d343d63814,workflow,workflow#5,,FAILURE,DONE,failed,,,8,,PRODUCTION,,2023-03-25T17:52:15.000+00:00,2023-03-25T17:52:15.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io +circleci:CircleciWorkflow:1:7370985a-9de3-4a47-acbc-e6a1fe8e5812,workflow,workflow#7,,FAILURE,DONE,failed,,,19,,PRODUCTION,,2023-03-25T17:56:24.000+00:00,2023-03-25T17:56:24.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io +circleci:CircleciWorkflow:1:89054eb2-8e85-4f5c-9a93-66d753a0e970,say-hello-workflow,say-hello-workflow#3,,SUCCESS,DONE,success,,,5,,PRODUCTION,,2023-03-25T17:39:23.000+00:00,2023-03-25T17:39:23.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io +circleci:CircleciWorkflow:1:8971a56b-5547-4824-94dd-07bb467524c5,say-hello-workflow,say-hello-workflow#2,,SUCCESS,DONE,success,,,5,,PRODUCTION,,2023-03-25T17:12:18.000+00:00,2023-03-25T17:12:18.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io +circleci:CircleciWorkflow:1:8fe60291-68f7-40e2-acec-d99bff4da713,say-hello-workflow,say-hello-workflow#1,,SUCCESS,DONE,success,,,4,,PRODUCTION,,2023-03-25T17:12:18.000+00:00,2023-03-25T17:12:18.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io +circleci:CircleciWorkflow:1:b9ab7bbe-2f30-4c59-b4e2-eb2005bffb14,workflow,workflow#6,,FAILURE,DONE,failed,,,14,,PRODUCTION,,2023-03-25T17:54:09.000+00:00,2023-03-25T17:54:09.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io +circleci:CircleciWorkflow:1:c7df82a6-0d2b-4e19-a36a-3f3aa9fd3943,workflow,workflow#4,,FAILURE,DONE,failed,,,5,,PRODUCTION,,2023-03-25T17:50:20.000+00:00,2023-03-25T17:50:20.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io +circleci:CircleciWorkflow:1:fc76deef-bcdd-4856-8e96-a8e2d1c5a85f,workflow,workflow#8,,FAILURE,DONE,failed,,,15,,PRODUCTION,,2023-03-25T18:06:13.000+00:00,2023-03-25T18:06:13.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io +circleci:CircleciWorkflow:1:fd0bd4f5-264f-4e3c-a151-06153c018f78,workflow,workflow#9,,SUCCESS,DONE,success,,,17,,PRODUCTION,,2023-03-25T18:13:21.000+00:00,2023-03-25T18:13:21.000+00:00,circleci:CircleciProject:1:github/coldgust/coldgust.github.io diff --git a/backend/plugins/circleci/e2e/workflow_test.go b/backend/plugins/circleci/e2e/workflow_test.go index cc542fdb27f..ba5194cea68 100644 --- a/backend/plugins/circleci/e2e/workflow_test.go +++ b/backend/plugins/circleci/e2e/workflow_test.go @@ -72,7 +72,7 @@ func TestCircleciWorkflow(t *testing.T) { devops.CICDPipeline{}, e2ehelper.TableOptions{ CSVRelPath: "./snapshot_tables/cicd_pipelines.csv", - IgnoreFields: []string{"finished_date", "created_date", "is_child"}, + IgnoreFields: []string{"finished_date", "is_child"}, IgnoreTypes: []interface{}{domainlayer.DomainEntity{}}, }, ) diff --git a/backend/plugins/circleci/models/job.go b/backend/plugins/circleci/models/job.go index bafaf5b9ebb..c8a3bea31cb 100644 --- a/backend/plugins/circleci/models/job.go +++ b/backend/plugins/circleci/models/job.go @@ -29,7 +29,7 @@ type CircleciJob struct { CanceledBy string `gorm:"type:varchar(100)" json:"canceled_by"` Dependencies []string `gorm:"serializer:json;type:text" json:"dependencies"` JobNumber int64 `json:"job_number"` - CreatedAt *common.Iso8601Time `json:"created_at"` + CreatedDate *common.Iso8601Time `json:"created_at"` QueuedAt *common.Iso8601Time `json:"queued_at"` StartedAt *common.Iso8601Time `json:"started_at"` Name string `gorm:"type:varchar(255)" json:"name"` diff --git a/backend/plugins/circleci/models/migrationscripts/20260322_rename_created_at_to_created_date.go b/backend/plugins/circleci/models/migrationscripts/20260322_rename_created_at_to_created_date.go new file mode 100644 index 00000000000..497c0d3b5a6 --- /dev/null +++ b/backend/plugins/circleci/models/migrationscripts/20260322_rename_created_at_to_created_date.go @@ -0,0 +1,71 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "time" + + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" +) + +type circleciWorkflow20260322 struct { + CreatedDate *time.Time +} + +func (circleciWorkflow20260322) TableName() string { + return "_tool_circleci_workflows" +} + +type circleciJob20260322 struct { + CreatedDate *time.Time +} + +func (circleciJob20260322) TableName() string { + return "_tool_circleci_jobs" +} + +type renameCreatedAtToCreatedDate20260322 struct{} + +func (*renameCreatedAtToCreatedDate20260322) Up(basicRes context.BasicRes) errors.Error { + db := basicRes.GetDal() + + if err := db.AutoMigrate(&circleciWorkflow20260322{}); err != nil { + return err + } + if err := db.Exec("UPDATE _tool_circleci_workflows SET created_date = created_at WHERE created_date IS NULL"); err != nil { + return err + } + + if err := db.AutoMigrate(&circleciJob20260322{}); err != nil { + return err + } + if err := db.Exec("UPDATE _tool_circleci_jobs SET created_date = created_at WHERE created_date IS NULL"); err != nil { + return err + } + + return nil +} + +func (*renameCreatedAtToCreatedDate20260322) Version() uint64 { + return 20260322000001 +} + +func (*renameCreatedAtToCreatedDate20260322) Name() string { + return "circleci rename created_at to created_date in workflows and jobs" +} diff --git a/backend/plugins/circleci/models/migrationscripts/register.go b/backend/plugins/circleci/models/migrationscripts/register.go index 5a23f27cc85..af8472a3cd7 100644 --- a/backend/plugins/circleci/models/migrationscripts/register.go +++ b/backend/plugins/circleci/models/migrationscripts/register.go @@ -24,5 +24,6 @@ func All() []plugin.MigrationScript { return []plugin.MigrationScript{ new(addInitTables), new(addFieldsToCircleciJob20231129), + new(renameCreatedAtToCreatedDate20260322), } } diff --git a/backend/plugins/circleci/models/pipeline.go b/backend/plugins/circleci/models/pipeline.go index a18cded109d..a775ac4b2f4 100644 --- a/backend/plugins/circleci/models/pipeline.go +++ b/backend/plugins/circleci/models/pipeline.go @@ -49,8 +49,8 @@ type CircleciPipeline struct { ConnectionId uint64 `gorm:"primaryKey;type:BIGINT" json:"connectionId" mapstructure:"connectionId"` Id string `gorm:"primaryKey;type:varchar(100)" json:"id" mapstructure:"id"` ProjectSlug string `gorm:"type:varchar(255)" json:"projectSlug" mapstructure:"projectSlug"` - UpdatedDate *common.Iso8601Time `json:"updatedDate" mapstructure:"updatedDate"` - CreatedDate *common.Iso8601Time `json:"createdDate" mapstructure:"createdDate"` + UpdatedDate *common.Iso8601Time `json:"updated_at"` + CreatedDate *common.Iso8601Time `json:"created_at"` Number int64 `json:"number" mapstructure:"number"` // pipeline number within the project? TriggerParameters any `gorm:"serializer:json" json:"trigger_parameters" mapstructure:"triggerParameters"` State string `gorm:"type:varchar(100)" json:"state" mapstructure:"state"` diff --git a/backend/plugins/circleci/models/workflow.go b/backend/plugins/circleci/models/workflow.go index f4e5e9f7111..0b1a330284e 100644 --- a/backend/plugins/circleci/models/workflow.go +++ b/backend/plugins/circleci/models/workflow.go @@ -33,7 +33,7 @@ type CircleciWorkflow struct { Status string `gorm:"type:varchar(100)" json:"status"` StartedBy string `gorm:"type:varchar(100)" json:"started_by"` PipelineNumber int64 `json:"pipeline_number"` - CreatedAt *common.Iso8601Time `json:"created_at"` + CreatedDate *common.Iso8601Time `json:"created_at"` StoppedAt *common.Iso8601Time `json:"stopped_at"` DurationSec float64 `json:"duration_sec"` diff --git a/backend/plugins/circleci/tasks/job_converter.go b/backend/plugins/circleci/tasks/job_converter.go index 2cb8de1207c..5a085609d9f 100644 --- a/backend/plugins/circleci/tasks/job_converter.go +++ b/backend/plugins/circleci/tasks/job_converter.go @@ -58,8 +58,8 @@ func ConvertJobs(taskCtx plugin.SubTaskContext) errors.Error { Convert: func(inputRow interface{}) ([]interface{}, errors.Error) { userTool := inputRow.(*models.CircleciJob) createdAt := time.Now() - if userTool.CreatedAt != nil { - createdAt = userTool.CreatedAt.ToTime() + if userTool.CreatedDate != nil { + createdAt = userTool.CreatedDate.ToTime() } task := &devops.CICDTask{ DomainEntity: domainlayer.DomainEntity{ diff --git a/backend/plugins/circleci/tasks/workflow_converter.go b/backend/plugins/circleci/tasks/workflow_converter.go index a023accc772..bf6b029e3e5 100644 --- a/backend/plugins/circleci/tasks/workflow_converter.go +++ b/backend/plugins/circleci/tasks/workflow_converter.go @@ -61,11 +61,11 @@ func ConvertWorkflows(taskCtx plugin.SubTaskContext) errors.Error { Convert: func(inputRow interface{}) ([]interface{}, errors.Error) { userTool := inputRow.(*models.CircleciWorkflow) // Skip if CreatedAt is null or empty string - still enters into the `_tool_circleci_workflows` table with null values - if userTool.CreatedAt.ToNullableTime() == nil { - logger.Info("CreatedAt is null or empty string in the CircleCI API response for %s", userTool.PipelineId) + if userTool.CreatedDate.ToNullableTime() == nil { + logger.Info("CreatedDate is null or empty string in the CircleCI API response for %s", userTool.PipelineId) return []interface{}{}, nil } - createdAt := userTool.CreatedAt.ToTime() + createdAt := userTool.CreatedDate.ToTime() pipeline := &devops.CICDPipeline{ DomainEntity: domainlayer.DomainEntity{ Id: getWorkflowIdGen().Generate(data.Options.ConnectionId, userTool.Id), diff --git a/backend/plugins/circleci/tasks/workflow_extractor.go b/backend/plugins/circleci/tasks/workflow_extractor.go index ba0639bc877..165c6d635d0 100644 --- a/backend/plugins/circleci/tasks/workflow_extractor.go +++ b/backend/plugins/circleci/tasks/workflow_extractor.go @@ -48,8 +48,8 @@ func ExtractWorkflows(taskCtx plugin.SubTaskContext) errors.Error { toolL := userRes toolL.ConnectionId = data.Options.ConnectionId toolL.ProjectSlug = data.Options.ProjectSlug - if userRes.CreatedAt != nil && userRes.StoppedAt != nil { - startTime := userRes.CreatedAt.ToTime() + if userRes.CreatedDate != nil && userRes.StoppedAt != nil { + startTime := userRes.CreatedDate.ToTime() endTime := userRes.StoppedAt.ToTime() toolL.DurationSec = float64(endTime.Sub(startTime).Milliseconds() / 1e3) } From a502532f2e85e4d992f1667d9af5493c7d5f0502 Mon Sep 17 00:00:00 2001 From: AvivGuiser Date: Mon, 23 Mar 2026 16:30:30 +0200 Subject: [PATCH 35/39] fix(jenkins): scope multi-branch build collection to current project (#8430) (#8781) The branch jobs query in collectMultiBranchJobApiBuilds selected all WorkflowJob entries across all multi-branch pipelines for a connection, causing builds to be duplicated and misattributed. Filter by _raw_data_params to collect only the current project's branch jobs. Co-authored-by: Claude Opus 4.6 (1M context) --- backend/plugins/jenkins/tasks/build_collector.go | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/backend/plugins/jenkins/tasks/build_collector.go b/backend/plugins/jenkins/tasks/build_collector.go index 0d792c61454..a1ae7498754 100644 --- a/backend/plugins/jenkins/tasks/build_collector.go +++ b/backend/plugins/jenkins/tasks/build_collector.go @@ -146,12 +146,17 @@ func collectMultiBranchJobApiBuilds(taskCtx plugin.SubTaskContext) errors.Error logger := taskCtx.GetLogger() // Jobs added through the multi-branch workflow have _raw_data_table set to "jenkins_api_jobs". - // This check works, but it's not very robust. It would be better to use a more explicit check like a "source" column. + // Filter by _raw_data_params to scope to only this multi-branch project's branch jobs, + // preventing cross-project data mixing when multiple multi-branch pipelines exist. + jobParams := plugin.MarshalScopeParams(JenkinsApiParams{ + ConnectionId: data.Options.ConnectionId, + FullName: data.Options.JobFullName, + }) clauses := []dal.Clause{ dal.Select("j.full_name,j.name,j.path,j.class,j.url"), dal.From("_tool_jenkins_jobs as j"), - dal.Where(`j.connection_id = ? and j.class = ? and j._raw_data_table = ?`, - data.Options.ConnectionId, WORKFLOW_JOB, fmt.Sprintf("_raw_%s", RAW_JOB_TABLE)), + dal.Where(`j.connection_id = ? and j.class = ? and j._raw_data_table = ? and j._raw_data_params = ?`, + data.Options.ConnectionId, WORKFLOW_JOB, fmt.Sprintf("_raw_%s", RAW_JOB_TABLE), jobParams), } cursor, err := db.Cursor(clauses...) if err != nil { From 4bc464375f63873e88f29f5c83a3476f33b27bba Mon Sep 17 00:00:00 2001 From: Shayne Clausson Date: Mon, 23 Mar 2026 15:36:55 +0100 Subject: [PATCH 36/39] fix: Make gh-copilot plugin database agnostic (#8779) Co-authored-by: Eldrick Wega --- .../gh-copilot/models/migrationscripts/20250100_initialize.go | 4 ++-- .../migrationscripts/20260116_add_name_fields_to_scopes.go | 4 ++-- .../models/migrationscripts/20260121_add_scope_configs.go | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/plugins/gh-copilot/models/migrationscripts/20250100_initialize.go b/backend/plugins/gh-copilot/models/migrationscripts/20250100_initialize.go index 23b4f67b48d..53dee4fbcb2 100644 --- a/backend/plugins/gh-copilot/models/migrationscripts/20250100_initialize.go +++ b/backend/plugins/gh-copilot/models/migrationscripts/20250100_initialize.go @@ -65,9 +65,9 @@ type ghCopilotScope20250100 struct { ScopeConfigId uint64 `json:"scopeConfigId,omitempty"` Id string `json:"id" gorm:"primaryKey;type:varchar(255)"` Organization string `json:"organization" gorm:"type:varchar(255)"` - ImplementationDate *time.Time `json:"implementationDate" gorm:"type:datetime"` + ImplementationDate *time.Time `json:"implementationDate"` BaselinePeriodDays int `json:"baselinePeriodDays" gorm:"default:90"` - SeatsLastSyncedAt *time.Time `json:"seatsLastSyncedAt" gorm:"type:datetime"` + SeatsLastSyncedAt *time.Time `json:"seatsLastSyncedAt"` } func (ghCopilotScope20250100) TableName() string { diff --git a/backend/plugins/gh-copilot/models/migrationscripts/20260116_add_name_fields_to_scopes.go b/backend/plugins/gh-copilot/models/migrationscripts/20260116_add_name_fields_to_scopes.go index a1e7c8c99ee..69df2c0b620 100644 --- a/backend/plugins/gh-copilot/models/migrationscripts/20260116_add_name_fields_to_scopes.go +++ b/backend/plugins/gh-copilot/models/migrationscripts/20260116_add_name_fields_to_scopes.go @@ -38,9 +38,9 @@ type ghCopilotScope20260116 struct { Organization string `json:"organization" gorm:"type:varchar(255)"` Name string `json:"name" gorm:"type:varchar(255)"` FullName string `json:"fullName" gorm:"type:varchar(255)"` - ImplementationDate *time.Time `json:"implementationDate" gorm:"type:datetime"` + ImplementationDate *time.Time `json:"implementationDate"` BaselinePeriodDays int `json:"baselinePeriodDays" gorm:"default:90"` - SeatsLastSyncedAt *time.Time `json:"seatsLastSyncedAt" gorm:"type:datetime"` + SeatsLastSyncedAt *time.Time `json:"seatsLastSyncedAt"` } func (ghCopilotScope20260116) TableName() string { diff --git a/backend/plugins/gh-copilot/models/migrationscripts/20260121_add_scope_configs.go b/backend/plugins/gh-copilot/models/migrationscripts/20260121_add_scope_configs.go index eb53b0a0640..4bd77391f1a 100644 --- a/backend/plugins/gh-copilot/models/migrationscripts/20260121_add_scope_configs.go +++ b/backend/plugins/gh-copilot/models/migrationscripts/20260121_add_scope_configs.go @@ -33,7 +33,7 @@ type scopeConfig20260121 struct { Entities []string `gorm:"type:json;serializer:json" json:"entities" mapstructure:"entities"` ConnectionId uint64 `json:"connectionId" gorm:"index" validate:"required" mapstructure:"connectionId,omitempty"` Name string `mapstructure:"name" json:"name" gorm:"type:varchar(255);uniqueIndex" validate:"required"` - ImplementationDate *time.Time `json:"implementationDate" mapstructure:"implementationDate" gorm:"type:datetime"` + ImplementationDate *time.Time `json:"implementationDate" mapstructure:"implementationDate"` BaselinePeriodDays int `json:"baselinePeriodDays" mapstructure:"baselinePeriodDays" gorm:"default:90"` } From fd4d572dab4ce5c2f084c2dade19f2a7415e4de6 Mon Sep 17 00:00:00 2001 From: Joshua Smith Date: Mon, 23 Mar 2026 08:40:58 -0600 Subject: [PATCH 37/39] fix(sonarqube): increase cq_issues and cq_file_metrics project_key length to 500 (#8783) Fixes #8331 --- .../codequality/cq_file_metrics.go | 2 +- .../domainlayer/codequality/cq_issues.go | 2 +- ...7_increase_cq_issues_project_key_length.go | 44 +++++++++++++++++++ .../core/models/migrationscripts/register.go | 1 + 4 files changed, 47 insertions(+), 2 deletions(-) create mode 100644 backend/core/models/migrationscripts/20260317_increase_cq_issues_project_key_length.go diff --git a/backend/core/models/domainlayer/codequality/cq_file_metrics.go b/backend/core/models/domainlayer/codequality/cq_file_metrics.go index abc03b85f6f..dc069230e20 100644 --- a/backend/core/models/domainlayer/codequality/cq_file_metrics.go +++ b/backend/core/models/domainlayer/codequality/cq_file_metrics.go @@ -23,7 +23,7 @@ import ( type CqFileMetrics struct { domainlayer.DomainEntity - ProjectKey string `gorm:"index;type:varchar(255)"` //domain project key + ProjectKey string `gorm:"index;type:varchar(500)"` //domain project key FileName string `gorm:"type:varchar(2000)"` FilePath string FileLanguage string `gorm:"type:varchar(20)"` diff --git a/backend/core/models/domainlayer/codequality/cq_issues.go b/backend/core/models/domainlayer/codequality/cq_issues.go index 1e9ea7134a7..4441eb1d7c0 100644 --- a/backend/core/models/domainlayer/codequality/cq_issues.go +++ b/backend/core/models/domainlayer/codequality/cq_issues.go @@ -27,7 +27,7 @@ type CqIssue struct { Rule string `gorm:"type:varchar(255)"` Severity string `gorm:"type:varchar(100)"` Component string - ProjectKey string `gorm:"index;type:varchar(100)"` //domain project key + ProjectKey string `gorm:"index;type:varchar(500)"` //domain project key Line int Status string `gorm:"type:varchar(20)"` Message string diff --git a/backend/core/models/migrationscripts/20260317_increase_cq_issues_project_key_length.go b/backend/core/models/migrationscripts/20260317_increase_cq_issues_project_key_length.go new file mode 100644 index 00000000000..223a2b6a345 --- /dev/null +++ b/backend/core/models/migrationscripts/20260317_increase_cq_issues_project_key_length.go @@ -0,0 +1,44 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" +) + +var _ plugin.MigrationScript = (*increaseCqIssuesProjectKeyLength)(nil) + +type increaseCqIssuesProjectKeyLength struct{} + +func (script *increaseCqIssuesProjectKeyLength) Up(basicRes context.BasicRes) errors.Error { + db := basicRes.GetDal() + if err := db.ModifyColumnType("cq_issues", "project_key", "varchar(500)"); err != nil { + return err + } + return db.ModifyColumnType("cq_file_metrics", "project_key", "varchar(500)") +} + +func (*increaseCqIssuesProjectKeyLength) Version() uint64 { + return 20260317000000 +} + +func (*increaseCqIssuesProjectKeyLength) Name() string { + return "increase cq_issues and cq_file_metrics project_key length to 500" +} diff --git a/backend/core/models/migrationscripts/register.go b/backend/core/models/migrationscripts/register.go index 9372d8fbd02..9abb4f0aeae 100644 --- a/backend/core/models/migrationscripts/register.go +++ b/backend/core/models/migrationscripts/register.go @@ -143,5 +143,6 @@ func All() []plugin.MigrationScript { new(addPipelinePriority), new(fixNullPriority), new(modifyCicdDeploymentsToText), + new(increaseCqIssuesProjectKeyLength), } } From 89c370a02594355d2e2ae0bc0bd4e3f82e78b3e1 Mon Sep 17 00:00:00 2001 From: irfanuddinahmad <34648393+irfanuddinahmad@users.noreply.github.com> Date: Mon, 23 Mar 2026 19:41:37 +0500 Subject: [PATCH 38/39] feat: added taiga plugin (#8755) * feat: added taiga plugin * fix: fixed tests * feat(gh-copilot): add support for organization daily user metrics (#8747) * feat(circleci): add server version requirement and endpoint help text (#8757) Update CircleCI connection form to indicate Server v4.x+ requirement and provide guidance for server endpoint configuration. Signed-off-by: Joshua Smith * fix: fixed test files --------- Signed-off-by: Joshua Smith Co-authored-by: Reece Ward <47779818+ReeceXW@users.noreply.github.com> Co-authored-by: Joshua Smith --- backend/plugins/table_info_test.go | 2 + backend/plugins/taiga/api/blueprint_v200.go | 137 + backend/plugins/taiga/api/connection_api.go | 225 ++ backend/plugins/taiga/api/init.go | 53 + backend/plugins/taiga/api/remote_api.go | 132 + backend/plugins/taiga/api/scope_api.go | 52 + backend/plugins/taiga/api/scope_config_api.go | 47 + backend/plugins/taiga/e2e/epic_test.go | 74 + backend/plugins/taiga/e2e/issue_test.go | 77 + backend/plugins/taiga/e2e/project_test.go | 69 + .../e2e/raw_tables/_raw_taiga_api_epics.csv | 3 + .../e2e/raw_tables/_raw_taiga_api_issues.csv | 3 + .../raw_tables/_raw_taiga_api_projects.csv | 2 + .../e2e/raw_tables/_raw_taiga_api_tasks.csv | 3 + .../_raw_taiga_api_user_stories.csv | 3 + .../e2e/snapshot_tables/_tool_taiga_epics.csv | 3 + .../snapshot_tables/_tool_taiga_issues.csv | 3 + .../snapshot_tables/_tool_taiga_projects.csv | 2 + .../e2e/snapshot_tables/_tool_taiga_tasks.csv | 3 + .../_tool_taiga_user_stories.csv | 3 + .../board_issues_from_epics.csv | 3 + .../board_issues_from_issues.csv | 3 + .../board_issues_from_tasks.csv | 3 + .../board_issues_from_user_stories.csv | 3 + .../taiga/e2e/snapshot_tables/boards.csv | 2 + .../e2e/snapshot_tables/issues_from_epics.csv | 3 + .../snapshot_tables/issues_from_issues.csv | 3 + .../e2e/snapshot_tables/issues_from_tasks.csv | 3 + .../issues_from_user_stories.csv | 3 + backend/plugins/taiga/e2e/task_test.go | 74 + backend/plugins/taiga/e2e/user_story_test.go | 74 + backend/plugins/taiga/impl/impl.go | 251 ++ backend/plugins/taiga/models/connection.go | 143 + backend/plugins/taiga/models/epic.go | 45 + backend/plugins/taiga/models/issue.go | 49 + .../20250220_add_init_tables.go | 132 + .../20260306_add_task_issue_epic_tables.go | 128 + .../taiga/models/migrationscripts/register.go | 30 + backend/plugins/taiga/models/project.go | 63 + backend/plugins/taiga/models/scope_config.go | 53 + backend/plugins/taiga/models/task.go | 49 + backend/plugins/taiga/models/user_story.go | 53 + backend/plugins/taiga/taiga.go | 45 + backend/plugins/taiga/tasks/api_client.go | 49 + backend/plugins/taiga/tasks/epic_collector.go | 79 + backend/plugins/taiga/tasks/epic_convertor.go | 106 + backend/plugins/taiga/tasks/epic_extractor.go | 106 + .../plugins/taiga/tasks/issue_collector.go | 79 + .../plugins/taiga/tasks/issue_convertor.go | 132 + .../plugins/taiga/tasks/issue_extractor.go | 136 + .../plugins/taiga/tasks/project_collector.go | 76 + .../plugins/taiga/tasks/project_convertor.go | 88 + .../plugins/taiga/tasks/project_extractor.go | 84 + backend/plugins/taiga/tasks/task_collector.go | 79 + backend/plugins/taiga/tasks/task_convertor.go | 107 + backend/plugins/taiga/tasks/task_data.go | 54 + backend/plugins/taiga/tasks/task_extractor.go | 122 + .../taiga/tasks/user_story_collector.go | 80 + .../taiga/tasks/user_story_convertor.go | 129 + .../taiga/tasks/user_story_extractor.go | 135 + grafana/dashboards/Taiga.json | 2316 +++++++++++++++++ 61 files changed, 6068 insertions(+) create mode 100644 backend/plugins/taiga/api/blueprint_v200.go create mode 100644 backend/plugins/taiga/api/connection_api.go create mode 100644 backend/plugins/taiga/api/init.go create mode 100644 backend/plugins/taiga/api/remote_api.go create mode 100644 backend/plugins/taiga/api/scope_api.go create mode 100644 backend/plugins/taiga/api/scope_config_api.go create mode 100644 backend/plugins/taiga/e2e/epic_test.go create mode 100644 backend/plugins/taiga/e2e/issue_test.go create mode 100644 backend/plugins/taiga/e2e/project_test.go create mode 100644 backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_epics.csv create mode 100644 backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_issues.csv create mode 100644 backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_projects.csv create mode 100644 backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_tasks.csv create mode 100644 backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_user_stories.csv create mode 100644 backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_epics.csv create mode 100644 backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_issues.csv create mode 100644 backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_projects.csv create mode 100644 backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_tasks.csv create mode 100644 backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_user_stories.csv create mode 100644 backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_epics.csv create mode 100644 backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_issues.csv create mode 100644 backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_tasks.csv create mode 100644 backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_user_stories.csv create mode 100644 backend/plugins/taiga/e2e/snapshot_tables/boards.csv create mode 100644 backend/plugins/taiga/e2e/snapshot_tables/issues_from_epics.csv create mode 100644 backend/plugins/taiga/e2e/snapshot_tables/issues_from_issues.csv create mode 100644 backend/plugins/taiga/e2e/snapshot_tables/issues_from_tasks.csv create mode 100644 backend/plugins/taiga/e2e/snapshot_tables/issues_from_user_stories.csv create mode 100644 backend/plugins/taiga/e2e/task_test.go create mode 100644 backend/plugins/taiga/e2e/user_story_test.go create mode 100644 backend/plugins/taiga/impl/impl.go create mode 100644 backend/plugins/taiga/models/connection.go create mode 100644 backend/plugins/taiga/models/epic.go create mode 100644 backend/plugins/taiga/models/issue.go create mode 100644 backend/plugins/taiga/models/migrationscripts/20250220_add_init_tables.go create mode 100644 backend/plugins/taiga/models/migrationscripts/20260306_add_task_issue_epic_tables.go create mode 100644 backend/plugins/taiga/models/migrationscripts/register.go create mode 100644 backend/plugins/taiga/models/project.go create mode 100644 backend/plugins/taiga/models/scope_config.go create mode 100644 backend/plugins/taiga/models/task.go create mode 100644 backend/plugins/taiga/models/user_story.go create mode 100644 backend/plugins/taiga/taiga.go create mode 100644 backend/plugins/taiga/tasks/api_client.go create mode 100644 backend/plugins/taiga/tasks/epic_collector.go create mode 100644 backend/plugins/taiga/tasks/epic_convertor.go create mode 100644 backend/plugins/taiga/tasks/epic_extractor.go create mode 100644 backend/plugins/taiga/tasks/issue_collector.go create mode 100644 backend/plugins/taiga/tasks/issue_convertor.go create mode 100644 backend/plugins/taiga/tasks/issue_extractor.go create mode 100644 backend/plugins/taiga/tasks/project_collector.go create mode 100644 backend/plugins/taiga/tasks/project_convertor.go create mode 100644 backend/plugins/taiga/tasks/project_extractor.go create mode 100644 backend/plugins/taiga/tasks/task_collector.go create mode 100644 backend/plugins/taiga/tasks/task_convertor.go create mode 100644 backend/plugins/taiga/tasks/task_data.go create mode 100644 backend/plugins/taiga/tasks/task_extractor.go create mode 100644 backend/plugins/taiga/tasks/user_story_collector.go create mode 100644 backend/plugins/taiga/tasks/user_story_convertor.go create mode 100644 backend/plugins/taiga/tasks/user_story_extractor.go create mode 100644 grafana/dashboards/Taiga.json diff --git a/backend/plugins/table_info_test.go b/backend/plugins/table_info_test.go index 929c069b86b..5f81f319aea 100644 --- a/backend/plugins/table_info_test.go +++ b/backend/plugins/table_info_test.go @@ -52,6 +52,7 @@ import ( slack "github.com/apache/incubator-devlake/plugins/slack/impl" sonarqube "github.com/apache/incubator-devlake/plugins/sonarqube/impl" starrocks "github.com/apache/incubator-devlake/plugins/starrocks/impl" + taiga "github.com/apache/incubator-devlake/plugins/taiga/impl" tapd "github.com/apache/incubator-devlake/plugins/tapd/impl" teambition "github.com/apache/incubator-devlake/plugins/teambition/impl" testmo "github.com/apache/incubator-devlake/plugins/testmo/impl" @@ -90,6 +91,7 @@ func Test_GetPluginTablesInfo(t *testing.T) { checker.FeedIn("slack/models", slack.Slack{}.GetTablesInfo) checker.FeedIn("sonarqube/models", sonarqube.Sonarqube{}.GetTablesInfo) checker.FeedIn("starrocks", starrocks.StarRocks{}.GetTablesInfo) + checker.FeedIn("taiga/models", taiga.Taiga{}.GetTablesInfo) checker.FeedIn("tapd/models", tapd.Tapd{}.GetTablesInfo) checker.FeedIn("teambition/models", teambition.Teambition{}.GetTablesInfo) checker.FeedIn("testmo/models", testmo.Testmo{}.GetTablesInfo) diff --git a/backend/plugins/taiga/api/blueprint_v200.go b/backend/plugins/taiga/api/blueprint_v200.go new file mode 100644 index 00000000000..3eabf1acbd5 --- /dev/null +++ b/backend/plugins/taiga/api/blueprint_v200.go @@ -0,0 +1,137 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package api + +import ( + "context" + + "github.com/apache/incubator-devlake/core/errors" + coreModels "github.com/apache/incubator-devlake/core/models" + "github.com/apache/incubator-devlake/core/models/domainlayer" + "github.com/apache/incubator-devlake/core/models/domainlayer/didgen" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/core/plugin" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/helpers/srvhelper" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +type TaigaTaskOptions struct { + ConnectionId uint64 `json:"connectionId"` + ProjectId uint64 `json:"projectId"` +} + +func MakeDataSourcePipelinePlanV200( + subtaskMetas []plugin.SubTaskMeta, + connectionId uint64, + bpScopes []*coreModels.BlueprintScope, +) (coreModels.PipelinePlan, []plugin.Scope, errors.Error) { + // load connection, scope and scopeConfig from the db + connection, err := dsHelper.ConnSrv.FindByPk(connectionId) + if err != nil { + return nil, nil, err + } + scopeDetails, err := dsHelper.ScopeSrv.MapScopeDetails(connectionId, bpScopes) + if err != nil { + return nil, nil, err + } + + // needed for the connection to populate its access tokens + _, err = helper.NewApiClientFromConnection(context.TODO(), basicRes, connection) + if err != nil { + return nil, nil, err + } + + plan, err := makeDataSourcePipelinePlanV200(subtaskMetas, scopeDetails, connection) + if err != nil { + return nil, nil, err + } + scopes, err := makeScopesV200(scopeDetails, connection) + if err != nil { + return nil, nil, err + } + + return plan, scopes, nil +} + +func makeDataSourcePipelinePlanV200( + subtaskMetas []plugin.SubTaskMeta, + scopeDetails []*srvhelper.ScopeDetail[models.TaigaProject, models.TaigaScopeConfig], + connection *models.TaigaConnection, +) (coreModels.PipelinePlan, errors.Error) { + plan := make(coreModels.PipelinePlan, len(scopeDetails)) + for i, scopeDetail := range scopeDetails { + stage := plan[i] + if stage == nil { + stage = coreModels.PipelineStage{} + } + + scope, scopeConfig := scopeDetail.Scope, scopeDetail.ScopeConfig + // construct task options for Taiga + task, err := helper.MakePipelinePlanTask( + "taiga", + subtaskMetas, + scopeConfig.Entities, + TaigaTaskOptions{ + ConnectionId: scope.ConnectionId, + ProjectId: uint64(scope.ProjectId), + }, + ) + if err != nil { + return nil, err + } + + stage = append(stage, task) + plan[i] = stage + } + + return plan, nil +} + +func makeScopesV200( + scopeDetails []*srvhelper.ScopeDetail[models.TaigaProject, models.TaigaScopeConfig], + connection *models.TaigaConnection, +) ([]plugin.Scope, errors.Error) { + scopes := make([]plugin.Scope, 0, len(scopeDetails)) + idGen := didgen.NewDomainIdGenerator(&models.TaigaProject{}) + + for _, scopeDetail := range scopeDetails { + project := scopeDetail.Scope + + // add board to scopes + entities := scopeDetail.ScopeConfig.Entities + hasTicket := false + for _, entity := range entities { + if entity == plugin.DOMAIN_TYPE_TICKET { + hasTicket = true + break + } + } + if hasTicket { + domainBoard := &ticket.Board{ + DomainEntity: domainlayer.DomainEntity{ + Id: idGen.Generate(connection.ID, project.ProjectId), + }, + Name: project.Name, + } + scopes = append(scopes, domainBoard) + } + } + + return scopes, nil +} diff --git a/backend/plugins/taiga/api/connection_api.go b/backend/plugins/taiga/api/connection_api.go new file mode 100644 index 00000000000..88021be3a99 --- /dev/null +++ b/backend/plugins/taiga/api/connection_api.go @@ -0,0 +1,225 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package api + +import ( + "context" + "fmt" + "net/http" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" + "github.com/apache/incubator-devlake/server/api/shared" +) + +// TaigaTestConnResponse is the response struct for testing a connection +type TaigaTestConnResponse struct { + shared.ApiBody + Connection *models.TaigaConnection +} + +// testConnection tests the Taiga connection +func testConnection(ctx context.Context, connection models.TaigaConnection) (*TaigaTestConnResponse, errors.Error) { + // If username and password are provided, authenticate to get a token + if connection.Username != "" && connection.Password != "" && connection.Token == "" { + // Create a temporary connection without token for authentication + tempConnection := connection + tempConnection.Token = "" + + // Create a temporary API client to call the auth endpoint + tempApiClient, err := api.NewApiClientFromConnection(ctx, basicRes, &tempConnection) + if err != nil { + return nil, errors.Default.Wrap(err, "error creating API client") + } + + // Prepare auth request body + authBody := map[string]interface{}{ + "type": "normal", + "username": connection.Username, + "password": connection.Password, + } + + // Authenticate to get token + authResponse := struct { + AuthToken string `json:"auth_token"` + }{} + + res, err := tempApiClient.Post("auth", nil, authBody, nil) + if err != nil { + return nil, errors.Default.Wrap(err, "error authenticating with Taiga") + } + + if res.StatusCode == http.StatusUnauthorized || res.StatusCode == http.StatusBadRequest { + return nil, errors.HttpStatus(http.StatusBadRequest).New("authentication failed - please check your username and password") + } + + if res.StatusCode != http.StatusOK { + return nil, errors.HttpStatus(res.StatusCode).New(fmt.Sprintf("unexpected status code during auth: %d", res.StatusCode)) + } + + // Parse the auth response + err = api.UnmarshalResponse(res, &authResponse) + if err != nil { + return nil, errors.Default.Wrap(err, "error parsing authentication response") + } + + // Set the token for validation + connection.Token = authResponse.AuthToken + } + + // validate - but make Token optional if we have username/password + if vld != nil { + if connection.Token == "" && (connection.Username == "" || connection.Password == "") { + return nil, errors.Default.New("either token or username/password must be provided") + } + } + + apiClient, err := api.NewApiClientFromConnection(ctx, basicRes, &connection) + if err != nil { + return nil, err + } + + // test connection by making a request to the user endpoint + res, err := apiClient.Get("users/me", nil, nil) + if err != nil { + return nil, errors.Default.Wrap(err, "error testing connection") + } + + if res.StatusCode == http.StatusUnauthorized || res.StatusCode == http.StatusForbidden { + return nil, errors.HttpStatus(http.StatusBadRequest).New("authentication error when testing connection - please check your credentials") + } + + if res.StatusCode != http.StatusOK { + return nil, errors.HttpStatus(res.StatusCode).New(fmt.Sprintf("unexpected status code: %d", res.StatusCode)) + } + + connection = connection.Sanitize() + body := TaigaTestConnResponse{} + body.Success = true + body.Message = "success" + body.Connection = &connection + + return &body, nil +} + +// TestConnection tests the Taiga connection +// @Summary test taiga connection +// @Description Test Taiga Connection +// @Tags plugins/taiga +// @Param body body models.TaigaConnection true "json body" +// @Success 200 {object} TaigaTestConnResponse "Success" +// @Failure 400 {string} errcode.Error "Bad Request" +// @Failure 500 {string} errcode.Error "Internal Error" +// @Router /plugins/taiga/test [POST] +func TestConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + // decode + var connection models.TaigaConnection + err := api.DecodeMapStruct(input.Body, &connection, false) + if err != nil { + return nil, err + } + // test connection + result, err := testConnection(context.TODO(), connection) + if err != nil { + return nil, plugin.WrapTestConnectionErrResp(basicRes, err) + } + return &plugin.ApiResourceOutput{Body: result, Status: http.StatusOK}, nil +} + +// TestExistingConnection tests an existing Taiga connection +// @Summary test existing taiga connection +// @Description Test Existing Taiga Connection +// @Tags plugins/taiga +// @Success 200 {object} TaigaTestConnResponse "Success" +// @Failure 400 {string} errcode.Error "Bad Request" +// @Failure 500 {string} errcode.Error "Internal Error" +// @Router /plugins/taiga/connections/:connectionId/test [POST] +func TestExistingConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + connection, err := dsHelper.ConnApi.GetMergedConnection(input) + if err != nil { + return nil, errors.BadInput.Wrap(err, "find connection from db") + } + // test connection + result, err := testConnection(context.TODO(), *connection) + if err != nil { + return nil, plugin.WrapTestConnectionErrResp(basicRes, err) + } + return &plugin.ApiResourceOutput{Body: result, Status: http.StatusOK}, nil +} + +// PostConnections creates a new Taiga connection +// @Summary create taiga connection +// @Description Create Taiga Connection +// @Tags plugins/taiga +// @Success 200 {object} models.TaigaConnection +// @Failure 400 +// @Failure 500 +// @Router /plugins/taiga/connections [POST] +func PostConnections(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ConnApi.Post(input) +} + +// ListConnections lists all Taiga connections +// @Summary list taiga connections +// @Description List Taiga Connections +// @Tags plugins/taiga +// @Success 200 {object} []models.TaigaConnection +// @Failure 400 +// @Failure 500 +// @Router /plugins/taiga/connections [GET] +func ListConnections(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ConnApi.GetAll(input) +} + +// GetConnection gets a Taiga connection by ID +// @Summary get taiga connection +// @Description Get Taiga Connection +// @Tags plugins/taiga +// @Success 200 {object} models.TaigaConnection +// @Failure 400 +// @Failure 500 +// @Router /plugins/taiga/connections/:connectionId [GET] +func GetConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ConnApi.GetDetail(input) +} + +// PatchConnection updates a Taiga connection +// @Summary patch taiga connection +// @Description Patch Taiga Connection +// @Tags plugins/taiga +// @Success 200 {object} models.TaigaConnection +// @Failure 400 +// @Failure 500 +// @Router /plugins/taiga/connections/:connectionId [PATCH] +func PatchConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ConnApi.Patch(input) +} + +// DeleteConnection deletes a Taiga connection +// @Summary delete taiga connection +// @Description Delete Taiga Connection +// @Tags plugins/taiga +// @Success 200 +// @Failure 400 +// @Failure 500 +// @Router /plugins/taiga/connections/:connectionId [DELETE] +func DeleteConnection(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ConnApi.Delete(input) +} diff --git a/backend/plugins/taiga/api/init.go b/backend/plugins/taiga/api/init.go new file mode 100644 index 00000000000..82b332456ac --- /dev/null +++ b/backend/plugins/taiga/api/init.go @@ -0,0 +1,53 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package api + +import ( + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" + "github.com/go-playground/validator/v10" +) + +var vld *validator.Validate +var basicRes context.BasicRes +var dsHelper *api.DsHelper[models.TaigaConnection, models.TaigaProject, models.TaigaScopeConfig] +var raProxy *api.DsRemoteApiProxyHelper[models.TaigaConnection] +var raScopeList *api.DsRemoteApiScopeListHelper[models.TaigaConnection, models.TaigaProject, TaigaRemotePagination] + +func Init(br context.BasicRes, p plugin.PluginMeta) { + basicRes = br + vld = validator.New() + dsHelper = api.NewDataSourceHelper[ + models.TaigaConnection, + models.TaigaProject, + models.TaigaScopeConfig, + ]( + br, + p.Name(), + []string{"name"}, + func(c models.TaigaConnection) models.TaigaConnection { + return c.Sanitize() + }, + nil, + nil, + ) + raProxy = api.NewDsRemoteApiProxyHelper[models.TaigaConnection](dsHelper.ConnApi.ModelApiHelper) + raScopeList = api.NewDsRemoteApiScopeListHelper[models.TaigaConnection, models.TaigaProject, TaigaRemotePagination](raProxy, listTaigaRemoteScopes) +} diff --git a/backend/plugins/taiga/api/remote_api.go b/backend/plugins/taiga/api/remote_api.go new file mode 100644 index 00000000000..a0cc0b02a90 --- /dev/null +++ b/backend/plugins/taiga/api/remote_api.go @@ -0,0 +1,132 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package api + +import ( + "fmt" + "net/url" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + dsmodels "github.com/apache/incubator-devlake/helpers/pluginhelper/api/models" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +type TaigaRemotePagination struct { + Page int `json:"page"` + PageSize int `json:"pageSize"` +} + +type TaigaApiProject struct { + Id uint64 `json:"id"` + Name string `json:"name"` + Slug string `json:"slug"` + Description string `json:"description"` +} + +func queryTaigaProjects( + apiClient plugin.ApiClient, + keyword string, + page TaigaRemotePagination, +) ( + children []dsmodels.DsRemoteApiScopeListEntry[models.TaigaProject], + nextPage *TaigaRemotePagination, + err errors.Error, +) { + if page.PageSize == 0 { + page.PageSize = 100 + } + if page.Page == 0 { + page.Page = 1 + } + + query := url.Values{ + "page": {fmt.Sprintf("%d", page.Page)}, + "page_size": {fmt.Sprintf("%d", page.PageSize)}, + } + if keyword != "" { + query.Set("search", keyword) + } + + res, err := apiClient.Get("projects", query, nil) + if err != nil { + return + } + + var projects []TaigaApiProject + err = api.UnmarshalResponse(res, &projects) + if err != nil { + return + } + + for _, project := range projects { + children = append(children, dsmodels.DsRemoteApiScopeListEntry[models.TaigaProject]{ + Type: api.RAS_ENTRY_TYPE_SCOPE, + Id: fmt.Sprintf("%d", project.Id), + ParentId: nil, + Name: project.Name, + FullName: project.Name, + Data: &models.TaigaProject{ + ProjectId: project.Id, + Name: project.Name, + Slug: project.Slug, + Description: project.Description, + }, + }) + } + + // Check if there are more pages + if len(projects) == page.PageSize { + nextPage = &TaigaRemotePagination{ + Page: page.Page + 1, + PageSize: page.PageSize, + } + } + + return +} + +func listTaigaRemoteScopes( + _ *models.TaigaConnection, + apiClient plugin.ApiClient, + groupId string, + page TaigaRemotePagination, +) ( + children []dsmodels.DsRemoteApiScopeListEntry[models.TaigaProject], + nextPage *TaigaRemotePagination, + err errors.Error, +) { + return queryTaigaProjects(apiClient, "", page) +} + +// RemoteScopes list all available scopes on the remote server +// @Summary list all available scopes on the remote server +// @Description list all available scopes on the remote server +// @Accept application/json +// @Param connectionId path int false "connection ID" +// @Param groupId query string false "group ID" +// @Param pageToken query string false "page Token" +// @Failure 400 {object} shared.ApiBody "Bad Request" +// @Failure 500 {object} shared.ApiBody "Internal Error" +// @Success 200 {object} dsmodels.DsRemoteApiScopeList[models.TaigaProject] +// @Tags plugins/taiga +// @Router /plugins/taiga/connections/{connectionId}/remote-scopes [GET] +func RemoteScopes(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return raScopeList.Get(input) +} diff --git a/backend/plugins/taiga/api/scope_api.go b/backend/plugins/taiga/api/scope_api.go new file mode 100644 index 00000000000..3b8c128894d --- /dev/null +++ b/backend/plugins/taiga/api/scope_api.go @@ -0,0 +1,52 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package api + +import ( + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +type PutScopesReqBody api.PutScopesReqBody[models.TaigaProject] +type ScopeDetail api.ScopeDetail[models.TaigaProject, models.TaigaScopeConfig] + +func PutScope(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeApi.PutMultiple(input) +} + +func UpdateScope(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeApi.Patch(input) +} + +func GetScopeList(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeApi.GetPage(input) +} + +func GetScope(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeApi.GetScopeDetail(input) +} + +func DeleteScope(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeApi.Delete(input) +} + +func GetScopeLatestSyncState(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeApi.GetScopeLatestSyncState(input) +} diff --git a/backend/plugins/taiga/api/scope_config_api.go b/backend/plugins/taiga/api/scope_config_api.go new file mode 100644 index 00000000000..f162da31c07 --- /dev/null +++ b/backend/plugins/taiga/api/scope_config_api.go @@ -0,0 +1,47 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package api + +import ( + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" +) + +func CreateScopeConfig(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeConfigApi.Post(input) +} + +func UpdateScopeConfig(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeConfigApi.Patch(input) +} + +func GetScopeConfig(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeConfigApi.GetDetail(input) +} + +func GetScopeConfigList(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeConfigApi.GetAll(input) +} + +func DeleteScopeConfig(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeConfigApi.Delete(input) +} + +func GetProjectsByScopeConfig(input *plugin.ApiResourceInput) (*plugin.ApiResourceOutput, errors.Error) { + return dsHelper.ScopeConfigApi.GetProjectsByScopeConfig(input) +} diff --git a/backend/plugins/taiga/e2e/epic_test.go b/backend/plugins/taiga/e2e/epic_test.go new file mode 100644 index 00000000000..47ac0c5d962 --- /dev/null +++ b/backend/plugins/taiga/e2e/epic_test.go @@ -0,0 +1,74 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package e2e + +import ( + "testing" + + "github.com/apache/incubator-devlake/core/models/common" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/helpers/e2ehelper" + "github.com/apache/incubator-devlake/plugins/taiga/impl" + "github.com/apache/incubator-devlake/plugins/taiga/models" + "github.com/apache/incubator-devlake/plugins/taiga/tasks" +) + +// TestTaigaEpicDataFlow verifies the full extract → convert pipeline for +// Taiga epics: raw API JSON → _tool_taiga_epics → ticket.Issue (type EPIC). +// +// To regenerate snapshot CSVs from actual output, temporarily replace +// VerifyTableWithOptions with CreateSnapshot for each table. +func TestTaigaEpicDataFlow(t *testing.T) { + var taiga impl.Taiga + dataflowTester := e2ehelper.NewDataFlowTester(t, "taiga", taiga) + + taskData := &tasks.TaigaTaskData{ + Options: &tasks.TaigaOptions{ + ConnectionId: 1, + ProjectId: 1, + }, + } + + // ── Extraction: raw JSON → _tool_taiga_epics ───────────────────────────── + dataflowTester.ImportCsvIntoRawTable( + "./raw_tables/_raw_taiga_api_epics.csv", + "_raw_taiga_api_epics", + ) + + dataflowTester.FlushTabler(&models.TaigaEpic{}) + dataflowTester.Subtask(tasks.ExtractEpicsMeta, taskData) + // Verify all tool-layer columns, ignoring NoPKModel timestamps and raw-data provenance. + dataflowTester.VerifyTableWithOptions(models.TaigaEpic{}, e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/_tool_taiga_epics.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) + + // ── Conversion: _tool_taiga_epics → ticket.Issue / ticket.BoardIssue ───── + dataflowTester.FlushTabler(&ticket.Issue{}) + dataflowTester.FlushTabler(&ticket.BoardIssue{}) + dataflowTester.Subtask(tasks.ConvertEpicsMeta, taskData) + // Verify all domain-layer columns, ignoring NoPKModel timestamps and raw-data provenance. + dataflowTester.VerifyTableWithOptions(ticket.Issue{}, e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/issues_from_epics.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) + dataflowTester.VerifyTableWithOptions(ticket.BoardIssue{}, e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/board_issues_from_epics.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) +} diff --git a/backend/plugins/taiga/e2e/issue_test.go b/backend/plugins/taiga/e2e/issue_test.go new file mode 100644 index 00000000000..7ab3c9c85a9 --- /dev/null +++ b/backend/plugins/taiga/e2e/issue_test.go @@ -0,0 +1,77 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package e2e + +import ( + "testing" + + "github.com/apache/incubator-devlake/core/models/common" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/helpers/e2ehelper" + "github.com/apache/incubator-devlake/plugins/taiga/impl" + "github.com/apache/incubator-devlake/plugins/taiga/models" + "github.com/apache/incubator-devlake/plugins/taiga/tasks" +) + +// TestTaigaIssueDataFlow verifies the full extract → convert pipeline for +// Taiga issues (bugs/enhancements/questions): raw API JSON → +// _tool_taiga_issues → ticket.Issue with correct DevLake type mapping. +// Specifically checks that "Bug" → BUG, "Enhancement" → REQUIREMENT. +// +// To regenerate snapshot CSVs from actual output, temporarily replace +// VerifyTableWithOptions with CreateSnapshot for each table. +func TestTaigaIssueDataFlow(t *testing.T) { + var taiga impl.Taiga + dataflowTester := e2ehelper.NewDataFlowTester(t, "taiga", taiga) + + taskData := &tasks.TaigaTaskData{ + Options: &tasks.TaigaOptions{ + ConnectionId: 1, + ProjectId: 1, + }, + } + + // ── Extraction: raw JSON → _tool_taiga_issues ──────────────────────────── + dataflowTester.ImportCsvIntoRawTable( + "./raw_tables/_raw_taiga_api_issues.csv", + "_raw_taiga_api_issues", + ) + + dataflowTester.FlushTabler(&models.TaigaIssue{}) + dataflowTester.Subtask(tasks.ExtractIssuesMeta, taskData) + // Verify all tool-layer columns, ignoring NoPKModel timestamps and raw-data provenance. + dataflowTester.VerifyTableWithOptions(models.TaigaIssue{}, e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/_tool_taiga_issues.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) + + // ── Conversion: _tool_taiga_issues → ticket.Issue / ticket.BoardIssue ──── + // Key assertion: taigaIssueTypeToDevLake mapping is exercised (Bug→BUG, Enhancement→REQUIREMENT). + dataflowTester.FlushTabler(&ticket.Issue{}) + dataflowTester.FlushTabler(&ticket.BoardIssue{}) + dataflowTester.Subtask(tasks.ConvertIssuesMeta, taskData) + // Verify all domain-layer columns, ignoring NoPKModel timestamps and raw-data provenance. + dataflowTester.VerifyTableWithOptions(ticket.Issue{}, e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/issues_from_issues.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) + dataflowTester.VerifyTableWithOptions(ticket.BoardIssue{}, e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/board_issues_from_issues.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) +} diff --git a/backend/plugins/taiga/e2e/project_test.go b/backend/plugins/taiga/e2e/project_test.go new file mode 100644 index 00000000000..d1e8aa87096 --- /dev/null +++ b/backend/plugins/taiga/e2e/project_test.go @@ -0,0 +1,69 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package e2e + +import ( + "testing" + + "github.com/apache/incubator-devlake/core/models/common" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/helpers/e2ehelper" + "github.com/apache/incubator-devlake/plugins/taiga/impl" + "github.com/apache/incubator-devlake/plugins/taiga/models" + "github.com/apache/incubator-devlake/plugins/taiga/tasks" +) + +// TestTaigaProjectDataFlow verifies the full extract → convert pipeline for +// Taiga projects: raw API JSON → _tool_taiga_projects → ticket.Board. +// +// To regenerate snapshot CSVs from actual output, temporarily replace +// VerifyTableWithOptions with CreateSnapshot for each table. +func TestTaigaProjectDataFlow(t *testing.T) { + var taiga impl.Taiga + dataflowTester := e2ehelper.NewDataFlowTester(t, "taiga", taiga) + + taskData := &tasks.TaigaTaskData{ + Options: &tasks.TaigaOptions{ + ConnectionId: 1, + ProjectId: 1, + }, + } + + // ── Extraction: raw JSON → _tool_taiga_projects ───────────────────────── + dataflowTester.ImportCsvIntoRawTable( + "./raw_tables/_raw_taiga_api_projects.csv", + "_raw_taiga_api_projects", + ) + + dataflowTester.FlushTabler(&models.TaigaProject{}) + dataflowTester.Subtask(tasks.ExtractProjectsMeta, taskData) + // Verify all tool-layer columns, ignoring NoPKModel timestamps and raw-data provenance. + dataflowTester.VerifyTableWithOptions(models.TaigaProject{}, e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/_tool_taiga_projects.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) + + // ── Conversion: _tool_taiga_projects → ticket.Board ────────────────────── + dataflowTester.FlushTabler(&ticket.Board{}) + dataflowTester.Subtask(tasks.ConvertProjectsMeta, taskData) + // Verify all domain-layer columns, ignoring NoPKModel timestamps and raw-data provenance. + dataflowTester.VerifyTableWithOptions(ticket.Board{}, e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/boards.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) +} diff --git a/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_epics.csv b/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_epics.csv new file mode 100644 index 00000000000..9121a54d0a3 --- /dev/null +++ b/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_epics.csv @@ -0,0 +1,3 @@ +id,params,data,url,input,created_at +1,"{""ConnectionId"":1,""ProjectId"":1}","{""id"":401,""ref"":1,""subject"":""Platform v2.0"",""status"":1,""status_extra_info"":{""name"":""In Progress""},""is_closed"":false,""created_date"":""2024-01-05T09:00:00.000Z"",""modified_date"":""2024-01-20T10:00:00.000Z"",""assigned_to"":5,""assigned_to_extra_info"":{""full_name_display"":""Alice Smith""},""color"":""#F2711C""}",https://api.taiga.io/api/v1/epics?project=1,null,2024-01-21 08:00:00.000 +2,"{""ConnectionId"":1,""ProjectId"":1}","{""id"":402,""ref"":2,""subject"":""Mobile App"",""status"":2,""status_extra_info"":{""name"":""Open""},""is_closed"":false,""created_date"":""2024-01-07T11:00:00.000Z"",""modified_date"":""2024-01-18T09:00:00.000Z"",""assigned_to"":null,""assigned_to_extra_info"":null,""color"":""#2185D0""}",https://api.taiga.io/api/v1/epics?project=1,null,2024-01-21 08:00:00.000 diff --git a/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_issues.csv b/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_issues.csv new file mode 100644 index 00000000000..5e56af72500 --- /dev/null +++ b/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_issues.csv @@ -0,0 +1,3 @@ +id,params,data,url,input,created_at +1,"{""ConnectionId"":1,""ProjectId"":1}","{""id"":301,""ref"":1,""subject"":""Login page crash on mobile"",""status"":1,""status_extra_info"":{""name"":""New""},""type_extra_info"":{""name"":""Bug""},""priority_extra_info"":{""name"":""High""},""severity_extra_info"":{""name"":""Critical""},""is_closed"":false,""created_date"":""2024-01-14T08:00:00.000Z"",""modified_date"":""2024-01-19T14:00:00.000Z"",""finished_date"":null,""assigned_to"":5,""assigned_to_extra_info"":{""full_name_display"":""Alice Smith""},""milestone"":null}",https://api.taiga.io/api/v1/issues?project=1,null,2024-01-21 08:00:00.000 +2,"{""ConnectionId"":1,""ProjectId"":1}","{""id"":302,""ref"":2,""subject"":""Add dark mode support"",""status"":2,""status_extra_info"":{""name"":""Closed""},""type_extra_info"":{""name"":""Enhancement""},""priority_extra_info"":{""name"":""Normal""},""severity_extra_info"":{""name"":""Minor""},""is_closed"":true,""created_date"":""2024-01-08T10:00:00.000Z"",""modified_date"":""2024-01-16T13:00:00.000Z"",""finished_date"":""2024-01-16T13:00:00.000Z"",""assigned_to"":6,""assigned_to_extra_info"":{""full_name_display"":""Bob Jones""},""milestone"":1}",https://api.taiga.io/api/v1/issues?project=1,null,2024-01-21 08:00:00.000 diff --git a/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_projects.csv b/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_projects.csv new file mode 100644 index 00000000000..2871848b092 --- /dev/null +++ b/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_projects.csv @@ -0,0 +1,2 @@ +id,params,data,url,input,created_at +1,"{""ConnectionId"":1,""ProjectId"":1}","{""id"":1,""name"":""Test Project Alpha"",""slug"":""test-project-alpha"",""description"":""A test project for the DevLake Taiga plugin e2e tests"",""created_date"":""2024-01-01T00:00:00.000Z"",""modified_date"":""2024-01-20T00:00:00.000Z""}",https://api.taiga.io/api/v1/projects/1,null,2024-01-21 08:00:00.000 diff --git a/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_tasks.csv b/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_tasks.csv new file mode 100644 index 00000000000..227fff104a2 --- /dev/null +++ b/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_tasks.csv @@ -0,0 +1,3 @@ +id,params,data,url,input,created_at +1,"{""ConnectionId"":1,""ProjectId"":1}","{""id"":201,""ref"":1,""subject"":""Write unit tests"",""status"":1,""status_extra_info"":{""name"":""In Progress""},""is_closed"":false,""created_date"":""2024-01-16T09:00:00.000Z"",""modified_date"":""2024-01-21T11:00:00.000Z"",""finished_date"":null,""assigned_to"":5,""assigned_to_extra_info"":{""full_name_display"":""Alice Smith""},""user_story"":101,""milestone"":1,""is_blocked"":false,""blocked_note"":""""}",https://api.taiga.io/api/v1/tasks?project=1,null,2024-01-21 08:00:00.000 +2,"{""ConnectionId"":1,""ProjectId"":1}","{""id"":202,""ref"":2,""subject"":""Deploy to staging"",""status"":2,""status_extra_info"":{""name"":""Done""},""is_closed"":true,""created_date"":""2024-01-12T10:00:00.000Z"",""modified_date"":""2024-01-17T16:00:00.000Z"",""finished_date"":""2024-01-17T16:00:00.000Z"",""assigned_to"":7,""assigned_to_extra_info"":{""full_name_display"":""Carol White""},""user_story"":101,""milestone"":null,""is_blocked"":false,""blocked_note"":""""}",https://api.taiga.io/api/v1/tasks?project=1,null,2024-01-21 08:00:00.000 diff --git a/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_user_stories.csv b/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_user_stories.csv new file mode 100644 index 00000000000..4bc83489eeb --- /dev/null +++ b/backend/plugins/taiga/e2e/raw_tables/_raw_taiga_api_user_stories.csv @@ -0,0 +1,3 @@ +id,params,data,url,input,created_at +1,"{""ConnectionId"":1,""ProjectId"":1}","{""id"":101,""ref"":1,""subject"":""Setup CI/CD Pipeline"",""status"":1,""status_extra_info"":{""name"":""In Progress""},""is_closed"":false,""created_date"":""2024-01-15T10:00:00.000Z"",""modified_date"":""2024-01-20T15:30:00.000Z"",""finish_date"":null,""assigned_to"":5,""assigned_to_extra_info"":{""full_name_display"":""Alice Smith""},""total_points"":8.0,""milestone"":1,""milestone_name"":""Sprint 1"",""priority"":3,""is_blocked"":false,""blocked_note"":""""}",https://api.taiga.io/api/v1/userstories?project=1,null,2024-01-21 08:00:00.000 +2,"{""ConnectionId"":1,""ProjectId"":1}","{""id"":102,""ref"":2,""subject"":""User Authentication"",""status"":2,""status_extra_info"":{""name"":""Done""},""is_closed"":true,""created_date"":""2024-01-10T09:00:00.000Z"",""modified_date"":""2024-01-18T17:00:00.000Z"",""finish_date"":""2024-01-18T17:00:00.000Z"",""assigned_to"":6,""assigned_to_extra_info"":{""full_name_display"":""Bob Jones""},""total_points"":5.0,""milestone"":null,""milestone_name"":null,""priority"":1,""is_blocked"":false,""blocked_note"":""""}",https://api.taiga.io/api/v1/userstories?project=1,null,2024-01-21 08:00:00.000 diff --git a/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_epics.csv b/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_epics.csv new file mode 100644 index 00000000000..2cc0768c7ca --- /dev/null +++ b/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_epics.csv @@ -0,0 +1,3 @@ +connection_id,project_id,epic_id,ref,subject,status,is_closed,created_date,modified_date,assigned_to,assigned_to_name,color +1,1,401,1,Platform v2.0,In Progress,0,2024-01-05T09:00:00.000+00:00,2024-01-20T10:00:00.000+00:00,5,Alice Smith,#F2711C +1,1,402,2,Mobile App,Open,0,2024-01-07T11:00:00.000+00:00,2024-01-18T09:00:00.000+00:00,0,,#2185D0 diff --git a/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_issues.csv b/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_issues.csv new file mode 100644 index 00000000000..d4fa918fc72 --- /dev/null +++ b/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_issues.csv @@ -0,0 +1,3 @@ +connection_id,project_id,issue_id,ref,subject,status,issue_type_name,priority,severity,is_closed,created_date,modified_date,finished_date,assigned_to,assigned_to_name,milestone_id +1,1,301,1,Login page crash on mobile,New,Bug,High,Critical,0,2024-01-14T08:00:00.000+00:00,2024-01-19T14:00:00.000+00:00,,5,Alice Smith,0 +1,1,302,2,Add dark mode support,Closed,Enhancement,Normal,Minor,1,2024-01-08T10:00:00.000+00:00,2024-01-16T13:00:00.000+00:00,2024-01-16T13:00:00.000+00:00,6,Bob Jones,1 diff --git a/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_projects.csv b/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_projects.csv new file mode 100644 index 00000000000..82e1575013d --- /dev/null +++ b/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_projects.csv @@ -0,0 +1,2 @@ +connection_id,project_id,scope_config_id,name,slug,description,url,is_private,total_milestones,total_story_points +1,1,0,Test Project Alpha,test-project-alpha,A test project for the DevLake Taiga plugin e2e tests,,0,0,0 diff --git a/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_tasks.csv b/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_tasks.csv new file mode 100644 index 00000000000..ad8131272fa --- /dev/null +++ b/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_tasks.csv @@ -0,0 +1,3 @@ +connection_id,project_id,task_id,ref,subject,status,is_closed,created_date,modified_date,finished_date,assigned_to,assigned_to_name,user_story_id,milestone_id,is_blocked,blocked_note +1,1,201,1,Write unit tests,In Progress,0,2024-01-16T09:00:00.000+00:00,2024-01-21T11:00:00.000+00:00,,5,Alice Smith,101,1,0, +1,1,202,2,Deploy to staging,Done,1,2024-01-12T10:00:00.000+00:00,2024-01-17T16:00:00.000+00:00,2024-01-17T16:00:00.000+00:00,7,Carol White,101,0,0, diff --git a/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_user_stories.csv b/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_user_stories.csv new file mode 100644 index 00000000000..ac053fec48a --- /dev/null +++ b/backend/plugins/taiga/e2e/snapshot_tables/_tool_taiga_user_stories.csv @@ -0,0 +1,3 @@ +connection_id,project_id,user_story_id,ref,subject,description,status,status_color,is_closed,created_date,modified_date,finished_date,assigned_to,assigned_to_name,total_points,milestone_id,milestone_name,priority,is_blocked,blocked_note +1,1,101,1,Setup CI/CD Pipeline,,In Progress,,0,2024-01-15T10:00:00.000+00:00,2024-01-20T15:30:00.000+00:00,,5,Alice Smith,8,1,Sprint 1,3,0, +1,1,102,2,User Authentication,,Done,,1,2024-01-10T09:00:00.000+00:00,2024-01-18T17:00:00.000+00:00,2024-01-18T17:00:00.000+00:00,6,Bob Jones,5,0,,1,0, diff --git a/backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_epics.csv b/backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_epics.csv new file mode 100644 index 00000000000..118e5665334 --- /dev/null +++ b/backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_epics.csv @@ -0,0 +1,3 @@ +board_id,issue_id +taiga:TaigaProject:1:1,taiga:TaigaEpic:1:401 +taiga:TaigaProject:1:1,taiga:TaigaEpic:1:402 diff --git a/backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_issues.csv b/backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_issues.csv new file mode 100644 index 00000000000..afc67821289 --- /dev/null +++ b/backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_issues.csv @@ -0,0 +1,3 @@ +board_id,issue_id +taiga:TaigaProject:1:1,taiga:TaigaIssue:1:301 +taiga:TaigaProject:1:1,taiga:TaigaIssue:1:302 diff --git a/backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_tasks.csv b/backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_tasks.csv new file mode 100644 index 00000000000..0dddfd05a27 --- /dev/null +++ b/backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_tasks.csv @@ -0,0 +1,3 @@ +board_id,issue_id +taiga:TaigaProject:1:1,taiga:TaigaTask:1:201 +taiga:TaigaProject:1:1,taiga:TaigaTask:1:202 diff --git a/backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_user_stories.csv b/backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_user_stories.csv new file mode 100644 index 00000000000..d54c4ccb5e0 --- /dev/null +++ b/backend/plugins/taiga/e2e/snapshot_tables/board_issues_from_user_stories.csv @@ -0,0 +1,3 @@ +board_id,issue_id +taiga:TaigaProject:1:1,taiga:TaigaUserStory:1:101 +taiga:TaigaProject:1:1,taiga:TaigaUserStory:1:102 diff --git a/backend/plugins/taiga/e2e/snapshot_tables/boards.csv b/backend/plugins/taiga/e2e/snapshot_tables/boards.csv new file mode 100644 index 00000000000..94046502c97 --- /dev/null +++ b/backend/plugins/taiga/e2e/snapshot_tables/boards.csv @@ -0,0 +1,2 @@ +id,name,description,url,created_date,type +taiga:TaigaProject:1:1,Test Project Alpha,A test project for the DevLake Taiga plugin e2e tests,,, diff --git a/backend/plugins/taiga/e2e/snapshot_tables/issues_from_epics.csv b/backend/plugins/taiga/e2e/snapshot_tables/issues_from_epics.csv new file mode 100644 index 00000000000..bf58bfbae98 --- /dev/null +++ b/backend/plugins/taiga/e2e/snapshot_tables/issues_from_epics.csv @@ -0,0 +1,3 @@ +id,url,icon_url,issue_key,title,description,epic_key,type,original_type,status,original_status,story_point,resolution_date,created_date,updated_date,lead_time_minutes,original_estimate_minutes,time_spent_minutes,time_remaining_minutes,creator_id,creator_name,assignee_id,assignee_name,parent_issue_id,priority,severity,urgency,component,original_project,is_subtask,due_date,fix_versions +taiga:TaigaEpic:1:401,,,Platform v2.0,Platform v2.0,,,EPIC,Epic,In Progress,In Progress,,,2024-01-05T09:00:00.000+00:00,2024-01-20T10:00:00.000+00:00,,,,,,,,,,,,,,,0,, +taiga:TaigaEpic:1:402,,,Mobile App,Mobile App,,,EPIC,Epic,Open,Open,,,2024-01-07T11:00:00.000+00:00,2024-01-18T09:00:00.000+00:00,,,,,,,,,,,,,,,0,, diff --git a/backend/plugins/taiga/e2e/snapshot_tables/issues_from_issues.csv b/backend/plugins/taiga/e2e/snapshot_tables/issues_from_issues.csv new file mode 100644 index 00000000000..52da4dad986 --- /dev/null +++ b/backend/plugins/taiga/e2e/snapshot_tables/issues_from_issues.csv @@ -0,0 +1,3 @@ +id,url,icon_url,issue_key,title,description,epic_key,type,original_type,status,original_status,story_point,resolution_date,created_date,updated_date,lead_time_minutes,original_estimate_minutes,time_spent_minutes,time_remaining_minutes,creator_id,creator_name,assignee_id,assignee_name,parent_issue_id,priority,severity,urgency,component,original_project,is_subtask,due_date,fix_versions +taiga:TaigaIssue:1:301,,,Login page crash on mobile,Login page crash on mobile,,,BUG,Bug,New,New,,,2024-01-14T08:00:00.000+00:00,2024-01-19T14:00:00.000+00:00,,,,,,,,,,High,,,,,0,, +taiga:TaigaIssue:1:302,,,Add dark mode support,Add dark mode support,,,REQUIREMENT,Enhancement,Closed,Closed,,2024-01-16T13:00:00.000+00:00,2024-01-08T10:00:00.000+00:00,2024-01-16T13:00:00.000+00:00,,,,,,,,,,Normal,,,,,0,, diff --git a/backend/plugins/taiga/e2e/snapshot_tables/issues_from_tasks.csv b/backend/plugins/taiga/e2e/snapshot_tables/issues_from_tasks.csv new file mode 100644 index 00000000000..be3cfebb985 --- /dev/null +++ b/backend/plugins/taiga/e2e/snapshot_tables/issues_from_tasks.csv @@ -0,0 +1,3 @@ +id,url,icon_url,issue_key,title,description,epic_key,type,original_type,status,original_status,story_point,resolution_date,created_date,updated_date,lead_time_minutes,original_estimate_minutes,time_spent_minutes,time_remaining_minutes,creator_id,creator_name,assignee_id,assignee_name,parent_issue_id,priority,severity,urgency,component,original_project,is_subtask,due_date,fix_versions +taiga:TaigaTask:1:201,,,Write unit tests,Write unit tests,,,TASK,Task,In Progress,In Progress,,,2024-01-16T09:00:00.000+00:00,2024-01-21T11:00:00.000+00:00,,,,,,,,,,,,,,,0,, +taiga:TaigaTask:1:202,,,Deploy to staging,Deploy to staging,,,TASK,Task,Done,Done,,2024-01-17T16:00:00.000+00:00,2024-01-12T10:00:00.000+00:00,2024-01-17T16:00:00.000+00:00,,,,,,,,,,,,,,,0,, diff --git a/backend/plugins/taiga/e2e/snapshot_tables/issues_from_user_stories.csv b/backend/plugins/taiga/e2e/snapshot_tables/issues_from_user_stories.csv new file mode 100644 index 00000000000..bad31c7405c --- /dev/null +++ b/backend/plugins/taiga/e2e/snapshot_tables/issues_from_user_stories.csv @@ -0,0 +1,3 @@ +id,url,icon_url,issue_key,title,description,epic_key,type,original_type,status,original_status,story_point,resolution_date,created_date,updated_date,lead_time_minutes,original_estimate_minutes,time_spent_minutes,time_remaining_minutes,creator_id,creator_name,assignee_id,assignee_name,parent_issue_id,priority,severity,urgency,component,original_project,is_subtask,due_date,fix_versions +taiga:TaigaUserStory:1:101,,,#1,Setup CI/CD Pipeline,,,USER_STORY,User Story,TODO,In Progress,8,,2024-01-15T10:00:00.000+00:00,2024-01-20T15:30:00.000+00:00,,,,,,,5,Alice Smith,,,,,,,0,, +taiga:TaigaUserStory:1:102,,,#2,User Authentication,,,USER_STORY,User Story,DONE,Done,5,2024-01-18T17:00:00.000+00:00,2024-01-10T09:00:00.000+00:00,2024-01-18T17:00:00.000+00:00,12000,,,,,,6,Bob Jones,,,,,,,0,, diff --git a/backend/plugins/taiga/e2e/task_test.go b/backend/plugins/taiga/e2e/task_test.go new file mode 100644 index 00000000000..aefa1fb8fd2 --- /dev/null +++ b/backend/plugins/taiga/e2e/task_test.go @@ -0,0 +1,74 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package e2e + +import ( + "testing" + + "github.com/apache/incubator-devlake/core/models/common" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/helpers/e2ehelper" + "github.com/apache/incubator-devlake/plugins/taiga/impl" + "github.com/apache/incubator-devlake/plugins/taiga/models" + "github.com/apache/incubator-devlake/plugins/taiga/tasks" +) + +// TestTaigaTaskDataFlow verifies the full extract → convert pipeline for +// Taiga tasks: raw API JSON → _tool_taiga_tasks → ticket.Issue (type TASK). +// +// To regenerate snapshot CSVs from actual output, temporarily replace +// VerifyTableWithOptions with CreateSnapshot for each table. +func TestTaigaTaskDataFlow(t *testing.T) { + var taiga impl.Taiga + dataflowTester := e2ehelper.NewDataFlowTester(t, "taiga", taiga) + + taskData := &tasks.TaigaTaskData{ + Options: &tasks.TaigaOptions{ + ConnectionId: 1, + ProjectId: 1, + }, + } + + // ── Extraction: raw JSON → _tool_taiga_tasks ───────────────────────────── + dataflowTester.ImportCsvIntoRawTable( + "./raw_tables/_raw_taiga_api_tasks.csv", + "_raw_taiga_api_tasks", + ) + + dataflowTester.FlushTabler(&models.TaigaTask{}) + dataflowTester.Subtask(tasks.ExtractTasksMeta, taskData) + // Verify all tool-layer columns, ignoring NoPKModel timestamps and raw-data provenance. + dataflowTester.VerifyTableWithOptions(models.TaigaTask{}, e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/_tool_taiga_tasks.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) + + // ── Conversion: _tool_taiga_tasks → ticket.Issue / ticket.BoardIssue ───── + dataflowTester.FlushTabler(&ticket.Issue{}) + dataflowTester.FlushTabler(&ticket.BoardIssue{}) + dataflowTester.Subtask(tasks.ConvertTasksMeta, taskData) + // Verify all domain-layer columns, ignoring NoPKModel timestamps and raw-data provenance. + dataflowTester.VerifyTableWithOptions(ticket.Issue{}, e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/issues_from_tasks.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) + dataflowTester.VerifyTableWithOptions(ticket.BoardIssue{}, e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/board_issues_from_tasks.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) +} diff --git a/backend/plugins/taiga/e2e/user_story_test.go b/backend/plugins/taiga/e2e/user_story_test.go new file mode 100644 index 00000000000..1cb8553e028 --- /dev/null +++ b/backend/plugins/taiga/e2e/user_story_test.go @@ -0,0 +1,74 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package e2e + +import ( + "testing" + + "github.com/apache/incubator-devlake/core/models/common" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/helpers/e2ehelper" + "github.com/apache/incubator-devlake/plugins/taiga/impl" + "github.com/apache/incubator-devlake/plugins/taiga/models" + "github.com/apache/incubator-devlake/plugins/taiga/tasks" +) + +// TestTaigaUserStoryDataFlow verifies the full extract → convert pipeline +// for Taiga user stories: raw API JSON → _tool_taiga_user_stories → ticket.Issue. +// +// To regenerate snapshot CSVs from actual output, temporarily replace +// VerifyTableWithOptions with CreateSnapshot for each table. +func TestTaigaUserStoryDataFlow(t *testing.T) { + var taiga impl.Taiga + dataflowTester := e2ehelper.NewDataFlowTester(t, "taiga", taiga) + + taskData := &tasks.TaigaTaskData{ + Options: &tasks.TaigaOptions{ + ConnectionId: 1, + ProjectId: 1, + }, + } + + // ── Extraction: raw JSON → _tool_taiga_user_stories ───────────────────── + dataflowTester.ImportCsvIntoRawTable( + "./raw_tables/_raw_taiga_api_user_stories.csv", + "_raw_taiga_api_user_stories", + ) + + dataflowTester.FlushTabler(&models.TaigaUserStory{}) + dataflowTester.Subtask(tasks.ExtractUserStoriesMeta, taskData) + // Verify all tool-layer columns, ignoring NoPKModel timestamps and raw-data provenance. + dataflowTester.VerifyTableWithOptions(models.TaigaUserStory{}, e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/_tool_taiga_user_stories.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) + + // ── Conversion: _tool_taiga_user_stories → ticket.Issue / ticket.BoardIssue ─ + dataflowTester.FlushTabler(&ticket.Issue{}) + dataflowTester.FlushTabler(&ticket.BoardIssue{}) + dataflowTester.Subtask(tasks.ConvertUserStoriesMeta, taskData) + // Verify all domain-layer columns, ignoring NoPKModel timestamps and raw-data provenance. + dataflowTester.VerifyTableWithOptions(ticket.Issue{}, e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/issues_from_user_stories.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) + dataflowTester.VerifyTableWithOptions(ticket.BoardIssue{}, e2ehelper.TableOptions{ + CSVRelPath: "./snapshot_tables/board_issues_from_user_stories.csv", + IgnoreTypes: []interface{}{common.NoPKModel{}}, + }) +} diff --git a/backend/plugins/taiga/impl/impl.go b/backend/plugins/taiga/impl/impl.go new file mode 100644 index 00000000000..73e6e5b9fd7 --- /dev/null +++ b/backend/plugins/taiga/impl/impl.go @@ -0,0 +1,251 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package impl + +import ( + "fmt" + + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + coreModels "github.com/apache/incubator-devlake/core/models" + "github.com/apache/incubator-devlake/core/plugin" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" + "github.com/apache/incubator-devlake/plugins/taiga/models/migrationscripts" + "github.com/apache/incubator-devlake/plugins/taiga/tasks" +) + +var _ interface { + plugin.PluginMeta + plugin.PluginInit + plugin.PluginTask + plugin.PluginApi + plugin.PluginModel + plugin.PluginMigration + plugin.DataSourcePluginBlueprintV200 + plugin.CloseablePluginTask + plugin.PluginSource +} = (*Taiga)(nil) + +type Taiga struct { +} + +func (p Taiga) Connection() dal.Tabler { + return &models.TaigaConnection{} +} + +func (p Taiga) Scope() plugin.ToolLayerScope { + return &models.TaigaProject{} +} + +func (p Taiga) ScopeConfig() dal.Tabler { + return &models.TaigaScopeConfig{} +} + +func (p Taiga) Init(basicRes context.BasicRes) errors.Error { + api.Init(basicRes, p) + return nil +} + +func (p Taiga) GetTablesInfo() []dal.Tabler { + return []dal.Tabler{ + &models.TaigaConnection{}, + &models.TaigaProject{}, + &models.TaigaUserStory{}, + &models.TaigaTask{}, + &models.TaigaIssue{}, + &models.TaigaEpic{}, + &models.TaigaScopeConfig{}, + } +} + +func (p Taiga) Description() string { + return "To collect and enrich data from Taiga" +} + +func (p Taiga) Name() string { + return "taiga" +} + +func (p Taiga) SubTaskMetas() []plugin.SubTaskMeta { + return []plugin.SubTaskMeta{ + tasks.CollectProjectsMeta, + tasks.ExtractProjectsMeta, + tasks.CollectUserStoriesMeta, + tasks.ExtractUserStoriesMeta, + tasks.CollectTasksMeta, + tasks.ExtractTasksMeta, + tasks.CollectIssuesMeta, + tasks.ExtractIssuesMeta, + tasks.CollectEpicsMeta, + tasks.ExtractEpicsMeta, + tasks.ConvertProjectsMeta, + tasks.ConvertUserStoriesMeta, + tasks.ConvertTasksMeta, + tasks.ConvertIssuesMeta, + tasks.ConvertEpicsMeta, + } +} + +func (p Taiga) PrepareTaskData(taskCtx plugin.TaskContext, options map[string]interface{}) (interface{}, errors.Error) { + var op tasks.TaigaOptions + var err errors.Error + logger := taskCtx.GetLogger() + logger.Debug("%v", options) + + err = helper.Decode(options, &op, nil) + if err != nil { + return nil, errors.Default.Wrap(err, "could not decode Taiga options") + } + + if op.ConnectionId == 0 { + return nil, errors.BadInput.New("taiga connectionId is invalid") + } + + connection := &models.TaigaConnection{} + connectionHelper := helper.NewConnectionHelper( + taskCtx, + nil, + p.Name(), + ) + err = connectionHelper.FirstById(connection, op.ConnectionId) + if err != nil { + return nil, errors.Default.Wrap(err, "unable to get Taiga connection") + } + + taigaApiClient, err := tasks.NewTaigaApiClient(taskCtx, connection) + if err != nil { + return nil, errors.Default.Wrap(err, "failed to create taiga api client") + } + + if op.ProjectId != 0 { + var scope *models.TaigaProject + db := taskCtx.GetDal() + err = db.First(&scope, dal.Where("connection_id = ? AND project_id = ?", op.ConnectionId, op.ProjectId)) + if err != nil && db.IsErrorNotFound(err) { + // Fetch from remote and save + // TODO: Implement fetching from Taiga API + return nil, errors.Default.Wrap(err, fmt.Sprintf("fail to find project: %d", op.ProjectId)) + } + if err != nil { + return nil, errors.Default.Wrap(err, fmt.Sprintf("fail to find project: %d", op.ProjectId)) + } + if op.ScopeConfigId == 0 && scope.ScopeConfigId != 0 { + op.ScopeConfigId = scope.ScopeConfigId + } + } + + if op.ScopeConfig == nil && op.ScopeConfigId != 0 { + var scopeConfig models.TaigaScopeConfig + db := taskCtx.GetDal() + err = taskCtx.GetDal().First(&scopeConfig, dal.Where("id = ?", op.ScopeConfigId)) + if err != nil && db.IsErrorNotFound(err) { + return nil, errors.BadInput.Wrap(err, "fail to get scopeConfig") + } + op.ScopeConfig = &scopeConfig + if err != nil { + return nil, errors.BadInput.Wrap(err, "fail to make scopeConfig") + } + } + + if op.ScopeConfig == nil && op.ScopeConfigId == 0 { + op.ScopeConfig = new(models.TaigaScopeConfig) + } + + // Set default page size + if op.PageSize <= 0 || op.PageSize > 100 { + op.PageSize = 100 + } + + taskData := &tasks.TaigaTaskData{ + Options: &op, + ApiClient: taigaApiClient, + } + + return taskData, nil +} + +func (p Taiga) MakeDataSourcePipelinePlanV200( + connectionId uint64, + scopes []*coreModels.BlueprintScope, +) (pp coreModels.PipelinePlan, sc []plugin.Scope, err errors.Error) { + return api.MakeDataSourcePipelinePlanV200(p.SubTaskMetas(), connectionId, scopes) +} + +func (p Taiga) RootPkgPath() string { + return "github.com/apache/incubator-devlake/plugins/taiga" +} + +func (p Taiga) MigrationScripts() []plugin.MigrationScript { + return migrationscripts.All() +} + +func (p Taiga) ApiResources() map[string]map[string]plugin.ApiResourceHandler { + return map[string]map[string]plugin.ApiResourceHandler{ + "test": { + "POST": api.TestConnection, + }, + "connections": { + "POST": api.PostConnections, + "GET": api.ListConnections, + }, + "connections/:connectionId": { + "PATCH": api.PatchConnection, + "DELETE": api.DeleteConnection, + "GET": api.GetConnection, + }, + "connections/:connectionId/test": { + "POST": api.TestExistingConnection, + }, + "connections/:connectionId/remote-scopes": { + "GET": api.RemoteScopes, + }, + "connections/:connectionId/scopes/:scopeId": { + "GET": api.GetScope, + "PATCH": api.UpdateScope, + "DELETE": api.DeleteScope, + }, + "connections/:connectionId/scopes": { + "GET": api.GetScopeList, + "PUT": api.PutScope, + }, + "connections/:connectionId/scope-configs": { + "POST": api.CreateScopeConfig, + "GET": api.GetScopeConfigList, + }, + "connections/:connectionId/scope-configs/:scopeConfigId": { + "PATCH": api.UpdateScopeConfig, + "GET": api.GetScopeConfig, + "DELETE": api.DeleteScopeConfig, + }, + "scope-config/:scopeConfigId/projects": { + "GET": api.GetProjectsByScopeConfig, + }, + } +} + +func (p Taiga) Close(taskCtx plugin.TaskContext) errors.Error { + data, ok := taskCtx.GetData().(*tasks.TaigaTaskData) + if !ok { + return errors.Default.New(fmt.Sprintf("GetData failed when try to close %+v", taskCtx)) + } + data.ApiClient.Release() + return nil +} diff --git a/backend/plugins/taiga/models/connection.go b/backend/plugins/taiga/models/connection.go new file mode 100644 index 00000000000..e97979331fb --- /dev/null +++ b/backend/plugins/taiga/models/connection.go @@ -0,0 +1,143 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/utils" + helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api" +) + +// TaigaConn holds the essential information to connect to the Taiga API +type TaigaConn struct { + helper.RestConnection `mapstructure:",squash"` + helper.BasicAuth `mapstructure:",squash"` + // Token is optional - can be provided directly or obtained via username/password auth + Token string `mapstructure:"token" json:"token" gorm:"serializer:encdec"` +} + +func (tc *TaigaConn) Sanitize() TaigaConn { + tc.Password = "" + tc.Token = utils.SanitizeString(tc.Token) + return *tc +} + +// SetupAuthentication sets up the HTTP request with authentication. +// If Token is set directly, use it. Otherwise exchange Username+Password for a token. +func (tc *TaigaConn) SetupAuthentication(req *http.Request) errors.Error { + if tc.Token != "" { + req.Header.Set("Authorization", "Bearer "+tc.Token) + return nil + } + if tc.Username != "" && tc.Password != "" { + token, err := tc.fetchToken() + if err != nil { + return err + } + req.Header.Set("Authorization", "Bearer "+token) + } + return nil +} + +// fetchToken exchanges username+password for a Taiga auth token via POST /auth +func (tc *TaigaConn) fetchToken() (string, errors.Error) { + endpoint := strings.TrimSuffix(tc.Endpoint, "/") + // strip /api/v1 suffix to get base, then re-add /api/v1/auth + authURL := endpoint + "/auth" + + body, e := json.Marshal(map[string]string{ + "type": "normal", + "username": tc.Username, + "password": tc.Password, + }) + if e != nil { + return "", errors.Default.WrapRaw(e) + } + + resp, e := http.Post(authURL, "application/json", bytes.NewReader(body)) //nolint:noctx + if e != nil { + return "", errors.Default.WrapRaw(e) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return "", errors.Default.New(fmt.Sprintf("taiga auth failed with status %d", resp.StatusCode)) + } + + var result map[string]interface{} + if e := json.NewDecoder(resp.Body).Decode(&result); e != nil { + return "", errors.Default.WrapRaw(e) + } + // Taiga returns auth_token (v5) or token (v6) + for _, key := range []string{"auth_token", "token"} { + if t, ok := result[key]; ok { + if token, ok := t.(string); ok && token != "" { + return token, nil + } + } + } + // fallback: read raw body hint + raw, _ := io.ReadAll(bytes.NewReader(body)) + return "", errors.Default.New(fmt.Sprintf("taiga auth response missing token field, body: %s", string(raw))) +} + +// TaigaConnection holds TaigaConn plus ID/Name for database storage +type TaigaConnection struct { + helper.BaseConnection `mapstructure:",squash"` + TaigaConn `mapstructure:",squash"` +} + +func (TaigaConnection) TableName() string { + return "_tool_taiga_connections" +} + +func (connection *TaigaConnection) MergeFromRequest(target *TaigaConnection, body map[string]interface{}) error { + token := target.Token + password := target.Password + + if err := helper.DecodeMapStruct(body, target, true); err != nil { + return err + } + + modifiedToken := target.Token + modifiedPassword := target.Password + + // preserve existing token if not modified + if modifiedToken == "" || modifiedToken == utils.SanitizeString(token) { + target.Token = token + } + + // preserve existing password if not modified + if modifiedPassword == "" || modifiedPassword == password { + target.Password = password + } + + return nil +} + +func (connection TaigaConnection) Sanitize() TaigaConnection { + connection.TaigaConn = connection.TaigaConn.Sanitize() + return connection +} diff --git a/backend/plugins/taiga/models/epic.go b/backend/plugins/taiga/models/epic.go new file mode 100644 index 00000000000..1c808b380f9 --- /dev/null +++ b/backend/plugins/taiga/models/epic.go @@ -0,0 +1,45 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "time" + + "github.com/apache/incubator-devlake/core/models/common" +) + +// TaigaEpic represents an epic in Taiga +type TaigaEpic struct { + common.NoPKModel + ConnectionId uint64 `gorm:"primaryKey"` + ProjectId uint64 `gorm:"index"` + EpicId uint64 `gorm:"primaryKey;autoIncrement:false" json:"id"` + Ref int `json:"ref"` + Subject string `gorm:"type:varchar(255)" json:"subject"` + Status string `gorm:"type:varchar(100)" json:"status"` + IsClosed bool `json:"isClosed"` + CreatedDate *time.Time `json:"createdDate"` + ModifiedDate *time.Time `json:"modifiedDate"` + AssignedTo uint64 `json:"assignedTo"` + AssignedToName string `gorm:"type:varchar(255)" json:"assignedToName"` + Color string `gorm:"type:varchar(20)" json:"color"` +} + +func (TaigaEpic) TableName() string { + return "_tool_taiga_epics" +} diff --git a/backend/plugins/taiga/models/issue.go b/backend/plugins/taiga/models/issue.go new file mode 100644 index 00000000000..90dda4f6503 --- /dev/null +++ b/backend/plugins/taiga/models/issue.go @@ -0,0 +1,49 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "time" + + "github.com/apache/incubator-devlake/core/models/common" +) + +// TaigaIssue represents an issue (bug/enhancement/question) in Taiga +type TaigaIssue struct { + common.NoPKModel + ConnectionId uint64 `gorm:"primaryKey"` + ProjectId uint64 `gorm:"index"` + IssueId uint64 `gorm:"primaryKey;autoIncrement:false" json:"id"` + Ref int `json:"ref"` + Subject string `gorm:"type:varchar(255)" json:"subject"` + Status string `gorm:"type:varchar(100)" json:"status"` + IssueTypeName string `gorm:"type:varchar(100)" json:"issueTypeName"` // e.g. "Bug", "Enhancement", "Question" + Priority string `gorm:"type:varchar(100)" json:"priority"` + Severity string `gorm:"type:varchar(100)" json:"severity"` + IsClosed bool `json:"isClosed"` + CreatedDate *time.Time `json:"createdDate"` + ModifiedDate *time.Time `json:"modifiedDate"` + FinishedDate *time.Time `json:"finishedDate"` + AssignedTo uint64 `json:"assignedTo"` + AssignedToName string `gorm:"type:varchar(255)" json:"assignedToName"` + MilestoneId uint64 `json:"milestoneId"` +} + +func (TaigaIssue) TableName() string { + return "_tool_taiga_issues" +} diff --git a/backend/plugins/taiga/models/migrationscripts/20250220_add_init_tables.go b/backend/plugins/taiga/models/migrationscripts/20250220_add_init_tables.go new file mode 100644 index 00000000000..3a0a6beeed3 --- /dev/null +++ b/backend/plugins/taiga/models/migrationscripts/20250220_add_init_tables.go @@ -0,0 +1,132 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "time" + + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/helpers/migrationhelper" +) + +type TaigaConnection20250220 struct { + ID uint64 `gorm:"primaryKey" json:"id"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + Name string `gorm:"type:varchar(100);uniqueIndex" json:"name"` + Endpoint string `json:"endpoint"` + Username string `json:"username"` + Password string `json:"password"` + Token string `json:"token"` + Proxy string `json:"proxy"` + RateLimitPerHour int `json:"rateLimitPerHour"` +} + +func (TaigaConnection20250220) TableName() string { + return "_tool_taiga_connections" +} + +type TaigaProject20250220 struct { + ConnectionId uint64 `gorm:"primaryKey"` + ProjectId uint64 `gorm:"primaryKey;autoIncrement:false"` + Name string `gorm:"type:varchar(255)"` + Slug string `gorm:"type:varchar(255)"` + Description string `gorm:"type:text"` + Url string `gorm:"type:varchar(255)"` + IsPrivate bool + TotalMilestones int + TotalStoryPoints float64 + CreatedAt time.Time + UpdatedAt time.Time + RawDataParams string `gorm:"column:_raw_data_params;type:varchar(255);index"` + RawDataTable string `gorm:"column:_raw_data_table;type:varchar(255)"` + RawDataId uint64 `gorm:"column:_raw_data_id"` + RawDataRemark string `gorm:"column:_raw_data_remark"` + ScopeConfigId uint64 +} + +func (TaigaProject20250220) TableName() string { + return "_tool_taiga_projects" +} + +type TaigaScopeConfig20250220 struct { + ID uint64 `gorm:"primaryKey;autoIncrement" json:"id"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + ConnectionId uint64 `json:"connectionId" gorm:"index"` + Name string `gorm:"type:varchar(255);uniqueIndex" json:"name"` + Entities string `gorm:"type:json" json:"entities"` +} + +func (TaigaScopeConfig20250220) TableName() string { + return "_tool_taiga_scope_configs" +} + +type TaigaUserStory20250220 struct { + ConnectionId uint64 `gorm:"primaryKey"` + ProjectId uint64 `gorm:"index"` + UserStoryId uint64 `gorm:"primaryKey;autoIncrement:false"` + Ref int + Subject string `gorm:"type:varchar(255)"` + Description string `gorm:"type:text"` + Status string `gorm:"type:varchar(100)"` + StatusColor string `gorm:"type:varchar(20)"` + IsClosed bool + CreatedDate *time.Time + ModifiedDate *time.Time + FinishedDate *time.Time + AssignedTo uint64 + AssignedToName string `gorm:"type:varchar(255)"` + TotalPoints float64 + MilestoneId uint64 + MilestoneName string `gorm:"type:varchar(255)"` + Priority int + IsBlocked bool + BlockedNote string `gorm:"type:text"` + CreatedAt time.Time + UpdatedAt time.Time + RawDataParams string `gorm:"column:_raw_data_params;type:varchar(255);index"` + RawDataTable string `gorm:"column:_raw_data_table;type:varchar(255)"` + RawDataId uint64 `gorm:"column:_raw_data_id"` + RawDataRemark string `gorm:"column:_raw_data_remark"` +} + +func (TaigaUserStory20250220) TableName() string { + return "_tool_taiga_user_stories" +} + +type addInitTables20250220 struct{} + +func (*addInitTables20250220) Up(basicRes context.BasicRes) errors.Error { + return migrationhelper.AutoMigrateTables( + basicRes, + &TaigaConnection20250220{}, + &TaigaProject20250220{}, + &TaigaScopeConfig20250220{}, + &TaigaUserStory20250220{}, + ) +} + +func (*addInitTables20250220) Version() uint64 { + return 20250220000001 +} + +func (*addInitTables20250220) Name() string { + return "Taiga init schemas" +} diff --git a/backend/plugins/taiga/models/migrationscripts/20260306_add_task_issue_epic_tables.go b/backend/plugins/taiga/models/migrationscripts/20260306_add_task_issue_epic_tables.go new file mode 100644 index 00000000000..ff84ba19b84 --- /dev/null +++ b/backend/plugins/taiga/models/migrationscripts/20260306_add_task_issue_epic_tables.go @@ -0,0 +1,128 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "time" + + "github.com/apache/incubator-devlake/core/context" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/helpers/migrationhelper" +) + +type TaigaTask20260306 struct { + ConnectionId uint64 `gorm:"primaryKey"` + ProjectId uint64 `gorm:"index"` + TaskId uint64 `gorm:"primaryKey;autoIncrement:false"` + Ref int + Subject string `gorm:"type:varchar(255)"` + Status string `gorm:"type:varchar(100)"` + IsClosed bool + CreatedDate *time.Time + ModifiedDate *time.Time + FinishedDate *time.Time + AssignedTo uint64 + AssignedToName string `gorm:"type:varchar(255)"` + UserStoryId uint64 + MilestoneId uint64 + IsBlocked bool + BlockedNote string `gorm:"type:text"` + CreatedAt time.Time + UpdatedAt time.Time + RawDataParams string `gorm:"column:_raw_data_params;type:varchar(255);index"` + RawDataTable string `gorm:"column:_raw_data_table;type:varchar(255)"` + RawDataId uint64 `gorm:"column:_raw_data_id"` + RawDataRemark string `gorm:"column:_raw_data_remark"` +} + +func (TaigaTask20260306) TableName() string { + return "_tool_taiga_tasks" +} + +type TaigaIssue20260306 struct { + ConnectionId uint64 `gorm:"primaryKey"` + ProjectId uint64 `gorm:"index"` + IssueId uint64 `gorm:"primaryKey;autoIncrement:false"` + Ref int + Subject string `gorm:"type:varchar(255)"` + Status string `gorm:"type:varchar(100)"` + IssueTypeName string `gorm:"type:varchar(100)"` + Priority string `gorm:"type:varchar(100)"` + Severity string `gorm:"type:varchar(100)"` + IsClosed bool + CreatedDate *time.Time + ModifiedDate *time.Time + FinishedDate *time.Time + AssignedTo uint64 + AssignedToName string `gorm:"type:varchar(255)"` + MilestoneId uint64 + CreatedAt time.Time + UpdatedAt time.Time + RawDataParams string `gorm:"column:_raw_data_params;type:varchar(255);index"` + RawDataTable string `gorm:"column:_raw_data_table;type:varchar(255)"` + RawDataId uint64 `gorm:"column:_raw_data_id"` + RawDataRemark string `gorm:"column:_raw_data_remark"` +} + +func (TaigaIssue20260306) TableName() string { + return "_tool_taiga_issues" +} + +type TaigaEpic20260306 struct { + ConnectionId uint64 `gorm:"primaryKey"` + ProjectId uint64 `gorm:"index"` + EpicId uint64 `gorm:"primaryKey;autoIncrement:false"` + Ref int + Subject string `gorm:"type:varchar(255)"` + Status string `gorm:"type:varchar(100)"` + IsClosed bool + CreatedDate *time.Time + ModifiedDate *time.Time + AssignedTo uint64 + AssignedToName string `gorm:"type:varchar(255)"` + Color string `gorm:"type:varchar(20)"` + CreatedAt time.Time + UpdatedAt time.Time + RawDataParams string `gorm:"column:_raw_data_params;type:varchar(255);index"` + RawDataTable string `gorm:"column:_raw_data_table;type:varchar(255)"` + RawDataId uint64 `gorm:"column:_raw_data_id"` + RawDataRemark string `gorm:"column:_raw_data_remark"` +} + +func (TaigaEpic20260306) TableName() string { + return "_tool_taiga_epics" +} + +type addTaskIssueEpicTables20260306 struct{} + +func (*addTaskIssueEpicTables20260306) Up(basicRes context.BasicRes) errors.Error { + return migrationhelper.AutoMigrateTables( + basicRes, + &TaigaTask20260306{}, + &TaigaIssue20260306{}, + &TaigaEpic20260306{}, + ) +} + +func (*addTaskIssueEpicTables20260306) Version() uint64 { + return 20260306000001 +} + +func (*addTaskIssueEpicTables20260306) Name() string { + return "Taiga add task, issue, epic tables" +} diff --git a/backend/plugins/taiga/models/migrationscripts/register.go b/backend/plugins/taiga/models/migrationscripts/register.go new file mode 100644 index 00000000000..d2cfd08e269 --- /dev/null +++ b/backend/plugins/taiga/models/migrationscripts/register.go @@ -0,0 +1,30 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package migrationscripts + +import ( + "github.com/apache/incubator-devlake/core/plugin" +) + +// All return all the migration scripts +func All() []plugin.MigrationScript { + return []plugin.MigrationScript{ + new(addInitTables20250220), + new(addTaskIssueEpicTables20260306), + } +} diff --git a/backend/plugins/taiga/models/project.go b/backend/plugins/taiga/models/project.go new file mode 100644 index 00000000000..7b26125624e --- /dev/null +++ b/backend/plugins/taiga/models/project.go @@ -0,0 +1,63 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "fmt" + + "github.com/apache/incubator-devlake/core/models/common" + "github.com/apache/incubator-devlake/core/plugin" +) + +// TaigaProject represents a Taiga project (scope) +type TaigaProject struct { + common.Scope `mapstructure:",squash"` + ProjectId uint64 `json:"projectId" gorm:"primaryKey;autoIncrement:false"` + Name string `gorm:"type:varchar(255)" json:"name"` + Slug string `gorm:"type:varchar(255)" json:"slug"` + Description string `gorm:"type:text" json:"description"` + Url string `gorm:"type:varchar(255)" json:"url"` + IsPrivate bool `json:"isPrivate"` + TotalMilestones int `json:"totalMilestones"` + TotalStoryPoints float64 `json:"totalStoryPoints"` +} + +func (p TaigaProject) ScopeId() string { + return fmt.Sprintf("%d", p.ProjectId) +} + +func (p TaigaProject) ScopeName() string { + return p.Name +} + +func (p TaigaProject) ScopeFullName() string { + return p.Name +} + +func (p TaigaProject) ScopeParams() interface{} { + return &plugin.ApiResourceInput{ + Params: map[string]string{ + "connectionId": fmt.Sprintf("%d", p.ConnectionId), + "projectId": fmt.Sprintf("%d", p.ProjectId), + }, + } +} + +func (TaigaProject) TableName() string { + return "_tool_taiga_projects" +} diff --git a/backend/plugins/taiga/models/scope_config.go b/backend/plugins/taiga/models/scope_config.go new file mode 100644 index 00000000000..c6397083bca --- /dev/null +++ b/backend/plugins/taiga/models/scope_config.go @@ -0,0 +1,53 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/models/common" +) + +type StatusMapping struct { + StandardStatus string `json:"standardStatus"` +} + +type StatusMappings map[string]StatusMapping + +type TypeMapping struct { + StandardType string `json:"standardType"` + StatusMappings StatusMappings `json:"statusMappings"` +} + +type TaigaScopeConfig struct { + common.ScopeConfig `mapstructure:",squash" json:",inline" gorm:"embedded"` + TypeMappings map[string]TypeMapping `mapstructure:"typeMappings,omitempty" json:"typeMappings" gorm:"type:json;serializer:json"` +} + +func (r *TaigaScopeConfig) SetConnectionId(c *TaigaScopeConfig, connectionId uint64) { + c.ConnectionId = connectionId + c.ScopeConfig.ConnectionId = connectionId +} + +func (r *TaigaScopeConfig) Validate() errors.Error { + // Add validation logic if needed + return nil +} + +func (r TaigaScopeConfig) TableName() string { + return "_tool_taiga_scope_configs" +} diff --git a/backend/plugins/taiga/models/task.go b/backend/plugins/taiga/models/task.go new file mode 100644 index 00000000000..248267cf9c2 --- /dev/null +++ b/backend/plugins/taiga/models/task.go @@ -0,0 +1,49 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "time" + + "github.com/apache/incubator-devlake/core/models/common" +) + +// TaigaTask represents a task (sub-task of a user story) in Taiga +type TaigaTask struct { + common.NoPKModel + ConnectionId uint64 `gorm:"primaryKey"` + ProjectId uint64 `gorm:"index"` + TaskId uint64 `gorm:"primaryKey;autoIncrement:false" json:"id"` + Ref int `json:"ref"` + Subject string `gorm:"type:varchar(255)" json:"subject"` + Status string `gorm:"type:varchar(100)" json:"status"` + IsClosed bool `json:"isClosed"` + CreatedDate *time.Time `json:"createdDate"` + ModifiedDate *time.Time `json:"modifiedDate"` + FinishedDate *time.Time `json:"finishedDate"` + AssignedTo uint64 `json:"assignedTo"` + AssignedToName string `gorm:"type:varchar(255)" json:"assignedToName"` + UserStoryId uint64 `json:"userStoryId"` + MilestoneId uint64 `json:"milestoneId"` + IsBlocked bool `json:"isBlocked"` + BlockedNote string `gorm:"type:text" json:"blockedNote"` +} + +func (TaigaTask) TableName() string { + return "_tool_taiga_tasks" +} diff --git a/backend/plugins/taiga/models/user_story.go b/backend/plugins/taiga/models/user_story.go new file mode 100644 index 00000000000..498584477dc --- /dev/null +++ b/backend/plugins/taiga/models/user_story.go @@ -0,0 +1,53 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package models + +import ( + "time" + + "github.com/apache/incubator-devlake/core/models/common" +) + +// TaigaUserStory represents a user story in Taiga +type TaigaUserStory struct { + common.NoPKModel + ConnectionId uint64 `gorm:"primaryKey"` + ProjectId uint64 `gorm:"index"` + UserStoryId uint64 `gorm:"primaryKey;autoIncrement:false" json:"id"` + Ref int `json:"ref"` + Subject string `gorm:"type:varchar(255)" json:"subject"` + Description string `gorm:"type:text" json:"description"` + Status string `gorm:"type:varchar(100)" json:"status"` + StatusColor string `gorm:"type:varchar(20)" json:"statusColor"` + IsClosed bool `json:"isClosed"` + CreatedDate *time.Time `json:"createdDate"` + ModifiedDate *time.Time `json:"modifiedDate"` + FinishedDate *time.Time `json:"finishedDate"` + AssignedTo uint64 `json:"assignedTo"` + AssignedToName string `gorm:"type:varchar(255)" json:"assignedToName"` + TotalPoints float64 `json:"totalPoints"` + MilestoneId uint64 `json:"milestoneId"` + MilestoneName string `gorm:"type:varchar(255)" json:"milestoneName"` + Priority int `json:"priority"` + IsBlocked bool `json:"isBlocked"` + BlockedNote string `gorm:"type:text" json:"blockedNote"` +} + +func (TaigaUserStory) TableName() string { + return "_tool_taiga_user_stories" +} diff --git a/backend/plugins/taiga/taiga.go b/backend/plugins/taiga/taiga.go new file mode 100644 index 00000000000..4a6da00a8a1 --- /dev/null +++ b/backend/plugins/taiga/taiga.go @@ -0,0 +1,45 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main // must be main for plugin entry point + +import ( + "github.com/apache/incubator-devlake/core/runner" + "github.com/apache/incubator-devlake/plugins/taiga/impl" + "github.com/spf13/cobra" +) + +// PluginEntry exports a symbol for Framework to load +var PluginEntry impl.Taiga //nolint + +// standalone mode for debugging +func main() { + cmd := &cobra.Command{Use: "taiga"} + connectionId := cmd.Flags().Uint64P("connectionId", "c", 0, "taiga connection id") + projectId := cmd.Flags().Uint64P("projectId", "p", 0, "taiga project id") + timeAfter := cmd.Flags().StringP("timeAfter", "a", "", "collect data that are created after specified time, ie 2006-01-02T15:04:05Z") + _ = cmd.MarkFlagRequired("connectionId") + _ = cmd.MarkFlagRequired("projectId") + + cmd.Run = func(cmd *cobra.Command, args []string) { + runner.DirectRun(cmd, args, PluginEntry, map[string]interface{}{ + "connectionId": *connectionId, + "projectId": *projectId, + }, *timeAfter) + } + runner.RunCmd(cmd) +} diff --git a/backend/plugins/taiga/tasks/api_client.go b/backend/plugins/taiga/tasks/api_client.go new file mode 100644 index 00000000000..4b722cc1a6c --- /dev/null +++ b/backend/plugins/taiga/tasks/api_client.go @@ -0,0 +1,49 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +func NewTaigaApiClient(taskCtx plugin.TaskContext, connection *models.TaigaConnection) (*api.ApiAsyncClient, errors.Error) { + // create synchronize api client + apiClient, err := api.NewApiClientFromConnection(taskCtx.GetContext(), taskCtx, connection) + if err != nil { + return nil, err + } + + // create rate limit calculator + rateLimiter := &api.ApiRateLimitCalculator{ + UserRateLimitPerHour: connection.RateLimitPerHour, + } + + asyncApiClient, err := api.CreateAsyncApiClient( + taskCtx, + apiClient, + rateLimiter, + ) + if err != nil { + return nil, err + } + + return asyncApiClient, nil +} diff --git a/backend/plugins/taiga/tasks/epic_collector.go b/backend/plugins/taiga/tasks/epic_collector.go new file mode 100644 index 00000000000..f99d48aabee --- /dev/null +++ b/backend/plugins/taiga/tasks/epic_collector.go @@ -0,0 +1,79 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "fmt" + "net/http" + "net/url" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" +) + +const RAW_EPIC_TABLE = "taiga_api_epics" + +var _ plugin.SubTaskEntryPoint = CollectEpics + +var CollectEpicsMeta = plugin.SubTaskMeta{ + Name: "collectEpics", + EntryPoint: CollectEpics, + EnabledByDefault: true, + Description: "collect Taiga epics", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func CollectEpics(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*TaigaTaskData) + logger := taskCtx.GetLogger() + logger.Info("collect epics") + + collector, err := api.NewApiCollector(api.ApiCollectorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: RAW_EPIC_TABLE, + }, + ApiClient: data.ApiClient, + PageSize: 1000, + UrlTemplate: "epics", + Query: func(reqData *api.RequestData) (url.Values, errors.Error) { + query := url.Values{} + query.Set("project", fmt.Sprintf("%d", data.Options.ProjectId)) + return query, nil + }, + ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) { + var result []json.RawMessage + err := api.UnmarshalResponse(res, &result) + if err != nil { + return nil, err + } + return result, nil + }, + }) + if err != nil { + logger.Error(err, "collect epics error") + return err + } + return collector.Execute() +} diff --git a/backend/plugins/taiga/tasks/epic_convertor.go b/backend/plugins/taiga/tasks/epic_convertor.go new file mode 100644 index 00000000000..ed5bb53b0fb --- /dev/null +++ b/backend/plugins/taiga/tasks/epic_convertor.go @@ -0,0 +1,106 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/models/domainlayer" + "github.com/apache/incubator-devlake/core/models/domainlayer/didgen" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +var ConvertEpicsMeta = plugin.SubTaskMeta{ + Name: "convertEpics", + EntryPoint: ConvertEpics, + EnabledByDefault: true, + Description: "convert Taiga epics", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func ConvertEpics(subtaskCtx plugin.SubTaskContext) errors.Error { + logger := subtaskCtx.GetLogger() + data := subtaskCtx.GetData().(*TaigaTaskData) + db := subtaskCtx.GetDal() + + epicIdGen := didgen.NewDomainIdGenerator(&models.TaigaEpic{}) + boardIdGen := didgen.NewDomainIdGenerator(&models.TaigaProject{}) + boardId := boardIdGen.Generate(data.Options.ConnectionId, data.Options.ProjectId) + + converter, err := api.NewStatefulDataConverter(&api.StatefulDataConverterArgs[models.TaigaEpic]{ + SubtaskCommonArgs: &api.SubtaskCommonArgs{ + SubTaskContext: subtaskCtx, + Table: RAW_EPIC_TABLE, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + }, + Input: func(stateManager *api.SubtaskStateManager) (dal.Rows, errors.Error) { + clauses := []dal.Clause{ + dal.Select("*"), + dal.From(&models.TaigaEpic{}), + dal.Where("connection_id = ?", data.Options.ConnectionId), + } + if stateManager.IsIncremental() { + since := stateManager.GetSince() + if since != nil { + clauses = append(clauses, dal.Where("updated_at >= ?", since)) + } + } + return db.Cursor(clauses...) + }, + Convert: func(epic *models.TaigaEpic) ([]interface{}, errors.Error) { + var result []interface{} + + issue := &ticket.Issue{ + DomainEntity: domainlayer.DomainEntity{ + Id: epicIdGen.Generate(epic.ConnectionId, epic.EpicId), + }, + IssueKey: epic.Subject, + Title: epic.Subject, + Type: "EPIC", + OriginalType: "Epic", + Status: epic.Status, + OriginalStatus: epic.Status, + CreatedDate: epic.CreatedDate, + UpdatedDate: epic.ModifiedDate, + } + + result = append(result, issue) + + boardIssue := &ticket.BoardIssue{ + BoardId: boardId, + IssueId: issue.Id, + } + result = append(result, boardIssue) + + logger.Debug("converted epic %d", epic.EpicId) + return result, nil + }, + }) + + if err != nil { + return err + } + + return converter.Execute() +} diff --git a/backend/plugins/taiga/tasks/epic_extractor.go b/backend/plugins/taiga/tasks/epic_extractor.go new file mode 100644 index 00000000000..6acd76dacf9 --- /dev/null +++ b/backend/plugins/taiga/tasks/epic_extractor.go @@ -0,0 +1,106 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "time" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +var _ plugin.SubTaskEntryPoint = ExtractEpics + +var ExtractEpicsMeta = plugin.SubTaskMeta{ + Name: "extractEpics", + EntryPoint: ExtractEpics, + EnabledByDefault: true, + Description: "extract Taiga epics", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func ExtractEpics(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*TaigaTaskData) + extractor, err := api.NewApiExtractor(api.ApiExtractorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: RAW_EPIC_TABLE, + }, + Extract: func(row *api.RawData) ([]interface{}, errors.Error) { + var apiEpic struct { + Id uint64 `json:"id"` + Ref int `json:"ref"` + Subject string `json:"subject"` + StatusExtraInfo struct { + Name string `json:"name"` + } `json:"status_extra_info"` + IsClosed bool `json:"is_closed"` + CreatedDate *time.Time `json:"created_date"` + ModifiedDate *time.Time `json:"modified_date"` + AssignedTo *uint64 `json:"assigned_to"` + AssignedToExtraInfo *struct { + FullNameDisplay string `json:"full_name_display"` + } `json:"assigned_to_extra_info"` + Color string `json:"color"` + } + err := json.Unmarshal(row.Data, &apiEpic) + if err != nil { + return nil, errors.Default.Wrap(err, "error unmarshalling epic") + } + + var assignedTo uint64 + var assignedToName string + if apiEpic.AssignedTo != nil { + assignedTo = *apiEpic.AssignedTo + } + if apiEpic.AssignedToExtraInfo != nil { + assignedToName = apiEpic.AssignedToExtraInfo.FullNameDisplay + } + + epic := &models.TaigaEpic{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + EpicId: apiEpic.Id, + Ref: apiEpic.Ref, + Subject: apiEpic.Subject, + Status: apiEpic.StatusExtraInfo.Name, + IsClosed: apiEpic.IsClosed, + CreatedDate: apiEpic.CreatedDate, + ModifiedDate: apiEpic.ModifiedDate, + AssignedTo: assignedTo, + AssignedToName: assignedToName, + Color: apiEpic.Color, + } + + return []interface{}{epic}, nil + }, + }) + + if err != nil { + return err + } + + return extractor.Execute() +} diff --git a/backend/plugins/taiga/tasks/issue_collector.go b/backend/plugins/taiga/tasks/issue_collector.go new file mode 100644 index 00000000000..cf79d539901 --- /dev/null +++ b/backend/plugins/taiga/tasks/issue_collector.go @@ -0,0 +1,79 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "fmt" + "net/http" + "net/url" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" +) + +const RAW_ISSUE_TABLE = "taiga_api_issues" + +var _ plugin.SubTaskEntryPoint = CollectIssues + +var CollectIssuesMeta = plugin.SubTaskMeta{ + Name: "collectIssues", + EntryPoint: CollectIssues, + EnabledByDefault: true, + Description: "collect Taiga issues", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func CollectIssues(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*TaigaTaskData) + logger := taskCtx.GetLogger() + logger.Info("collect issues") + + collector, err := api.NewApiCollector(api.ApiCollectorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: RAW_ISSUE_TABLE, + }, + ApiClient: data.ApiClient, + PageSize: 1000, + UrlTemplate: "issues", + Query: func(reqData *api.RequestData) (url.Values, errors.Error) { + query := url.Values{} + query.Set("project", fmt.Sprintf("%d", data.Options.ProjectId)) + return query, nil + }, + ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) { + var result []json.RawMessage + err := api.UnmarshalResponse(res, &result) + if err != nil { + return nil, err + } + return result, nil + }, + }) + if err != nil { + logger.Error(err, "collect issues error") + return err + } + return collector.Execute() +} diff --git a/backend/plugins/taiga/tasks/issue_convertor.go b/backend/plugins/taiga/tasks/issue_convertor.go new file mode 100644 index 00000000000..5612a3b588c --- /dev/null +++ b/backend/plugins/taiga/tasks/issue_convertor.go @@ -0,0 +1,132 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "strings" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/models/domainlayer" + "github.com/apache/incubator-devlake/core/models/domainlayer/didgen" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +var ConvertIssuesMeta = plugin.SubTaskMeta{ + Name: "convertIssues", + EntryPoint: ConvertIssues, + EnabledByDefault: true, + Description: "convert Taiga issues", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +// taigaIssueTypeToDevLake maps Taiga's customizable issue type names to +// DevLake standard issue types. Taiga's default types are Bug, Enhancement, +// and Question; additional custom types fall back to REQUIREMENT. +func taigaIssueTypeToDevLake(typeName string) string { + switch strings.ToLower(typeName) { + case "bug": + return "BUG" + case "enhancement", "feature": + return "REQUIREMENT" + case "question": + return "QUESTION" + default: + return "REQUIREMENT" + } +} + +func ConvertIssues(subtaskCtx plugin.SubTaskContext) errors.Error { + logger := subtaskCtx.GetLogger() + data := subtaskCtx.GetData().(*TaigaTaskData) + db := subtaskCtx.GetDal() + + issueIdGen := didgen.NewDomainIdGenerator(&models.TaigaIssue{}) + boardIdGen := didgen.NewDomainIdGenerator(&models.TaigaProject{}) + boardId := boardIdGen.Generate(data.Options.ConnectionId, data.Options.ProjectId) + + converter, err := api.NewStatefulDataConverter(&api.StatefulDataConverterArgs[models.TaigaIssue]{ + SubtaskCommonArgs: &api.SubtaskCommonArgs{ + SubTaskContext: subtaskCtx, + Table: RAW_ISSUE_TABLE, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + }, + Input: func(stateManager *api.SubtaskStateManager) (dal.Rows, errors.Error) { + clauses := []dal.Clause{ + dal.Select("*"), + dal.From(&models.TaigaIssue{}), + dal.Where("connection_id = ?", data.Options.ConnectionId), + } + if stateManager.IsIncremental() { + since := stateManager.GetSince() + if since != nil { + clauses = append(clauses, dal.Where("updated_at >= ?", since)) + } + } + return db.Cursor(clauses...) + }, + Convert: func(taigaIssue *models.TaigaIssue) ([]interface{}, errors.Error) { + var result []interface{} + + devLakeType := taigaIssueTypeToDevLake(taigaIssue.IssueTypeName) + originalType := taigaIssue.IssueTypeName + if originalType == "" { + originalType = "Issue" + } + + issue := &ticket.Issue{ + DomainEntity: domainlayer.DomainEntity{ + Id: issueIdGen.Generate(taigaIssue.ConnectionId, taigaIssue.IssueId), + }, + IssueKey: taigaIssue.Subject, + Title: taigaIssue.Subject, + Type: devLakeType, + OriginalType: originalType, + Status: taigaIssue.Status, + OriginalStatus: taigaIssue.Status, + Priority: taigaIssue.Priority, + CreatedDate: taigaIssue.CreatedDate, + UpdatedDate: taigaIssue.ModifiedDate, + ResolutionDate: taigaIssue.FinishedDate, + } + + result = append(result, issue) + + boardIssue := &ticket.BoardIssue{ + BoardId: boardId, + IssueId: issue.Id, + } + result = append(result, boardIssue) + + logger.Debug("converted issue %d (type: %s → %s)", taigaIssue.IssueId, originalType, devLakeType) + return result, nil + }, + }) + + if err != nil { + return err + } + + return converter.Execute() +} diff --git a/backend/plugins/taiga/tasks/issue_extractor.go b/backend/plugins/taiga/tasks/issue_extractor.go new file mode 100644 index 00000000000..f2dd54ca995 --- /dev/null +++ b/backend/plugins/taiga/tasks/issue_extractor.go @@ -0,0 +1,136 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "time" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +var _ plugin.SubTaskEntryPoint = ExtractIssues + +var ExtractIssuesMeta = plugin.SubTaskMeta{ + Name: "extractIssues", + EntryPoint: ExtractIssues, + EnabledByDefault: true, + Description: "extract Taiga issues", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func ExtractIssues(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*TaigaTaskData) + extractor, err := api.NewApiExtractor(api.ApiExtractorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: RAW_ISSUE_TABLE, + }, + Extract: func(row *api.RawData) ([]interface{}, errors.Error) { + var apiIssue struct { + Id uint64 `json:"id"` + Ref int `json:"ref"` + Subject string `json:"subject"` + StatusExtraInfo struct { + Name string `json:"name"` + } `json:"status_extra_info"` + TypeExtraInfo *struct { + Name string `json:"name"` + } `json:"type_extra_info"` + PriorityExtraInfo *struct { + Name string `json:"name"` + } `json:"priority_extra_info"` + SeverityExtraInfo *struct { + Name string `json:"name"` + } `json:"severity_extra_info"` + IsClosed bool `json:"is_closed"` + CreatedDate *time.Time `json:"created_date"` + ModifiedDate *time.Time `json:"modified_date"` + FinishedDate *time.Time `json:"finished_date"` + AssignedTo *uint64 `json:"assigned_to"` + AssignedToExtraInfo *struct { + FullNameDisplay string `json:"full_name_display"` + } `json:"assigned_to_extra_info"` + Milestone *uint64 `json:"milestone"` + } + err := json.Unmarshal(row.Data, &apiIssue) + if err != nil { + return nil, errors.Default.Wrap(err, "error unmarshalling issue") + } + + var assignedTo uint64 + var assignedToName string + if apiIssue.AssignedTo != nil { + assignedTo = *apiIssue.AssignedTo + } + if apiIssue.AssignedToExtraInfo != nil { + assignedToName = apiIssue.AssignedToExtraInfo.FullNameDisplay + } + var issueTypeName string + if apiIssue.TypeExtraInfo != nil { + issueTypeName = apiIssue.TypeExtraInfo.Name + } + var priority string + if apiIssue.PriorityExtraInfo != nil { + priority = apiIssue.PriorityExtraInfo.Name + } + var severity string + if apiIssue.SeverityExtraInfo != nil { + severity = apiIssue.SeverityExtraInfo.Name + } + var milestoneId uint64 + if apiIssue.Milestone != nil { + milestoneId = *apiIssue.Milestone + } + + issue := &models.TaigaIssue{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + IssueId: apiIssue.Id, + Ref: apiIssue.Ref, + Subject: apiIssue.Subject, + Status: apiIssue.StatusExtraInfo.Name, + IssueTypeName: issueTypeName, + Priority: priority, + Severity: severity, + IsClosed: apiIssue.IsClosed, + CreatedDate: apiIssue.CreatedDate, + ModifiedDate: apiIssue.ModifiedDate, + FinishedDate: apiIssue.FinishedDate, + AssignedTo: assignedTo, + AssignedToName: assignedToName, + MilestoneId: milestoneId, + } + + return []interface{}{issue}, nil + }, + }) + + if err != nil { + return err + } + + return extractor.Execute() +} diff --git a/backend/plugins/taiga/tasks/project_collector.go b/backend/plugins/taiga/tasks/project_collector.go new file mode 100644 index 00000000000..4a6462b40f6 --- /dev/null +++ b/backend/plugins/taiga/tasks/project_collector.go @@ -0,0 +1,76 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "net/http" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" +) + +const RAW_PROJECT_TABLE = "taiga_api_projects" + +var _ plugin.SubTaskEntryPoint = CollectProjects + +var CollectProjectsMeta = plugin.SubTaskMeta{ + Name: "collectProjects", + EntryPoint: CollectProjects, + EnabledByDefault: true, + Description: "collect Taiga projects", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func CollectProjects(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*TaigaTaskData) + logger := taskCtx.GetLogger() + logger.Info("collect projects") + + collector, err := api.NewApiCollector(api.ApiCollectorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: RAW_PROJECT_TABLE, + }, + ApiClient: data.ApiClient, + UrlTemplate: "projects/{{ .Params.ProjectId }}", + ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) { + var result json.RawMessage + err := api.UnmarshalResponse(res, &result) + if err != nil { + return nil, err + } + return []json.RawMessage{result}, nil + }, + }) + if err != nil { + logger.Error(err, "collect project error") + return err + } + return collector.Execute() +} + +type TaigaApiParams struct { + ConnectionId uint64 + ProjectId uint64 +} diff --git a/backend/plugins/taiga/tasks/project_convertor.go b/backend/plugins/taiga/tasks/project_convertor.go new file mode 100644 index 00000000000..032831d0717 --- /dev/null +++ b/backend/plugins/taiga/tasks/project_convertor.go @@ -0,0 +1,88 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "reflect" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/models/domainlayer" + "github.com/apache/incubator-devlake/core/models/domainlayer/didgen" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +var ConvertProjectsMeta = plugin.SubTaskMeta{ + Name: "convertProjects", + EntryPoint: ConvertProjects, + EnabledByDefault: true, + Description: "convert Taiga projects", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func ConvertProjects(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*TaigaTaskData) + logger := taskCtx.GetLogger() + db := taskCtx.GetDal() + logger.Info("convert project:%d", data.Options.ProjectId) + + idGen := didgen.NewDomainIdGenerator(&models.TaigaProject{}) + clauses := []dal.Clause{ + dal.Select("*"), + dal.From(&models.TaigaProject{}), + dal.Where("connection_id = ? AND project_id = ?", data.Options.ConnectionId, data.Options.ProjectId), + } + cursor, err := db.Cursor(clauses...) + if err != nil { + return err + } + defer cursor.Close() + + converter, err := api.NewDataConverter(api.DataConverterArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: RAW_PROJECT_TABLE, + }, + InputRowType: reflect.TypeOf(models.TaigaProject{}), + Input: cursor, + Convert: func(inputRow interface{}) ([]interface{}, errors.Error) { + project := inputRow.(*models.TaigaProject) + domainBoard := &ticket.Board{ + DomainEntity: domainlayer.DomainEntity{Id: idGen.Generate(data.Options.ConnectionId, data.Options.ProjectId)}, + Name: project.Name, + Description: project.Description, + Url: project.Url, + } + return []interface{}{ + domainBoard, + }, nil + }, + }) + if err != nil { + return err + } + + return converter.Execute() +} diff --git a/backend/plugins/taiga/tasks/project_extractor.go b/backend/plugins/taiga/tasks/project_extractor.go new file mode 100644 index 00000000000..669e34760a8 --- /dev/null +++ b/backend/plugins/taiga/tasks/project_extractor.go @@ -0,0 +1,84 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/models/common" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +var _ plugin.SubTaskEntryPoint = ExtractProjects + +var ExtractProjectsMeta = plugin.SubTaskMeta{ + Name: "extractProjects", + EntryPoint: ExtractProjects, + EnabledByDefault: true, + Description: "extract Taiga projects", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func ExtractProjects(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*TaigaTaskData) + extractor, err := api.NewApiExtractor(api.ApiExtractorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: RAW_PROJECT_TABLE, + }, + Extract: func(row *api.RawData) ([]interface{}, errors.Error) { + var apiProject struct { + Id uint64 `json:"id"` + Name string `json:"name"` + Slug string `json:"slug"` + Description string `json:"description"` + CreatedDate string `json:"created_date"` + ModifiedDate string `json:"modified_date"` + } + err := json.Unmarshal(row.Data, &apiProject) + if err != nil { + return nil, errors.Default.Wrap(err, "error unmarshalling project") + } + + project := &models.TaigaProject{ + Scope: common.Scope{ + ConnectionId: data.Options.ConnectionId, + }, + ProjectId: apiProject.Id, + Name: apiProject.Name, + Slug: apiProject.Slug, + Description: apiProject.Description, + } + + return []interface{}{project}, nil + }, + }) + + if err != nil { + return err + } + + return extractor.Execute() +} diff --git a/backend/plugins/taiga/tasks/task_collector.go b/backend/plugins/taiga/tasks/task_collector.go new file mode 100644 index 00000000000..fabec6e39a4 --- /dev/null +++ b/backend/plugins/taiga/tasks/task_collector.go @@ -0,0 +1,79 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "fmt" + "net/http" + "net/url" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" +) + +const RAW_TASK_TABLE = "taiga_api_tasks" + +var _ plugin.SubTaskEntryPoint = CollectTasks + +var CollectTasksMeta = plugin.SubTaskMeta{ + Name: "collectTasks", + EntryPoint: CollectTasks, + EnabledByDefault: true, + Description: "collect Taiga tasks", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func CollectTasks(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*TaigaTaskData) + logger := taskCtx.GetLogger() + logger.Info("collect tasks") + + collector, err := api.NewApiCollector(api.ApiCollectorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: RAW_TASK_TABLE, + }, + ApiClient: data.ApiClient, + PageSize: 1000, + UrlTemplate: "tasks", + Query: func(reqData *api.RequestData) (url.Values, errors.Error) { + query := url.Values{} + query.Set("project", fmt.Sprintf("%d", data.Options.ProjectId)) + return query, nil + }, + ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) { + var result []json.RawMessage + err := api.UnmarshalResponse(res, &result) + if err != nil { + return nil, err + } + return result, nil + }, + }) + if err != nil { + logger.Error(err, "collect tasks error") + return err + } + return collector.Execute() +} diff --git a/backend/plugins/taiga/tasks/task_convertor.go b/backend/plugins/taiga/tasks/task_convertor.go new file mode 100644 index 00000000000..f13b51686db --- /dev/null +++ b/backend/plugins/taiga/tasks/task_convertor.go @@ -0,0 +1,107 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/models/domainlayer" + "github.com/apache/incubator-devlake/core/models/domainlayer/didgen" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +var ConvertTasksMeta = plugin.SubTaskMeta{ + Name: "convertTasks", + EntryPoint: ConvertTasks, + EnabledByDefault: true, + Description: "convert Taiga tasks", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func ConvertTasks(subtaskCtx plugin.SubTaskContext) errors.Error { + logger := subtaskCtx.GetLogger() + data := subtaskCtx.GetData().(*TaigaTaskData) + db := subtaskCtx.GetDal() + + taskIdGen := didgen.NewDomainIdGenerator(&models.TaigaTask{}) + boardIdGen := didgen.NewDomainIdGenerator(&models.TaigaProject{}) + boardId := boardIdGen.Generate(data.Options.ConnectionId, data.Options.ProjectId) + + converter, err := api.NewStatefulDataConverter(&api.StatefulDataConverterArgs[models.TaigaTask]{ + SubtaskCommonArgs: &api.SubtaskCommonArgs{ + SubTaskContext: subtaskCtx, + Table: RAW_TASK_TABLE, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + }, + Input: func(stateManager *api.SubtaskStateManager) (dal.Rows, errors.Error) { + clauses := []dal.Clause{ + dal.Select("*"), + dal.From(&models.TaigaTask{}), + dal.Where("connection_id = ?", data.Options.ConnectionId), + } + if stateManager.IsIncremental() { + since := stateManager.GetSince() + if since != nil { + clauses = append(clauses, dal.Where("updated_at >= ?", since)) + } + } + return db.Cursor(clauses...) + }, + Convert: func(task *models.TaigaTask) ([]interface{}, errors.Error) { + var result []interface{} + + issue := &ticket.Issue{ + DomainEntity: domainlayer.DomainEntity{ + Id: taskIdGen.Generate(task.ConnectionId, task.TaskId), + }, + IssueKey: task.Subject, + Title: task.Subject, + Type: "TASK", + OriginalType: "Task", + Status: task.Status, + OriginalStatus: task.Status, + CreatedDate: task.CreatedDate, + UpdatedDate: task.ModifiedDate, + ResolutionDate: task.FinishedDate, + } + + result = append(result, issue) + + boardIssue := &ticket.BoardIssue{ + BoardId: boardId, + IssueId: issue.Id, + } + result = append(result, boardIssue) + + logger.Debug("converted task %d", task.TaskId) + return result, nil + }, + }) + + if err != nil { + return err + } + + return converter.Execute() +} diff --git a/backend/plugins/taiga/tasks/task_data.go b/backend/plugins/taiga/tasks/task_data.go new file mode 100644 index 00000000000..dd6b3d58bb0 --- /dev/null +++ b/backend/plugins/taiga/tasks/task_data.go @@ -0,0 +1,54 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "fmt" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +type TaigaOptions struct { + ConnectionId uint64 `json:"connectionId" mapstructure:"connectionId"` + ProjectId uint64 `json:"projectId" mapstructure:"projectId"` + ScopeConfig *models.TaigaScopeConfig `json:"scopeConfig" mapstructure:"scopeConfig"` + ScopeConfigId uint64 `json:"scopeConfigId" mapstructure:"scopeConfigId"` + PageSize int `json:"pageSize" mapstructure:"pageSize"` +} + +type TaigaTaskData struct { + Options *TaigaOptions + ApiClient *api.ApiAsyncClient +} + +func DecodeAndValidateTaskOptions(options map[string]interface{}) (*TaigaOptions, errors.Error) { + var op TaigaOptions + err := api.Decode(options, &op, nil) + if err != nil { + return nil, err + } + if op.ConnectionId == 0 { + return nil, errors.BadInput.New(fmt.Sprintf("invalid connectionId:%d", op.ConnectionId)) + } + if op.ProjectId == 0 { + return nil, errors.BadInput.New(fmt.Sprintf("invalid projectId:%d", op.ProjectId)) + } + return &op, nil +} diff --git a/backend/plugins/taiga/tasks/task_extractor.go b/backend/plugins/taiga/tasks/task_extractor.go new file mode 100644 index 00000000000..29bece08e12 --- /dev/null +++ b/backend/plugins/taiga/tasks/task_extractor.go @@ -0,0 +1,122 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "time" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +var _ plugin.SubTaskEntryPoint = ExtractTasks + +var ExtractTasksMeta = plugin.SubTaskMeta{ + Name: "extractTasks", + EntryPoint: ExtractTasks, + EnabledByDefault: true, + Description: "extract Taiga tasks", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func ExtractTasks(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*TaigaTaskData) + extractor, err := api.NewApiExtractor(api.ApiExtractorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: RAW_TASK_TABLE, + }, + Extract: func(row *api.RawData) ([]interface{}, errors.Error) { + var apiTask struct { + Id uint64 `json:"id"` + Ref int `json:"ref"` + Subject string `json:"subject"` + StatusExtraInfo struct { + Name string `json:"name"` + } `json:"status_extra_info"` + IsClosed bool `json:"is_closed"` + CreatedDate *time.Time `json:"created_date"` + ModifiedDate *time.Time `json:"modified_date"` + FinishedDate *time.Time `json:"finished_date"` + AssignedTo *uint64 `json:"assigned_to"` + AssignedToExtraInfo *struct { + FullNameDisplay string `json:"full_name_display"` + } `json:"assigned_to_extra_info"` + UserStory *uint64 `json:"user_story"` + Milestone *uint64 `json:"milestone"` + IsBlocked bool `json:"is_blocked"` + BlockedNote string `json:"blocked_note"` + } + err := json.Unmarshal(row.Data, &apiTask) + if err != nil { + return nil, errors.Default.Wrap(err, "error unmarshalling task") + } + + var assignedTo uint64 + var assignedToName string + if apiTask.AssignedTo != nil { + assignedTo = *apiTask.AssignedTo + } + if apiTask.AssignedToExtraInfo != nil { + assignedToName = apiTask.AssignedToExtraInfo.FullNameDisplay + } + var userStoryId uint64 + if apiTask.UserStory != nil { + userStoryId = *apiTask.UserStory + } + var milestoneId uint64 + if apiTask.Milestone != nil { + milestoneId = *apiTask.Milestone + } + + task := &models.TaigaTask{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + TaskId: apiTask.Id, + Ref: apiTask.Ref, + Subject: apiTask.Subject, + Status: apiTask.StatusExtraInfo.Name, + IsClosed: apiTask.IsClosed, + CreatedDate: apiTask.CreatedDate, + ModifiedDate: apiTask.ModifiedDate, + FinishedDate: apiTask.FinishedDate, + AssignedTo: assignedTo, + AssignedToName: assignedToName, + UserStoryId: userStoryId, + MilestoneId: milestoneId, + IsBlocked: apiTask.IsBlocked, + BlockedNote: apiTask.BlockedNote, + } + + return []interface{}{task}, nil + }, + }) + + if err != nil { + return err + } + + return extractor.Execute() +} diff --git a/backend/plugins/taiga/tasks/user_story_collector.go b/backend/plugins/taiga/tasks/user_story_collector.go new file mode 100644 index 00000000000..80f3ec7a67d --- /dev/null +++ b/backend/plugins/taiga/tasks/user_story_collector.go @@ -0,0 +1,80 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "fmt" + "net/http" + "net/url" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" +) + +const RAW_USER_STORY_TABLE = "taiga_api_user_stories" + +var _ plugin.SubTaskEntryPoint = CollectUserStories + +var CollectUserStoriesMeta = plugin.SubTaskMeta{ + Name: "collectUserStories", + EntryPoint: CollectUserStories, + EnabledByDefault: true, + Description: "collect Taiga user stories", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func CollectUserStories(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*TaigaTaskData) + logger := taskCtx.GetLogger() + logger.Info("collect user stories") + + collector, err := api.NewApiCollector(api.ApiCollectorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: RAW_USER_STORY_TABLE, + }, + ApiClient: data.ApiClient, + PageSize: 1000, // Fetch all in one page - Taiga returns all user stories for a project + UrlTemplate: "userstories", + Query: func(reqData *api.RequestData) (url.Values, errors.Error) { + query := url.Values{} + query.Set("project", fmt.Sprintf("%d", data.Options.ProjectId)) + // Don't specify page - get all results + return query, nil + }, + ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) { + var result []json.RawMessage + err := api.UnmarshalResponse(res, &result) + if err != nil { + return nil, err + } + return result, nil + }, + }) + if err != nil { + logger.Error(err, "collect user stories error") + return err + } + return collector.Execute() +} diff --git a/backend/plugins/taiga/tasks/user_story_convertor.go b/backend/plugins/taiga/tasks/user_story_convertor.go new file mode 100644 index 00000000000..963a25ae18e --- /dev/null +++ b/backend/plugins/taiga/tasks/user_story_convertor.go @@ -0,0 +1,129 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "fmt" + + "github.com/apache/incubator-devlake/core/dal" + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/models/domainlayer" + "github.com/apache/incubator-devlake/core/models/domainlayer/didgen" + "github.com/apache/incubator-devlake/core/models/domainlayer/ticket" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +var ConvertUserStoriesMeta = plugin.SubTaskMeta{ + Name: "convertUserStories", + EntryPoint: ConvertUserStories, + EnabledByDefault: true, + Description: "convert Taiga user stories", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func ConvertUserStories(subtaskCtx plugin.SubTaskContext) errors.Error { + logger := subtaskCtx.GetLogger() + data := subtaskCtx.GetData().(*TaigaTaskData) + db := subtaskCtx.GetDal() + + issueIdGen := didgen.NewDomainIdGenerator(&models.TaigaUserStory{}) + boardIdGen := didgen.NewDomainIdGenerator(&models.TaigaProject{}) + boardId := boardIdGen.Generate(data.Options.ConnectionId, data.Options.ProjectId) + + converter, err := api.NewStatefulDataConverter(&api.StatefulDataConverterArgs[models.TaigaUserStory]{ + SubtaskCommonArgs: &api.SubtaskCommonArgs{ + SubTaskContext: subtaskCtx, + Table: RAW_USER_STORY_TABLE, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + }, + Input: func(stateManager *api.SubtaskStateManager) (dal.Rows, errors.Error) { + clauses := []dal.Clause{ + dal.Select("*"), + dal.From(&models.TaigaUserStory{}), + dal.Where("connection_id = ?", data.Options.ConnectionId), + } + if stateManager.IsIncremental() { + since := stateManager.GetSince() + if since != nil { + clauses = append(clauses, dal.Where("updated_at >= ?", since)) + } + } + return db.Cursor(clauses...) + }, + Convert: func(userStory *models.TaigaUserStory) ([]interface{}, errors.Error) { + var result []interface{} + + // Map Taiga is_closed to DevLake standard status + status := "TODO" + if userStory.IsClosed { + status = "DONE" + } + + issue := &ticket.Issue{ + DomainEntity: domainlayer.DomainEntity{ + Id: issueIdGen.Generate(userStory.ConnectionId, userStory.UserStoryId), + }, + IssueKey: fmt.Sprintf("#%d", userStory.Ref), + Title: userStory.Subject, + Type: "USER_STORY", + OriginalType: "User Story", + Status: status, + OriginalStatus: userStory.Status, + CreatedDate: userStory.CreatedDate, + UpdatedDate: userStory.ModifiedDate, + ResolutionDate: userStory.FinishedDate, + AssigneeId: fmt.Sprintf("%d", userStory.AssignedTo), + AssigneeName: userStory.AssignedToName, + } + + if userStory.TotalPoints > 0 { + issue.StoryPoint = &userStory.TotalPoints + } + + // Calculate lead time: creation → resolution for closed stories + if userStory.IsClosed && issue.CreatedDate != nil && issue.ResolutionDate != nil { + leadTimeMinutes := uint(issue.ResolutionDate.Sub(*issue.CreatedDate).Minutes()) + if leadTimeMinutes > 0 { + issue.LeadTimeMinutes = &leadTimeMinutes + } + } + + result = append(result, issue) + + boardIssue := &ticket.BoardIssue{ + BoardId: boardId, + IssueId: issue.Id, + } + result = append(result, boardIssue) + + logger.Debug("converted user story %d", userStory.UserStoryId) + return result, nil + }, + }) + + if err != nil { + return err + } + + return converter.Execute() +} diff --git a/backend/plugins/taiga/tasks/user_story_extractor.go b/backend/plugins/taiga/tasks/user_story_extractor.go new file mode 100644 index 00000000000..642981e2076 --- /dev/null +++ b/backend/plugins/taiga/tasks/user_story_extractor.go @@ -0,0 +1,135 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package tasks + +import ( + "encoding/json" + "time" + + "github.com/apache/incubator-devlake/core/errors" + "github.com/apache/incubator-devlake/core/plugin" + "github.com/apache/incubator-devlake/helpers/pluginhelper/api" + "github.com/apache/incubator-devlake/plugins/taiga/models" +) + +var _ plugin.SubTaskEntryPoint = ExtractUserStories + +var ExtractUserStoriesMeta = plugin.SubTaskMeta{ + Name: "extractUserStories", + EntryPoint: ExtractUserStories, + EnabledByDefault: true, + Description: "extract Taiga user stories", + DomainTypes: []string{plugin.DOMAIN_TYPE_TICKET}, +} + +func ExtractUserStories(taskCtx plugin.SubTaskContext) errors.Error { + data := taskCtx.GetData().(*TaigaTaskData) + extractor, err := api.NewApiExtractor(api.ApiExtractorArgs{ + RawDataSubTaskArgs: api.RawDataSubTaskArgs{ + Ctx: taskCtx, + Params: TaigaApiParams{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + }, + Table: RAW_USER_STORY_TABLE, + }, + Extract: func(row *api.RawData) ([]interface{}, errors.Error) { + var apiUserStory struct { + Id uint64 `json:"id"` + Ref int `json:"ref"` + Subject string `json:"subject"` + Status uint64 `json:"status"` + StatusExtraInfo struct { + Name string `json:"name"` + } `json:"status_extra_info"` + IsClosed bool `json:"is_closed"` + CreatedDate *time.Time `json:"created_date"` + ModifiedDate *time.Time `json:"modified_date"` + FinishDate *time.Time `json:"finish_date"` + AssignedTo *uint64 `json:"assigned_to"` + AssignedToExtraInfo *struct { + FullNameDisplay string `json:"full_name_display"` + } `json:"assigned_to_extra_info"` + TotalPoints *float64 `json:"total_points"` + MilestoneId *uint64 `json:"milestone"` + MilestoneName *string `json:"milestone_name"` + Priority *int `json:"priority"` + IsBlocked bool `json:"is_blocked"` + BlockedNote string `json:"blocked_note"` + } + err := json.Unmarshal(row.Data, &apiUserStory) + if err != nil { + return nil, errors.Default.Wrap(err, "error unmarshalling user story") + } + + var assignedTo uint64 + var assignedToName string + if apiUserStory.AssignedTo != nil { + assignedTo = *apiUserStory.AssignedTo + } + if apiUserStory.AssignedToExtraInfo != nil { + assignedToName = apiUserStory.AssignedToExtraInfo.FullNameDisplay + } + var totalPoints float64 + if apiUserStory.TotalPoints != nil { + totalPoints = *apiUserStory.TotalPoints + } + var milestoneId uint64 + if apiUserStory.MilestoneId != nil { + milestoneId = *apiUserStory.MilestoneId + } + var milestoneName string + if apiUserStory.MilestoneName != nil { + milestoneName = *apiUserStory.MilestoneName + } + var priority int + if apiUserStory.Priority != nil { + priority = *apiUserStory.Priority + } + + userStory := &models.TaigaUserStory{ + ConnectionId: data.Options.ConnectionId, + ProjectId: data.Options.ProjectId, + UserStoryId: apiUserStory.Id, + Ref: apiUserStory.Ref, + Subject: apiUserStory.Subject, + Status: apiUserStory.StatusExtraInfo.Name, + IsClosed: apiUserStory.IsClosed, + CreatedDate: apiUserStory.CreatedDate, + ModifiedDate: apiUserStory.ModifiedDate, + FinishedDate: apiUserStory.FinishDate, + AssignedTo: assignedTo, + AssignedToName: assignedToName, + TotalPoints: totalPoints, + MilestoneId: milestoneId, + MilestoneName: milestoneName, + Priority: priority, + IsBlocked: apiUserStory.IsBlocked, + BlockedNote: apiUserStory.BlockedNote, + } + + return []interface{}{userStory}, nil + }, + }) + + if err != nil { + return err + } + + return extractor.Execute() +} diff --git a/grafana/dashboards/Taiga.json b/grafana/dashboards/Taiga.json new file mode 100644 index 00000000000..7e1263b43d7 --- /dev/null +++ b/grafana/dashboards/Taiga.json @@ -0,0 +1,2316 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [ + { + "asDropdown": false, + "icon": "bolt", + "includeVars": false, + "keepTime": true, + "tags": [], + "targetBlank": false, + "title": "Homepage", + "tooltip": "", + "type": "link", + "url": "/grafana/d/Lv1XbLHnk/data-specific-dashboards-homepage" + }, + { + "asDropdown": false, + "icon": "external link", + "includeVars": false, + "keepTime": true, + "tags": [ + "Data Source Specific Dashboard" + ], + "targetBlank": false, + "title": "Metric dashboards", + "tooltip": "", + "type": "dashboards", + "url": "" + } + ], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "gridPos": { + "h": 3, + "w": 13, + "x": 0, + "y": 0 + }, + "id": 1, + "links": [ + { + "targetBlank": true, + "title": "Taiga Plugin", + "url": "https://github.com/apache/incubator-devlake" + } + ], + "options": { + "code": { + "language": "plaintext", + "showLineNumbers": false, + "showMiniMap": false + }, + "content": "- Use Cases: This dashboard shows project management metrics from Taiga across all issue types (User Stories, Tasks, Issues/Requirements, Epics).\n- Data Source Required: Taiga\n- Metrics: Issue throughput (filtered by type), lead time, story points (User Stories), milestone progress, assignee breakdown and issue type breakdown.\n- Use the **Issue Type** variable to filter panels to a specific type, or select All to see all types together.", + "mode": "markdown" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "queryType": "randomWalk", + "refId": "A" + } + ], + "title": "Dashboard Introduction", + "type": "text" + }, + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 3 + }, + "id": 100, + "targets": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "refId": "A" + } + ], + "title": "1. Issue Throughput", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Total number of user stories created in the selected time range and board.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 4, + "x": 0, + "y": 4 + }, + "id": 2, + "links": [ + { + "targetBlank": true, + "title": "Requirement Count", + "url": "https://devlake.apache.org/docs/Metrics/RequirementCount" + } + ], + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "text": {}, + "textMode": "auto" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "group": [], + "metricColumn": "none", + "queryType": "randomWalk", + "rawQuery": true, + "rawSql": "select\r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere\r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + }, + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Number of Issues [Created in Selected Time Range]", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Total number of issues that have been marked as Done/Closed in the selected time range.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 4, + "x": 4, + "y": 4 + }, + "id": 3, + "links": [ + { + "targetBlank": true, + "title": "Requirement Count", + "url": "https://devlake.apache.org/docs/Metrics/RequirementCount" + } + ], + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "text": {}, + "textMode": "auto" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "group": [], + "metricColumn": "none", + "queryType": "randomWalk", + "rawQuery": true, + "rawSql": "select\r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere\r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + }, + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Number of Closed Issues [Created in Selected Time Range]", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Monthly distribution of open vs. closed issues.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 1, + "drawStyle": "bars", + "fillOpacity": 12, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "normal" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 16, + "x": 8, + "y": 4 + }, + "id": 4, + "links": [ + { + "targetBlank": true, + "title": "Requirement Count", + "url": "https://devlake.apache.org/docs/Metrics/RequirementCount" + } + ], + "options": { + "legend": { + "calcs": [ + "sum" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "8.0.6", + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "group": [], + "metricColumn": "none", + "queryType": "randomWalk", + "rawQuery": true, + "rawSql": "SELECT\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n count(distinct case when status != 'DONE' then i.id else null end) as \"Number of Open Stories\",\r\n count(distinct case when status = 'DONE' then i.id else null end) as \"Number of Closed Stories\"\r\nFROM issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere\r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\ngroup by 1", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Issue Status Distribution over Month [Created in Selected Time Range]", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Issue Delivery Rate = count(Closed Issues) / count(All Issues)", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "percentage", + "steps": [ + { + "color": "red", + "value": null + }, + { + "color": "green", + "value": 50 + } + ] + }, + "unit": "percentunit" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 8, + "x": 0, + "y": 10 + }, + "id": 5, + "links": [ + { + "targetBlank": true, + "title": "Requirement Delivery Rate", + "url": "https://devlake.apache.org/docs/Metrics/RequirementDeliveryRate" + } + ], + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "text": {}, + "textMode": "auto" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "group": [], + "metricColumn": "none", + "queryType": "randomWalk", + "rawQuery": true, + "rawSql": "with _requirements as(\r\n select\r\n count(distinct i.id) as total_count,\r\n count(distinct case when i.status = 'DONE' then i.id else null end) as delivered_count\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where\r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect\r\n now() as time,\r\n 1.0 * delivered_count/total_count as requirement_delivery_rate\r\nfrom _requirements", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Issue Delivery Rate [Created in Selected Time Range]", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Monthly trend of issue delivery rate.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Delivery Rate(%)", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 12, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "percentunit" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 16, + "x": 8, + "y": 10 + }, + "id": 6, + "links": [ + { + "targetBlank": true, + "title": "Requirement Delivery Rate", + "url": "https://devlake.apache.org/docs/Metrics/RequirementDeliveryRate" + } + ], + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.0.6", + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "group": [], + "metricColumn": "none", + "queryType": "randomWalk", + "rawQuery": true, + "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n 1.0 * count(distinct case when i.status = 'DONE' then i.id else null end)/count(distinct i.id) as delivered_rate\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where\r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect\r\n time,\r\n delivered_rate\r\nfrom _requirements\r\norder by time", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Issue Delivery Rate over Time [Created in Selected Time Range]", + "type": "timeseries" + }, + { + "collapsed": false, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 16 + }, + "id": 101, + "panels": [], + "targets": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "refId": "A" + } + ], + "title": "2. Issue Lead Time", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Average number of days from story creation to resolution.", + "fieldConfig": { + "defaults": { + "decimals": 1, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 14 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 4, + "x": 0, + "y": 17 + }, + "id": 7, + "links": [ + { + "targetBlank": true, + "title": "Requirement Lead Time", + "url": "https://devlake.apache.org/docs/Metrics/RequirementLeadTime" + } + ], + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "mean" + ], + "fields": "/^value$/", + "values": false + }, + "text": {}, + "textMode": "auto" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "select\r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere\r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Mean Lead Time in Days [Resolved in Selected Time Range]", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "80% of stories are resolved within this many days.", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 21 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 4, + "x": 4, + "y": 17 + }, + "id": 8, + "links": [ + { + "targetBlank": true, + "title": "Requirement Lead Time", + "url": "https://devlake.apache.org/docs/Metrics/RequirementLeadTime" + } + ], + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "text": {}, + "textMode": "auto" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "with _ranks as(\r\n select\r\n i.lead_time_minutes,\r\n percent_rank() over (order by lead_time_minutes asc) as ranks\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where\r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect\r\n max(lead_time_minutes/1440) as value\r\nfrom _ranks\r\nwhere\r\n ranks <= 0.8", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "80% Stories' Lead Time ≤ # Days [Resolved in Selected Time Range]", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Mean lead time per month for resolved stories.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Lead Time (days)", + "axisPlacement": "auto", + "axisSoftMin": 0, + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineWidth": 1, + "scaleDistribution": { + "type": "linear" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 16, + "x": 8, + "y": 17 + }, + "id": 9, + "links": [ + { + "targetBlank": true, + "title": "Requirement Lead Time", + "url": "https://devlake.apache.org/docs/Metrics/RequirementLeadTime" + } + ], + "options": { + "barRadius": 0, + "barWidth": 0.5, + "fullHighlight": false, + "groupWidth": 0.7, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "orientation": "auto", + "showValue": "auto", + "stacking": "none", + "tooltip": { + "mode": "single", + "sort": "none" + }, + "xTickLabelRotation": 0, + "xTickLabelSpacing": 0 + }, + "pluginVersion": "8.0.6", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.resolution_date), INTERVAL -DAYOFMONTH(date(i.resolution_date))+1 DAY) as time,\r\n avg(lead_time_minutes/1440) as mean_lead_time\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where\r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect\r\n date_format(time,'%M %Y') as month,\r\n mean_lead_time\r\nfrom _requirements\r\norder by time asc", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Mean Lead Time by Month [Resolved in Selected Time Range]", + "type": "barchart" + }, + { + "datasource": "mysql", + "description": "The cumulative distribution of story lead time. Each point shows the percent rank of a given lead time value.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineWidth": 1, + "scaleDistribution": { + "type": "linear" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 24, + "x": 0, + "y": 23 + }, + "id": 10, + "links": [ + { + "targetBlank": true, + "title": "Requirement Lead Time", + "url": "https://devlake.apache.org/docs/Metrics/RequirementLeadTime" + } + ], + "options": { + "barRadius": 0, + "barWidth": 0.51, + "fullHighlight": false, + "groupWidth": 0.7, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "orientation": "auto", + "showValue": "auto", + "stacking": "none", + "tooltip": { + "mode": "single", + "sort": "none" + }, + "xTickLabelRotation": 0, + "xTickLabelSpacing": 100 + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "time_series", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "with _ranks as(\r\n select\r\n round(i.lead_time_minutes/1440) as lead_time_day\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where\r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n order by lead_time_day asc\r\n)\r\n\r\nselect\r\n now() as time,\r\n lpad(concat(lead_time_day,'d'), 4, ' ') as metric,\r\n percent_rank() over (order by lead_time_day asc) as value\r\nfrom _ranks\r\norder by lead_time_day asc", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + }, + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Cumulative Distribution of Lead Time [Resolved in Selected Time Range]", + "transformations": [ + { + "id": "reduce", + "options": { + "reducers": [ + "current" + ] + } + } + ], + "type": "barchart" + }, + { + "collapsed": false, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 29 + }, + "id": 102, + "panels": [], + "targets": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "refId": "A" + } + ], + "title": "3. Story Points", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Total story points for all user stories in the selected time range.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "blue", + "value": null + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 4, + "x": 0, + "y": 30 + }, + "id": 11, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "text": {}, + "textMode": "auto" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "select\r\n sum(ts.total_points) as value\r\nfrom _tool_taiga_user_stories ts\r\n join board_issues bi on bi.issue_id = concat('taiga:TaigaUserStory:', ts.connection_id, ':', ts.user_story_id)\r\nwhere\r\n bi.board_id in (${board_id})", + "refId": "A", + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + }, + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Total Story Points", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Total story points for closed/completed user stories.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 4, + "x": 4, + "y": 30 + }, + "id": 12, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "text": {}, + "textMode": "auto" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "select\r\n sum(ts.total_points) as value\r\nfrom _tool_taiga_user_stories ts\r\n join board_issues bi on bi.issue_id = concat('taiga:TaigaUserStory:', ts.connection_id, ':', ts.user_story_id)\r\n join issues i on i.id = bi.issue_id\r\nwhere\r\n bi.board_id in (${board_id})\r\n and i.status = 'DONE'", + "refId": "A", + "sql": { + "columns": [ + { + "parameters": [], + "type": "function" + } + ], + "groupBy": [ + { + "property": { + "type": "string" + }, + "type": "groupBy" + } + ], + "limit": 50 + }, + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Completed Story Points", + "type": "stat" + }, + { + "datasource": "mysql", + "description": "Story points allocated to each milestone/sprint. Requires milestone data from Taiga.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Story Points", + "axisPlacement": "auto", + "axisSoftMin": 0, + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineWidth": 1, + "scaleDistribution": { + "type": "linear" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 16, + "x": 8, + "y": 30 + }, + "id": 13, + "options": { + "barRadius": 0, + "barWidth": 0.6, + "fullHighlight": false, + "groupWidth": 0.7, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "orientation": "auto", + "showValue": "auto", + "stacking": "none", + "tooltip": { + "mode": "single", + "sort": "none" + }, + "xTickLabelRotation": -15, + "xTickLabelSpacing": 0 + }, + "pluginVersion": "8.0.6", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "select\r\n COALESCE(NULLIF(ts.milestone_name,''), 'No Milestone') as Milestone,\r\n sum(ts.total_points) as `Story Points`\r\nfrom _tool_taiga_user_stories ts\r\n join board_issues bi on bi.issue_id = concat('taiga:TaigaUserStory:', ts.connection_id, ':', ts.user_story_id)\r\nwhere\r\n bi.board_id in (${board_id})\r\ngroup by 1\r\norder by `Story Points` desc", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Story Points by Milestone", + "type": "barchart" + }, + { + "datasource": "mysql", + "description": "Story points distributed across assignees.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Story Points", + "axisPlacement": "auto", + "axisSoftMin": 0, + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineWidth": 1, + "scaleDistribution": { + "type": "linear" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 12, + "x": 0, + "y": 36 + }, + "id": 14, + "options": { + "barRadius": 0, + "barWidth": 0.6, + "fullHighlight": false, + "groupWidth": 0.7, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "orientation": "auto", + "showValue": "auto", + "stacking": "none", + "tooltip": { + "mode": "single", + "sort": "none" + }, + "xTickLabelRotation": -15, + "xTickLabelSpacing": 0 + }, + "pluginVersion": "8.0.6", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "select\r\n COALESCE(NULLIF(i.assignee_name,''), 'Unassigned') as Assignee,\r\n sum(i.story_point) as `Story Points`\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere\r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\ngroup by 1\r\norder by `Story Points` desc", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Story Points by Assignee [Created in Selected Time Range]", + "type": "barchart" + }, + { + "datasource": "mysql", + "description": "Distribution of story points across open vs. closed stories.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 12, + "x": 12, + "y": 36 + }, + "id": 15, + "options": { + "displayLabels": [ + "name", + "value" + ], + "legend": { + "displayMode": "list", + "placement": "right", + "showLegend": true + }, + "pieType": "pie", + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "select\r\n sum(ts.total_points) as `Story Points`,\r\n CASE WHEN i.status = 'DONE' THEN 'Completed' ELSE 'In Progress' END as Status\r\nfrom _tool_taiga_user_stories ts\r\n join board_issues bi on bi.issue_id = concat('taiga:TaigaUserStory:', ts.connection_id, ':', ts.user_story_id)\r\n join issues i on i.id = bi.issue_id\r\nwhere\r\n bi.board_id in (${board_id})\r\ngroup by Status", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Story Points: Completed vs In Progress", + "type": "piechart" + }, + { + "collapsed": false, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 42 + }, + "id": 103, + "panels": [], + "targets": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "refId": "A" + } + ], + "title": "4. Issues Overview", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Breakdown of all Taiga issues by their current status, across all issue types (User Story, Task, Issue/Requirement, Epic).", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 0, + "y": 43 + }, + "id": 16, + "options": { + "displayLabels": ["name", "value"], + "legend": { + "displayMode": "list", + "placement": "right", + "showLegend": true + }, + "pieType": "donut", + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n now() as time,\n count(distinct case when i.status = 'TODO' then i.id end) as 'To Do',\n count(distinct case when i.status = 'IN_PROGRESS' then i.id end) as 'In Progress',\n count(distinct case when i.status = 'DONE' then i.id end) as 'Done',\n count(distinct case when i.status not in ('TODO','IN_PROGRESS','DONE') then i.id end) as 'Other'\nFROM issues i\n join board_issues bi on i.id = bi.issue_id\nWHERE bi.board_id in (${board_id})", + "refId": "A" + } + ], + "title": "Issues by Status", + "type": "piechart" + }, + { + "datasource": "mysql", + "description": "Number of user stories assigned to each team member.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "User Stories", + "axisPlacement": "auto", + "axisSoftMin": 0, + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineWidth": 1, + "scaleDistribution": { + "type": "linear" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 8, + "y": 43 + }, + "id": 17, + "options": { + "barRadius": 0, + "barWidth": 0.6, + "fullHighlight": false, + "groupWidth": 0.7, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "orientation": "horizontal", + "showValue": "auto", + "stacking": "none", + "tooltip": { + "mode": "single", + "sort": "none" + }, + "xTickLabelRotation": 0, + "xTickLabelSpacing": 0 + }, + "pluginVersion": "8.0.6", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "select\r\n COALESCE(NULLIF(i.assignee_name,''), 'Unassigned') as Assignee,\r\n count(distinct i.id) as `User Stories`\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere\r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\ngroup by 1\r\norder by `User Stories` desc", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Issues by Assignee [Created in Selected Time Range]", + "type": "barchart" + }, + { + "datasource": "mysql", + "description": "Number of user stories in each milestone/sprint.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "User Stories", + "axisPlacement": "auto", + "axisSoftMin": 0, + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineWidth": 1, + "scaleDistribution": { + "type": "linear" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 16, + "y": 43 + }, + "id": 18, + "options": { + "barRadius": 0, + "barWidth": 0.6, + "fullHighlight": false, + "groupWidth": 0.7, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "orientation": "horizontal", + "showValue": "auto", + "stacking": "none", + "tooltip": { + "mode": "single", + "sort": "none" + }, + "xTickLabelRotation": 0, + "xTickLabelSpacing": 0 + }, + "pluginVersion": "8.0.6", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "select\r\n COALESCE(NULLIF(ts.milestone_name,''), 'No Milestone') as Milestone,\r\n count(distinct ts.user_story_id) as `User Stories`\r\nfrom _tool_taiga_user_stories ts\r\n join board_issues bi on bi.issue_id = concat('taiga:TaigaUserStory:', ts.connection_id, ':', ts.user_story_id)\r\nwhere\r\n bi.board_id in (${board_id})\r\ngroup by 1\r\norder by `User Stories` desc", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "User Stories per Milestone", + "type": "barchart" + }, + { + "datasource": "mysql", + "description": "Milestone progress: open vs. closed user stories per sprint.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineWidth": 1, + "scaleDistribution": { + "type": "linear" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 24, + "x": 0, + "y": 51 + }, + "id": 19, + "options": { + "barRadius": 0, + "barWidth": 0.6, + "fullHighlight": false, + "groupWidth": 0.7, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "orientation": "auto", + "showValue": "auto", + "stacking": "normal", + "tooltip": { + "mode": "multi", + "sort": "none" + }, + "xTickLabelRotation": -15, + "xTickLabelSpacing": 0 + }, + "pluginVersion": "8.0.6", + "targets": [ + { + "datasource": "mysql", + "format": "table", + "group": [], + "metricColumn": "none", + "rawQuery": true, + "rawSql": "select\r\n COALESCE(NULLIF(ts.milestone_name,''), 'No Milestone') as Milestone,\r\n count(distinct case when i.status != 'DONE' then ts.user_story_id else null end) as `Open Stories`,\r\n count(distinct case when i.status = 'DONE' then ts.user_story_id else null end) as `Closed Stories`\r\nfrom _tool_taiga_user_stories ts\r\n join board_issues bi on bi.issue_id = concat('taiga:TaigaUserStory:', ts.connection_id, ':', ts.user_story_id)\r\n join issues i on i.id = bi.issue_id\r\nwhere\r\n bi.board_id in (${board_id})\r\ngroup by 1\r\norder by `Closed Stories` desc", + "refId": "A", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "column" + } + ] + ], + "timeColumn": "time", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + } + ] + } + ], + "title": "Milestone Progress: Open vs. Closed Stories", + "type": "barchart" + }, + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "gridPos": { + "h": 2, + "w": 24, + "x": 0, + "y": 77 + }, + "id": 130, + "options": { + "code": { + "language": "plaintext", + "showLineNumbers": false, + "showMiniMap": false + }, + "content": "
\n\nThis dashboard is created based on this [data schema](https://devlake.apache.org/docs/DataModels/DevLakeDomainLayerSchema). Want to add more metrics? Please follow the [guide](https://devlake.apache.org/docs/Configuration/Dashboards/GrafanaUserGuide).", + "mode": "markdown" + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "queryType": "randomWalk", + "refId": "A" + } + ], + "type": "text" + }, + { + "collapsed": false, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 60 + }, + "id": 104, + "panels": [], + "targets": [ + { + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "refId": "A" + } + ], + "title": "5. Issue Type Breakdown", + "type": "row" + }, + { + "datasource": "mysql", + "description": "Distribution of all Taiga items across issue types (User Story, Task, Issue/Requirement, Epic).", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "hideFrom": { "legend": false, "tooltip": false, "viz": false } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [{ "color": "green", "value": null }] + } + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 6, "x": 0, "y": 61 }, + "id": 20, + "options": { + "displayLabels": ["name", "value"], + "legend": { "displayMode": "list", "placement": "right", "showLegend": true }, + "pieType": "pie", + "tooltip": { "mode": "single", "sort": "none" } + }, + "pluginVersion": "9.5.15", + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n now() as time,\n count(distinct case when i.type = 'USER_STORY' then i.id end) as 'User Story',\n count(distinct case when i.type = 'TASK' then i.id end) as 'Task',\n count(distinct case when i.type = 'REQUIREMENT' then i.id end) as 'Issue',\n count(distinct case when i.type = 'EPIC' then i.id end) as 'Epic'\nFROM issues i\n join board_issues bi on i.id = bi.issue_id\nWHERE bi.board_id in (${board_id})", + "refId": "A" + } + ], + "title": "Issues by Type", + "type": "piechart" + }, + { + "datasource": "mysql", + "description": "Open vs closed count for each Taiga issue type in the selected time range.", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Count", + "axisPlacement": "auto", + "axisSoftMin": 0, + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { "legend": false, "tooltip": false, "viz": false }, + "lineWidth": 1, + "scaleDistribution": { "type": "linear" }, + "thresholdsStyle": { "mode": "off" } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [{ "color": "green", "value": null }] + } + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 10, "x": 6, "y": 61 }, + "id": 21, + "options": { + "barRadius": 0, + "barWidth": 0.6, + "fullHighlight": false, + "groupWidth": 0.7, + "legend": { "calcs": [], "displayMode": "list", "placement": "bottom", "showLegend": true }, + "orientation": "auto", + "showValue": "auto", + "stacking": "normal", + "tooltip": { "mode": "multi", "sort": "none" }, + "xTickLabelRotation": 0, + "xTickLabelSpacing": 0 + }, + "pluginVersion": "8.0.6", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "select\n COALESCE(i.original_type, i.type) as `Issue Type`,\n count(distinct case when i.status != 'DONE' then i.id else null end) as `Open`,\n count(distinct case when i.status = 'DONE' then i.id else null end) as `Closed`\nfrom issues i\n join board_issues bi on i.id = bi.issue_id\nwhere\n $__timeFilter(i.created_date)\n and bi.board_id in (${board_id})\ngroup by 1\norder by 1", + "refId": "A" + } + ], + "title": "Open vs Closed by Type [Created in Selected Time Range]", + "type": "barchart" + }, + { + "datasource": "mysql", + "description": "Monthly count of items created, broken down by issue type (User Stories, Tasks, Issues, Epics).", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 1, + "drawStyle": "bars", + "fillOpacity": 12, + "gradientMode": "none", + "hideFrom": { "legend": false, "tooltip": false, "viz": false }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { "type": "linear" }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { "group": "A", "mode": "normal" }, + "thresholdsStyle": { "mode": "off" } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [{ "color": "green", "value": null }, { "color": "red", "value": 80 }] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 8, "x": 16, "y": 61 }, + "id": 22, + "options": { + "legend": { "calcs": ["sum"], "displayMode": "list", "placement": "bottom", "showLegend": true }, + "tooltip": { "mode": "multi", "sort": "none" } + }, + "pluginVersion": "8.0.6", + "targets": [ + { + "datasource": "mysql", + "format": "time_series", + "rawQuery": true, + "rawSql": "SELECT\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\n count(distinct case when i.type='USER_STORY' then i.id end) as 'User Stories',\n count(distinct case when i.type='TASK' then i.id end) as 'Tasks',\n count(distinct case when i.type='REQUIREMENT' then i.id end) as 'Issues',\n count(distinct case when i.type='EPIC' then i.id end) as 'Epics'\nFROM issues i\n join board_issues bi on i.id = bi.issue_id\nWHERE\n bi.board_id in (${board_id})\n AND $__timeFilter(i.created_date)\nGROUP BY 1\nORDER BY 1", + "refId": "A" + } + ], + "title": "Items Created per Month by Type", + "type": "timeseries" + }, + { + "datasource": "mysql", + "description": "Average lead time in days from creation to resolution, broken down by issue type (resolved issues only).", + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Avg Lead Time (days)", + "axisPlacement": "auto", + "axisSoftMin": 0, + "fillOpacity": 80, + "gradientMode": "none", + "hideFrom": { "legend": false, "tooltip": false, "viz": false }, + "lineWidth": 1, + "scaleDistribution": { "type": "linear" }, + "thresholdsStyle": { "mode": "off" } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [{ "color": "green", "value": null }, { "color": "red", "value": 80 }] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 24, "x": 0, "y": 69 }, + "id": 23, + "options": { + "barRadius": 0, + "barWidth": 0.5, + "fullHighlight": false, + "groupWidth": 0.7, + "legend": { "calcs": [], "displayMode": "list", "placement": "bottom", "showLegend": true }, + "orientation": "auto", + "showValue": "auto", + "stacking": "none", + "tooltip": { "mode": "single", "sort": "none" }, + "xTickLabelRotation": 0, + "xTickLabelSpacing": 0 + }, + "pluginVersion": "8.0.6", + "targets": [ + { + "datasource": "mysql", + "editorMode": "code", + "format": "table", + "rawQuery": true, + "rawSql": "select\n COALESCE(i.original_type, i.type) as `Issue Type`,\n round(avg(\n COALESCE(i.lead_time_minutes, TIMESTAMPDIFF(MINUTE, i.created_date, i.resolution_date))\n /1440), 1) as `Avg Lead Time (days)`\nfrom issues i\n join board_issues bi on i.id = bi.issue_id\nwhere\n i.status = 'DONE'\n and bi.board_id in (${board_id})\ngroup by 1\nhaving `Avg Lead Time (days)` is not null\norder by 2 desc", + "refId": "A" + } + ], + "title": "Average Lead Time by Type", + "type": "barchart" + } + ], + "refresh": "", + "schemaVersion": 38, + "style": "dark", + "tags": [ + "Data Source Dashboard" + ], + "templating": { + "list": [ + { + "current": { + "selected": true, + "text": [ + "All" + ], + "value": [ + "$__all" + ] + }, + "datasource": "mysql", + "definition": "select concat(name, '--', id) from boards where id like 'taiga%'", + "hide": 0, + "includeAll": true, + "label": "Choose Project", + "multi": true, + "name": "board_id", + "options": [], + "query": "select concat(name, '--', id) from boards where id like 'taiga%'", + "refresh": 1, + "regex": "/^(?.*)--(?.*)$/", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "current": { + "selected": true, + "text": [ + "All" + ], + "value": [ + "$__all" + ] + }, + "datasource": "mysql", + "definition": "select distinct i.type from issues i join board_issues bi on i.id = bi.issue_id where bi.board_id like 'taiga%'", + "hide": 0, + "includeAll": true, + "label": "Issue Type", + "multi": true, + "name": "type", + "options": [], + "query": "select distinct i.type from issues i join board_issues bi on i.id = bi.issue_id where bi.board_id like 'taiga%'", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + } + ] + }, + "time": { + "from": "now-6M", + "to": "now" + }, + "timepicker": {}, + "timezone": "utc", + "title": "Taiga", + "uid": "taiga-dashboard", + "version": 2, + "weekStart": "" +} From 7dc9144496e1d933a886803bdb9d9d5cd648e299 Mon Sep 17 00:00:00 2001 From: "tamas.albert" Date: Wed, 4 Mar 2026 11:02:20 +0200 Subject: [PATCH 39/39] feat: Collect teams and display per team metrics --- .../models/migrationscripts/register.go | 1 + .../GithubCopilotAdoptionByTeam.json | 206 ++---------------- 2 files changed, 23 insertions(+), 184 deletions(-) diff --git a/backend/plugins/gh-copilot/models/migrationscripts/register.go b/backend/plugins/gh-copilot/models/migrationscripts/register.go index 28fb0caf152..37e6d4178a5 100644 --- a/backend/plugins/gh-copilot/models/migrationscripts/register.go +++ b/backend/plugins/gh-copilot/models/migrationscripts/register.go @@ -31,5 +31,6 @@ func All() []plugin.MigrationScript { new(addPRFieldsToEnterpriseMetrics), new(addTeamTables), new(addOrganizationIdToUserMetrics), + new(addTeamTables), } } diff --git a/grafana/dashboards/GithubCopilotAdoptionByTeam.json b/grafana/dashboards/GithubCopilotAdoptionByTeam.json index a5efc0df349..2b4c9f800ad 100644 --- a/grafana/dashboards/GithubCopilotAdoptionByTeam.json +++ b/grafana/dashboards/GithubCopilotAdoptionByTeam.json @@ -194,7 +194,7 @@ "datasource": "mysql", "format": "table", "rawQuery": true, - "rawSql": "SELECT SUM(m.loc_added_sum) as \"Lines Added\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:raw}' = 'All'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug = '${team:raw}'\n )\n )", + "rawSql": "SELECT SUM(m.loc_added_sum) as \"Lines Added\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')", "refId": "A" } ], @@ -248,7 +248,7 @@ "datasource": "mysql", "format": "table", "rawQuery": true, - "rawSql": "SELECT SUM(m.user_initiated_interaction_count) as \"User-Initiated Interactions\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:raw}' = 'All'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug = '${team:raw}'\n )\n )", + "rawSql": "SELECT SUM(m.user_initiated_interaction_count) as \"User-Initiated Interactions\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')", "refId": "A" } ], @@ -481,114 +481,6 @@ "title": "Seat Utilization", "type": "gauge" }, - { - "datasource": "mysql", - "description": "Users who have used Copilot agent mode in the selected team(s)", - "fieldConfig": { - "defaults": { - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "purple", - "value": null - } - ] - } - }, - "overrides": [] - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 0, - "y": 8 - }, - "id": 47, - "options": { - "colorMode": "value", - "graphMode": "none", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "lastNotNull" - ], - "fields": "", - "values": false - }, - "showPercentChange": false, - "textMode": "auto", - "wideLayout": true - }, - "pluginVersion": "11.0.0", - "targets": [ - { - "datasource": "mysql", - "format": "table", - "rawQuery": true, - "rawSql": "SELECT COUNT(DISTINCT m.user_login) as \"Agent Users\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND m.used_agent = 1\n AND (\n '${team:raw}' = 'All'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug = '${team:raw}'\n )\n )", - "refId": "A" - } - ], - "title": "Agent Mode Adopters", - "type": "stat" - }, - { - "datasource": "mysql", - "description": "Users who have used Copilot chat in the selected team(s)", - "fieldConfig": { - "defaults": { - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "orange", - "value": null - } - ] - } - }, - "overrides": [] - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 6, - "y": 8 - }, - "id": 48, - "options": { - "colorMode": "value", - "graphMode": "none", - "justifyMode": "auto", - "orientation": "auto", - "reduceOptions": { - "calcs": [ - "lastNotNull" - ], - "fields": "", - "values": false - }, - "showPercentChange": false, - "textMode": "auto", - "wideLayout": true - }, - "pluginVersion": "11.0.0", - "targets": [ - { - "datasource": "mysql", - "format": "table", - "rawQuery": true, - "rawSql": "SELECT COUNT(DISTINCT m.user_login) as \"Chat Users\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND m.used_chat = 1\n AND (\n '${team:raw}' = 'All'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug = '${team:raw}'\n )\n )", - "refId": "A" - } - ], - "title": "Chat Adopters", - "type": "stat" - }, { "datasource": "mysql", "description": "Active users over time for the selected team(s)", @@ -646,7 +538,7 @@ "h": 8, "w": 12, "x": 0, - "y": 12 + "y": 8 }, "id": 5, "options": { @@ -731,7 +623,7 @@ "h": 8, "w": 12, "x": 12, - "y": 12 + "y": 8 }, "id": 6, "options": { @@ -816,7 +708,7 @@ "h": 8, "w": 12, "x": 0, - "y": 20 + "y": 16 }, "id": 19, "options": { @@ -872,7 +764,7 @@ "h": 8, "w": 12, "x": 12, - "y": 20 + "y": 16 }, "id": 23, "options": { @@ -900,67 +792,13 @@ "title": "Overall Acceptance Rate Trend", "type": "timeseries" }, - { - "datasource": "mysql", - "description": "Daily LOC yield trend for team members (added LOC / suggested LOC)", - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "mappings": [], - "min": 0, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null - } - ] - } - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 28 - }, - "id": 49, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "pluginVersion": "11.0.0", - "targets": [ - { - "datasource": "mysql", - "format": "table", - "rawQuery": true, - "rawSql": "SELECT m.day as time,\n ROUND(SUM(m.loc_added_sum) / NULLIF(SUM(m.loc_suggested_to_add_sum), 0), 2) as \"LOC Yield\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:raw}' = 'All'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug = '${team:raw}'\n )\n )\nGROUP BY m.day\nORDER BY 1", - "refId": "A" - } - ], - "title": "LOC Yield Trend", - "type": "timeseries" - }, { "collapsed": false, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 36 + "y": 24 }, "id": 40, "panels": [], @@ -989,7 +827,7 @@ "h": 10, "w": 24, "x": 0, - "y": 37 + "y": 25 }, "id": 41, "options": { @@ -1074,7 +912,7 @@ "h": 8, "w": 24, "x": 0, - "y": 47 + "y": 35 }, "id": 42, "options": { @@ -1127,7 +965,7 @@ "h": 1, "w": 24, "x": 0, - "y": 55 + "y": 43 }, "id": 43, "panels": [], @@ -1156,7 +994,7 @@ "h": 8, "w": 12, "x": 0, - "y": 56 + "y": 44 }, "id": 9, "options": { @@ -1206,7 +1044,7 @@ "h": 8, "w": 12, "x": 12, - "y": 56 + "y": 44 }, "id": 10, "options": { @@ -1240,7 +1078,7 @@ "h": 1, "w": 24, "x": 0, - "y": 64 + "y": 52 }, "id": 45, "panels": [], @@ -1269,7 +1107,7 @@ "h": 8, "w": 12, "x": 0, - "y": 65 + "y": 53 }, "id": 29, "options": { @@ -1290,7 +1128,7 @@ "datasource": "mysql", "format": "table", "rawQuery": true, - "rawSql": "SELECT m.user_login as \"user_login\",\n (SELECT GROUP_CONCAT(DISTINCT t.name ORDER BY t.name SEPARATOR ', ')\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n ) as \"teams\",\n SUM(m.code_generation_activity_count) as \"suggestions\",\n SUM(m.code_acceptance_activity_count) as \"acceptances\",\n ROUND(SUM(m.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(m.code_generation_activity_count), 0), 1) as \"acceptance %\"\nFROM _tool_copilot_user_daily_metrics m\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND (\n '${team:raw}' = 'All'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = m.connection_id\n AND tu.user_login = m.user_login\n AND t.slug = '${team:raw}'\n )\n )\nGROUP BY m.user_login\nHAVING SUM(m.code_generation_activity_count) > 0\nORDER BY SUM(m.code_generation_activity_count) DESC\nLIMIT 25", + "rawSql": "SELECT m.user_login as \"user_login\",\n t.name as \"team\",\n SUM(m.code_generation_activity_count) as \"suggestions\",\n SUM(m.code_acceptance_activity_count) as \"acceptances\",\n ROUND(SUM(m.code_acceptance_activity_count) * 100.0 / NULLIF(SUM(m.code_generation_activity_count), 0), 1) as \"acceptance %\"\nFROM _tool_copilot_user_daily_metrics m\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = m.connection_id AND tu.user_login = m.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE $__timeFilter(m.day)\n AND m.connection_id = ${connection_id}\n AND m.scope_id = '${scope_id}'\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\nGROUP BY m.user_login, t.name\nHAVING SUM(m.code_generation_activity_count) > 0\nORDER BY SUM(m.code_generation_activity_count) DESC\nLIMIT 25", "refId": "A" } ], @@ -1354,7 +1192,7 @@ "h": 8, "w": 12, "x": 12, - "y": 65 + "y": 53 }, "id": 30, "options": { @@ -1439,7 +1277,7 @@ "h": 8, "w": 24, "x": 0, - "y": 73 + "y": 61 }, "id": 31, "options": { @@ -1473,7 +1311,7 @@ "h": 1, "w": 24, "x": 0, - "y": 81 + "y": 69 }, "id": 46, "panels": [], @@ -1502,7 +1340,7 @@ "h": 8, "w": 8, "x": 0, - "y": 82 + "y": 70 }, "id": 33, "options": { @@ -1563,7 +1401,7 @@ "h": 8, "w": 8, "x": 8, - "y": 82 + "y": 70 }, "id": 34, "options": { @@ -1613,7 +1451,7 @@ "h": 8, "w": 8, "x": 16, - "y": 82 + "y": 70 }, "id": 35, "options": { @@ -1634,7 +1472,7 @@ "datasource": "mysql", "format": "table", "rawQuery": true, - "rawSql": "SELECT s.user_login as \"user_login\",\n (SELECT GROUP_CONCAT(DISTINCT t.name ORDER BY t.name SEPARATOR ', ')\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = s.connection_id\n AND tu.user_login = s.user_login\n ) as \"teams\",\n s.last_activity_at as \"last_activity_at\",\n s.plan_type as \"plan_type\"\nFROM _tool_copilot_seats s\nWHERE s.connection_id = ${connection_id}\n AND (\n '${team:raw}' = 'All'\n OR EXISTS (\n SELECT 1\n FROM _tool_copilot_team_users tu\n INNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\n WHERE tu.connection_id = s.connection_id\n AND tu.user_login = s.user_login\n AND t.slug = '${team:raw}'\n )\n )\n AND (s.last_activity_at IS NULL OR s.last_activity_at < DATE_SUB(UTC_TIMESTAMP(), INTERVAL 30 DAY))\nORDER BY s.last_activity_at IS NULL DESC, s.last_activity_at ASC\nLIMIT 100", + "rawSql": "SELECT s.user_login as \"user_login\",\n t.name as \"team\",\n s.last_activity_at as \"last_activity_at\",\n s.plan_type as \"plan_type\"\nFROM _tool_copilot_seats s\nINNER JOIN _tool_copilot_team_users tu\n ON tu.connection_id = s.connection_id AND tu.user_login = s.user_login\nINNER JOIN _tool_copilot_teams t\n ON t.connection_id = tu.connection_id AND t.id = tu.team_id\nWHERE s.connection_id = ${connection_id}\n AND ('${team:raw}' = 'All' OR t.slug = '${team:raw}')\n AND (s.last_activity_at IS NULL OR s.last_activity_at < DATE_SUB(UTC_TIMESTAMP(), INTERVAL 30 DAY))\nORDER BY s.last_activity_at IS NULL DESC, s.last_activity_at ASC\nLIMIT 100", "refId": "A" } ],