-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path11_github_health_check.py
More file actions
309 lines (263 loc) · 9.18 KB
/
11_github_health_check.py
File metadata and controls
309 lines (263 loc) · 9.18 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
"""GitHub Repository Health Check - py-std worker example.
This example demonstrates py-std worker capabilities:
- HTTP API calls with httpx
- JSON schema validation with pydantic
- Date parsing and manipulation with python-dateutil
- Fast JSON processing with orjson
- YAML configuration parsing with pyyaml
The workflow fetches GitHub repository data, validates the structure,
analyzes activity metrics, and generates a health score report.
"""
from kruxiaflow import ScriptActivity, Workflow
# Step 1: Fetch repository metadata from GitHub API
# Uses httpx for HTTP client functionality
@ScriptActivity.from_function(
inputs={
"owner": "anthropics",
"repo": "anthropic-sdk-python",
},
)
async def fetch_repo(owner, repo):
from datetime import datetime
import httpx
# Fetch repository data from GitHub API
repo_url = f"https://api.github.com/repos/{owner}/{repo}"
headers = {"Accept": "application/vnd.github.v3+json"}
async with httpx.AsyncClient() as client:
response = await client.get(repo_url, headers=headers)
response.raise_for_status()
repo_data = response.json()
return {
"repo_data": repo_data,
"fetched_at": datetime.utcnow().isoformat(),
}
# Step 2: Validate repository data structure using Pydantic
# Ensures the API response has expected fields and types
@ScriptActivity.from_function(
inputs={
"repo_data": fetch_repo["repo_data"],
},
depends_on=["fetch_repo"],
)
async def validate_structure(repo_data):
from pydantic import BaseModel, ValidationError
# Define expected schema for GitHub repository data
class GitHubRepoSchema(BaseModel):
id: int
name: str
full_name: str
description: str | None
stargazers_count: int
forks_count: int
open_issues_count: int
created_at: str
updated_at: str
pushed_at: str
size: int
language: str | None
class Config:
extra = "allow" # Allow additional fields from GitHub API
try:
# Validate the repository data
validated = GitHubRepoSchema(**repo_data)
return {
"valid": True,
"repo_name": validated.full_name,
"stars": validated.stargazers_count,
"forks": validated.forks_count,
"open_issues": validated.open_issues_count,
"created_at": validated.created_at,
"updated_at": validated.updated_at,
"pushed_at": validated.pushed_at,
"size_kb": validated.size,
"language": validated.language,
}
except ValidationError as e:
return {
"valid": False,
"errors": e.errors(),
}
# Step 3: Parse and analyze dates to calculate repository age and activity
# Uses python-dateutil for robust date parsing
@ScriptActivity.from_function(
inputs={
"created_at": validate_structure["created_at"],
"updated_at": validate_structure["updated_at"],
"pushed_at": validate_structure["pushed_at"],
},
depends_on=["validate_structure"],
)
async def parse_dates(created_at, updated_at, pushed_at):
from datetime import datetime, timezone
from dateutil import parser
# Parse ISO 8601 dates from GitHub API
created = parser.isoparse(created_at)
updated = parser.isoparse(updated_at)
pushed = parser.isoparse(pushed_at)
now = datetime.now(timezone.utc)
# Calculate time deltas
age_days = (now - created).days
days_since_update = (now - updated).days
days_since_push = (now - pushed).days
return {
"age_days": age_days,
"age_years": round(age_days / 365.25, 1),
"days_since_update": days_since_update,
"days_since_push": days_since_push,
"created_date": created.strftime("%Y-%m-%d"),
"last_push_date": pushed.strftime("%Y-%m-%d"),
}
# Step 4: Fetch recent commit activity
@ScriptActivity.from_function(
inputs={
"owner": "anthropics",
"repo": "anthropic-sdk-python",
},
depends_on=["validate_structure"],
)
async def fetch_commits(owner, repo):
from datetime import datetime, timedelta, timezone
import httpx
# Fetch commits from the last 30 days
since_date = (datetime.now(timezone.utc) - timedelta(days=30)).isoformat()
commits_url = f"https://api.github.com/repos/{owner}/{repo}/commits"
headers = {"Accept": "application/vnd.github.v3+json"}
params = {"since": since_date, "per_page": 100}
async with httpx.AsyncClient() as client:
response = await client.get(commits_url, headers=headers, params=params)
response.raise_for_status()
commits = response.json()
# Calculate commit statistics
commit_count = len(commits)
unique_authors = len(
{c["commit"]["author"]["name"] for c in commits if "commit" in c}
)
return {
"commit_count_30d": commit_count,
"unique_authors_30d": unique_authors,
"commits_per_day": round(commit_count / 30, 2),
}
# Step 5: Calculate repository health score
# Combines multiple metrics into a single health score
@ScriptActivity.from_function(
inputs={
"stars": validate_structure["stars"],
"forks": validate_structure["forks"],
"open_issues": validate_structure["open_issues"],
"days_since_push": parse_dates["days_since_push"],
"commit_count_30d": fetch_commits["commit_count_30d"],
"unique_authors_30d": fetch_commits["unique_authors_30d"],
},
depends_on=["parse_dates", "fetch_commits"],
)
async def calculate_health(
stars, forks, open_issues, days_since_push, commit_count_30d, unique_authors_30d
):
# Health score calculation based on multiple factors
# Scale: 0-100, higher is better
# Activity score (0-30 points): Recent commits indicate active development
activity_score = 0
if days_since_push <= 7:
activity_score = 30
elif days_since_push <= 30:
activity_score = 20
elif days_since_push <= 90:
activity_score = 10
# Engagement score (0-30 points): Stars and forks indicate community interest
engagement_score = min(30, (stars // 100) + (forks // 20))
# Maintenance score (0-20 points): Active commits and contributors
maintenance_score = min(20, (commit_count_30d // 5) + (unique_authors_30d * 2))
# Issue management score (0-20 points): Lower open issues is better
issue_score = max(0, 20 - (open_issues // 10))
# Calculate total health score
total_score = activity_score + engagement_score + maintenance_score + issue_score
# Determine health status
if total_score >= 80:
status = "excellent"
elif total_score >= 60:
status = "good"
elif total_score >= 40:
status = "fair"
else:
status = "needs_attention"
return {
"health_score": total_score,
"status": status,
"breakdown": {
"activity": activity_score,
"engagement": engagement_score,
"maintenance": maintenance_score,
"issue_management": issue_score,
},
}
# Step 6: Format final report using orjson for fast JSON serialization
@ScriptActivity.from_function(
inputs={
"repo_name": validate_structure["repo_name"],
"language": validate_structure["language"],
"stars": validate_structure["stars"],
"forks": validate_structure["forks"],
"age_years": parse_dates["age_years"],
"days_since_push": parse_dates["days_since_push"],
"commit_count_30d": fetch_commits["commit_count_30d"],
"health_score": calculate_health["health_score"],
"status": calculate_health["status"],
"breakdown": calculate_health["breakdown"],
},
depends_on=["calculate_health"],
)
async def format_report(
repo_name,
language,
stars,
forks,
age_years,
days_since_push,
commit_count_30d,
health_score,
status,
breakdown,
):
from datetime import datetime
import orjson
# Build comprehensive health report
report = {
"repository": repo_name,
"language": language,
"metrics": {
"stars": stars,
"forks": forks,
"age_years": age_years,
"days_since_last_push": days_since_push,
"commits_last_30_days": commit_count_30d,
},
"health": {
"score": health_score,
"status": status,
"breakdown": breakdown,
},
"generated_at": datetime.utcnow().isoformat(),
}
# Serialize using orjson (faster than standard json library)
# orjson returns bytes, so decode to string for return
report_json = orjson.dumps(report, option=orjson.OPT_INDENT_2).decode()
return {
"report_json": report_json,
"report": report,
"summary": f"{repo_name}: Health Score {health_score}/100 ({status})",
}
# Build the workflow
github_health_workflow = Workflow(
name="github_health_check",
activities=[
fetch_repo,
validate_structure,
parse_dates,
fetch_commits,
calculate_health,
format_report,
],
)
if __name__ == "__main__":
# Print the compiled YAML to verify
print(github_health_workflow)