-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgithub_checker.py
More file actions
168 lines (136 loc) · 5.41 KB
/
github_checker.py
File metadata and controls
168 lines (136 loc) · 5.41 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
#!/usr/bin/env python3
"""
GitHub Repository Batch Checker
Checks if GitHub repositories exist and are accessible.
"""
import os
import re
import sys
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
import requests
class GitHubRepoChecker:
def __init__(self, token=None, max_workers=10):
self.session = requests.Session()
self.max_workers = max_workers
# Add GitHub token if provided (increases rate limits)
if token:
self.session.headers.update({'Authorization': f'token {token}'})
def extract_repo_info(self, url):
"""Extract owner/repo from GitHub URL"""
# Handle various GitHub URL formats
patterns = [
r'github\.com/([^/]+)/([^/]+?)(?:\.git)?/?$',
r'github\.com/([^/]+)/([^/]+)',
]
for pattern in patterns:
match = re.search(pattern, url)
if match:
owner, repo = match.groups()
# Clean up repo name (remove .git suffix)
repo = repo.removesuffix('.git')
return owner, repo
return None, None
def check_repo(self, url):
"""Check if a single repository exists"""
owner, repo = self.extract_repo_info(url)
if not owner or not repo:
return {
'url': url,
'status': 'INVALID_URL',
'message': 'Could not parse GitHub URL'
}
api_url = f'https://api.github.com/repos/{owner}/{repo}'
# Rate limit delay per request (respects GitHub API limits)
time.sleep(0.1)
try:
response = self.session.get(api_url, timeout=10)
if response.status_code == 200:
repo_data = response.json()
return {
'url': url,
'status': 'EXISTS',
'message': f"✓ {owner}/{repo}",
'private': repo_data.get('private', False),
'archived': repo_data.get('archived', False),
'stars': repo_data.get('stargazers_count', 0)
}
elif response.status_code == 404:
return {
'url': url,
'status': 'NOT_FOUND',
'message': f"✗ {owner}/{repo} - Repository not found"
}
elif response.status_code == 403:
return {
'url': url,
'status': 'FORBIDDEN',
'message': f"⚠ {owner}/{repo} - Access denied (private or rate limited)"
}
else:
return {
'url': url,
'status': 'ERROR',
'message': f"? {owner}/{repo} - HTTP {response.status_code}"
}
except requests.exceptions.RequestException as e:
return {
'url': url,
'status': 'ERROR',
'message': f"✗ {owner}/{repo} - Network error: {str(e)}"
}
def check_repos_batch(self, urls, progress_callback=None):
"""Check multiple repositories concurrently"""
results = []
with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
# Submit all tasks
future_to_url = {executor.submit(self.check_repo, url): url for url in urls}
# Process completed tasks
for i, future in enumerate(as_completed(future_to_url)):
result = future.result()
results.append(result)
if progress_callback:
progress_callback(i + 1, len(urls), result)
return results
def progress_callback(completed, total, result):
"""Print progress updates"""
print(f"[{completed}/{total}] {result['message']}")
def main():
if len(sys.argv) < 2:
print("Usage: python github_checker.py <file_with_urls.txt> [github_token]")
print("\nFile should contain one GitHub URL per line")
print("GitHub token is optional but recommended for higher rate limits")
sys.exit(1)
filename = sys.argv[1]
token = sys.argv[2] if len(sys.argv) > 2 else os.environ.get('GITHUB_TOKEN')
try:
with open(filename, 'r') as f:
urls = [line.strip() for line in f if line.strip() and 'github.com' in line]
except FileNotFoundError:
print(f"Error: File '{filename}' not found")
sys.exit(1)
if not urls:
print("No GitHub URLs found in the file")
sys.exit(1)
print(f"Found {len(urls)} GitHub URLs to check...")
checker = GitHubRepoChecker(token=token)
results = checker.check_repos_batch(urls, progress_callback)
# Summary
print("\n" + "="*50)
print("SUMMARY")
print("="*50)
status_counts = {}
dead_links = []
for result in results:
status = result['status']
status_counts[status] = status_counts.get(status, 0) + 1
if status in ['NOT_FOUND', 'ERROR']:
dead_links.append(result['url'])
for status, count in status_counts.items():
print(f"{status}: {count}")
if dead_links:
print(f"\nDead/Problematic Links ({len(dead_links)}):")
for url in dead_links:
print(f" {url}")
if __name__ == "__main__":
main()