-
Notifications
You must be signed in to change notification settings - Fork 61
Expand file tree
/
Copy pathconftest.py
More file actions
338 lines (260 loc) · 10.9 KB
/
conftest.py
File metadata and controls
338 lines (260 loc) · 10.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
# -*- coding: UTF-8 -*-
"""Root conftest.py for shared pytest fixtures and configuration.
This file provides fixtures and helpers used across all test sets.
"""
import importlib.util as importUtils
import json
import os
import sys
import re
from pathlib import Path
from typing import Any
import pytest
from colorama import Fore, Style, init as colorama_init
# Initialize colorama for cross-platform colored output
colorama_init()
# Add parent directory to path for imports
sys.path.append(os.path.dirname(__file__))
# Import treats for celebration messages
from treats import deadpool, grumpy, nyan_cat, pikachu, pokeball, squirtle
# Import shared helpers
from code_helpers import lab_book_entry_completed
# Colors for output
EM = Fore.YELLOW
NORM = Fore.WHITE
# ============================================================================
# Fixtures for loading student exercise files
# ============================================================================
@pytest.fixture
def repo_path():
"""Return the path to the student's 'me' repository."""
return os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "me"))
@pytest.fixture
def load_exercise():
"""Fixture that returns a function to load exercise modules dynamically.
Usage:
def test_something(load_exercise):
exercise1 = load_exercise(set_number=2, exercise_number=1)
result = exercise1.some_function()
"""
def _load_exercise(set_number: int, exercise_number: int, path: str = None):
"""Load an exercise file as a module.
Args:
set_number: The set number (1, 2, 3, etc.)
exercise_number: The exercise number (0, 1, 2, etc.)
path: Optional custom path to the repo (defaults to ../me)
Returns:
The loaded module
"""
if path is None:
path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "me"))
# Add path to sys.path so package imports work
if path not in sys.path:
sys.path.insert(0, path)
exercise_path = os.path.join(
path, f"set{set_number}", f"exercise{exercise_number}.py"
)
spec = importUtils.spec_from_file_location(
f"exercise{exercise_number}", exercise_path
)
module = importUtils.module_from_spec(spec)
spec.loader.exec_module(module)
return module
return _load_exercise
@pytest.fixture
def check_exercise_runs(repo_path):
"""Fixture to check if an exercise file runs without syntax errors.
Usage:
def test_something(check_exercise_runs):
if check_exercise_runs(set_number=2, exercise_number=1):
# exercise runs, proceed with tests
"""
def _check_runs(set_number: int, exercise_number: int) -> bool:
"""Check that an exercise file runs without errors."""
try:
exercise_path = os.path.join(
repo_path, f"set{set_number}", f"exercise{exercise_number}.py"
)
spec = importUtils.spec_from_file_location(
"exercise", os.path.abspath(exercise_path)
)
module = importUtils.module_from_spec(spec)
spec.loader.exec_module(module)
return True
except Exception:
return False
return _check_runs
# ============================================================================
# Fixtures for common test scenarios
# ============================================================================
@pytest.fixture
def lab_book_completed(repo_path):
"""Check if a lab book (readme.md) has been filled out.
Usage:
def test_lab_book(lab_book_completed):
assert lab_book_completed(set_number=2)
"""
def _check_lab_book(set_number: int) -> bool:
"""Check if the lab book has actual content.
Delegates to lab_book_entry_completed from code_helpers.
"""
return lab_book_entry_completed(set_number, repo_path)
return _check_lab_book
# ============================================================================
# Custom assertion helpers
# ============================================================================
def check_return_none_message(result, context: str = "") -> str:
"""Generate helpful message when student returns None.
This is a common mistake where students print instead of return.
"""
if result is None:
message = (
f"\n{EM}You returned None!{NORM}\n"
f"Common causes:\n"
f" • Did you forget to {EM}return{NORM} the result?\n"
f" • Are you using {EM}return print(something){NORM}? "
f"That returns None!\n"
f" • Did you assign the value but forget to return it?\n"
f"\n{EM}Remember:{NORM} You need to return the computed value, "
f"not print it.\n"
)
if context:
message = f"\n{context}\n" + message
return message
return ""
def check_print_instead_of_return(func, args, expected_value, capsys):
"""Check if a function printed the expected value instead of returning it.
This is a VERY common student mistake: they print() the result but forget to return it.
This helper detects this pattern and provides a helpful error message.
Args:
func: The function to test
args: Arguments to pass (single value or tuple)
expected_value: The value we expect to be returned
capsys: The pytest capsys fixture for capturing stdout
Returns:
Tuple of (result, helpful_message)
- result is what the function actually returned
- helpful_message is None if OK, or a string explaining the mistake
Example usage in a test:
def test_add_5(exercise0, capsys):
result, error_msg = check_print_instead_of_return(
exercise0.add_5, (55,), 60, capsys
)
if error_msg:
pytest.fail(error_msg)
assert result == 60, "55 + 5 should equal 60"
"""
# Call the function and capture any output
if isinstance(args, tuple):
result = func(*args)
else:
result = func(args)
captured = capsys.readouterr()
# Check if they printed the expected value but returned None
if result is None and captured.out:
# Check if the expected value appears in output
if str(expected_value) in captured.out.strip():
message = (
f"\n{EM}⚠ It looks like you printed the result instead of returning it!{NORM}\n"
f"\n"
f" Your function returned: {EM}None{NORM}\n"
f" But we found this in your output: {EM}{captured.out.strip()}{NORM}\n"
f"\n"
f" {EM}Common mistake:{NORM}\n"
f" def my_function(x):\n"
f" answer = x + 5\n"
f" print(answer) {EM}← This is wrong!{NORM}\n"
f"\n"
f" {EM}Correct version:{NORM}\n"
f" def my_function(x):\n"
f" answer = x + 5\n"
f" return answer {EM}← Use return, not print{NORM}\n"
f"\n"
f" Remember: {EM}return{NORM} gives the value back to the caller.\n"
f" {EM}print(){NORM} just shows it on screen.\n"
)
return result, message
return result, None
# ============================================================================
# Progress tracking hooks
# ============================================================================
class ProgressTracker:
"""Track test results for generating trace.json."""
def __init__(self):
self.results = []
self.current_set = None
def add_result(self, name: str, passed: bool):
"""Add a test result."""
self.results.append({"name": name, "value": 1 if passed else 0})
def get_summary(self) -> dict[str, Any]:
"""Get summary of results."""
total = sum(r["value"] for r in self.results)
return {"mark": total, "of_total": len(self.results), "results": self.results}
# Global progress tracker instance
_progress_tracker = ProgressTracker()
@pytest.fixture(scope="session")
def progress_tracker():
"""Provide access to the progress tracker."""
return _progress_tracker
# ============================================================================
# Pytest hooks for custom behavior
# ============================================================================
def pytest_collection_modifyitems(config, items):
"""Modify collected test items.
This hook allows us to add markers, modify test order, etc.
"""
for item in items:
# Add set marker based on file path
if "set1" in str(item.fspath):
item.add_marker(pytest.mark.set1)
elif "set2" in str(item.fspath):
item.add_marker(pytest.mark.set2)
elif "set3" in str(item.fspath):
item.add_marker(pytest.mark.set3)
elif "set4" in str(item.fspath):
item.add_marker(pytest.mark.set4)
elif "set5" in str(item.fspath):
item.add_marker(pytest.mark.set5)
elif "set6" in str(item.fspath):
item.add_marker(pytest.mark.set6)
elif "set8" in str(item.fspath):
item.add_marker(pytest.mark.set8)
def pytest_runtest_makereport(item, call):
"""Hook to track test results for progress tracking."""
if call.when == "call":
passed = call.excinfo is None
_progress_tracker.add_result(item.name, passed)
def pytest_sessionfinish(session, exitstatus):
"""Hook called after test session finishes.
This is where we can generate trace.json and show celebration messages.
"""
# Get test results
passed = session.testscollected - session.testsfailed
total = session.testscollected
# Show summary
print(f"\n\n{'='*60}")
print(f"Results: {passed}/{total} tests passed")
# Show celebration if all passed
if passed == total and total > 0:
print(
f"\n{Fore.GREEN}🎉 Perfect score! All tests passed! 🎉{Style.RESET_ALL}\n"
)
print(nyan_cat())
elif passed > 0:
print(
f"\n{Fore.YELLOW}Keep going! You've got {passed} passing!{Style.RESET_ALL}"
)
print(f"✨🌟✨ I believe in you! ✨🌟✨\n")
print(f"{'='*60}\n")
# ============================================================================
# Helper functions that can be imported by test files
# ============================================================================
def get_set_number_from_path(test_file_path: str) -> int:
"""Extract set number from a test file path.
Example: /path/to/set2/test_exercise1.py -> 2
"""
parts = Path(test_file_path).parts
for part in parts:
if part.startswith("set") and part[3:].isdigit():
return int(part[3:])
return 0