-
Notifications
You must be signed in to change notification settings - Fork 9
Expand file tree
/
Copy pathgrade.py
More file actions
executable file
·295 lines (248 loc) · 10.4 KB
/
grade.py
File metadata and controls
executable file
·295 lines (248 loc) · 10.4 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
# grade.py
#
# usage:
# python grade.py MainClass TestDir [--outpre outfilePrefix] [--inext infileExt] [--gradescope]
#
# usage examples:
# python grade.py Section2Binary PublicTestCases --outpre binary
#
# This grade.py is attempting to be more general for
# programs that accept command line input in addition
# to or other than an input file name. It assumes that
# the output files in the given test directory end in .out.
# It will then look for all outfiles with the given prefix.
# After the prefix each command line option should be provided
# in the outfile name separated by dashes. The input file
# if there is one will be provided without an extension. The
# optional argument to the grade script will be the infile extension
# if the first command line option is a file.
#
# examples:
# binary-5.out
# The output prefix is "binary". The program will be run with
# the command line "5".
#
# python grade.py PA2Main PublicTestCases --outpre pa2 --inext csv
# outfile: pa2-miniRoutes-MAX.out
# If I were going to redo the PA2 testing, this is what to do.
# In this one the output prefix is "pa2".
# Command line args: miniRoutes.csv MAX
#
# If the gradescope options is specified then the source
# code will be found in "/autograder/submission/".
# The script always generates a TestingTemp/results.json
# file that could be used by gradescope.
#
# Another assumption is that all of the source files are in the
# default package and are in the src/ subdirectory.
# And the actual testing will happen in the testdir.
# Also assuming the correctness grade is out of 50 points.
import argparse
import sys
import os
import json
import subprocess
import glob
from functools import reduce
import shutil
#######################################
# global variables
tempdir = "TestingTemp/"
totalpoints = 50.0
gradescope_outfile = "results.json"
def getSubmissionSource(gradescope_flag):
#######################################
# like global variables should be set by user
if gradescope_flag:
return "/autograder/submission/"
else:
return "src/"
def cmdLineParse(argv):
#######################################
# Returns an args class with gradescope, mainclass, testdir, inext, and outpre specified.
parser = argparse.ArgumentParser(description="grading script for CS 210")
parser.add_argument('mainclass')
parser.add_argument('testdir')
parser.add_argument('--outpre', help='Prefix for expected outfiles.')
parser.add_argument('--inext', help='Extention for input files.')
parser.add_argument('--gradescope', help='Copy submission from gradescope location.',
action="store_true")
#DEBUG: parser.print_help()
args = parser.parse_args()
#DEBUG: print(args)
return args
def copySrcToTempAndCDThere(srcdir, tempdir):
#######################################
# Copying over all the source files into the testing
# directory, clean the directory, and then move into that directory.
# First parameter should be the source directory path.
# Second parameter should be the temporary directory.
# force the removal of tempdir
if (os.path.exists(tempdir)):
shutil.rmtree(tempdir)
# If the temporary directory doesn't already exist make it.
#if not os.path.exists(tempdir):
os.makedirs(tempdir)
# copy the source code and BoolSat library over to the temp dir
os.system("cp " + srcdir + "/*.java " + tempdir)
os.chdir (tempdir)
def execCommand(commandstr):
#######################################
# execute the given unix command line
# if successful then will return (0,stdout)
# if error then will return (cmdretcode,stderr)
# commandstr should be something like 'javac PA2Main.java'
#
# reference for this code
# https://stackoverflow.com/questions/16198546/get-exit-code-and-stderr-from-subprocess-call
retcode = 0
try:
output = subprocess.check_output(
commandstr, stderr=subprocess.STDOUT, shell=True, universal_newlines=True)
except subprocess.CalledProcessError as exc:
output = exc.output
retcode = exc.returncode
return (retcode,output)
def truncateFloats(infile,outfile):
#######################################
# Will truncate all of the floating points to 2 decimal
# places and place the new version of the file in the
# given output file.
execCommand("perl -pe 's/[-+]?\d*(?:\.?\d|\d\.)" \
+"\d*(?:[eE][-+]?\d+)?/sprintf(\"%.2f\",$&)/ge' " \
+infile+" > "+outfile)
def formatFloat(inval):
#######################################
# Want floats in our score output to all have 2 decimal points
return float("%.2f" % inval)
def createTestRecord(mainclassname,expected_output_file,
cmd_str,max_grade_per_test):
#######################################
# create a test record for each test
# run the program and redirect to the "out" file
# will compare with the given expected_output_file
# run the program
#print('java -cp "../lib/*:." '+mainclassname+ " "+ cmd_str+" > out")
run_cmd = 'java -cp "../lib/*:." '+mainclassname+ " "+ cmd_str+" > out"
(run_retcode,run_output) = execCommand(run_cmd)
# do a diff with the generated output and the expected output
diff_cmd = "diff -B -w out "+expected_output_file
(diff_retcode,diff_output) = execCommand(diff_cmd)
# put together all the information in the test record
if diff_retcode!=0:
score = 0.0
mesg = "Failed " + os.path.basename(expected_output_file) \
+ " test.\n"
# showing stdout on gradescope stdout but not diffs
# FIXME: using a global variable!
if (args.gradescope):
print(mesg)
mesg=mesg+"******** DIFF OUTPUT: Actual output followed by expected.\n"
if (not args.gradescope):
print(mesg+diff_output)
else:
score = max_grade_per_test
mesg = "Passed " + os.path.basename(expected_output_file) + " test.\n"
print(mesg)
return { "score" : formatFloat(score),
"max_score" : formatFloat(max_grade_per_test),
"name" : os.path.basename(expected_output_file),
"output" : mesg + diff_output }
def compileProgram(mainclassname):
#######################################
# try to compile the program
# return (boolean indicating if succeeded, output message)
# do the compile
compile_cmd = 'javac '+mainclassname+'.java'
(retcode,output) = execCommand(compile_cmd)
# If compilation failed
mesg_prefix = 'Compilation (' + compile_cmd + ')'
if retcode!=0:
print(mesg_prefix+' FAILED:\n'+output)
return (False,mesg_prefix+' FAILED:\n'+output)
else:
print(mesg_prefix+' SUCCEEDED!\n')
return (True,mesg_prefix+' SUCCEEDED!\n')
def parseOutFileName(outfile,outpre,infile_path,infile_ext):
#######################################
# outfile is the outfile name in format discussed in above file header.
# outpre is the outfile prefix. Assuming not passed in if doesn't match.
# infile_ext is None if there is no infile and a string if the first
# command line argument is an infile that needs an extension.
#
# returns command str for use as command line arguments for program
#
# assumming names are in format outpre-infilebase-other-cmd-args.out
# or outpre-cmd-args.out if there won't be an infile
#print("DEBUG: outfile=",outfile)
# take off the out extension
outfile = outfile[0:-4]
# outpre will now be followed by cmd line args
cmd_line_parts = outfile.split('-')
if (cmd_line_parts[0]!=outpre):
print("grade.py ERROR: file_name_parts[0]!=outpre")
sys.exit()
cmd_line_parts.pop(0)
# see if we need to grab an infile base
# If there are infiles they HAVE to have an extension.
if (infile_ext!=None):
cmd_line_parts[0] = infile_path + cmd_line_parts[0] + "." + infile_ext
# concat all the command line arguments with spaces between
cmd_str = reduce(lambda a,b: a+" "+b, cmd_line_parts, "")
#print("DEBUG: cmd_str=",cmd_str)
return cmd_str
def runTests(mainclassname,testdir,outpre,inext):
#######################################
# returns (list of test records,total_score,failed_at_least_once flag)
# get a list of all the output files and max score per test
output_files = glob.glob("../"+testdir+"/"+outpre+"*.out")
max_grade_per_test = totalpoints/float(len(output_files))
# Do all of the tests
test_records = []
total_score = 0.0
failed_at_least_once = False
for outfile in output_files:
cmd_str = parseOutFileName(os.path.basename(outfile),outpre,
"../"+testdir+"/",inext)
test_rect = createTestRecord(mainclassname, outfile, cmd_str,
max_grade_per_test)
failed_at_least_once = failed_at_least_once \
or (test_rect["score"]==0)
total_score = total_score + test_rect["score"]
test_records.append(test_rect)
return (test_records, total_score, failed_at_least_once)
#######################################
# main python routine
# set everything up and cd into temporary directory
args = cmdLineParse(sys.argv)
srcdir = getSubmissionSource(args.gradescope)
copySrcToTempAndCDThere(srcdir, tempdir)
# see https://gradescope-autograders.readthedocs.io/en/latest/specs/
# for format of json file that will come from results_dict
results_dict = {}
results_dict["visibility"] = "after_due_date"
# because this will be similar to what they see in Travis
results_dict["stdout_visibility"] = "visible"
#### try to compile the program
(compile_succeeded,compile_msg) = compileProgram(args.mainclass)
results_dict["output"] = compile_msg
#### If compilation failed then done, else do testing
if (compile_succeeded):
(test_records, total_score, failed_at_least_once) \
= runTests(args.mainclass,args.testdir,args.outpre,args.inext)
results_dict["score"] = round(formatFloat(total_score))
results_dict["tests"] = test_records
else:
results_dict["score"] = 0.0
failed_at_least_once = True
# Send testing output to stdout and the results.json file
# NOT printing score to stdout on purpose.
results_file = open(gradescope_outfile,"w")
json=json.dumps(results_dict, sort_keys=True, indent=4, separators=(',', ': '))
results_file.write(json)
results_file.close()
# Indicate whether there were any failures
if failed_at_least_once:
sys.exit(1)
else:
sys.exit(0)