Skip to content
This repository was archived by the owner on Sep 12, 2022. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions submit/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from .models import configure_sql, create_schema, populate_database
from .security import get_user, group_finder

__version__ = '1.2.0'
__version__ = '1.0.3'


class Root(object):
Expand Down Expand Up @@ -79,7 +79,7 @@ def main(global_config, **settings):
engine = engine_from_config(settings, 'sqlalchemy.')
configure_sql(engine)

secure_cookies = settings.get('secure_cookies') != 'false'
secure_cookies = True
if 'pyramid_debugtoolbar' in settings['pyramid.includes']:
create_schema(global_config['__file__'])
populate_database()
Expand Down
16 changes: 5 additions & 11 deletions submit/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,17 +71,16 @@ class UmailAddress(EmailAddress):

"""A validator to verify that a umail address is correct."""

def run(self, value, errors, request, *args):
retval = super(UmailAddress, self).run(value.lower(), errors, request,
*args)
def run(self, value, errors, *args):
retval = super(UmailAddress, self).run(value.lower(), errors, *args)
if errors:
return retval
if not retval.endswith('@umail.ucsb.edu'):
self.add_error(errors, 'must end with @umail.ucsb.edu')
return retval
# Fetch name
try:
name = fetch_name_by_umail(retval, request)
name = fetch_name_by_umail(retval)
except Exception as exc:
self.add_error(errors, exc.message)
return retval
Expand Down Expand Up @@ -274,21 +273,16 @@ def fetch_request_ids(item_ids, cls, attr_name, verification_list=None):
return items


def fetch_name_by_umail(umail, request):
def fetch_name_by_umail(umail):
def extract(item):
if len(data[item]) == 1:
return data[item][0]
raise Exception('Multiple values returned: {}'.format(data))

uid = umail.split('@')[0]

# Return the portion before @umail.ucsb.edu if ldap_uri is not provided
ldap_uri = request.registry.settings.get('ldap_uri')
if not ldap_uri:
return uid

# connect to ldap
ldap_conn = ldap.initialize(ldap_uri)
ldap_conn = ldap.initialize('ldaps://directory.ucsb.edu')
ldap_conn.protocol_version = ldap.VERSION3
results = ldap_conn.search_s(
'o=ucsb', ldap.SCOPE_ONELEVEL, filterstr='uid={}'.format(uid),
Expand Down
2 changes: 1 addition & 1 deletion submit/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -407,7 +407,7 @@ class Project(BasicBase, Base):
makefile = relationship(File, backref='makefile_for_projects')
makefile_id = Column(Integer, ForeignKey('file.id'), nullable=True)
name = Column(Unicode, nullable=False)
status = Column(Enum('locked', 'notready', 'ready', name='proj_status'),
status = Column(Enum('locked', 'notready', 'ready', name='status'),
nullable=False, server_default='notready')
submissions = relationship('Submission', backref='project',
cascade='all, delete-orphan')
Expand Down
4 changes: 4 additions & 0 deletions submit/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1071,6 +1071,10 @@ def submission_view(request, submission, as_user):
else:
prev_group = next_group = None


if not submission_admin and submission.project.name[0:4]=="EXAM":
files = {}

return {'diff_table': diff_table,
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This change was the hack to allow for lab exams.

'extra_files': extra_files,
'files': files,
Expand Down
59 changes: 53 additions & 6 deletions submit/workers/proxy.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import time
import amqp_worker
import json
import os
Expand Down Expand Up @@ -57,6 +58,11 @@ def __init__(self):

self.base_file_path = settings['file_directory']
self.private_key_file = settings['ssh_priv_key']
self.sleep_short_min = int(settings['sleep_short_min'])
self.sleep_short_max = int(settings['sleep_short_max'])
self.sleep_long_min = int(settings['sleep_long_min'])
self.sleep_long_max = int(settings['sleep_long_max'])

self.account = args.worker_account
machines = settings['worker_machines']
if isinstance(machines, basestring):
Expand All @@ -78,6 +84,15 @@ def __init__(self):

worker.handle_command(args.command)


def get_short_sleep(self):
return random.uniform(self.sleep_short_min,self.sleep_short_max)

def get_long_sleep(self):
return random.uniform(self.sleep_long_min,self.sleep_long_max)



@workers.wrapper
def do_work(self, submission_id, testable_id, update_project=False):
# Verify job
Expand All @@ -100,15 +115,19 @@ def do_work(self, submission_id, testable_id, update_project=False):
# Fetch the best machine
priority, machine = heappop(self.machines)
# Log the start of the job
workers.log_msg('{}.{} begin ({})'
.format(submission_id, testable_id, machine))
start_time = time.time()
workers.log_msg('{}.{} begin ({}) {} {} {} {}'
.format(submission_id, testable_id, machine,
submission.project.class_.name,submission.project.name,submission.created_by.name,testable.name ))

log_type = 'unhandled'
try:
# Kill any processes on the worker
priority = self.kill_processes(machine)
# Copy the files to the worker (and remove existing files)
self.push_files(machine, submission, testable)
# Run the remote worker

self.ssh(machine, 'python worker.py')
# Fetch and generate the results
self.fetch_results(machine, submission, testable,
Expand All @@ -119,21 +138,35 @@ def do_work(self, submission_id, testable_id, update_project=False):
attempt += 1
log_type = 'timeout'
priority += 10
except Exception: # Increase priority and rereaise the exception
except Exception: # Increase priority and re-raise the exception
log_type = 'exception'
priority += 5
sleep_time = self.get_long_sleep()
workers.log_msg('{}.{} {} ({}) ({} sleep) {} {} {} {}'.format(submission_id,
testable_id, 'exception_sleep', sleep_time,
machine,
submission.project.class_.name,submission.project.name,submission.created_by.name,testable.name ))
time.sleep(sleep_time)
raise
finally:
# Add the machine back to the queue
heappush(self.machines, (priority, machine))
# Log the end of the job
workers.log_msg('{}.{} {} ({})'.format(submission_id,
testable_id, log_type,
machine))
elapsed_time = time.time() - start_time
workers.log_msg('{}.{} {} ({}) ({} sec) {} {} {} {}'.format(submission_id,
testable_id, log_type,
machine, elapsed_time,
submission.project.class_.name,submission.project.name,submission.created_by.name,testable.name ))
raise Exception('{}.{} timed out 16 times.'
.format(submission_id, testable_id))

def fetch_results(self, machine, submission, testable, update_project):

workers.log_msg('{}.{} {} ({}) {} {} {} {}'.format(submission.id,
testable.id, 'fetch_results',
machine,
submission.project.class_.name,submission.project.name,submission.created_by.name,testable.name ))

# Rsync to retrieve results
self.rsync(machine)
os.chdir('results')
Expand Down Expand Up @@ -193,6 +226,9 @@ def kill_processes(self, machine):
raise Exception('killall did not work as expected')
except subprocess.CalledProcessError as exc:
if exc.returncode != 255 or exc.output.strip() != expected:
sleep_time = self.get_short_sleep()
workers.log_msg('kill_processes exc.returncode={} sleep={} output={}'.format(exc.returncode,sleep_time,exc.output.strip()))
time.sleep(sleep_time)
raise Exception('killall status: {} ({})'
.format(exc.returncode, exc.output.strip()))
return time.time() - start
Expand Down Expand Up @@ -265,6 +301,12 @@ def push_files(self, machine, submission, testable):
self.rsync(machine, from_local=True)

def rsync(self, machine, from_local=False):

sleep_time = self.get_short_sleep()
time.sleep(sleep_time)
workers.log_msg('rsync machine={} sleep={}'.format(machine,sleep_time))


src = '{}@{}:working/'.format(self.account, machine)
dst = '.'
if from_local:
Expand All @@ -274,6 +316,11 @@ def rsync(self, machine, from_local=False):
subprocess.check_call(cmd, stdout=open(os.devnull, 'w'), shell=True)

def ssh(self, machine, command, timeout=None):

sleep_time = self.get_short_sleep()
time.sleep(sleep_time)
workers.log_msg('ssh machine={} command={} sleep={}'.format(machine,command,sleep_time))

options = '-o ConnectTimeout={}'.format(timeout) if timeout else ''
cmd = 'ssh -i {key} {options} {user}@{host} {command}'.format(
key=self.private_key_file, user=self.account, host=machine,
Expand Down
Loading