diff options
author | Prabhu Ramachandran | 2017-03-20 18:51:57 +0530 |
---|---|---|
committer | GitHub | 2017-03-20 18:51:57 +0530 |
commit | a522432ae637c029368f22ccb407089ed34e231e (patch) | |
tree | b7646d505ad79d4dc80d9c49521995c84fe50dc1 /yaksh | |
parent | 6a09816d66f9f24c0dba275d0dd3aaf7289eb73b (diff) | |
parent | e0beba1dacb0d5de5ca8b59298345eb9d841d879 (diff) | |
download | online_test-a522432ae637c029368f22ccb407089ed34e231e.tar.gz online_test-a522432ae637c029368f22ccb407089ed34e231e.tar.bz2 online_test-a522432ae637c029368f22ccb407089ed34e231e.zip |
Merge pull request #252 from adityacp/assignment_upload_check
Assignment upload Evaluation
Diffstat (limited to 'yaksh')
-rw-r--r-- | yaksh/evaluator_tests/test_python_evaluation.py | 56 | ||||
-rw-r--r-- | yaksh/hook_evaluator.py | 8 | ||||
-rw-r--r-- | yaksh/migrations/0001_initial.py | 7 | ||||
-rw-r--r-- | yaksh/models.py | 21 | ||||
-rw-r--r-- | yaksh/static/yaksh/js/add_question.js | 36 | ||||
-rw-r--r-- | yaksh/templates/exam.html | 2 | ||||
-rw-r--r-- | yaksh/templates/yaksh/add_question.html | 1 | ||||
-rw-r--r-- | yaksh/templates/yaksh/question.html | 2 | ||||
-rw-r--r-- | yaksh/test_models.py | 34 | ||||
-rw-r--r-- | yaksh/views.py | 39 |
10 files changed, 155 insertions, 51 deletions
diff --git a/yaksh/evaluator_tests/test_python_evaluation.py b/yaksh/evaluator_tests/test_python_evaluation.py index 51f9bea..a751c40 100644 --- a/yaksh/evaluator_tests/test_python_evaluation.py +++ b/yaksh/evaluator_tests/test_python_evaluation.py @@ -19,7 +19,8 @@ class EvaluatorBaseTest(unittest.TestCase): class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): def setUp(self): - with open('/tmp/test.txt', 'wb') as f: + self.tmp_file = os.path.join(tempfile.gettempdir(), "test.txt") + with open(self.tmp_file, 'wb') as f: f.write('2'.encode('ascii')) tmp_in_dir_path = tempfile.mkdtemp() self.in_dir = tmp_in_dir_path @@ -33,7 +34,7 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): self.file_paths = None def tearDown(self): - os.remove('/tmp/test.txt') + os.remove(self.tmp_file) shutil.rmtree(self.in_dir) def test_correct_answer(self): @@ -343,7 +344,7 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): def test_file_based_assert(self): # Given self.test_case_data = [{"test_case_type": "standardtestcase", "test_case": "assert(ans()=='2')", "weight": 0.0}] - self.file_paths = [('/tmp/test.txt', False)] + self.file_paths = [(self.tmp_file, False)] user_answer = dedent(""" def ans(): with open("test.txt") as f: @@ -479,12 +480,17 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): class PythonStdIOEvaluationTestCases(EvaluatorBaseTest): def setUp(self): - with open('/tmp/test.txt', 'wb') as f: + self.tmp_file = os.path.join(tempfile.gettempdir(), "test.txt") + with open(self.tmp_file, 'wb') as f: f.write('2'.encode('ascii')) self.file_paths = None tmp_in_dir_path = tempfile.mkdtemp() self.in_dir = tmp_in_dir_path + def teardown(self): + os.remove(self.tmp_file) + shutil.rmtree(self.in_dir) + def test_correct_answer_integer(self): # Given self.test_case_data = [{"test_case_type": "stdiobasedtestcase", @@ -618,7 +624,7 @@ class PythonStdIOEvaluationTestCases(EvaluatorBaseTest): "expected_output": "2", "weight": 0.0 }] - self.file_paths = [('/tmp/test.txt', False)] + self.file_paths = [(self.tmp_file, False)] user_answer = dedent(""" with open("test.txt") as f: @@ -702,7 +708,8 @@ class PythonStdIOEvaluationTestCases(EvaluatorBaseTest): class PythonHookEvaluationTestCases(EvaluatorBaseTest): def setUp(self): - with open('/tmp/test.txt', 'wb') as f: + self.tmp_file = os.path.join(tempfile.gettempdir(), "test.txt") + with open(self.tmp_file, 'wb') as f: f.write('2'.encode('ascii')) tmp_in_dir_path = tempfile.mkdtemp() self.in_dir = tmp_in_dir_path @@ -712,7 +719,7 @@ class PythonHookEvaluationTestCases(EvaluatorBaseTest): self.file_paths = None def tearDown(self): - os.remove('/tmp/test.txt') + os.remove(self.tmp_file) shutil.rmtree(self.in_dir) def test_correct_answer(self): @@ -910,6 +917,41 @@ class PythonHookEvaluationTestCases(EvaluatorBaseTest): self.assertFalse(result.get('success')) self.assert_correct_output(self.timeout_msg, result.get('error')) + def test_assignment_upload(self): + # Given + user_answer = "Assignment Upload" + hook_code = dedent("""\ + def check_answer(user_answer): + success = False + err = "Incorrect Answer" + mark_fraction = 0.0 + with open("test.txt") as f: + data = f.read() + if data == '2': + success, err, mark_fraction = True, "", 1.0 + return success, err, mark_fraction + """ + ) + test_case_data = [{"test_case_type": "hooktestcase", + "hook_code": hook_code,"weight": 1.0 + }] + kwargs = { + 'metadata': { + 'user_answer': user_answer, + 'file_paths': self.file_paths, + 'assign_files': [(self.tmp_file, False)], + 'partial_grading': False, + 'language': 'python' + }, + 'test_case_data': test_case_data, + } + + # When + grader = Grader(self.in_dir) + result = grader.evaluate(kwargs) + + # Then + self.assertTrue(result.get('success')) if __name__ == '__main__': unittest.main() diff --git a/yaksh/hook_evaluator.py b/yaksh/hook_evaluator.py index 2cc4578..0819ec9 100644 --- a/yaksh/hook_evaluator.py +++ b/yaksh/hook_evaluator.py @@ -12,11 +12,13 @@ from .grader import TimeoutException class HookEvaluator(BaseEvaluator): def __init__(self, metadata, test_case_data): self.files = [] + self.assign_files = [] # Set metadata values self.user_answer = metadata.get('user_answer') self.file_paths = metadata.get('file_paths') self.partial_grading = metadata.get('partial_grading') + self.assignment_files = metadata.get('assign_files') # Set test case data values self.hook_code = test_case_data.get('hook_code') @@ -26,6 +28,8 @@ class HookEvaluator(BaseEvaluator): # Delete the created file. if self.files: delete_files(self.files) + if self.assign_files: + delete_files(self.assign_files) def check_code(self): """ Function evaluates user answer by running a python based hook code @@ -47,6 +51,10 @@ class HookEvaluator(BaseEvaluator): Returns (False, error_msg, 0.0): If mandatory arguments are not files or if the required permissions are not given to the file(s). """ + if self.file_paths: + self.files = copy_files(self.file_paths) + if self.assignment_files: + self.assign_files = copy_files(self.assignment_files) success = False mark_fraction = 0.0 try: diff --git a/yaksh/migrations/0001_initial.py b/yaksh/migrations/0001_initial.py index 8ee8c6a..8770a72 100644 --- a/yaksh/migrations/0001_initial.py +++ b/yaksh/migrations/0001_initial.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Generated by Django 1.9.5 on 2017-03-14 08:33 +# Generated by Django 1.9.5 on 2017-03-17 16:42 from __future__ import unicode_literals import datetime @@ -114,6 +114,7 @@ class Migration(migrations.Migration): ('active', models.BooleanField(default=True)), ('snippet', models.CharField(blank=True, max_length=256)), ('partial_grading', models.BooleanField(default=False)), + ('grade_assignment_upload', models.BooleanField(default=False)), ('tags', taggit.managers.TaggableManager(blank=True, help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL)), ], @@ -171,7 +172,7 @@ class Migration(migrations.Migration): name='HookTestCase', fields=[ ('testcase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='yaksh.TestCase')), - ('hook_code', models.TextField(default='def check_answer(user_answer):\n \'\'\' Evaluates user answer to return -\n success - Boolean, indicating if code was executed correctly\n mark_fraction - Float, indicating fraction of the\n weight to a test case\n error - String, error message if success is false\'\'\'\n success = False\n err = "Incorrect Answer" # Please make this more specific\n mark_fraction = 0.0\n\n # write your code here\n\n return success, err, mark_fraction\n\n')), + ('hook_code', models.TextField(default='def check_answer(user_answer):\n \'\'\' Evaluates user answer to return -\n success - Boolean, indicating if code was executed correctly\n mark_fraction - Float, indicating fraction of the\n weight to a test case\n error - String, error message if success is false\n In case of assignment upload there will be no user answer \'\'\'\n success = False\n err = "Incorrect Answer" # Please make this more specific\n mark_fraction = 0.0\n\n # write your code here\n\n return success, err, mark_fraction\n\n')), ('weight', models.FloatField(default=1.0)), ], bases=('yaksh.testcase',), @@ -233,7 +234,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='assignmentupload', name='user', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='yaksh.Profile'), + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name='answerpaper', diff --git a/yaksh/models.py b/yaksh/models.py index 42e8714..b14fcf6 100644 --- a/yaksh/models.py +++ b/yaksh/models.py @@ -68,7 +68,7 @@ test_status = ( def get_assignment_dir(instance, filename): return os.sep.join(( - instance.user.user.username, str(instance.assignmentQuestion.id), filename + instance.user.username, str(instance.assignmentQuestion.id), filename )) @@ -266,7 +266,10 @@ class Question(models.Model): # Does this question allow partial grading partial_grading = models.BooleanField(default=False) - def consolidate_answer_data(self, user_answer): + # Check assignment upload based question + grade_assignment_upload = models.BooleanField(default=False) + + def consolidate_answer_data(self, user_answer, user=None): question_data = {} metadata = {} test_case_data = [] @@ -285,6 +288,13 @@ class Question(models.Model): if files: metadata['file_paths'] = [(file.file.path, file.extract) for file in files] + if self.type == "upload": + assignment_files = AssignmentUpload.objects.filter( + assignmentQuestion=self, user=user + ) + if assignment_files: + metadata['assign_files'] = [(file.assignmentFile.path, False) + for file in assignment_files] question_data['metadata'] = metadata return json.dumps(question_data) @@ -1168,7 +1178,7 @@ class AnswerPaper(models.Model): if set(user_answer) == set(expected_answers): result['success'] = True result['error'] = ['Correct answer'] - elif question.type == 'code': + elif question.type == 'code' or question.type == "upload": user_dir = self.user.profile.get_user_dir() json_result = code_server.run_code( question.language, json_data, user_dir @@ -1231,7 +1241,7 @@ class AnswerPaper(models.Model): ############################################################################### class AssignmentUpload(models.Model): - user = models.ForeignKey(Profile) + user = models.ForeignKey(User) assignmentQuestion = models.ForeignKey(Question) assignmentFile = models.FileField(upload_to=get_assignment_dir) @@ -1295,7 +1305,8 @@ class HookTestCase(TestCase): success - Boolean, indicating if code was executed correctly mark_fraction - Float, indicating fraction of the weight to a test case - error - String, error message if success is false''' + error - String, error message if success is false + In case of assignment upload there will be no user answer ''' success = False err = "Incorrect Answer" # Please make this more specific mark_fraction = 0.0 diff --git a/yaksh/static/yaksh/js/add_question.js b/yaksh/static/yaksh/js/add_question.js index 8ca22eb..05752b4 100644 --- a/yaksh/static/yaksh/js/add_question.js +++ b/yaksh/static/yaksh/js/add_question.js @@ -111,16 +111,34 @@ function textareaformat() }); - $('#id_type').bind('focus', function(event){ - var type = document.getElementById('id_type'); - type.style.border = '1px solid #ccc'; - }); + $('#id_type').bind('focus', function(event){ + var type = document.getElementById('id_type'); + type.style.border = '1px solid #ccc'; + }); + + $('#id_language').bind('focus', function(event){ + var language = document.getElementById('id_language'); + language.style.border = '1px solid #ccc'; + }); + document.getElementById('my').innerHTML = document.getElementById('id_description').value ; - $('#id_language').bind('focus', function(event){ - var language = document.getElementById('id_language'); - language.style.border = '1px solid #ccc'; - }); - document.getElementById('my').innerHTML = document.getElementById('id_description').value ; + + if (document.getElementById('id_grade_assignment_upload').checked || + document.getElementById('id_type').val() == 'upload'){ + $("#id_grade_assignment_upload").prop("disabled", false); + } + else{ + $("#id_grade_assignment_upload").prop("disabled", true); + } + + $('#id_type').change(function() { + if ($(this).val() == "upload"){ + $("#id_grade_assignment_upload").prop("disabled", false); + } + else{ + $("#id_grade_assignment_upload").prop("disabled", true); + } + }); } function autosubmit() diff --git a/yaksh/templates/exam.html b/yaksh/templates/exam.html index 02ff70a..a18a962 100644 --- a/yaksh/templates/exam.html +++ b/yaksh/templates/exam.html @@ -73,7 +73,7 @@ {% block main %} {% endblock %} </div> - {% if question.type == 'code' %} + {% if question.type == 'code' or question.type == 'upload' %} {% if error_message %} <div class="row" id="error_panel"> {% for error in error_message %} diff --git a/yaksh/templates/yaksh/add_question.html b/yaksh/templates/yaksh/add_question.html index b01ddc3..0d54ef7 100644 --- a/yaksh/templates/yaksh/add_question.html +++ b/yaksh/templates/yaksh/add_question.html @@ -26,6 +26,7 @@ <tr><td>Tags: <td>{{ qform.tags }} <tr><td>Snippet: <td>{{ qform.snippet }} <tr><td>Partial Grading: <td>{{ qform.partial_grading }} + <tr><td>Grade Assignment Upload:<td> {{ qform.grade_assignment_upload }} <tr><td> File: <td> {{ fileform.file_field }}{{ fileform.file_field.errors }} {% if uploaded_files %}<br><b>Uploaded files:</b><br>Check on delete to delete files, extract to extract files and hide to hide files from student(if required)<br> diff --git a/yaksh/templates/yaksh/question.html b/yaksh/templates/yaksh/question.html index eaaacbf..dc8a165 100644 --- a/yaksh/templates/yaksh/question.html +++ b/yaksh/templates/yaksh/question.html @@ -189,7 +189,7 @@ function call_skip(url) {% endif %} {% if question.type == "upload" %} <p>Upload assignment file for the said question<p> - <input type=file id="assignment" name="assignment"> + <input type=file id="assignment" name="assignment" multiple=""> <hr> {% endif %} {% if question.type == "code" %} diff --git a/yaksh/test_models.py b/yaksh/test_models.py index 6812212..c732e58 100644 --- a/yaksh/test_models.py +++ b/yaksh/test_models.py @@ -57,20 +57,15 @@ def setUpModule(): description='demo quiz', pass_criteria=40, language='Python', prerequisite=quiz, course=course, instructions="Demo Instructions") - - with open('/tmp/test.txt', 'wb') as f: + tmp_file1 = os.path.join(tempfile.gettempdir(), "test.txt") + with open(tmp_file1, 'wb') as f: f.write('2'.encode('ascii')) + def tearDownModule(): User.objects.all().delete() Question.objects.all().delete() Quiz.objects.all().delete() - - que_id_list = ["25", "22", "24", "27"] - for que_id in que_id_list: - dir_path = os.path.join(os.getcwd(), "yaksh", "data","question_{0}".format(que_id)) - if os.path.exists(dir_path): - shutil.rmtree(dir_path) ############################################################################### class ProfileTestCases(unittest.TestCase): @@ -117,7 +112,7 @@ class QuestionTestCases(unittest.TestCase): self.question2.save() # create a temp directory and add files for loading questions test - file_path = "/tmp/test.txt" + file_path = os.path.join(tempfile.gettempdir(), "test.txt") self.load_tmp_path = tempfile.mkdtemp() shutil.copy(file_path, self.load_tmp_path) file1 = os.path.join(self.load_tmp_path, "test.txt") @@ -126,9 +121,11 @@ class QuestionTestCases(unittest.TestCase): self.dump_tmp_path = tempfile.mkdtemp() shutil.copy(file_path, self.dump_tmp_path) file2 = os.path.join(self.dump_tmp_path, "test.txt") - file = open(file2, "r") - django_file = File(file) - file = FileUpload.objects.create(file=django_file, question=self.question2) + upload_file = open(file2, "r") + django_file = File(upload_file) + file = FileUpload.objects.create(file=django_file, + question=self.question2 + ) self.question1.tags.add('python', 'function') self.assertion_testcase = StandardTestCase(question=self.question1, @@ -158,6 +155,15 @@ class QuestionTestCases(unittest.TestCase): def tearDown(self): shutil.rmtree(self.load_tmp_path) shutil.rmtree(self.dump_tmp_path) + uploaded_files = FileUpload.objects.all() + que_id_list = [file.question.id for file in uploaded_files] + for que_id in que_id_list: + dir_path = os.path.join(os.getcwd(), "yaksh", "data", + "question_{0}".format(que_id) + ) + if os.path.exists(dir_path): + shutil.rmtree(dir_path) + uploaded_files.delete() def test_question(self): """ Test question """ @@ -214,7 +220,9 @@ class QuestionTestCases(unittest.TestCase): self.assertTrue(question_data.active) self.assertEqual(question_data.snippet, 'def fact()') self.assertEqual(os.path.basename(file.file.path), "test.txt") - self.assertEqual([case.get_field_value() for case in test_case], self.test_case_upload_data) + self.assertEqual([case.get_field_value() for case in test_case], + self.test_case_upload_data + ) ############################################################################### diff --git a/yaksh/views.py b/yaksh/views.py index a752ec2..83749c1 100644 --- a/yaksh/views.py +++ b/yaksh/views.py @@ -468,17 +468,26 @@ def check(request, q_id, attempt_num=None, questionpaper_id=None): elif current_question.type == 'mcc': user_answer = request.POST.getlist('answer') elif current_question.type == 'upload': - assign = AssignmentUpload() - assign.user = user.profile - assign.assignmentQuestion = current_question # if time-up at upload question then the form is submitted without # validation if 'assignment' in request.FILES: - assign.assignmentFile = request.FILES['assignment'] - assign.save() + assignment_filename = request.FILES.getlist('assignment') + for fname in assignment_filename: + if AssignmentUpload.objects.filter( + assignmentQuestion=current_question, + assignmentFile__icontains=fname, user=user).exists(): + assign_file = AssignmentUpload.objects.get( + assignmentQuestion=current_question, + assignmentFile__icontains=fname, user=user) + os.remove(assign_file.assignmentFile.path) + assign_file.delete() + AssignmentUpload.objects.create(user=user, + assignmentQuestion=current_question, assignmentFile=fname + ) user_answer = 'ASSIGNMENT UPLOADED' - next_q = paper.add_completed_question(current_question.id) - return show_question(request, next_q, paper) + if not current_question.grade_assignment_upload: + next_q = paper.add_completed_question(current_question.id) + return show_question(request, next_q, paper) else: user_code = request.POST.get('answer') user_answer = snippet_code + "\n" + user_code if snippet_code else user_code @@ -492,13 +501,16 @@ def check(request, q_id, attempt_num=None, questionpaper_id=None): # If we were not skipped, we were asked to check. For any non-mcq # questions, we obtain the results via XML-RPC with the code executed # safely in a separate process (the code_server.py) running as nobody. - json_data = current_question.consolidate_answer_data(user_answer) \ - if current_question.type == 'code' else None + json_data = current_question.consolidate_answer_data(user_answer, user) \ + if current_question.type == 'code' or \ + current_question.type == 'upload' else None result = paper.validate_answer(user_answer, current_question, json_data) if result.get('success'): new_answer.marks = (current_question.points * result['weight'] / current_question.get_maximum_test_case_weight()) \ - if current_question.partial_grading and current_question.type == 'code' else current_question.points + if current_question.partial_grading and \ + current_question.type == 'code' or current_question.type == 'upload' \ + else current_question.points new_answer.correct = result.get('success') error_message = None new_answer.error = json.dumps(result.get('error')) @@ -506,11 +518,14 @@ def check(request, q_id, attempt_num=None, questionpaper_id=None): else: new_answer.marks = (current_question.points * result['weight'] / current_question.get_maximum_test_case_weight()) \ - if current_question.partial_grading and current_question.type == 'code' else 0 + if current_question.partial_grading and \ + current_question.type == 'code' or current_question.type == 'upload' \ + else 0 error_message = result.get('error') if current_question.type == 'code' \ - else None + or current_question.type == 'upload' else None new_answer.error = json.dumps(result.get('error')) next_question = current_question if current_question.type == 'code' \ + or current_question.type == 'upload' \ else paper.add_completed_question(current_question.id) new_answer.save() paper.update_marks('inprogress') |