diff options
29 files changed, 457 insertions, 214 deletions
diff --git a/CHANGELOG.txt b/CHANGELOG.txt index 56c8910..a227164 100644 --- a/CHANGELOG.txt +++ b/CHANGELOG.txt @@ -16,6 +16,7 @@ * CSV download for quiz attempts enhanced. * Updated Courses Page to show Active Courses on top. * Added a Datetime Picker to Add Course Page +* Added invoke script for quickstart and docker deployment === 0.6.0 (11-05-2017) === @@ -1,13 +1,11 @@ -FROM debian:8.2 +FROM ubuntu:16.04 MAINTAINER FOSSEE <pythonsupport@fossee.in> # Update Packages and Install Python & net-tools -RUN apt-get update && apt-get install -y python net-tools python-pip && pip install tornado - -# Copy the project folder from host into container -COPY ./yaksh /src/yaksh - -WORKDIR /src - -# Run Yaksh code server -CMD ["python", "-m", "yaksh.code_server"] +RUN apt-get update && \ +apt-get install -y software-properties-common && \ +echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select true | debconf-set-selections && \ +add-apt-repository -y ppa:webupd8team/java && \ +apt-get update && \ +apt-get install -y oracle-java8-installer && \ +apt-get install -y sudo python net-tools git python3-pip vim libmysqlclient-dev scilab build-essential python3-numpy python3-scipy ipython3 ipython3-notebook python3-pandas python3-nose @@ -37,56 +37,52 @@ Quick Start #### Installation 1. Install yaksh - - For latest stable release + - Clone the repository - $ pip install yaksh + $ git clone https://github.com/FOSSEE/online_test.git - - For the development version + - Go to the online_test directory - $ pip install git+https://github.com/FOSSEE/online_test.git + $ cd ./online_test -#### Short instructions + - Install the dependencies + + - For Python 2 use: + + $ pip install -r ./requirements/requirements-py2.txt -To see a quick demo after installing yaksh do the following: + - For Python 3 (recommended) use: - $ yaksh create_demo yaksh_demo - $ yaksh run yaksh_demo + $ pip install -r ./requirements/requirements-py3.txt + +#### Short instructions -On another terminal start up the code server that executes the user code safely: +1. To run the application do the following: - $ sudo yaksh run_code_server + $ invoke serve -Now point your browser to ```http://localhost:8000/exam```. + - *Note:* The serve command will run the django application server on the 8000 port + and hence this port will be unavailable to other processes. -#### More detailed instructions +1. On another terminal start up the code server that executes the user code safely: -1. On the terminal run: + - To run the code server in a sandboxed docker environment, run the command: - $ yaksh create_demo [project_path] + $ invoke start - - `project_path` is the desired directory of the django project the - basename of which is also the Django project name. This can be a - relative directory. + - Make sure that you have Docker installed on your system beforehand. + [Docker Installation](https://docs.docker.com/engine/installation/#desktop) - - In case a `project_path` is not specified, the project is created - in a `yaksh_demo` subdirectory of the current directory. -1. The script does the following; - 1. Creates a new django project with name as the basename of the specified - `project_path` - 1. Creates a new demo database. - 1. Creates two users, teacher and student. - 1. Loads demo questions. - 1. Loads demo quiz. + - To run the code server without docker, locally use: -1. To run the server, run: + $ invoke start --unsafe - $ yaksh run relpath/or/abspath/to/demo + - Note this command will run the yaksh code server locally on your machine + and is susceptible to malicious code. You will have to install the code + server requirements in sudo mode. -1. In a new terminal run the following command which executes user submitted - code safely: - $ sudo yaksh run_code_server 1. Open your browser and open the URL ```http://localhost:8000/exam``` diff --git a/README_production.md b/README_production.md index ed19523..8b79785 100644 --- a/README_production.md +++ b/README_production.md @@ -167,19 +167,18 @@ To install this app follow the steps below: #### Using Dockerized Code Server - 1. Install [Docker] (https://github.com/FOSSEE/online_test/blob/master/README.md) + 1. Install [Docker](https://github.com/FOSSEE/online_test/blob/master/README.md) 1. Got to the directory where the project is located cd /path/to/online_test 1. Create a docker image. This may take a few minutes - docker build -t yaksha:v1 . + docker build -t yaksh_code_server . 1. Check if the image has been created using the output of, docker images - 1. Run a container using the newly created image. - sudo docker run -d -p 53579:53579 -p 8001:8001 yaksha:v1 + 1. Run the invoke script using the command ```invoke start``` The command will create and run a new docker container (that is running the code_server.py within it), it will also bind the ports of the host with those of the container #### Additional commands available diff --git a/online_test/settings.py b/online_test/settings.py index 90cce9d..790083e 100644 --- a/online_test/settings.py +++ b/online_test/settings.py @@ -15,7 +15,7 @@ BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # The directory where user data can be saved. This directory will be # world-writable and all user code will be written and saved here by the # code server with each user having their own sub-directory. -OUTPUT_DIR = os.path.join(BASE_DIR, 'output') +OUTPUT_DIR = os.path.join(BASE_DIR, "yaksh_data", "output") # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/ @@ -33,8 +33,6 @@ URL_ROOT = '' # Application definition -FIXTURE_DIRS = os.path.join(BASE_DIR, "yaksh", "fixtures") - INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', @@ -100,7 +98,7 @@ SOCIAL_AUTH_LOGIN_ERROR_URL = '/exam/login/' MEDIA_URL = "/data/" -MEDIA_ROOT = os.path.join(BASE_DIR, "yaksh", "data") +MEDIA_ROOT = os.path.join(BASE_DIR, "yaksh_data", "data") # Set this varable to <True> if smtp-server is not allowing to send email. EMAIL_USE_TLS = False diff --git a/requirements/requirements-codeserver.txt b/requirements/requirements-codeserver.txt new file mode 100644 index 0000000..a4f419c --- /dev/null +++ b/requirements/requirements-codeserver.txt @@ -0,0 +1,5 @@ +pytest +six +requests +tornado +psutil diff --git a/requirements/requirements-common.txt b/requirements/requirements-common.txt index 100d693..b4d2e5b 100644 --- a/requirements/requirements-common.txt +++ b/requirements/requirements-common.txt @@ -1,9 +1,9 @@ +-r requirements-codeserver.txt +invoke==0.21.0 django==1.9.5 django-taggit==0.18.1 pytz==2016.4 python-social-auth==0.2.19 -tornado selenium==2.53.6 coverage -psutil -ruamel.yaml==0.15.23 +ruamel.yaml==0.15.23
\ No newline at end of file diff --git a/tasks.py b/tasks.py new file mode 100644 index 0000000..eabf8fb --- /dev/null +++ b/tasks.py @@ -0,0 +1,95 @@ +import invoke +from invoke import task +import os +from yaksh.settings import SERVER_POOL_PORT + +SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__)) +TARGET_CONTAINER_NAME = 'yaksh_code_server' +SRC_IMAGE_NAME = 'fossee/yaksh_codeserver' + +def create_dir(path): + if not os.path.exists(path): + os.makedirs(path) + +@task +def setupdb(ctx): + print("** Setting up & migrating database **") + ctx.run("python manage.py makemigrations") + ctx.run("python manage.py migrate") + ctx.run("python manage.py loaddata demo_fixtures.json") + +@task(setupdb) +def serve(ctx): + print("** Running the Django web server. Press Ctrl-C to Exit **") + ctx.run("python manage.py runserver") + +@task +def clean(ctx): + print("** Discarding database **") + ctx.run("rm -rf {0}".format(os.path.join(SCRIPT_DIR, 'db.sqlite3'))) + +@task +def getimage(ctx, image=SRC_IMAGE_NAME): + try: + result = ctx.run("sudo docker inspect {0}".format(image), hide=True) + except invoke.exceptions.Failure: + print("The docker image {0} does not exist locally".format(image)) + print("\n** Pulling latest image <{0}> from docker hub **".format(image)) + ctx.run("sudo docker pull {0}".format(image)) + +@task +def start(ctx, ports=SERVER_POOL_PORT, image=SRC_IMAGE_NAME, unsafe=False, + version=3): + if unsafe: + with ctx.cd(SCRIPT_DIR): + print("** Initializing local code server **") + ctx.run("sudo python{0} -m yaksh.code_server".format( + version + ) + ) + else: + cmd_params = {'ports': ports, + 'image': SRC_IMAGE_NAME, + 'name': TARGET_CONTAINER_NAME, + 'vol_mount': os.path.join(SCRIPT_DIR, 'yaksh_data'), + 'command': 'sh {0}'.format( + os.path.join(SCRIPT_DIR, + 'yaksh_data', 'yaksh', 'scripts', 'yaksh_script.sh') + ) + } + + getimage(ctx, image=SRC_IMAGE_NAME) + + print("** Preparing code server **") + create_dir(os.path.join(SCRIPT_DIR, 'yaksh_data', 'data')) + create_dir(os.path.join(SCRIPT_DIR, 'yaksh_data', 'output')) + + ctx.run('cp -r {0} {1}'.format( + os.path.join(SCRIPT_DIR, 'yaksh'), + os.path.join(SCRIPT_DIR, 'yaksh_data') + ) + ) + ctx.run('cp {0} {1}'.format( + os.path.join(SCRIPT_DIR, 'requirements', 'requirements-codeserver.txt'), + os.path.join(SCRIPT_DIR, 'yaksh_data') + ) + ) + + print("** Initializing code server within docker container **") + ctx.run( + "sudo docker run \ + -dp {ports}:{ports} --name={name} \ + -v {vol_mount}:{vol_mount} \ + -w {vol_mount} \ + {image} {command}".format(**cmd_params) + ) + +@task +def stop(ctx, container=TARGET_CONTAINER_NAME, hide=True): + result = ctx.run("sudo docker ps -q --filter='name={0}'".format(container)) + if result.stdout: + print ("** Discarding the docker container <{0}>".format(container)) + ctx.run("sudo docker stop {0}".format(container)) + ctx.run("sudo docker rm {0}".format(container)) + else: + print("** Docker container <{0}> not found **".format(container)) diff --git a/yaksh/compare_stdio.py b/yaksh/error_messages.py index c4076de..7ea8618 100644 --- a/yaksh/compare_stdio.py +++ b/yaksh/error_messages.py @@ -3,7 +3,24 @@ try: except ImportError: from itertools import izip_longest as zip_longest - +def prettify_exceptions(exception, message, traceback=None, testcase=None): + err = {"type": "assertion", + "exception": exception, + "traceback": traceback, + "message": message + } + if exception == 'RuntimeError' or exception == 'RecursionError': + err["traceback"] = None + + if exception == 'AssertionError': + value = ("Expected answer from the" + + " test case did not match the output") + err["message"] = value + err["traceback"] = None + if testcase: + err["test_case"] = testcase + return err + def _get_incorrect_user_lines(exp_lines, user_lines): err_line_numbers = [] for line_no, (expected_line, user_line) in \ @@ -16,17 +33,19 @@ def _get_incorrect_user_lines(exp_lines, user_lines): def compare_outputs(expected_output, user_output, given_input=None): given_lines = user_output.splitlines() exp_lines = expected_output.splitlines() - msg = {"given_input":given_input, + msg = {"type": "stdio", + "given_input": given_input, "expected_output": exp_lines, - "user_output":given_lines - } + "user_output": given_lines + } ng = len(given_lines) ne = len(exp_lines) err_line_numbers = _get_incorrect_user_lines(exp_lines, given_lines) msg["error_line_numbers"] = err_line_numbers if ng != ne: msg["error_msg"] = ("Incorrect Answer: " - + "We had expected {} number of lines. ".format(ne) + + "We had expected {} number of lines. "\ + .format(ne) + "We got {} number of lines.".format(ng) ) return False, msg diff --git a/yaksh/evaluator_tests/test_bash_evaluation.py b/yaksh/evaluator_tests/test_bash_evaluation.py index 2faa7bf..5542710 100644 --- a/yaksh/evaluator_tests/test_bash_evaluation.py +++ b/yaksh/evaluator_tests/test_bash_evaluation.py @@ -104,7 +104,9 @@ class BashAssertionEvaluationTestCases(EvaluatorBaseTest): # Then self.assertFalse(result.get("success")) - self.assert_correct_output(self.timeout_msg, result.get("error")) + self.assert_correct_output(self.timeout_msg, + result.get("error")[0]["message"] + ) parent_proc = Process(os.getpid()).children() if parent_proc: children_procs = Process(parent_proc[0].pid) @@ -533,7 +535,9 @@ class BashHookEvaluationTestCases(EvaluatorBaseTest): # Then self.assertFalse(result.get('success')) - self.assert_correct_output(self.timeout_msg, result.get('error')) + self.assert_correct_output(self.timeout_msg, + result.get("error")[0]["message"] + ) parent_proc = Process(os.getpid()).children() if parent_proc: children_procs = Process(parent_proc[0].pid) diff --git a/yaksh/evaluator_tests/test_c_cpp_evaluation.py b/yaksh/evaluator_tests/test_c_cpp_evaluation.py index 0898b3f..162d90c 100644 --- a/yaksh/evaluator_tests/test_c_cpp_evaluation.py +++ b/yaksh/evaluator_tests/test_c_cpp_evaluation.py @@ -151,7 +151,9 @@ class CAssertionEvaluationTestCases(EvaluatorBaseTest): # Then self.assertFalse(result.get("success")) - self.assert_correct_output(self.timeout_msg, result.get("error")) + self.assert_correct_output(self.timeout_msg, + result.get("error")[0]["message"] + ) parent_proc = Process(os.getpid()).children() if parent_proc: children_procs = Process(parent_proc[0].pid) @@ -406,7 +408,9 @@ class CppStdIOEvaluationTestCases(EvaluatorBaseTest): # Then self.assertFalse(result.get("success")) - self.assert_correct_output(self.timeout_msg, result.get("error")) + self.assert_correct_output(self.timeout_msg, + result.get("error")[0]["message"] + ) parent_proc = Process(os.getpid()).children() if parent_proc: children_procs = Process(parent_proc[0].pid) @@ -616,7 +620,9 @@ class CppStdIOEvaluationTestCases(EvaluatorBaseTest): # Then self.assertFalse(result.get("success")) - self.assert_correct_output(self.timeout_msg, result.get("error")) + self.assert_correct_output(self.timeout_msg, + result.get("error")[0]["message"] + ) def test_cpp_only_stdout(self): # Given @@ -976,7 +982,9 @@ class CppHookEvaluationTestCases(EvaluatorBaseTest): # Then self.assertFalse(result.get('success')) - self.assert_correct_output(self.timeout_msg, result.get('error')) + self.assert_correct_output(self.timeout_msg, + result.get("error")[0]["message"] + ) parent_proc = Process(os.getpid()).children() if parent_proc: children_procs = Process(parent_proc[0].pid) diff --git a/yaksh/evaluator_tests/test_java_evaluation.py b/yaksh/evaluator_tests/test_java_evaluation.py index 5ddf8cd..35b64d0 100644 --- a/yaksh/evaluator_tests/test_java_evaluation.py +++ b/yaksh/evaluator_tests/test_java_evaluation.py @@ -160,7 +160,9 @@ class JavaAssertionEvaluationTestCases(EvaluatorBaseTest): # Then self.assertFalse(result.get("success")) - self.assert_correct_output(self.timeout_msg, result.get("error")) + self.assert_correct_output(self.timeout_msg, + result.get("error")[0]["message"] + ) parent_proc = Process(os.getpid()).children() if parent_proc: children_procs = Process(parent_proc[0].pid) @@ -405,7 +407,9 @@ class JavaStdIOEvaluationTestCases(EvaluatorBaseTest): # Then self.assertFalse(result.get("success")) - self.assert_correct_output(self.timeout_msg, result.get("error")) + self.assert_correct_output(self.timeout_msg, + result.get("error")[0]["message"] + ) parent_proc = Process(os.getpid()).children() if parent_proc: children_procs = Process(parent_proc[0].pid) @@ -845,7 +849,9 @@ class JavaHookEvaluationTestCases(EvaluatorBaseTest): # Then self.assertFalse(result.get('success')) - self.assert_correct_output(self.timeout_msg, result.get('error')) + self.assert_correct_output(self.timeout_msg, + result.get("error")[0]["message"] + ) parent_proc = Process(os.getpid()).children() if parent_proc: children_procs = Process(parent_proc[0].pid) diff --git a/yaksh/evaluator_tests/test_python_evaluation.py b/yaksh/evaluator_tests/test_python_evaluation.py index a2faf77..71d7732 100644 --- a/yaksh/evaluator_tests/test_python_evaluation.py +++ b/yaksh/evaluator_tests/test_python_evaluation.py @@ -24,9 +24,15 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): f.write('2'.encode('ascii')) tmp_in_dir_path = tempfile.mkdtemp() self.in_dir = tmp_in_dir_path - self.test_case_data = [{"test_case_type": "standardtestcase", "test_case": 'assert(add(1,2)==3)', 'weight': 0.0}, - {"test_case_type": "standardtestcase", "test_case": 'assert(add(-1,2)==1)', 'weight': 0.0}, - {"test_case_type": "standardtestcase", "test_case": 'assert(add(-1,-2)==-3)', 'weight': 0.0}, + self.test_case_data = [{"test_case_type": "standardtestcase", + "test_case": 'assert(add(1,2)==3)', + 'weight': 0.0}, + {"test_case_type": "standardtestcase", + "test_case": 'assert(add(-1,2)==1)', + 'weight': 0.0}, + {"test_case_type": "standardtestcase", + "test_case": 'assert(add(-1,-2)==-3)', + 'weight': 0.0}, ] self.timeout_msg = ("Code took more than {0} seconds to run. " "You probably have an infinite loop in" @@ -76,23 +82,29 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): # Then self.assertFalse(result.get('success')) - self.assert_correct_output('AssertionError in:\n assert(add(1,2)==3)', - result.get('error') - ) - self.assert_correct_output('AssertionError in:\n assert(add(-1,2)==1)', - result.get('error') - ) - self.assert_correct_output('AssertionError in:\n assert(add(-1,-2)==-3)', - result.get('error') - ) + given_test_case_list = [tc["test_case"] for tc in self.test_case_data] + for error in result.get("error"): + self.assertEqual(error['exception'], 'AssertionError') + self.assertEqual(error['message'], + "Expected answer from the test case did not match the output" + ) + error_testcase_list = [tc['test_case'] for tc in result.get('error')] + self.assertEqual(error_testcase_list, given_test_case_list) + def test_partial_incorrect_answer(self): # Given user_answer = "def add(a,b):\n\treturn abs(a) + abs(b)" - test_case_data = [{"test_case_type": "standardtestcase", "test_case": 'assert(add(-1,2)==1)', 'weight': 1.0}, - {"test_case_type": "standardtestcase", "test_case": 'assert(add(-1,-2)==-3)', 'weight': 1.0}, - {"test_case_type": "standardtestcase", "test_case": 'assert(add(1,2)==3)', 'weight': 2.0} - ] + test_case_data = [{"test_case_type": "standardtestcase", + "test_case": 'assert(add(-1,2)==1)', + 'weight': 1.0}, + {"test_case_type": "standardtestcase", + "test_case": 'assert(add(-1,-2)==-3)', + 'weight': 1.0}, + {"test_case_type": "standardtestcase", + "test_case": 'assert(add(1,2)==3)', + 'weight': 2.0} + ] kwargs = { 'metadata': { 'user_answer': user_answer, @@ -110,13 +122,15 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): # Then self.assertFalse(result.get('success')) self.assertEqual(result.get('weight'), 2.0) - self.assert_correct_output('AssertionError in:\n assert(add(-1,2)==1)', - result.get('error') - ) - self.assert_correct_output('AssertionError in:\n assert(add(-1,-2)==-3)', - result.get('error') - ) - + given_test_case_list = [tc["test_case"] for tc in self.test_case_data] + given_test_case_list.remove('assert(add(1,2)==3)') + for error in result.get("error"): + self.assertEqual(error['exception'], 'AssertionError') + self.assertEqual(error['message'], + "Expected answer from the test case did not match the output" + ) + error_testcase_list = [tc['test_case'] for tc in result.get('error')] + self.assertEqual(error_testcase_list, given_test_case_list) def test_infinite_loop(self): # Given user_answer = "def add(a, b):\n\twhile True:\n\t\tpass" @@ -136,7 +150,9 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): # Then self.assertFalse(result.get('success')) - self.assert_correct_output(self.timeout_msg, result.get('error')) + self.assert_correct_output(self.timeout_msg, + result.get("error")[0]["message"] + ) def test_syntax_error(self): # Given @@ -165,14 +181,12 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): # When grader = Grader(self.in_dir) result = grader.evaluate(kwargs) - error_as_str = ''.join(result.get("error")) - err = error_as_str.splitlines() + err = result.get("error")[0]['traceback'] # Then self.assertFalse(result.get("success")) - self.assertEqual(5, len(err)) for msg in syntax_error_msg: - self.assert_correct_output(msg, result.get("error")) + self.assert_correct_output(msg, err) def test_indent_error(self): # Given @@ -200,13 +214,15 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): # When grader = Grader(self.in_dir) result = grader.evaluate(kwargs) - err = result.get("error")[0].splitlines() + err = result.get("error")[0]["traceback"].splitlines() # Then self.assertFalse(result.get("success")) self.assertEqual(5, len(err)) for msg in indent_error_msg: - self.assert_correct_output(msg, result.get("error")) + self.assert_correct_output(msg, + result.get("error")[0]['traceback'] + ) def test_name_error(self): # Given @@ -231,14 +247,9 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): # When grader = Grader(self.in_dir) result = grader.evaluate(kwargs) - error_as_str = ''.join(result.get("error")) - err = error_as_str.splitlines() - - # Then - self.assertFalse(result.get("success")) - self.assertEqual(25, len(err)) + err = result.get("error")[0]["traceback"] for msg in name_error_msg: - self.assert_correct_output(msg, result.get("error")) + self.assertIn(msg, err) def test_recursion_error(self): # Given @@ -246,10 +257,7 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): def add(a, b): return add(3, 3) """) - recursion_error_msg = ["Traceback", - "maximum recursion depth exceeded" - ] - + recursion_error_msg = "maximum recursion depth exceeded" kwargs = { 'metadata': { 'user_answer': user_answer, @@ -263,13 +271,11 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): # When grader = Grader(self.in_dir) result = grader.evaluate(kwargs) - error_as_str = ''.join(result.get("error")) - err = error_as_str.splitlines() + err = result.get("error")[0]['message'] # Then self.assertFalse(result.get("success")) - for msg in recursion_error_msg: - self.assert_correct_output(msg, result.get("error")) + self.assert_correct_output(recursion_error_msg, err) def test_type_error(self): # Given @@ -296,14 +302,12 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): # When grader = Grader(self.in_dir) result = grader.evaluate(kwargs) - error_as_str = ''.join(result.get("error")) - err = error_as_str.splitlines() + err = result.get("error")[0]['traceback'] # Then self.assertFalse(result.get("success")) - self.assertEqual(25, len(err)) for msg in type_error_msg: - self.assert_correct_output(msg, result.get("error")) + self.assert_correct_output(msg, err) def test_value_error(self): # Given @@ -332,18 +336,19 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): # When grader = Grader(self.in_dir) result = grader.evaluate(kwargs) - error_as_str = ''.join(result.get("error")) - err = error_as_str.splitlines() + err = result.get("error")[0]['traceback'] # Then self.assertFalse(result.get("success")) - self.assertEqual(28, len(err)) for msg in value_error_msg: - self.assert_correct_output(msg, result.get("error")) + self.assert_correct_output(msg, err) def test_file_based_assert(self): # Given - self.test_case_data = [{"test_case_type": "standardtestcase", "test_case": "assert(ans()=='2')", "weight": 0.0}] + self.test_case_data = [{"test_case_type": "standardtestcase", + "test_case": "assert(ans()=='2')", + "weight": 0.0} + ] self.file_paths = [(self.tmp_file, False)] user_answer = dedent(""" def ans(): @@ -369,20 +374,17 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): self.assertTrue(result.get('success')) def test_single_testcase_error(self): - # Given """ Tests the user answer with just an incorrect test case """ + # Given user_answer = "def palindrome(a):\n\treturn a == a[::-1]" test_case_data = [{"test_case_type": "standardtestcase", - "test_case": 's="abbb"\nasert palindrome(s)==False', - "weight": 0.0 - } + "test_case": 's="abbb"\nasert palindrome(s)==False', + "weight": 0.0 + } ] syntax_error_msg = ["Traceback", "call", - "File", - "line", - "<string>", "SyntaxError", "invalid syntax" ] @@ -399,14 +401,12 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): # When grader = Grader(self.in_dir) result = grader.evaluate(kwargs) - error_as_str = ''.join(result.get("error")) - err = error_as_str.splitlines() + err = result.get("error")[0]['traceback'] # Then self.assertFalse(result.get("success")) - self.assertEqual(13, len(err)) for msg in syntax_error_msg: - self.assert_correct_output(msg, result.get("error")) + self.assert_correct_output(msg, err) def test_multiple_testcase_error(self): @@ -415,13 +415,11 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): # Given user_answer = "def palindrome(a):\n\treturn a == a[::-1]" test_case_data = [{"test_case_type": "standardtestcase", - "test_case": 'assert(palindrome("abba")==True)', - "weight": 0.0 - }, + "test_case": 'assert(palindrome("abba")==True)', + "weight": 0.0}, {"test_case_type": "standardtestcase", - "test_case": 's="abbb"\nassert palindrome(S)==False', - "weight": 0.0 - } + "test_case": 's="abbb"\nassert palindrome(S)==False', + "weight": 0.0} ] name_error_msg = ["Traceback", "call", @@ -441,14 +439,12 @@ class PythonAssertionEvaluationTestCases(EvaluatorBaseTest): # When grader = Grader(self.in_dir) result = grader.evaluate(kwargs) - error_as_str = ''.join(result.get("error")) - err = error_as_str.splitlines() + err = result.get("error")[0]['traceback'] # Then self.assertFalse(result.get("success")) - self.assertEqual(11, len(err)) for msg in name_error_msg: - self.assert_correct_output(msg, result.get("error")) + self.assertIn(msg, err) def test_unicode_literal_bug(self): # Given @@ -674,7 +670,9 @@ class PythonStdIOEvaluationTestCases(EvaluatorBaseTest): result = grader.evaluate(kwargs) # Then - self.assert_correct_output(timeout_msg, result.get('error')) + self.assert_correct_output(timeout_msg, + result.get("error")[0]["message"] + ) self.assertFalse(result.get('success')) @@ -915,7 +913,9 @@ class PythonHookEvaluationTestCases(EvaluatorBaseTest): # Then self.assertFalse(result.get('success')) - self.assert_correct_output(self.timeout_msg, result.get('error')) + self.assert_correct_output(self.timeout_msg, + result.get("error")[0]["message"] + ) def test_assignment_upload(self): # Given diff --git a/yaksh/evaluator_tests/test_python_stdio_evaluator.py b/yaksh/evaluator_tests/test_python_stdio_evaluator.py index 8877544..9b8d702 100644 --- a/yaksh/evaluator_tests/test_python_stdio_evaluator.py +++ b/yaksh/evaluator_tests/test_python_stdio_evaluator.py @@ -1,4 +1,4 @@ -from yaksh.compare_stdio import compare_outputs +from yaksh.error_messages import compare_outputs def test_compare_outputs(): exp = "5\n5\n" diff --git a/yaksh/evaluator_tests/test_scilab_evaluation.py b/yaksh/evaluator_tests/test_scilab_evaluation.py index c3a1c83..f7a9925 100644 --- a/yaksh/evaluator_tests/test_scilab_evaluation.py +++ b/yaksh/evaluator_tests/test_scilab_evaluation.py @@ -137,7 +137,9 @@ class ScilabEvaluationTestCases(EvaluatorBaseTest): result = grader.evaluate(kwargs) self.assertFalse(result.get("success")) - self.assert_correct_output(self.timeout_msg, result.get("error")) + self.assert_correct_output(self.timeout_msg, + result.get("error")[0]["message"] + ) parent_proc = Process(os.getpid()).children() if parent_proc: children_procs = Process(parent_proc[0].pid) diff --git a/yaksh/grader.py b/yaksh/grader.py index a9a3738..38cce8d 100644 --- a/yaksh/grader.py +++ b/yaksh/grader.py @@ -21,7 +21,7 @@ except ImportError: # Local imports from .settings import SERVER_TIMEOUT from .language_registry import create_evaluator_instance - +from .error_messages import prettify_exceptions MY_DIR = abspath(dirname(__file__)) registry = None @@ -141,7 +141,8 @@ class Grader(object): for idx, test_case_instance in enumerate(test_case_instances): test_case_success = False test_case_instance.compile_code() - test_case_success, err, mark_fraction = test_case_instance.check_code() + eval_result = test_case_instance.check_code() + test_case_success, err, mark_fraction = eval_result if test_case_success: weight += mark_fraction * test_case_instance.weight else: @@ -154,16 +155,20 @@ class Grader(object): test_case_instance.teardown() except TimeoutException: - error.append(self.timeout_msg) - except OSError: - msg = traceback.format_exc(limit=0) - error.append("Error: {0}".format(msg)) + error.append(prettify_exceptions("TimeoutException", + self.timeout_msg + ) + ) except Exception: exc_type, exc_value, exc_tb = sys.exc_info() tb_list = traceback.format_exception(exc_type, exc_value, exc_tb) if len(tb_list) > 2: del tb_list[1:3] - error.append("Error: {0}".format("".join(tb_list))) + error.append(prettify_exceptions(exc_type.__name__, + str(exc_value), + "".join(tb_list), + ) + ) finally: # Set back any original signal handler. set_original_signal_handler(prev_handler) diff --git a/yaksh/hook_evaluator.py b/yaksh/hook_evaluator.py index f5364d6..41ef6e4 100644 --- a/yaksh/hook_evaluator.py +++ b/yaksh/hook_evaluator.py @@ -2,13 +2,13 @@ import sys import traceback import os -import signal import psutil # Local imports from .file_utils import copy_files, delete_files from .base_evaluator import BaseEvaluator from .grader import TimeoutException +from .error_messages import prettify_exceptions class HookEvaluator(BaseEvaluator): @@ -60,19 +60,32 @@ class HookEvaluator(BaseEvaluator): success = False mark_fraction = 0.0 try: - tb = None _tests = compile(self.hook_code, '<string>', mode='exec') hook_scope = {} exec(_tests, hook_scope) check = hook_scope["check_answer"] - success, err, mark_fraction = check(self.user_answer) + try: + success, err, mark_fraction = check(self.user_answer) + except Exception: + raise + except TimeoutException: processes = psutil.Process(os.getpid()).children(recursive=True) for process in processes: process.kill() raise except Exception: - msg = traceback.format_exc(limit=0) - err = "Error in Hook code: {0}".format(msg) - del tb + exc_type, exc_value, exc_tb = sys.exc_info() + tb_list = traceback.format_exception(exc_type, + exc_value, + exc_tb + ) + if len(tb_list) > 2: + del tb_list[1:3] + err = prettify_exceptions(exc_type.__name__, + str(exc_value), + "Error in Hook Code:\n" + + "".join(tb_list) + ) + return success, err, mark_fraction diff --git a/yaksh/models.py b/yaksh/models.py index cb9b481..d698232 100644 --- a/yaksh/models.py +++ b/yaksh/models.py @@ -82,6 +82,8 @@ test_status = ( ('completed', 'Completed'), ) +FIXTURES_DIR_PATH = os.path.join(settings.BASE_DIR, 'yaksh', 'fixtures') + def get_assignment_dir(instance, filename): upload_dir = instance.question_paper.quiz.description.replace(" ", "_") @@ -544,7 +546,7 @@ class Question(models.Model): def create_demo_questions(self, user): zip_file_path = os.path.join( - settings.FIXTURE_DIRS, 'demo_questions.zip' + FIXTURES_DIR_PATH, 'demo_questions.zip' ) files, extract_path = extract_files(zip_file_path) self.read_yaml(extract_path, user, files) diff --git a/yaksh/python_assertion_evaluator.py b/yaksh/python_assertion_evaluator.py index c8f2dd0..440f422 100644 --- a/yaksh/python_assertion_evaluator.py +++ b/yaksh/python_assertion_evaluator.py @@ -10,6 +10,7 @@ import importlib from .file_utils import copy_files, delete_files from .base_evaluator import BaseEvaluator from .grader import TimeoutException +from .error_messages import prettify_exceptions class PythonAssertionEvaluator(BaseEvaluator): @@ -68,39 +69,22 @@ class PythonAssertionEvaluator(BaseEvaluator): success = False mark_fraction = 0.0 try: - tb = None _tests = compile(self.test_case, '<string>', mode='exec') exec(_tests, self.exec_scope) except TimeoutException: raise except Exception: - type, value, tb = sys.exc_info() - info = traceback.extract_tb(tb) - fname, lineno, func, text = info[-1] - text = str(self.test_case) - - # Get truncated traceback - err_tb_lines = traceback.format_exc().splitlines() - stripped_tb_lines = [] - for line in err_tb_lines: - line = re.sub(r'File\s+".*?",\s+line', - 'File <file>, line', - line - ) - stripped_tb_lines.append(line) - stripped_tb = '\n'.join(stripped_tb_lines[-10::]) - - err = "Expected Test Case:\n{0}\n" \ - "Error Traceback - {1} {2} in:\n {3}\n{4}".format( - self.test_case, - type.__name__, - str(value), - text, - stripped_tb - ) + exc_type, exc_value, exc_tb = sys.exc_info() + tb_list = traceback.format_exception(exc_type, exc_value, exc_tb) + if len(tb_list) > 2: + del tb_list[1:3] + err = prettify_exceptions(exc_type.__name__, + str(exc_value), + "".join(tb_list), + self.test_case + ) else: success = True err = None mark_fraction = 1.0 if self.partial_grading else 0.0 - del tb return success, err, mark_fraction diff --git a/yaksh/python_stdio_evaluator.py b/yaksh/python_stdio_evaluator.py index 2b443a7..b08103a 100644 --- a/yaksh/python_stdio_evaluator.py +++ b/yaksh/python_stdio_evaluator.py @@ -9,7 +9,7 @@ except ImportError: # Local imports from .file_utils import copy_files, delete_files from .base_evaluator import BaseEvaluator -from .compare_stdio import compare_outputs +from .error_messages import compare_outputs @contextmanager diff --git a/yaksh/scripts/yaksh_script.sh b/yaksh/scripts/yaksh_script.sh new file mode 100644 index 0000000..f39153e --- /dev/null +++ b/yaksh/scripts/yaksh_script.sh @@ -0,0 +1,11 @@ +#!/bin/bash +# Basic script to install pip packages and run the yaksh code server command + +chown -R nobody output +chmod -R a+rwX output +chmod -R a+rX data yaksh +chmod -R o-w data yaksh +echo "** Installing python dependencies **" +pip3 install -r ./requirements-codeserver.txt +echo "** Running code server **" +/usr/bin/sudo -su nobody python3 -m yaksh.code_server diff --git a/yaksh/static/yaksh/css/exam.css b/yaksh/static/yaksh/css/exam.css index fff904e..ec48a14 100644 --- a/yaksh/static/yaksh/css/exam.css +++ b/yaksh/static/yaksh/css/exam.css @@ -2,6 +2,6 @@ table td, table th { border: black solid 1px !important; word-wrap: break-word !important; white-space: pre-wrap !important; } -output{ +#stdio, #assertion { table-layout: fixed }
\ No newline at end of file diff --git a/yaksh/stdio_evaluator.py b/yaksh/stdio_evaluator.py index 5e4ce18..55adb5c 100644 --- a/yaksh/stdio_evaluator.py +++ b/yaksh/stdio_evaluator.py @@ -5,7 +5,7 @@ import signal # Local imports from .base_evaluator import BaseEvaluator from .grader import TimeoutException -from .compare_stdio import compare_outputs +from .error_messages import compare_outputs class StdIOEvaluator(BaseEvaluator): diff --git a/yaksh/templates/exam.html b/yaksh/templates/exam.html index 9596c1c..a1f0df4 100644 --- a/yaksh/templates/exam.html +++ b/yaksh/templates/exam.html @@ -80,27 +80,50 @@ {% block main %} {% endblock %} </div> + <br/> {% if question.type == 'code' or question.type == 'upload' %} {% if error_message %} <div class="row" id="error_panel"> {% for error in error_message %} <div class="panel panel-danger"> - <div class="panel-heading">Testcase No. {{ forloop.counter }}</div> + <div class="panel-heading">Error No. {{ forloop.counter }}</div> <div class="panel-body"> <div class="well well-sm"> - {% if not error.expected_output %} + {% if not error.type %} <pre><code> {{error|safe}} </code></pre> - {% else %} - {% if error.given_input %} - <table class="table table-bordered"> - <col width="30%"> - <tr class = "active"> - <td> For given Input value(s):</td> - <td>{{error.given_input}}</td> + {% elif error.type == 'assertion' %} + {% if error.test_case %} + <strong> We tried your code with the following test case:</strong><br/></br> + <pre><code><strong style="color:#d9534f">{{error.test_case}}</strong></code></pre> + {% endif %} + <p> <b>The following error took place: </b></p> + <table class="table table-bordered" width="100%" id='assertion'> + <col width="30%"> + <tr class = "active"> + <td><b>Exception Name: </b></td> + <td><span style="color: #d9534f">{{error.exception}}</span></td> + </tr> + <tr> + <td><b>Exception Message: </b></td><td>{{error.message}}</td> </tr> - </table> + <tr> + {% if error.traceback %} + <td><b>Full Traceback: </b></td> + <td><pre>{{error.traceback}}</pre></td> + {% endif %} + </tr> + </table> + {% elif error.type == 'stdio' %} + {% if error.given_input %} + <table class="table table-bordered"> + <col width="30%"> + <tr class = "active"> + <td> For given Input value(s):</td> + <td>{{error.given_input}}</td> + </tr> + </table> {% endif %} - <table class="table table-bordered" width="100%" id="output"> + <table class="table table-bordered" width="100%" id="stdio"> <col width="10%"> <col width="40%"> <col width="40%"> diff --git a/yaksh/templates/yaksh/grade_user.html b/yaksh/templates/yaksh/grade_user.html index 37bc788..3339177 100644 --- a/yaksh/templates/yaksh/grade_user.html +++ b/yaksh/templates/yaksh/grade_user.html @@ -218,9 +218,8 @@ Status : <b style="color: green;"> Passed </b><br/> {% endif %} {% with ans.error_list as err %} {% for error in err %} - {% if not error.expected_output %} - <pre><code> {{error|safe}} </code></pre> - {% else %} + + {% if error.type == 'stdio' %} <div class = "well well-sm"> {% if error.given_input %} <table class="table table-bordered"> @@ -262,6 +261,32 @@ Status : <b style="color: green;"> Passed </b><br/> </tr> </table> </div> + {% elif error.type == 'assertion' %} + {% if error.test_case %} + <strong> We tried you code with the following test case:</strong><br/></br> + <pre><code><strong style="color:#d9534f">{{error.test_case}}</strong></code></pre> + {% endif %} + <p> <b>The following error took place: </b></p> + <div class="well well-sm"> + <table class="table table-bordered" width="100%"> + <col width="30%"> + <tr class = "active"> + <td><b>Exception Name: </b></td> + <td><span style="color: #d9534f">{{error.exception}}</span></td> + </tr> + <tr> + <td><b>Exception Message: </b></td><td>{{error.message}}</td> + </tr> + <tr> + {% if error.traceback %} + <td><b>Full Traceback: </b></td> + <td><pre>{{error.traceback}}</pre></td> + {% endif %} + </tr> + </table> + </div> <!-- Closes well --> + {% else %} + <pre><code> {{error|safe}} </code></pre> {% endif %} {% endfor %} {% endwith %} diff --git a/yaksh/templates/yaksh/user_data.html b/yaksh/templates/yaksh/user_data.html index 6dfaac3..a0219dd 100644 --- a/yaksh/templates/yaksh/user_data.html +++ b/yaksh/templates/yaksh/user_data.html @@ -136,12 +136,10 @@ User IP address: {{ paper.user_ip }} <div class="panel-heading">Correct answer {% else %} <div class="panel panel-danger"> - <div class="panel-heading">Error + <div class="panel-heading">Error<br/> {% with answer.error_list as err %} {% for error in err %} - {% if not error.expected_output %} - <pre><code> {{error|safe}} </code></pre> - {% else %} + {% if error.type == 'stdio' %} <div class = "well well-sm"> {% if error.given_input %} <table class="table table-bordered"> @@ -183,6 +181,32 @@ User IP address: {{ paper.user_ip }} </tr> </table> </div> + {% elif error.type == 'assertion' %} + {% if error.test_case %} + <strong> We tried you code with the following test case:</strong><br/></br> + <pre><code><strong style="color:#d9534f">{{error.test_case}}</strong></code></pre> + {% endif %} + <p> <b>The following error took place: </b></p> + <div class="well well-sm"> + <table class="table table-bordered" width="100%"> + <col width="30%"> + <tr class = "active"> + <td><b>Exception Name: </b></td> + <td><span style="color: #d9534f">{{error.exception}}</span></td> + </tr> + <tr> + <td><b>Exception Message: </b></td><td>{{error.message}}</td> + </tr> + <tr> + {% if error.traceback %} + <td><b>Full Traceback: </b></td> + <td><pre>{{error.traceback}}</pre></td> + {% endif %} + </tr> + </table> + </div> <!-- Closes well --> + {% else %} + <pre><code> {{error|safe}} </code></pre> {% endif %} {% endfor %} {% endwith %} diff --git a/yaksh/templates/yaksh/view_answerpaper.html b/yaksh/templates/yaksh/view_answerpaper.html index 79987b1..fa16a08 100644 --- a/yaksh/templates/yaksh/view_answerpaper.html +++ b/yaksh/templates/yaksh/view_answerpaper.html @@ -131,9 +131,8 @@ {% with answer.error_list as err %} {% for error in err %} - {% if not error.expected_output %} - <pre><code> {{error|safe}} </code></pre> - {% else %} + + {% if error.type == 'stdio' %} <div class = "well well-sm"> {% if error.given_input %} <table class="table table-bordered"> @@ -175,6 +174,32 @@ </tr> </table> </div> + {% elif error.type == 'assertion' %} + {% if error.test_case %} + <strong> We tried you code with the following test case:</strong><br/></br> + <pre><code><strong style="color:#d9534f">{{error.test_case}}</strong></code></pre> + {% endif %} + <p> <b>The following error took place: </b></p> + <div class="well well-sm"> + <table class="table table-bordered" width="100%"> + <col width="30%"> + <tr class = "active"> + <td><b>Exception Name: </b></td> + <td><span style="color: #d9534f">{{error.exception}}</span></td> + </tr> + <tr> + <td><b>Exception Message: </b></td><td>{{error.message}}</td> + </tr> + <tr> + {% if error.traceback %} + <td><b>Full Traceback: </b></td> + <td><pre>{{error.traceback}}</pre></td> + {% endif %} + </tr> + </table> + </div> <!-- Closes well --> + {% else %} + <pre><code> {{error|safe}} </code></pre> {% endif %} {% endfor %} {% endwith %} diff --git a/yaksh/test_views.py b/yaksh/test_views.py index e3b0168..652f44c 100644 --- a/yaksh/test_views.py +++ b/yaksh/test_views.py @@ -23,7 +23,7 @@ from django.core.files.uploadedfile import SimpleUploadedFile from yaksh.models import User, Profile, Question, Quiz, QuestionPaper,\ QuestionSet, AnswerPaper, Answer, Course, StandardTestCase,\ AssignmentUpload, FileUpload, McqTestCase, IntegerTestCase, StringTestCase,\ - FloatTestCase + FloatTestCase, FIXTURES_DIR_PATH from yaksh.decorators import user_has_profile @@ -3201,7 +3201,7 @@ class TestShowQuestions(TestCase): username=self.user.username, password=self.user_plaintext_pass ) - ques_file = os.path.join(settings.FIXTURE_DIRS, "demo_questions.zip") + ques_file = os.path.join(FIXTURES_DIR_PATH, "demo_questions.zip") f = open(ques_file, 'rb') questions_file = SimpleUploadedFile(ques_file, f.read(), content_type="application/zip") diff --git a/yaksh/tests/test_code_server.py b/yaksh/tests/test_code_server.py index 5f80f2d..1309624 100644 --- a/yaksh/tests/test_code_server.py +++ b/yaksh/tests/test_code_server.py @@ -61,7 +61,7 @@ class TestCodeServer(unittest.TestCase): # Then data = json.loads(result.get('result')) self.assertFalse(data['success']) - self.assertTrue('infinite loop' in data['error'][0]) + self.assertTrue('infinite loop' in data['error'][0]['message']) def test_correct_answer(self): # Given @@ -104,7 +104,7 @@ class TestCodeServer(unittest.TestCase): # Then data = json.loads(result.get('result')) self.assertFalse(data['success']) - self.assertTrue('AssertionError' in data['error'][0]) + self.assertTrue('AssertionError' in data['error'][0]['exception']) def test_multiple_simultaneous_hits(self): # Given @@ -143,7 +143,7 @@ class TestCodeServer(unittest.TestCase): for i in range(N): data = results.get() self.assertFalse(data['success']) - self.assertTrue('infinite loop' in data['error'][0]) + self.assertTrue('infinite loop' in data['error'][0]['message']) def test_server_pool_status(self): # Given |