├── python ├── head.tex ├── index.md ├── src │ ├── tests │ │ ├── __init__.py │ │ ├── test_partial.py │ │ ├── test_unknown.py │ │ ├── test_files.py │ │ ├── test_leaderboard.py │ │ ├── test_complex.py │ │ ├── test_simple.py │ │ └── test_integration.py │ ├── requirements.txt │ ├── autograder.zip │ ├── make_autograder.sh │ ├── setup.sh │ ├── text_tests.py │ ├── run_autograder │ ├── Pipfile │ ├── run_tests.py │ ├── .gitignore │ ├── incorrect_2 │ │ └── calculator.py │ ├── framework.py │ ├── Pipfile.lock │ ├── incomplete_1 │ │ └── calculator.py │ ├── incorrect_1 │ │ └── calculator.py │ ├── solution │ │ └── calculator.py │ └── incorrect_3 │ │ └── calculator.py ├── Makefile └── README.md ├── README.md ├── diff ├── index.md ├── reference │ └── 10 ├── requirements.txt ├── setup.sh ├── run_autograder ├── run_tests.py ├── fib.c ├── tests │ ├── test_from_file.py │ └── test_subprocess.py ├── .gitignore └── README.md ├── docs ├── index.md ├── c_sharp.md ├── diff.md ├── java.md ├── mysql.md ├── python.md ├── java-mvn.md ├── requirements.txt ├── diff_general.md ├── results.png ├── assignments.png ├── build_status.png ├── leaderboard.png ├── ssh_session.png ├── test_status.png ├── add_public_key.png ├── assignment_form.png ├── debug_via_ssh.png ├── test_autograder.png ├── zipping_files.png ├── ssh_instructions.png ├── assignment_outline.png ├── autograder_settings.png ├── base_image_selector.png ├── configure_autograder.png ├── leaderboard_setting.png ├── manual_grading_form.png ├── manual_grading_view.png ├── regrade_all_submissions.png ├── debug_via_ssh_no_public_key.png ├── update_and_test_autograder.png ├── submit_form_with_leaderboard.png ├── submission_with_leaderboard_enabled.png ├── gradescope.css ├── manual_grading.md ├── tech.md ├── python_other_language.md ├── git_pull.md ├── leaderboards.md ├── ssh.md ├── base_images.md ├── python3_issues.md ├── submission_metadata.md ├── getting_started.md ├── resources.md ├── updates.md ├── troubleshooting.md ├── README.md └── manual_docker.md ├── java-mvn ├── src ├── index.md ├── .gitignore ├── run_autograder ├── setup.sh ├── pom.xml └── README.md ├── java ├── classes │ └── .gitkeep ├── index.md ├── autograder.zip ├── run.sh ├── lib │ ├── junit-4.12.jar │ └── hamcrest-core-1.3.jar ├── compile.sh ├── .gitignore ├── setup.sh ├── run_autograder ├── src │ └── main │ │ └── java │ │ └── com │ │ └── gradescope │ │ ├── intlist │ │ ├── tests │ │ │ ├── RunTests.java │ │ │ ├── IntListPredicates.java │ │ │ └── IntListTest.java │ │ ├── IntList.java │ │ ├── AbstractIntList.java │ │ └── RefIntList.java │ │ └── jh61b │ │ ├── grader │ │ ├── GradedTestRunnerJSON.java │ │ ├── GradedTest.java │ │ ├── TestResult.java │ │ └── GradedTestListenerHumanReadable.java │ │ ├── LICENSE.txt │ │ └── junit │ │ ├── TestRunner.java │ │ ├── RunListenerWithCapture.java │ │ ├── JUnitUtilities.java │ │ ├── TestRunnerPrintAll.java │ │ └── TestRunnerPrintFailuresOnly.java ├── solution │ └── IntList.java └── README.md ├── mysql ├── index.md ├── tests │ ├── __init__.py │ └── test_simple_queries.py ├── requirements.txt ├── autograder.zip ├── make_autograder.sh ├── text_tests.py ├── run_autograder ├── run_tests.py ├── setup.sh ├── framework │ └── queries.py ├── .gitignore ├── solution │ └── queries.py ├── database.sql └── README.md ├── diff_general ├── index.md ├── run.sh ├── test_data │ ├── 1 │ │ ├── input │ │ ├── output │ │ ├── run.sh │ │ └── settings.yml │ ├── 2 │ │ ├── input │ │ ├── output │ │ └── run.sh │ ├── 3 │ │ ├── input │ │ ├── output │ │ └── run.sh │ ├── 4 │ │ ├── input │ │ ├── output │ │ └── run.sh │ ├── 5 │ │ ├── input │ │ ├── output │ │ └── run.sh │ └── err │ │ ├── input │ │ ├── output │ │ └── err ├── requirements.txt ├── run_autograder ├── setup.sh ├── compile.sh ├── fib.c ├── run_tests.py ├── .gitignore ├── README.md └── test_generator.py ├── .github ├── CODEOWNERS └── ISSUE_TEMPLATE │ ├── support-request.md │ ├── bug_report.md │ └── feature_request.md ├── manual_docker ├── source │ ├── setup.sh │ └── run_autograder └── Dockerfile ├── sigcse16.pdf ├── c# ├── src │ ├── autograder.zip │ ├── Framework.cs │ ├── solution │ │ └── HelloWorld.cs │ ├── run_autograder │ ├── incorrect │ │ └── HelloWorld.cs │ ├── setup.sh │ ├── test.cs │ └── nunit_to_gs.py └── README.md ├── deploy_keys ├── ssh_config ├── run_autograder └── setup.sh ├── java_template ├── lib │ ├── junit-4.12.jar │ └── hamcrest-core-1.3.jar ├── run.sh ├── compile.sh ├── setup.sh ├── run_autograder └── src │ ├── com │ └── gradescope │ │ └── jh61b │ │ ├── grader │ │ ├── GradedTestRunnerJSON.java │ │ ├── GradedTest.java │ │ ├── TestResult.java │ │ └── GradedTestListenerHumanReadable.java │ │ ├── LICENSE.txt │ │ └── junit │ │ ├── TestRunner.java │ │ ├── RunListenerWithCapture.java │ │ ├── JUnitUtilities.java │ │ ├── TestRunnerPrintAll.java │ │ └── TestRunnerPrintFailuresOnly.java │ └── edu │ └── youruniversity │ └── hw1 │ ├── MyTestClass.java │ └── RunTests.java ├── .readthedocs.yml └── mkdocs.yml /python/head.tex: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | docs/README.md -------------------------------------------------------------------------------- /diff/index.md: -------------------------------------------------------------------------------- 1 | README.md -------------------------------------------------------------------------------- /diff/reference/10: -------------------------------------------------------------------------------- 1 | 55 -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | README.md -------------------------------------------------------------------------------- /java-mvn/src: -------------------------------------------------------------------------------- 1 | ../java/src -------------------------------------------------------------------------------- /java/classes/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /java/index.md: -------------------------------------------------------------------------------- 1 | README.md -------------------------------------------------------------------------------- /mysql/index.md: -------------------------------------------------------------------------------- 1 | README.md -------------------------------------------------------------------------------- /mysql/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /java-mvn/index.md: -------------------------------------------------------------------------------- 1 | README.md -------------------------------------------------------------------------------- /python/index.md: -------------------------------------------------------------------------------- 1 | README.md -------------------------------------------------------------------------------- /python/src/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /diff_general/index.md: -------------------------------------------------------------------------------- 1 | README.md -------------------------------------------------------------------------------- /diff_general/run.sh: -------------------------------------------------------------------------------- 1 | ./fib 2 | -------------------------------------------------------------------------------- /diff_general/test_data/1/input: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /diff_general/test_data/2/input: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /diff_general/test_data/3/input: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /diff_general/test_data/4/input: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /diff_general/test_data/5/input: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /diff_general/test_data/err/input: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /diff_general/test_data/err/output: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/c_sharp.md: -------------------------------------------------------------------------------- 1 | ../c#/README.md -------------------------------------------------------------------------------- /docs/diff.md: -------------------------------------------------------------------------------- 1 | ../diff/README.md -------------------------------------------------------------------------------- /docs/java.md: -------------------------------------------------------------------------------- 1 | ../java/README.md -------------------------------------------------------------------------------- /docs/mysql.md: -------------------------------------------------------------------------------- 1 | ../mysql/README.md -------------------------------------------------------------------------------- /docs/python.md: -------------------------------------------------------------------------------- 1 | ../python/README.md -------------------------------------------------------------------------------- /diff_general/test_data/1/output: -------------------------------------------------------------------------------- 1 | 1 2 | -------------------------------------------------------------------------------- /diff_general/test_data/2/output: -------------------------------------------------------------------------------- 1 | 1 2 | -------------------------------------------------------------------------------- /diff_general/test_data/3/output: -------------------------------------------------------------------------------- 1 | 2 2 | -------------------------------------------------------------------------------- /diff_general/test_data/4/output: -------------------------------------------------------------------------------- 1 | 3 2 | -------------------------------------------------------------------------------- /diff_general/test_data/5/output: -------------------------------------------------------------------------------- 1 | 5 2 | -------------------------------------------------------------------------------- /docs/java-mvn.md: -------------------------------------------------------------------------------- 1 | ../java-mvn/README.md -------------------------------------------------------------------------------- /diff_general/test_data/1/run.sh: -------------------------------------------------------------------------------- 1 | ./fib 1 2 | -------------------------------------------------------------------------------- /diff_general/test_data/2/run.sh: -------------------------------------------------------------------------------- 1 | ./fib 2 2 | -------------------------------------------------------------------------------- /diff_general/test_data/3/run.sh: -------------------------------------------------------------------------------- 1 | ./fib 3 2 | -------------------------------------------------------------------------------- /diff_general/test_data/4/run.sh: -------------------------------------------------------------------------------- 1 | ./fib 4 2 | -------------------------------------------------------------------------------- /diff_general/test_data/5/run.sh: -------------------------------------------------------------------------------- 1 | ./fib 5 2 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | mkdocs==1.3.0 2 | -------------------------------------------------------------------------------- /docs/diff_general.md: -------------------------------------------------------------------------------- 1 | ../diff_general/README.md -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @gradescope/programming-support 2 | -------------------------------------------------------------------------------- /python/src/requirements.txt: -------------------------------------------------------------------------------- 1 | gradescope-utils>=0.3.1 2 | -------------------------------------------------------------------------------- /manual_docker/source/setup.sh: -------------------------------------------------------------------------------- 1 | apt-get install -y cowsay 2 | -------------------------------------------------------------------------------- /diff/requirements.txt: -------------------------------------------------------------------------------- 1 | gradescope-utils>=0.3.1 2 | subprocess32 3 | -------------------------------------------------------------------------------- /diff_general/test_data/err/err: -------------------------------------------------------------------------------- 1 | Error: Insufficient arguments. 2 | -------------------------------------------------------------------------------- /diff_general/requirements.txt: -------------------------------------------------------------------------------- 1 | gradescope-utils>=0.3.1 2 | subprocess32 3 | pyyaml 4 | -------------------------------------------------------------------------------- /sigcse16.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/sigcse16.pdf -------------------------------------------------------------------------------- /docs/results.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/results.png -------------------------------------------------------------------------------- /mysql/requirements.txt: -------------------------------------------------------------------------------- 1 | gradescope-utils>=0.3.1 2 | subprocess32 3 | mysql-connector-python 4 | -------------------------------------------------------------------------------- /java/autograder.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/java/autograder.zip -------------------------------------------------------------------------------- /c#/src/autograder.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/c#/src/autograder.zip -------------------------------------------------------------------------------- /deploy_keys/ssh_config: -------------------------------------------------------------------------------- 1 | Host github.com 2 | IdentityFile ~/.ssh/deploy_key 3 | IdentitiesOnly yes 4 | -------------------------------------------------------------------------------- /docs/assignments.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/assignments.png -------------------------------------------------------------------------------- /docs/build_status.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/build_status.png -------------------------------------------------------------------------------- /docs/leaderboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/leaderboard.png -------------------------------------------------------------------------------- /docs/ssh_session.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/ssh_session.png -------------------------------------------------------------------------------- /docs/test_status.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/test_status.png -------------------------------------------------------------------------------- /java/run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | java -cp classes/:lib/* com.gradescope.intlist.tests.RunTests 4 | -------------------------------------------------------------------------------- /mysql/autograder.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/mysql/autograder.zip -------------------------------------------------------------------------------- /docs/add_public_key.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/add_public_key.png -------------------------------------------------------------------------------- /docs/assignment_form.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/assignment_form.png -------------------------------------------------------------------------------- /docs/debug_via_ssh.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/debug_via_ssh.png -------------------------------------------------------------------------------- /docs/test_autograder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/test_autograder.png -------------------------------------------------------------------------------- /docs/zipping_files.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/zipping_files.png -------------------------------------------------------------------------------- /java/lib/junit-4.12.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/java/lib/junit-4.12.jar -------------------------------------------------------------------------------- /diff_general/test_data/1/settings.yml: -------------------------------------------------------------------------------- 1 | weight: 2.0 2 | msg: Failed to correctly compute the first Fibonacci number. 3 | -------------------------------------------------------------------------------- /docs/ssh_instructions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/ssh_instructions.png -------------------------------------------------------------------------------- /python/src/autograder.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/python/src/autograder.zip -------------------------------------------------------------------------------- /docs/assignment_outline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/assignment_outline.png -------------------------------------------------------------------------------- /docs/autograder_settings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/autograder_settings.png -------------------------------------------------------------------------------- /docs/base_image_selector.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/base_image_selector.png -------------------------------------------------------------------------------- /docs/configure_autograder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/configure_autograder.png -------------------------------------------------------------------------------- /docs/leaderboard_setting.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/leaderboard_setting.png -------------------------------------------------------------------------------- /docs/manual_grading_form.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/manual_grading_form.png -------------------------------------------------------------------------------- /docs/manual_grading_view.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/manual_grading_view.png -------------------------------------------------------------------------------- /java/lib/hamcrest-core-1.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/java/lib/hamcrest-core-1.3.jar -------------------------------------------------------------------------------- /docs/regrade_all_submissions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/regrade_all_submissions.png -------------------------------------------------------------------------------- /java_template/lib/junit-4.12.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/java_template/lib/junit-4.12.jar -------------------------------------------------------------------------------- /docs/debug_via_ssh_no_public_key.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/debug_via_ssh_no_public_key.png -------------------------------------------------------------------------------- /docs/update_and_test_autograder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/update_and_test_autograder.png -------------------------------------------------------------------------------- /diff_general/run_autograder: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | cd /autograder/source 4 | 5 | bash ./compile.sh 6 | 7 | python3 run_tests.py 8 | -------------------------------------------------------------------------------- /docs/submit_form_with_leaderboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/submit_form_with_leaderboard.png -------------------------------------------------------------------------------- /java_template/lib/hamcrest-core-1.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/java_template/lib/hamcrest-core-1.3.jar -------------------------------------------------------------------------------- /docs/submission_with_leaderboard_enabled.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gradescope/autograder_samples/HEAD/docs/submission_with_leaderboard_enabled.png -------------------------------------------------------------------------------- /java_template/run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Replace the main class with your own 4 | java -cp classes/:lib/* edu.youruniversity.hw1.RunTests 5 | -------------------------------------------------------------------------------- /python/src/make_autograder.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | zip -r autograder.zip setup.sh run_autograder run_tests.py requirements.txt tests/*.py 4 | -------------------------------------------------------------------------------- /diff/setup.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | apt-get install -y python3 python3-pip python3-dev 4 | 5 | pip3 install -r /autograder/source/requirements.txt 6 | -------------------------------------------------------------------------------- /mysql/make_autograder.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | zip -r autograder.zip setup.sh run_autograder run_tests.py requirements.txt database.sql tests/*.py 4 | -------------------------------------------------------------------------------- /diff_general/setup.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | apt-get install -y python3 python3-pip python3-dev 4 | 5 | pip3 install -r /autograder/source/requirements.txt 6 | -------------------------------------------------------------------------------- /python/src/setup.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | apt-get install -y python3 python3-pip python3-dev 4 | 5 | pip3 install -r /autograder/source/requirements.txt 6 | -------------------------------------------------------------------------------- /mysql/text_tests.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | if __name__ == '__main__': 4 | suite = unittest.defaultTestLoader.discover('tests') 5 | unittest.TextTestRunner().run(suite) 6 | -------------------------------------------------------------------------------- /diff_general/compile.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Prepare submission 4 | cp /autograder/submission/fib.c /autograder/source/fib.c 5 | 6 | cd /autograder/source 7 | 8 | make fib 9 | -------------------------------------------------------------------------------- /python/src/text_tests.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | if __name__ == '__main__': 4 | suite = unittest.defaultTestLoader.discover('tests') 5 | unittest.TextTestRunner().run(suite) 6 | -------------------------------------------------------------------------------- /java/compile.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | mkdir -p classes 4 | # Find all java files in src directory 5 | java_files=$(find src -name "*.java") 6 | javac -cp lib/*:. -d classes $java_files 7 | -------------------------------------------------------------------------------- /java_template/compile.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | mkdir -p classes 4 | # Find all java files in src directory 5 | java_files=$(find src -name "*.java") 6 | javac -cp lib/*:. -d classes $java_files 7 | -------------------------------------------------------------------------------- /docs/gradescope.css: -------------------------------------------------------------------------------- 1 | .wy-side-nav-search, .wy-nav-top { 2 | background-color: #003c46; 3 | } 4 | 5 | /* Hack around overlapping padding issue */ 6 | .rst-versions .rst-current-version { 7 | padding: 0 12px !important; 8 | } 9 | -------------------------------------------------------------------------------- /diff/run_autograder: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Set up autograder files 4 | 5 | cp /autograder/submission/fib.c /autograder/source/fib.c 6 | 7 | cd /autograder/source 8 | 9 | make fib 10 | 11 | python3 run_tests.py 12 | -------------------------------------------------------------------------------- /python/src/run_autograder: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Set up autograder files 4 | 5 | cp /autograder/submission/calculator.py /autograder/source/calculator.py 6 | 7 | cd /autograder/source 8 | 9 | python3 run_tests.py 10 | -------------------------------------------------------------------------------- /c#/src/Framework.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | 3 | public class HelloWorld 4 | { 5 | public static string Hello() 6 | { 7 | return ""; 8 | } 9 | 10 | public static string Bye() 11 | { 12 | return ""; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /java-mvn/.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | 3 | # Mobile Tools for Java (J2ME) 4 | .mtj.tmp/ 5 | 6 | # Package Files # 7 | *.jar 8 | *.war 9 | *.ear 10 | 11 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 12 | hs_err_pid* 13 | -------------------------------------------------------------------------------- /python/src/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [dev-packages] 7 | 8 | [packages] 9 | gradescope-utils = ">=0.3.1" 10 | "subprocess32" = "*" 11 | 12 | [requires] 13 | python_version = "2.7" 14 | -------------------------------------------------------------------------------- /c#/src/solution/HelloWorld.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | 3 | public class HelloWorld 4 | { 5 | public static string Hello() 6 | { 7 | return "Hello"; 8 | } 9 | 10 | public static string Bye() 11 | { 12 | return "Bye"; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /mysql/run_autograder: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Start MySQL server 4 | service mysql start 5 | 6 | # Set up autograder files 7 | cp /autograder/submission/queries.py /autograder/source/queries.py 8 | 9 | cd /autograder/source 10 | 11 | python3 run_tests.py 12 | -------------------------------------------------------------------------------- /c#/src/run_autograder: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | cd /autograder/source 4 | cp /autograder/submission/HelloWorld.cs . 5 | mcs -target:library -pkg:nunit -out:test.dll test.cs HelloWorld.cs 6 | nunit-console test.dll -nodots -labels &> stdout_and_stderr 7 | python3 nunit_to_gs.py > /autograder/results/results.json 8 | -------------------------------------------------------------------------------- /java/.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | 3 | # Mobile Tools for Java (J2ME) 4 | .mtj.tmp/ 5 | 6 | # Package Files # 7 | *.jar 8 | *.war 9 | *.ear 10 | 11 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 12 | hs_err_pid* 13 | 14 | # Allow lib files in lib/ 15 | !lib/*.jar -------------------------------------------------------------------------------- /python/Makefile: -------------------------------------------------------------------------------- 1 | %.pdf : %.md src/*.py head.tex 2 | gpp -T $< | pandoc -o $@ -H head.tex --number-sections 3 | 4 | %.tex : %.md head.tex 5 | gpp -T $< -o $*.tmp.md 6 | pandoc $*.tmp.md -s -R -o $@ -H head.tex --number-sections 7 | 8 | all: README.pdf 9 | 10 | tex: README.tex 11 | 12 | clean: 13 | rm README.pdf 14 | -------------------------------------------------------------------------------- /java-mvn/run_autograder: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | cd /autograder/source 4 | 5 | # Make sure student has the right package name in their file 6 | cp /autograder/submission/IntList.java /autograder/source/src/main/java/com/gradescope/intlist/ 7 | 8 | mvn -q compile 9 | mvn -q exec:java > /autograder/results/results.json 10 | -------------------------------------------------------------------------------- /diff/run_tests.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from gradescope_utils.autograder_utils.json_test_runner import JSONTestRunner 3 | 4 | if __name__ == '__main__': 5 | suite = unittest.defaultTestLoader.discover('tests') 6 | with open('/autograder/results/results.json', 'w') as f: 7 | JSONTestRunner(visibility='visible', stream=f).run(suite) 8 | -------------------------------------------------------------------------------- /mysql/run_tests.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from gradescope_utils.autograder_utils.json_test_runner import JSONTestRunner 3 | 4 | if __name__ == '__main__': 5 | suite = unittest.defaultTestLoader.discover('tests') 6 | with open('/autograder/results/results.json', 'w') as f: 7 | JSONTestRunner(visibility='visible', stream=f).run(suite) 8 | -------------------------------------------------------------------------------- /python/src/run_tests.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from gradescope_utils.autograder_utils.json_test_runner import JSONTestRunner 3 | 4 | if __name__ == '__main__': 5 | suite = unittest.defaultTestLoader.discover('tests') 6 | with open('/autograder/results/results.json', 'w') as f: 7 | JSONTestRunner(visibility='visible', stream=f).run(suite) 8 | -------------------------------------------------------------------------------- /c#/src/incorrect/HelloWorld.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | 3 | public class HelloWorld 4 | { 5 | public static string Hello() 6 | { 7 | Console.WriteLine("Output"); 8 | Console.Error.WriteLine("Error"); 9 | return "Hello"; 10 | } 11 | 12 | public static string Bye() 13 | { 14 | return "Hello"; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /mysql/setup.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | apt-get install -y python3 python3-pip python3-dev mysql-server 4 | 5 | pip3 install -r /autograder/source/requirements.txt 6 | 7 | # Start MySQL server 8 | # will need to start again in run_autograder (this is just for importing data) 9 | service mysql start 10 | # Import test data 11 | mysql < /autograder/source/database.sql 12 | -------------------------------------------------------------------------------- /java/setup.sh: -------------------------------------------------------------------------------- 1 | apt-get -y install openjdk-11-jdk 2 | 3 | # The following lines are to make sure we can install packages from maven 4 | # See https://bugs.launchpad.net/ubuntu/+source/ca-certificates-java/+bug/1396760 5 | # and https://github.com/docker-library/openjdk/issues/19#issuecomment-70546872 6 | # apt-get install --reinstall ca-certificates-java 7 | # update-ca-certificates -f 8 | -------------------------------------------------------------------------------- /java-mvn/setup.sh: -------------------------------------------------------------------------------- 1 | apt-get -y install openjdk-8-jdk maven bash 2 | 3 | # The following lines are to make sure we can install packages from maven 4 | # See https://bugs.launchpad.net/ubuntu/+source/ca-certificates-java/+bug/1396760 5 | # and https://github.com/docker-library/openjdk/issues/19#issuecomment-70546872 6 | apt-get install --reinstall ca-certificates-java 7 | update-ca-certificates -f 8 | -------------------------------------------------------------------------------- /java_template/setup.sh: -------------------------------------------------------------------------------- 1 | apt-get -y install openjdk-8-jdk 2 | 3 | # The following lines are to make sure we can install packages from maven 4 | # See https://bugs.launchpad.net/ubuntu/+source/ca-certificates-java/+bug/1396760 5 | # and https://github.com/docker-library/openjdk/issues/19#issuecomment-70546872 6 | # apt-get install --reinstall ca-certificates-java 7 | # update-ca-certificates -f 8 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/support-request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Support request 3 | about: Please send support requests to help@gradescope.com 4 | title: "[Please don't use the issue tracker for support requests]" 5 | labels: support 6 | assignees: '' 7 | 8 | --- 9 | 10 | If you need help with this package, please contact us at help@gradescope.com. Please don't use the issue tracker for support requests. Thanks! 11 | -------------------------------------------------------------------------------- /deploy_keys/run_autograder: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | BASE_DIR=/autograder/autograder_samples/python/src 4 | 5 | # Update autograder files 6 | cd /autograder/autograder_samples 7 | git pull 8 | 9 | cp /autograder/submission/calculator.py $BASE_DIR/calculator.py 10 | 11 | pip install -r $BASE_DIR/requirements.txt 12 | 13 | cd $BASE_DIR 14 | 15 | python run_tests.py > /autograder/results/results.json 16 | -------------------------------------------------------------------------------- /python/src/tests/test_partial.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import random 3 | from gradescope_utils.autograder_utils.decorators import partial_credit 4 | 5 | 6 | class TestPartialCredit(unittest.TestCase): 7 | def setUp(self): 8 | pass 9 | 10 | @partial_credit(10.0) 11 | def test_partial(self, set_score=None): 12 | """Sets partial credit""" 13 | set_score(random.randint(0, 100) *1.0/10.0) 14 | -------------------------------------------------------------------------------- /manual_docker/source/run_autograder: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | /usr/games/cowsay "Hello, world!" 4 | 5 | files_submitted=$(ls /autograder/submission) 6 | num_files_submitted=$(ls /autograder/submission/ | wc -l) 7 | 8 | cat > /autograder/results/results.json < 2 | #include 3 | 4 | int fib(int n) { 5 | if (n <= 2){ 6 | return 1; 7 | } 8 | return fib(n-1) + fib(n-2); 9 | } 10 | 11 | int main(int argc, char** argv) { 12 | if (argc < 2) { 13 | fprintf(stderr, "Error: Insufficient arguments.\n"); 14 | return -1; 15 | } 16 | int arg = atoi(argv[1]); 17 | printf("%d\n", fib(arg)); 18 | return 0; 19 | } 20 | -------------------------------------------------------------------------------- /diff_general/fib.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | int fib(int n) { 5 | if (n <= 2){ 6 | return 1; 7 | } 8 | return fib(n-1) + fib(n-2); 9 | } 10 | 11 | int main(int argc, char** argv) { 12 | if (argc < 2) { 13 | fprintf(stderr, "Error: Insufficient arguments.\n"); 14 | return -1; 15 | } 16 | int arg = atoi(argv[1]); 17 | printf("%d\n", fib(arg)); 18 | return 0; 19 | } 20 | -------------------------------------------------------------------------------- /mysql/framework/queries.py: -------------------------------------------------------------------------------- 1 | import mysql.connector 2 | 3 | 4 | class Queries(object): 5 | """Database queries""" 6 | 7 | def __init__(self, cursor): 8 | self.cursor = cursor 9 | 10 | def department_budget(self, department_name): 11 | """Get department budget by name""" 12 | pass 13 | 14 | def department_expenses(self, department_name): 15 | """Get department expenses by name""" 16 | pass 17 | -------------------------------------------------------------------------------- /java_template/run_autograder: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | cd /autograder/source 4 | 5 | # Copy the student's code into the autograder directory. Make sure 6 | # student has the right package name in their file and that it's in 7 | # the right place 8 | cp /autograder/submission/StudentFile.java /autograder/source/src/edu/youruniversity/hw1/ 9 | 10 | # Compile the student's code with autograder code 11 | ./compile.sh 12 | 13 | # Run the autograder code 14 | ./run.sh > /autograder/results/results.json 15 | -------------------------------------------------------------------------------- /java/run_autograder: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | cd /autograder/source 4 | 5 | # Copy the student's code into the autograder directory. Make sure 6 | # student has the right package name in their file and that it's in 7 | # the right place 8 | cp /autograder/submission/IntList.java /autograder/source/src/main/java/com/gradescope/intlist/ 9 | 10 | # Compile the student's code with autograder code 11 | bash ./compile.sh 12 | 13 | # Run the autograder code 14 | bash ./run.sh > /autograder/results/results.json 15 | -------------------------------------------------------------------------------- /python/src/tests/test_unknown.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from gradescope_utils.autograder_utils.decorators import weight, visibility 3 | from calculator import Calculator, CalculatorException 4 | 5 | 6 | class TestEvaluator(unittest.TestCase): 7 | def setUp(self): 8 | self.calc = Calculator() 9 | 10 | @weight(2) 11 | @visibility('after_published') 12 | def test_eval_power(self): 13 | """Evaluating 2 ** 8 should raise an exception""" 14 | with self.assertRaises(CalculatorException): 15 | self.calc.eval("2 ** 8") 16 | -------------------------------------------------------------------------------- /diff_general/run_tests.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from gradescope_utils.autograder_utils.json_test_runner import JSONTestRunner 3 | from test_generator import find_data_directories, build_test_class, TestMetaclass 4 | 5 | if __name__ == '__main__': 6 | suite = unittest.TestSuite() 7 | 8 | for name in find_data_directories(): 9 | klass = build_test_class(name) 10 | suite.addTest(klass(TestMetaclass.test_name(name))) 11 | 12 | with open('/autograder/results/results.json', 'w') as f: 13 | JSONTestRunner(visibility='visible', stream=f).run(suite) 14 | -------------------------------------------------------------------------------- /c#/src/test.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Text; 3 | using System.Collections.Generic; 4 | using NUnit.Framework; 5 | 6 | [TestFixture] 7 | public class HelloWorldTest 8 | { 9 | [Test, Property("Weight", 1.0), Property("Visibility", "visible")] 10 | public void HelloTest() 11 | { 12 | Assert.AreEqual(HelloWorld.Hello(), "Hello"); 13 | } 14 | 15 | [Test, Property("Weight", 2.0), Property("Visibility", "hidden"), Property("Name", "Bye")] 16 | public void MyTest2() 17 | { 18 | Assert.AreEqual(HelloWorld.Bye(), "Bye"); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /deploy_keys/setup.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | cd /autograder/source 4 | 5 | apt-get install -y python python-pip python-dev 6 | 7 | mkdir -p /root/.ssh 8 | cp ssh_config /root/.ssh/config 9 | # Make sure to include your private key here 10 | cp deploy_key /root/.ssh/deploy_key 11 | # To prevent host key verification errors at runtime 12 | ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts 13 | 14 | # Clone autograder files 15 | git clone git@github.com:gradescope/autograder_samples /autograder/autograder_samples 16 | # Install python dependencies 17 | pip install -r /autograder/autograder_samples/python/src/requirements.txt 18 | -------------------------------------------------------------------------------- /python/src/tests/test_files.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from gradescope_utils.autograder_utils.decorators import weight 3 | from gradescope_utils.autograder_utils.files import check_submitted_files 4 | 5 | 6 | class TestFiles(unittest.TestCase): 7 | @weight(0) 8 | def test_submitted_files(self): 9 | """Check submitted files""" 10 | missing_files = check_submitted_files(['calculator.py']) 11 | for path in missing_files: 12 | print('Missing {0}'.format(path)) 13 | self.assertEqual(len(missing_files), 0, 'Missing some required files!') 14 | print('All required files submitted!') 15 | -------------------------------------------------------------------------------- /diff/tests/test_from_file.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from gradescope_utils.autograder_utils.decorators import weight 3 | import subprocess32 as subprocess 4 | 5 | 6 | class TestDiff(unittest.TestCase): 7 | def setUp(self): 8 | pass 9 | 10 | @weight(1) 11 | def test_from_file(self): 12 | """10th Fibonacci number""" 13 | fib = subprocess.Popen(["./fib", "10"], stdout=subprocess.PIPE) 14 | output = fib.stdout.read().strip() 15 | with open("reference/10", "rb") as outputFile: 16 | referenceOutput = outputFile.read() 17 | 18 | self.assertEqual(output, referenceOutput) 19 | fib.terminate() 20 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior, including example code used to trigger the behavior. 15 | 16 | ```python 17 | import gradescope_utils 18 | ... 19 | ``` 20 | 21 | **Expected behavior** 22 | A clear and concise description of what you expected to happen. 23 | 24 | **Screenshots** 25 | If applicable, add screenshots to help explain your problem. 26 | 27 | **Additional context** 28 | Add any other context about the problem here. 29 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the version of Python and other tools you might need 9 | build: 10 | os: ubuntu-20.04 11 | tools: 12 | python: "3.9" 13 | 14 | # Build documentation in the docs/ directory with Sphinx 15 | mkdocs: 16 | configuration: mkdocs.yml 17 | 18 | # If using Sphinx, optionally build your docs in additional formats such as PDF 19 | # formats: 20 | # - pdf 21 | 22 | # Optionally declare the Python requirements required to build your docs 23 | python: 24 | install: 25 | - requirements: docs/requirements.txt 26 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /docs/manual_grading.md: -------------------------------------------------------------------------------- 1 | # Manual Grading 2 | 3 | Gradescope also supports manual grading for programming assignments. You can 4 | enable manual grading when creating your assignment, or afterwards by going to 5 | the assignment's settings page. 6 | 7 | [![Enabling manual grading](manual_grading_form.png)](manual_grading_form.png) 8 | 9 | When manual code grading is enabled, you will be able to grade students' code 10 | manually using a rubric, as with other assignments on Gradescope. You can also 11 | leave comments on lines of code. 12 | 13 | [![Manual code grading UI](manual_grading_view.png)](manual_grading_view.png) 14 | 15 | If you would like to have more than one manually graded question, you can create 16 | more questions via the "Edit Outline" step. 17 | 18 | [![Assignment outline](assignment_outline.png)](assignment_outline.png) 19 | -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/intlist/tests/RunTests.java: -------------------------------------------------------------------------------- 1 | package com.gradescope.intlist.tests; 2 | 3 | import org.junit.runner.RunWith; 4 | import org.junit.runners.Suite; 5 | import org.junit.runner.JUnitCore; 6 | import org.junit.runner.Result; 7 | // import junit.tests.framework.TestListenerTest 8 | import com.gradescope.intlist.tests.IntListTest; 9 | import com.gradescope.jh61b.grader.GradedTestListenerJSON; 10 | 11 | @RunWith(Suite.class) 12 | @Suite.SuiteClasses({ 13 | IntListTest.class, 14 | IntListPredicates.class, 15 | }) 16 | public class RunTests { 17 | public static void main(String[] args) { 18 | JUnitCore runner = new JUnitCore(); 19 | runner.addListener(new GradedTestListenerJSON()); 20 | // runner.addListener(new TestListenerTest()); 21 | Result r = runner.run(RunTests.class); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/jh61b/grader/GradedTestRunnerJSON.java: -------------------------------------------------------------------------------- 1 | // adapted from http://memorynotfound.com/add-junit-listener-example/ 2 | 3 | // Not actually in use in the current autograder, but if at some point we want to use the 4 | // RunWith annotation instead of adding listeners manually, this is what we'll need. 5 | 6 | package com.gradescope.jh61b.grader; 7 | 8 | import org.junit.runner.notification.RunNotifier; 9 | import org.junit.runners.BlockJUnit4ClassRunner; 10 | import org.junit.runners.model.InitializationError; 11 | 12 | public class GradedTestRunnerJSON extends BlockJUnit4ClassRunner { 13 | 14 | public GradedTestRunnerJSON(Class inputClass) throws InitializationError { 15 | super(inputClass); 16 | } 17 | 18 | @Override 19 | public void run(RunNotifier notifier){ 20 | notifier.addListener(new GradedTestListenerJSON()); 21 | super.run(notifier); 22 | } 23 | } -------------------------------------------------------------------------------- /java_template/src/com/gradescope/jh61b/grader/GradedTestRunnerJSON.java: -------------------------------------------------------------------------------- 1 | // adapted from http://memorynotfound.com/add-junit-listener-example/ 2 | 3 | // Not actually in use in the current autograder, but if at some point we want to use the 4 | // RunWith annotation instead of adding listeners manually, this is what we'll need. 5 | 6 | package com.gradescope.jh61b.grader; 7 | 8 | import org.junit.runner.notification.RunNotifier; 9 | import org.junit.runners.BlockJUnit4ClassRunner; 10 | import org.junit.runners.model.InitializationError; 11 | 12 | public class GradedTestRunnerJSON extends BlockJUnit4ClassRunner { 13 | 14 | public GradedTestRunnerJSON(Class inputClass) throws InitializationError { 15 | super(inputClass); 16 | } 17 | 18 | @Override 19 | public void run(RunNotifier notifier){ 20 | notifier.addListener(new GradedTestListenerJSON()); 21 | super.run(notifier); 22 | } 23 | } -------------------------------------------------------------------------------- /java_template/src/edu/youruniversity/hw1/MyTestClass.java: -------------------------------------------------------------------------------- 1 | package edu.youruniversity.hw1; 2 | 3 | import org.junit.Test; 4 | import static org.junit.Assert.*; 5 | // This is an annotation for assigning point values to tests 6 | import com.gradescope.jh61b.grader.GradedTest; 7 | 8 | // Import anything else you need to run the tests, such as the students' classes 9 | 10 | public class MyTestClass { 11 | @Test 12 | @GradedTest(name="Test 1+1", max_score=1) 13 | public void test_1p1() { 14 | int x = 1 + 1; 15 | System.out.println("Tested 1+1, got " + x); 16 | assertEquals(x, 2); 17 | } 18 | 19 | @Test 20 | @GradedTest(name="Test 1+1*2", max_score=1, visibility="after_published") 21 | public void test_1p1t2() { 22 | int x = 1 + 1 * 2; 23 | System.out.println("Tested 1+1*2, got " + x); 24 | assertEquals(x, 3); 25 | } 26 | // Add more tests... 27 | } 28 | -------------------------------------------------------------------------------- /docs/tech.md: -------------------------------------------------------------------------------- 1 | # Technical Details 2 | 3 | Under the hood, we are using Docker to build a container image that is 4 | used each time a student's submission needs to be graded. If you're 5 | not familiar with Docker, think of it as a lightweight virtual 6 | machine. Each container is isolated from others, and you can install 7 | anything you want inside the container. 8 | 9 | The image is based on **Ubuntu**, so packages can be installed with 10 | apt, or from source or other means. The image is built once when you 11 | set up your assignment, and each time students submit a new instance 12 | of that image is spun up. 13 | 14 | Our autograder harness downloads the student's submission and puts it 15 | in /autograder/submission, and then runs 16 | **/autograder/run_autograder**. Once run_autograder has finished 17 | running, the harness checks for output in 18 | /autograder/results/results.json, and uploads these results to 19 | Gradescope. 20 | -------------------------------------------------------------------------------- /diff/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | 47 | # Translations 48 | *.mo 49 | *.pot 50 | 51 | # Django stuff: 52 | *.log 53 | 54 | # Sphinx documentation 55 | docs/_build/ 56 | 57 | # PyBuilder 58 | target/ 59 | -------------------------------------------------------------------------------- /mysql/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | 47 | # Translations 48 | *.mo 49 | *.pot 50 | 51 | # Django stuff: 52 | *.log 53 | 54 | # Sphinx documentation 55 | docs/_build/ 56 | 57 | # PyBuilder 58 | target/ 59 | -------------------------------------------------------------------------------- /mysql/solution/queries.py: -------------------------------------------------------------------------------- 1 | import mysql.connector 2 | 3 | 4 | class Queries(object): 5 | """Database queries""" 6 | 7 | def __init__(self, cursor): 8 | self.cursor = cursor 9 | 10 | def department_budget(self, department_name): 11 | """Get department budget by name""" 12 | query = "SELECT budget FROM departments WHERE name = %s;" 13 | self.cursor.execute(query, (department_name,)) 14 | return self.cursor.fetchone()[0] 15 | 16 | def department_expenses(self, department_name): 17 | """Get department expenses by name""" 18 | department_query = "SELECT id FROM departments WHERE name = %s;" 19 | self.cursor.execute(department_query, (department_name,)) 20 | department_id = self.cursor.fetchone()[0] 21 | 22 | expenses_query = "SELECT sum(salary) from employees where department_id = %s;" 23 | self.cursor.execute(expenses_query, (department_id,)) 24 | return self.cursor.fetchone()[0] 25 | -------------------------------------------------------------------------------- /diff_general/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | 47 | # Translations 48 | *.mo 49 | *.pot 50 | 51 | # Django stuff: 52 | *.log 53 | 54 | # Sphinx documentation 55 | docs/_build/ 56 | 57 | # PyBuilder 58 | target/ 59 | -------------------------------------------------------------------------------- /python/src/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | 47 | # Translations 48 | *.mo 49 | *.pot 50 | 51 | # Django stuff: 52 | *.log 53 | 54 | # Sphinx documentation 55 | docs/_build/ 56 | 57 | # PyBuilder 58 | target/ 59 | -------------------------------------------------------------------------------- /mysql/tests/test_simple_queries.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from gradescope_utils.autograder_utils.decorators import weight 3 | import mysql.connector 4 | from queries import Queries 5 | 6 | class TestSimpleQueries(unittest.TestCase): 7 | def setUp(self): 8 | self.connection = mysql.connector.connect(user='test', password='password', database='test_data') 9 | self.cursor = self.connection.cursor() 10 | self.queries = Queries(self.cursor) 11 | 12 | def tearDown(self): 13 | self.cursor.close() 14 | self.connection.close() 15 | 16 | @weight(1) 17 | def test_department_budget(self): 18 | """Get department budget by name""" 19 | val = self.queries.department_budget("Engineering") 20 | self.assertEqual(val, 1000000) 21 | 22 | @weight(1) 23 | def test_department_expenses(self): 24 | """Get department expenses by name""" 25 | val = self.queries.department_expenses("Engineering") 26 | self.assertEqual(val, 200000) 27 | -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/intlist/tests/IntListPredicates.java: -------------------------------------------------------------------------------- 1 | package com.gradescope.intlist.tests; 2 | 3 | import org.junit.Test; 4 | import static org.junit.Assert.*; 5 | import com.gradescope.jh61b.grader.GradedTest; 6 | 7 | import com.gradescope.intlist.IntList; 8 | import com.gradescope.intlist.RefIntList; 9 | import com.gradescope.intlist.AbstractIntList; 10 | 11 | public class IntListPredicates { 12 | @Test 13 | @GradedTest(name="Test Intlist.contains", max_score=1) 14 | public void test_contains() { 15 | AbstractIntList test = RefIntList.createList(1, 2, 4, 8, 16); 16 | IntList studentList = new IntList(test); 17 | assertTrue(studentList.contains(4)); 18 | } 19 | 20 | @Test 21 | @GradedTest(name="Test Intlist.contains for nonexistant item", max_score=1) 22 | public void test_contains_missing() { 23 | AbstractIntList test = RefIntList.createList(1, 2, 4, 8, 16); 24 | IntList studentList = new IntList(test); 25 | assertFalse(studentList.contains(5)); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /python/src/incorrect_2/calculator.py: -------------------------------------------------------------------------------- 1 | # Place your imports here 2 | 3 | 4 | class CalculatorException(Exception): 5 | """A class to throw if you come across incorrect syntax or other issues""" 6 | def __init__(self, value): 7 | self.value = value 8 | 9 | def __str__(self): 10 | return repr(self.value) 11 | 12 | 13 | class Calculator(object): 14 | """Infix calculator REPL 15 | 16 | Parses and evaluates infix arithmetic with the 4 basic operators 17 | and parentheses. Must obey order of operations. 18 | """ 19 | 20 | def read(self): 21 | """Read input from stdin""" 22 | return input('> ') 23 | 24 | def eval(self, string): 25 | """Evaluates an infix arithmetic expression""" 26 | return eval(string) 27 | 28 | def loop(self): 29 | """Runs the read-eval-print loop 30 | 31 | Read a line of input, evaluate it, and print it. 32 | 33 | Repeat the above until the user types 'quit'.""" 34 | line = self.read() 35 | pass 36 | 37 | if __name__ == '__main__': 38 | calc = Calculator() 39 | calc.loop() 40 | -------------------------------------------------------------------------------- /manual_docker/Dockerfile: -------------------------------------------------------------------------------- 1 | # You can change these variables to use a different base image, but 2 | # you must ensure that your base image inherits from one of ours. 3 | # You can also override these at build time with --build-arg flags 4 | ARG BASE_REPO=gradescope/autograder-base 5 | ARG TAG=latest 6 | 7 | FROM ${BASE_REPO}:${TAG} 8 | 9 | ADD source /autograder/source 10 | 11 | RUN cp /autograder/source/run_autograder /autograder/run_autograder 12 | 13 | # Ensure that scripts are Unix-friendly and executable 14 | RUN dos2unix /autograder/run_autograder /autograder/source/setup.sh 15 | RUN chmod +x /autograder/run_autograder 16 | 17 | # Do whatever setup was needed in setup.sh, including installing apt packages 18 | # Cleans up the apt cache afterwards in the same step to keep the image small 19 | RUN apt-get update && \ 20 | bash /autograder/source/setup.sh && \ 21 | apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* 22 | 23 | # You can also use RUN commands in the Dockerfile to install things 24 | # instead of using a bash script 25 | 26 | # The base image defines the CMD and ENTRYPOINT, so don't redefine those 27 | -------------------------------------------------------------------------------- /java_template/src/edu/youruniversity/hw1/RunTests.java: -------------------------------------------------------------------------------- 1 | // Define whatever package name you want 2 | package edu.youruniversity.hw1; 3 | 4 | import org.junit.runner.RunWith; 5 | import org.junit.runners.Suite; 6 | import org.junit.runner.JUnitCore; 7 | import org.junit.runner.Result; 8 | 9 | // Import your test classes here 10 | import edu.youruniversity.hw1.MyTestClass; 11 | // This test listener produces the formatted JSON output for Gradescope 12 | import com.gradescope.jh61b.grader.GradedTestListenerJSON; 13 | 14 | // Define a test suite, with the classes that contain the tests you wish to run 15 | @RunWith(Suite.class) 16 | @Suite.SuiteClasses({ 17 | MyTestClass.class 18 | }) 19 | // The main class to be executed; this is the entry point for your tests 20 | public class RunTests { 21 | public static void main(String[] args) { 22 | JUnitCore runner = new JUnitCore(); 23 | // Attach a listener for JSON output 24 | runner.addListener(new GradedTestListenerJSON()); 25 | // Run the test suite defined within this class 26 | Result r = runner.run(RunTests.class); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /python/src/tests/test_leaderboard.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import unittest 4 | import random 5 | from gradescope_utils.autograder_utils.decorators import leaderboard 6 | 7 | 8 | class TestLeaderboard(unittest.TestCase): 9 | def setUp(self): 10 | pass 11 | 12 | @leaderboard("high score") 13 | def test_leaderboard(self, set_leaderboard_value=None): 14 | """Sets a leaderboard value""" 15 | set_leaderboard_value(random.randint(0, 10)) 16 | 17 | @leaderboard("accuracy") 18 | def test_leaderboard_float(self, set_leaderboard_value=None): 19 | """Sets a leaderboard value""" 20 | set_leaderboard_value(round(random.uniform(50, 100), 2)) 21 | 22 | @leaderboard("stars") 23 | def test_string(self, set_leaderboard_value=None): 24 | """Sets a leaderboard value""" 25 | set_leaderboard_value("🌟" * random.randint(0, 10)) 26 | 27 | @leaderboard("time", "asc") 28 | def test_another(self, set_leaderboard_value=None): 29 | """Sets a leaderboard value that's sorted ascending (lower is better)""" 30 | set_leaderboard_value(round(random.gauss(7, 3), 2)) 31 | -------------------------------------------------------------------------------- /python/src/framework.py: -------------------------------------------------------------------------------- 1 | # Place your imports here 2 | 3 | 4 | class CalculatorException(Exception): 5 | """A class to throw if you come across incorrect syntax or other issues""" 6 | def __init__(self, value): 7 | self.value = value 8 | 9 | def __str__(self): 10 | return repr(self.value) 11 | 12 | 13 | class Calculator(object): 14 | """Infix calculator REPL 15 | 16 | Parses and evaluates infix arithmetic with the 4 basic operators 17 | and parentheses. Must obey order of operations. 18 | """ 19 | 20 | def read(self): 21 | """Read input from stdin""" 22 | return input('> ') 23 | 24 | def eval(self, string): 25 | """Evaluates an infix arithmetic expression""" 26 | # TODO: Implement me 27 | pass 28 | 29 | def loop(self): 30 | """Runs the read-eval-print loop 31 | 32 | Read a line of input, evaluate it, and print it. 33 | 34 | Repeat the above until the user types 'quit'.""" 35 | line = self.read() 36 | # TODO: Implement the loop 37 | pass 38 | 39 | if __name__ == '__main__': 40 | calc = Calculator() 41 | calc.loop() 42 | -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/jh61b/LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 Josh Hug 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /java_template/src/com/gradescope/jh61b/LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 Josh Hug 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /python/src/tests/test_complex.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from gradescope_utils.autograder_utils.decorators import weight, visibility, number 3 | from calculator import Calculator 4 | 5 | 6 | class TestComplex(unittest.TestCase): 7 | def setUp(self): 8 | self.calc = Calculator() 9 | 10 | @weight(2) 11 | @visibility('after_due_date') 12 | @number("2.1") 13 | def test_eval_parens(self): 14 | """Evaluate (1 + 1) * 4""" 15 | val = self.calc.eval("(1 + 1) * 4") 16 | self.assertEqual(val, 8) 17 | 18 | @weight(2) 19 | @visibility('after_due_date') 20 | @number("2.2") 21 | def test_eval_precedence(self): 22 | """Evaluate 1 + 1 * 8""" 23 | val = self.calc.eval("1 + 1 * 8") 24 | self.assertEqual(val, 9) 25 | 26 | @weight(2) 27 | @number("2.3") 28 | def test_eval_mul_div(self): 29 | """Evaluate 8 / 4 * 2""" 30 | val = self.calc.eval("8 / 4 * 2") 31 | self.assertEqual(val, 4) 32 | 33 | @weight(2) 34 | @number("2.4") 35 | def test_eval_negative_number(self): 36 | """Evaluate -2 + 6""" 37 | val = self.calc.eval("-2 + 6") 38 | self.assertEqual(val, 4) 39 | -------------------------------------------------------------------------------- /python/src/Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "64812960b96dd0ab185c807b44bbceba78cd9ae881a1ba9dcffd192e400936ff" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "2.7" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "gradescope-utils": { 20 | "hashes": [ 21 | "sha256:50d9df093070669ffca0c99e5c4d511123981cb2090f53f9ac9aa7e1dae33861", 22 | "sha256:f19acf1957e4377a17ea16313013785ee358da1f5a9d526206d6ee935ed72584" 23 | ], 24 | "index": "pypi", 25 | "version": "==0.3.1" 26 | }, 27 | "subprocess32": { 28 | "hashes": [ 29 | "sha256:88e37c1aac5388df41cc8a8456bb49ebffd321a3ad4d70358e3518176de3a56b", 30 | "sha256:eb2937c80497978d181efa1b839ec2d9622cf9600a039a79d0e108d1f9aec79d" 31 | ], 32 | "index": "pypi", 33 | "version": "==3.5.4" 34 | } 35 | }, 36 | "develop": {} 37 | } 38 | -------------------------------------------------------------------------------- /python/src/tests/test_simple.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from gradescope_utils.autograder_utils.decorators import weight, number 3 | from calculator import Calculator 4 | 5 | 6 | class TestSimpleArithmetic(unittest.TestCase): 7 | def setUp(self): 8 | self.calc = Calculator() 9 | 10 | @weight(1) 11 | @number("1.1") 12 | def test_eval_add(self): 13 | """Evaluate 1 + 1""" 14 | val = self.calc.eval("1 + 1") 15 | self.assertEqual(val, 2) 16 | 17 | @weight(1) 18 | @number("1.2") 19 | def test_eval_sub(self): 20 | """Evaluate 2 - 1""" 21 | val = self.calc.eval("2 - 1") 22 | self.assertEqual(val, 1) 23 | 24 | @weight(1) 25 | @number("1.3") 26 | def test_eval_mul(self): 27 | """Evaluate 4 * 8""" 28 | val = self.calc.eval("4 * 8") 29 | self.assertEqual(val, 32) 30 | 31 | @weight(1) 32 | @number("1.4") 33 | def test_eval_div(self): 34 | """Evaluate 8/4""" 35 | val = self.calc.eval("8 / 4") 36 | self.assertEqual(val, 2) 37 | 38 | @weight(2) 39 | @number("1.5") 40 | def test_eval_whitespace(self): 41 | """Evaluate 1+1 (no whitespace)""" 42 | val = self.calc.eval("1+1") 43 | self.assertEqual(val, 2) 44 | -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/intlist/tests/IntListTest.java: -------------------------------------------------------------------------------- 1 | package com.gradescope.intlist.tests; 2 | 3 | import org.junit.Test; 4 | import static org.junit.Assert.*; 5 | import com.gradescope.jh61b.grader.GradedTest; 6 | 7 | import com.gradescope.intlist.IntList; 8 | import com.gradescope.intlist.RefIntList; 9 | import com.gradescope.intlist.AbstractIntList; 10 | 11 | public class IntListTest { 12 | @Test 13 | @GradedTest(name="Test creating an IntList from varargs", max_score=1) 14 | public void test_varargs() { 15 | AbstractIntList test = IntList.createList(1, 2, 4, 8, 16); 16 | assertEquals(test, RefIntList.createList(1, 2, 4, 8, 16)); 17 | } 18 | 19 | @Test 20 | @GradedTest(name="Test appending to a list", max_score=1) 21 | public void test_append() { 22 | AbstractIntList test = new IntList(RefIntList.createList(1, 2, 4, 8, 16)); 23 | test.append(32); 24 | assertEquals(test, RefIntList.createList(1, 2, 4, 8, 16, 32)); 25 | } 26 | 27 | @Test 28 | @GradedTest(name="Test converting a list to a string", max_score=1) 29 | public void test_to_string() { 30 | AbstractIntList test = new IntList(RefIntList.createList(1, 2, 4, 8, 16)); 31 | assertEquals(test.toString(), "1 2 4 8 16\n"); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Gradescope Autograder Documentation 2 | repo_url: https://github.com/gradescope/autograder_samples 3 | docs_dir: docs 4 | theme: readthedocs 5 | extra_css: [gradescope.css] 6 | nav: 7 | - 'Home': index.md 8 | - 'Updates': updates.md 9 | - 'Getting Started': getting_started.md 10 | - 'Autograder Specifications': specs.md 11 | - 'Manual Grading': manual_grading.md 12 | - 'Technical Details': tech.md 13 | - 'Base Images': base_images.md 14 | - 'Debugging via SSH': ssh.md 15 | - 'Troubleshooting': troubleshooting.md 16 | - 'Python 3 Issues': python3_issues.md 17 | - 'Autograder Best Practices': best_practices.md 18 | - 'Advanced Usage': 19 | - 'Submission Metadata': submission_metadata.md 20 | - 'Manual Docker Configuration': manual_docker.md 21 | - 'Pulling Autograder from Git': git_pull.md 22 | - 'Leaderboards': leaderboards.md 23 | - 'Examples': 24 | - 'Python': python.md 25 | - 'Java': java.md 26 | - 'Java+Maven': java-mvn.md 27 | - 'C#': c_sharp.md 28 | - 'General output-checking autograder': diff_general.md 29 | - 'Native code via Python': diff.md 30 | - 'MySQL': mysql.md 31 | - 'Python autograder for code in another language': python_other_language.md 32 | - 'Community Resources': resources.md 33 | markdown_extensions: 34 | - admonition 35 | - attr_list 36 | -------------------------------------------------------------------------------- /mysql/database.sql: -------------------------------------------------------------------------------- 1 | CREATE DATABASE test_data; 2 | USE test_data; 3 | 4 | -- Create user, grant access to database 5 | CREATE USER 'test'@'localhost' IDENTIFIED BY 'password'; 6 | GRANT ALL on test_data.* to 'test'@'localhost'; 7 | 8 | CREATE TABLE departments ( 9 | id bigint unsigned not null auto_increment, 10 | name varchar(255) not null, 11 | budget DECIMAL(20,2), 12 | constraint pk_example primary key (id) 13 | ); 14 | 15 | CREATE TABLE employees ( 16 | id bigint unsigned not null auto_increment, 17 | name varchar(255) not null, 18 | salary DECIMAL(20,2) not null, 19 | department_id bigint unsigned not null, 20 | constraint pk_example primary key (id), 21 | FOREIGN KEY (department_id) 22 | REFERENCES departments(id) 23 | ON DELETE CASCADE 24 | ); 25 | 26 | INSERT INTO departments ( name, budget ) VALUES 27 | ('Executive', 1000000), 28 | ('Engineering', 1000000), 29 | ('Sales', 1000000), 30 | ('Legal', 1000000), 31 | ('Marketing', 1000000) 32 | ; 33 | 34 | INSERT INTO employees ( name, salary, department_id ) VALUES 35 | ( 'Hilda Nguyen', 200000, 1), 36 | ( 'Rose Perez', 100000, 2), 37 | ( 'Byron Washington', 100000, 3), 38 | ( 'Dale Long', 100000, 4), 39 | ( 'Paul Gregory', 100000, 2), 40 | ( 'Kent Hansen', 100000, 3), 41 | ( 'Jean Pena', 100000, 5) 42 | ; 43 | -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/intlist/IntList.java: -------------------------------------------------------------------------------- 1 | package com.gradescope.intlist; 2 | 3 | import com.gradescope.intlist.AbstractIntList; 4 | 5 | public class IntList extends AbstractIntList{ 6 | 7 | /** 8 | * Calls the parent constructor 9 | */ 10 | public IntList(int head){ 11 | super(head); 12 | } 13 | 14 | /** 15 | * This copy constructor is needed for test cases; do not touch 16 | */ 17 | public IntList(AbstractIntList list){ 18 | this(list.head); 19 | if(list.next != null){ 20 | this.next = new IntList(list.next); 21 | } 22 | } 23 | /** 24 | * Creates an IntList from a variable number of arguments 25 | */ 26 | public static AbstractIntList createList(int... a){ 27 | // TODO: Fill me in! 28 | } 29 | 30 | /** 31 | * Appends value to the end of the list 32 | */ 33 | public AbstractIntList append(int value){ 34 | // TODO: Fill me in! 35 | } 36 | 37 | /** 38 | * Returns true if the IntList contains the value 39 | */ 40 | public boolean contains(int value){ 41 | // TODO: Fill me in! 42 | } 43 | 44 | /** 45 | * Converts an IntList to a string. 46 | * 47 | * The last element should be 48 | * followed by a newline instead of a space. 49 | */ 50 | public String toString(){ 51 | // TODO: FIll me in! 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /python/src/tests/test_integration.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from gradescope_utils.autograder_utils.decorators import weight, tags 3 | import subprocess 4 | 5 | 6 | class TestIntegration(unittest.TestCase): 7 | def setUp(self): 8 | pass 9 | 10 | @weight(2) 11 | @tags("integration") 12 | def test_single_input(self): 13 | """Evaluate 1 + 1 in the REPL""" 14 | calc = subprocess.Popen('python3 -u calculator.py'.split(), 15 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, 16 | encoding='utf8') 17 | output, err = calc.communicate("1 + 1\n", 1) 18 | self.assertTrue(output.startswith(">")) # Check for presence of prompt 19 | answer = output[1:].split()[0] # Separate prompt from answer 20 | self.assertEqual(answer.strip(), "2") 21 | calc.terminate() 22 | 23 | @weight(2) 24 | @tags("integration") 25 | def test_quit(self): 26 | """Quit the REPL""" 27 | calc = subprocess.Popen('python3 -u calculator.py'.split(), 28 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, 29 | encoding='utf8') 30 | calc.communicate("quit\n", 1) 31 | 32 | returncode = calc.returncode 33 | if returncode is None: 34 | calc.terminate() 35 | self.assertIsNotNone(returncode) 36 | self.assertEqual(returncode, 0) 37 | -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/intlist/AbstractIntList.java: -------------------------------------------------------------------------------- 1 | package com.gradescope.intlist; 2 | 3 | public abstract class AbstractIntList{ 4 | protected int head; 5 | protected AbstractIntList next; 6 | 7 | public AbstractIntList(int head){ 8 | this.head = head; 9 | this.next = null; 10 | } 11 | 12 | /** 13 | * Appends value to the end of the list 14 | */ 15 | public abstract AbstractIntList append(int value); 16 | 17 | /** 18 | * Returns true if the AbstractIntList contains the value 19 | */ 20 | public abstract boolean contains(int value); 21 | 22 | /** 23 | * Converts an AbstractIntList to a string 24 | * 25 | * The last element should be 26 | * followed by a newline instead of a space. 27 | */ 28 | public abstract String toString(); 29 | 30 | /** 31 | * Compares each element of both lists to see if they're the same 32 | */ 33 | public boolean equals(Object other){ 34 | if (other instanceof AbstractIntList){ 35 | AbstractIntList otherList = (AbstractIntList) other; 36 | if(this.head == otherList.head){ 37 | if(this.next == null && otherList.next == null){ 38 | return true; 39 | }else if(this.next != null && otherList.next != null){ 40 | return this.next.equals(otherList.next); 41 | }else{ 42 | return false; 43 | } 44 | }else{ 45 | return false; 46 | } 47 | }else{ 48 | return false; 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /python/src/incomplete_1/calculator.py: -------------------------------------------------------------------------------- 1 | class CalculatorException(Exception): 2 | """A class to throw if you come across incorrect syntax or other issues""" 3 | def __init__(self, value): 4 | self.value = value 5 | 6 | def __str__(self): 7 | return repr(self.value) 8 | 9 | 10 | class Calculator(object): 11 | """Infix calculator REPL 12 | 13 | Parses and evaluates infix arithmetic with the 4 basic operators 14 | and parentheses. Must obey order of operations. 15 | """ 16 | 17 | def read(self): 18 | """Read input from stdin""" 19 | return input('> ') 20 | 21 | def eval(self, string): 22 | """Evaluates an infix arithmetic expression""" 23 | tokens = string.split() 24 | op1 = int(tokens.pop(0)) 25 | operator = tokens.pop(0) 26 | op2 = int(tokens.pop(0)) 27 | if operator == '+': 28 | return op1 + op2 29 | elif operator == '-': 30 | return op1 - op2 31 | elif operator == '*': 32 | return op1 * op2 33 | elif operator == '/': 34 | return op1 * op2 35 | else: 36 | raise CalculatorException("Unknown operator %s" % operator) 37 | 38 | def loop(self): 39 | """Runs the read-eval-print loop 40 | 41 | Read a line of input, evaluate it, and print it. 42 | 43 | Repeat the above until the user types 'quit'.""" 44 | line = self.read() 45 | while line != "quit": 46 | value = self.eval(line) 47 | print(value) 48 | line = self.read() 49 | 50 | if __name__ == '__main__': 51 | calc = Calculator() 52 | calc.loop() 53 | -------------------------------------------------------------------------------- /java-mvn/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | com.gradescope.autograder_test 5 | autograder_test 6 | jar 7 | 1.0-SNAPSHOT 8 | autograder_test 9 | http://maven.apache.org 10 | 11 | 12 | junit 13 | junit 14 | 4.13.1 15 | compile 16 | 17 | 18 | 19 | 20 | 21 | org.apache.maven.plugins 22 | maven-compiler-plugin 23 | 3.5.1 24 | 25 | 1.8 26 | 1.8 27 | 28 | 29 | 30 | org.codehaus.mojo 31 | exec-maven-plugin 32 | 1.4.0 33 | 34 | 35 | 36 | java 37 | 38 | 39 | 40 | 41 | com.gradescope.intlist.tests.RunTests 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /docs/python_other_language.md: -------------------------------------------------------------------------------- 1 | # Building a Gradescope autograder for code in another language 2 | 3 | This is another example of using a Python autograder to grade code in another 4 | language. It's similar to the [General output-checking 5 | autograder](../diff_general) and [Native code via Python](../diff) examples, but 6 | using a [different supporting 7 | library](https://github.com/ThaumicMekanism/GradescopeBase). 8 | 9 | Part 1 10 | 11 | 12 | Part 2 13 | 14 | 15 | Part 3 16 | 17 | 18 | Part 4 19 | 20 | 21 | Part 5 22 | 23 | -------------------------------------------------------------------------------- /python/src/incorrect_1/calculator.py: -------------------------------------------------------------------------------- 1 | class CalculatorException(Exception): 2 | """A class to throw if you come across incorrect syntax or other issues""" 3 | def __init__(self, value): 4 | self.value = value 5 | 6 | def __str__(self): 7 | return repr(self.value) 8 | 9 | 10 | class Calculator(object): 11 | """Infix calculator REPL 12 | 13 | Parses and evaluates infix arithmetic with the 4 basic operators 14 | and parentheses. Must obey order of operations. 15 | """ 16 | 17 | def read(self): 18 | """Read input from stdin""" 19 | return input('> ') 20 | 21 | def lex(self, string): 22 | """Break the string up into tokens""" 23 | return string.split() 24 | 25 | def eval(self, string): 26 | """Evaluates an infix arithmetic expression""" 27 | tokens = self.lex(string) 28 | op1 = int(tokens.pop(0)) 29 | while len(tokens) > 0: 30 | operator = tokens.pop(0) 31 | op2 = int(tokens.pop(0)) 32 | if operator == '+': 33 | op1 = op1 + op2 34 | elif operator == '-': 35 | op1 = op1 - op2 36 | elif operator == '*': 37 | op1 = op1 * op2 38 | elif operator == '/': 39 | op1 = op1 / op2 40 | else: 41 | raise CalculatorException("Unknown operator %s" % operator) 42 | return op1 43 | 44 | def loop(self): 45 | """Runs the read-eval-print loop 46 | 47 | Read a line of input, evaluate it, and print it. 48 | 49 | Repeat the above until the user types 'quit'.""" 50 | line = self.read() 51 | while line != "quit": 52 | value = self.eval(line) 53 | print(value) 54 | line = self.read() 55 | 56 | if __name__ == '__main__': 57 | calc = Calculator() 58 | calc.loop() 59 | -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/jh61b/grader/GradedTest.java: -------------------------------------------------------------------------------- 1 | package com.gradescope.jh61b.grader; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | /** 9 | * The GradedTest annotation tells JUnit that the public void method 10 | * to which it is attached can be run as a test case. To run the method, 11 | * JUnit first constructs a fresh instance of the class then invokes the 12 | * annotated method. Any exceptions thrown by the test will be reported 13 | * by JUnit as a failure. If no exceptions are thrown, the test is assumed 14 | * to have succeeded. 15 | *

16 | * A simple test looks like this: 17 | *

18 |  * public class Example {
19 |  *    @GradedTest
20 |  *    public void method() {
21 |  *       org.junit.Assert.assertTrue( new ArrayList().isEmpty() );
22 |  *    }
23 |  * }
24 |  * 
25 | *

26 | * The GradedTest annotation allows you to specify optional parameters: 27 | *

  • name: String that specifies the name of the test.
  • 28 | *
  • number: String that specifies the number of the test.
  • 29 | *
  • points: Double that specifies the number of points that the test is worth.
  • 30 | *
  • visibility: String that specifies the visibility condition for the test.
  • 31 | * 32 | */ 33 | @Retention(RetentionPolicy.RUNTIME) 34 | @Target({ElementType.METHOD}) 35 | public @interface GradedTest { 36 | 37 | /** 38 | * Default empty exception 39 | */ 40 | static class None extends Throwable { 41 | private static final long serialVersionUID = 1L; 42 | 43 | private None() { 44 | } 45 | } 46 | 47 | String name() default "Unnamed test"; 48 | String number() default ""; 49 | double max_score() default 1.0; 50 | String visibility() default "visible"; 51 | } 52 | -------------------------------------------------------------------------------- /java_template/src/com/gradescope/jh61b/grader/GradedTest.java: -------------------------------------------------------------------------------- 1 | package com.gradescope.jh61b.grader; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | /** 9 | * The GradedTest annotation tells JUnit that the public void method 10 | * to which it is attached can be run as a test case. To run the method, 11 | * JUnit first constructs a fresh instance of the class then invokes the 12 | * annotated method. Any exceptions thrown by the test will be reported 13 | * by JUnit as a failure. If no exceptions are thrown, the test is assumed 14 | * to have succeeded. 15 | *

    16 | * A simple test looks like this: 17 | *

    18 |  * public class Example {
    19 |  *    @GradedTest
    20 |  *    public void method() {
    21 |  *       org.junit.Assert.assertTrue( new ArrayList().isEmpty() );
    22 |  *    }
    23 |  * }
    24 |  * 
    25 | *

    26 | * The GradedTest annotation allows you to specify optional parameters: 27 | *

  • name: String that specifies the name of the test.
  • 28 | *
  • number: String that specifies the number of the test.
  • 29 | *
  • points: Double that specifies the number of points that the test is worth.
  • 30 | *
  • visibility: String that specifies the visibility condition for the test.
  • 31 | * 32 | */ 33 | @Retention(RetentionPolicy.RUNTIME) 34 | @Target({ElementType.METHOD}) 35 | public @interface GradedTest { 36 | 37 | /** 38 | * Default empty exception 39 | */ 40 | static class None extends Throwable { 41 | private static final long serialVersionUID = 1L; 42 | 43 | private None() { 44 | } 45 | } 46 | 47 | String name() default "Unnamed test"; 48 | String number() default ""; 49 | double max_score() default 1.0; 50 | String visibility() default "visible"; 51 | } 52 | -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/jh61b/junit/TestRunner.java: -------------------------------------------------------------------------------- 1 | // adapted from http://memorynotfound.com/add-junit-listener-example/ 2 | // Highly redundant with GradedTestListenerJSON. Maybe refactor later. 3 | // Also, should output go to StdErr? That's what Paul did. 4 | package com.gradescope.jh61b.junit; 5 | 6 | import java.util.List; 7 | import java.util.ArrayList; 8 | 9 | import org.junit.runner.Description; 10 | import org.junit.runner.Result; 11 | import org.junit.runner.notification.Failure; 12 | import org.junit.runner.notification.RunListener; 13 | import org.junit.runner.JUnitCore; 14 | 15 | import java.io.ByteArrayOutputStream; 16 | import java.io.PrintStream; 17 | 18 | import java.util.Collection; 19 | 20 | import java.lang.annotation.Annotation; 21 | import java.lang.reflect.Method; 22 | import java.lang.reflect.InvocationTargetException; 23 | 24 | //import com.gradescope.jh61b.junit.JUnitUtilities; 25 | import java.util.HashSet; 26 | 27 | import java.util.Arrays; 28 | 29 | public class TestRunner { 30 | 31 | 32 | private static HashSet validModes = new HashSet( 33 | Arrays.asList("all", "failed")); 34 | 35 | public static void validateMode(String mode) { 36 | if (!validModes.contains(mode)) { 37 | System.out.println("Invalid mode specified when calling TestRunner.runTests: " + mode); 38 | System.out.println("Valid modes are: " + validModes); 39 | } 40 | } 41 | 42 | public static void runTests(String mode, Class... classes) { 43 | validateMode(mode); 44 | 45 | if (mode.equals("failed")) { 46 | TestRunnerPrintFailuresOnly.runTests(classes); 47 | } else if (mode.equals("all")) { 48 | TestRunnerPrintAll.runTests(classes); 49 | } 50 | } 51 | 52 | public static void runTests(Class... classes) { 53 | runTests("failed", classes); 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /java_template/src/com/gradescope/jh61b/junit/TestRunner.java: -------------------------------------------------------------------------------- 1 | // adapted from http://memorynotfound.com/add-junit-listener-example/ 2 | // Highly redundant with GradedTestListenerJSON. Maybe refactor later. 3 | // Also, should output go to StdErr? That's what Paul did. 4 | package com.gradescope.jh61b.junit; 5 | 6 | import java.util.List; 7 | import java.util.ArrayList; 8 | 9 | import org.junit.runner.Description; 10 | import org.junit.runner.Result; 11 | import org.junit.runner.notification.Failure; 12 | import org.junit.runner.notification.RunListener; 13 | import org.junit.runner.JUnitCore; 14 | 15 | import java.io.ByteArrayOutputStream; 16 | import java.io.PrintStream; 17 | 18 | import java.util.Collection; 19 | 20 | import java.lang.annotation.Annotation; 21 | import java.lang.reflect.Method; 22 | import java.lang.reflect.InvocationTargetException; 23 | 24 | //import com.gradescope.jh61b.junit.JUnitUtilities; 25 | import java.util.HashSet; 26 | 27 | import java.util.Arrays; 28 | 29 | public class TestRunner { 30 | 31 | 32 | private static HashSet validModes = new HashSet( 33 | Arrays.asList("all", "failed")); 34 | 35 | public static void validateMode(String mode) { 36 | if (!validModes.contains(mode)) { 37 | System.out.println("Invalid mode specified when calling TestRunner.runTests: " + mode); 38 | System.out.println("Valid modes are: " + validModes); 39 | } 40 | } 41 | 42 | public static void runTests(String mode, Class... classes) { 43 | validateMode(mode); 44 | 45 | if (mode.equals("failed")) { 46 | TestRunnerPrintFailuresOnly.runTests(classes); 47 | } else if (mode.equals("all")) { 48 | TestRunnerPrintAll.runTests(classes); 49 | } 50 | } 51 | 52 | public static void runTests(Class... classes) { 53 | runTests("failed", classes); 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /docs/git_pull.md: -------------------------------------------------------------------------------- 1 | # Pulling Autograder from Git 2 | 3 | To speed up autograder development, you can pull your autograder from a Git 4 | repository. This will allow you to avoid long setup times when testing changes 5 | to your autograder. You will still want to put common setup in your `setup.sh` or 6 | `Dockerfile`, to avoid having to install things on each autograder run. You may 7 | also want to include a copy of the repository in the base image, so that 8 | autograder runs don't have to clone it from scratch. 9 | 10 | If you just want to jump in, you can check out our 11 | [example](https://github.com/gradescope/autograder_samples/tree/master/deploy_keys). 12 | When using this, make sure to generate a keypair, and include your private key 13 | file in your autograder zip file or Docker image. 14 | 15 | ## Setting up your SSH configuration 16 | 17 | You should set up your ssh config so that Git knows to use the correct private 18 | key when pulling. Your configuration should look something like the following: 19 | 20 | ``` 21 | Host github.com 22 | IdentityFile ~/.ssh/deploy_key 23 | IdentitiesOnly yes 24 | ``` 25 | 26 | Make sure to include the private key in your autograder image, and copy it to 27 | the right place. 28 | 29 | You should also make sure to include the host keys for your git server, so that 30 | you don't get a host key verification error at runtime. You can do this by 31 | running something like 32 | 33 | ``` 34 | ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts 35 | ``` 36 | 37 | in your setup script. 38 | 39 | ## Github Deploy Keys 40 | 41 | Github has a 42 | [deploy keys](https://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys) 43 | feature, which allows you to set up a read-only key without a passphrase for the purpose of pulling down your autograder. Follow their instructions on how to generate a new keypair, and add the public key to your Github repository. Then, you can configure SSH to use the private key when connecting to github.com as described above. 44 | -------------------------------------------------------------------------------- /java/solution/IntList.java: -------------------------------------------------------------------------------- 1 | package com.gradescope.intlist; 2 | 3 | import com.gradescope.intlist.AbstractIntList; 4 | 5 | public class IntList extends AbstractIntList{ 6 | 7 | /** 8 | * Calls the parent constructor 9 | */ 10 | public IntList(int head){ 11 | super(head); 12 | } 13 | 14 | /** 15 | * This copy constructor is needed for test cases; do not touch 16 | */ 17 | public IntList(AbstractIntList list){ 18 | this(list.head); 19 | if(list.next != null){ 20 | this.next = new IntList(list.next); 21 | } 22 | } 23 | /** 24 | * Creates an IntList from a variable number of arguments 25 | */ 26 | public static AbstractIntList createList(int... a){ 27 | IntList head = new IntList(a[0]); 28 | IntList prev = head; 29 | for(int i=1; i < a.length; i++){ 30 | prev.next = new IntList(a[i]); 31 | prev = (IntList) prev.next; 32 | } 33 | return head; 34 | } 35 | 36 | /** 37 | * Appends value to the end of the list 38 | */ 39 | public AbstractIntList append(int value){ 40 | if(this.next == null){ 41 | this.next = new IntList(value); 42 | return this.next; 43 | }else{ 44 | return this.next.append(value); 45 | } 46 | } 47 | 48 | /** 49 | * Returns true if the IntList contains the value 50 | */ 51 | public boolean contains(int value){ 52 | if(this.head == value){ 53 | return true; 54 | }else if(this.next != null){ 55 | return this.next.contains(value); 56 | }else{ 57 | return false; 58 | } 59 | } 60 | 61 | /** 62 | * Converts an IntList to a string 63 | */ 64 | public String toString(){ 65 | if(this.next != null){ 66 | return this.head + " " + this.next.toString(); 67 | }else{ 68 | return this.head + "\n"; 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /java-mvn/README.md: -------------------------------------------------------------------------------- 1 | # Gradescope Java+Maven Autograder Example 2 | 3 | This example shows how to set up an autograder on Gradescope for a 4 | Java project using Maven. For the full Java example, see 5 | [here](../java). This document only describes the differences needed 6 | for Maven. 7 | 8 | ## Maven 9 | 10 | Maven is a build tool. It provides dependency resolution (only needed 11 | for JUnit and exec plugin in this project) and a way to compile and 12 | execute code (using exec plugin). It may be useful if you don't want 13 | to have to package your dependencies (though that might be a better 14 | idea for repeatability reasons), want better handling of complex 15 | build scenarios, or if you're already using Maven. 16 | 17 | 18 | ## Generating a Maven project 19 | 20 | Maven projects are defined by a pom.xml file. This file and the 21 | project structure were generated using the command 22 | 23 | `mvn archetype:generate -DgroupId=com.gradescope.intlist -DartifactId=intlist -DarchetypeArtifactId=maven-archetype-quickstart -DinteractiveMode=false` 24 | 25 | # Files 26 | 27 | ## [setup.sh](https://github.com/gradescope/autograder_samples/blob/master/java-mvn/11setup.sh) 28 | 29 | Sets up JDK 8 and maven. There's a workaround for a ca-certificates 30 | issue to make sure you can install things from Maven. 31 | 32 | ## [run_autograder](https://github.com/gradescope/autograder_samples/blob/master/java-mvn/run_autograder) 33 | 34 | mvn compile is a built in maven goal to compile the project. -q makes 35 | it quiet (i.e. not spam a bunch of debugging output). 36 | 37 | mvn exec:java is a goal from the exec plugin for maven. It lets you 38 | specify a main class to execute (see pom.xml). This lets you avoid 39 | having to figure out the proper java command to run and classpath 40 | arguments and the like. 41 | 42 | ## [pom.xml](https://github.com/gradescope/autograder_samples/blob/master/java-mvn/pom.xml) 43 | 44 | This is a maven project file. Briefly, it describes the project, its 45 | dependencies/plugins, Java compiler version, and main class. 46 | 47 | -------------------------------------------------------------------------------- /java_template/src/com/gradescope/jh61b/grader/TestResult.java: -------------------------------------------------------------------------------- 1 | /** Handy container class for storing test results. */ 2 | package com.gradescope.jh61b.grader; 3 | 4 | public class TestResult { 5 | final String name; 6 | final String number; 7 | final double maxScore; 8 | double score; 9 | final String visibility; 10 | 11 | /* outputSB is any text that we want to relay to the user when teh test is done running. */ 12 | private StringBuilder outputSB; 13 | 14 | /* private List tags; // Not yet implemented */ 15 | 16 | 17 | public TestResult(String name, String number, double maxScore, String visibility) { 18 | this.name = name; 19 | this.number = number; 20 | this.maxScore = maxScore; 21 | this.outputSB = new StringBuilder(); 22 | this.visibility = visibility; 23 | } 24 | 25 | public void setScore(double score) { 26 | this.score = score; 27 | } 28 | 29 | public void addOutput(String x) { 30 | outputSB.append(x); 31 | } 32 | 33 | /* Return in JSON format. TODO: Need to escape newlines and possibly other characters. */ 34 | public String toJSON() { 35 | String output = outputSB.toString(); 36 | String noWindowsNewLines = output.replace("\r\n", "\\n"); 37 | String noWeirdNewLines = noWindowsNewLines.replace("\r", "\\n"); 38 | String noLinuxNewLines = noWeirdNewLines.replace("\n", "\\n"); 39 | String noTabs = noLinuxNewLines.replace("\t", " "); 40 | String noQuotes = noTabs.replace("\"", "\\\""); 41 | 42 | return "{" + String.join(",", new String[] { 43 | String.format("\"%s\": \"%s\"", "name", name), 44 | String.format("\"%s\": \"%s\"", "number", number), 45 | String.format("\"%s\": %s", "score", score), 46 | String.format("\"%s\": %s", "max_score", maxScore), 47 | String.format("\"%s\": \"%s\"", "visibility", visibility), 48 | String.format("\"%s\": \"%s\"", "output", noQuotes) 49 | }) + "}"; 50 | } 51 | 52 | /* For debugging only. */ 53 | public String toString() { 54 | return("name: " + name + ", number: " + number + ", score: " + score + ", max_score: " + maxScore + ", detailed output if any (on next line): \n" + outputSB.toString()); 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/jh61b/grader/TestResult.java: -------------------------------------------------------------------------------- 1 | /** Handy container class for storing test results. */ 2 | package com.gradescope.jh61b.grader; 3 | 4 | public class TestResult { 5 | final String name; 6 | final String number; 7 | final double maxScore; 8 | double score; 9 | final String visibility; 10 | 11 | /* outputSB is any text that we want to relay to the user when teh test is done running. */ 12 | private StringBuilder outputSB; 13 | 14 | /* private List tags; // Not yet implemented */ 15 | 16 | 17 | public TestResult(String name, String number, double maxScore, String visibility) { 18 | this.name = name; 19 | this.number = number; 20 | this.maxScore = maxScore; 21 | this.outputSB = new StringBuilder(); 22 | this.visibility = visibility; 23 | } 24 | 25 | public void setScore(double score) { 26 | this.score = score; 27 | } 28 | 29 | public void addOutput(String x) { 30 | outputSB.append(x); 31 | } 32 | 33 | /* Return in JSON format. TODO: Need to escape newlines and possibly other characters. */ 34 | public String toJSON() { 35 | String output = outputSB.toString(); 36 | String noWindowsNewLines = output.replace("\r\n", "\\n"); 37 | String noWeirdNewLines = noWindowsNewLines.replace("\r", "\\n"); 38 | String noLinuxNewLines = noWeirdNewLines.replace("\n", "\\n"); 39 | String noTabs = noLinuxNewLines.replace("\t", " "); 40 | String noQuotes = noTabs.replace("\"", "\\\""); 41 | 42 | return "{" + String.join(",", new String[] { 43 | String.format("\"%s\": \"%s\"", "name", name), 44 | String.format("\"%s\": \"%s\"", "number", number), 45 | String.format("\"%s\": %s", "score", score), 46 | String.format("\"%s\": %s", "max_score", maxScore), 47 | String.format("\"%s\": \"%s\"", "visibility", visibility), 48 | String.format("\"%s\": \"%s\"", "output", noQuotes) 49 | }) + "}"; 50 | } 51 | 52 | /* For debugging only. */ 53 | public String toString() { 54 | return("name: " + name + ", number: " + number + ", score: " + score + ", max_score: " + maxScore + ", detailed output if any (on next line): \n" + outputSB.toString()); 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/intlist/RefIntList.java: -------------------------------------------------------------------------------- 1 | package com.gradescope.intlist; 2 | 3 | import com.gradescope.intlist.AbstractIntList; 4 | 5 | public class RefIntList extends AbstractIntList{ 6 | 7 | public RefIntList(int head) { 8 | super(head); 9 | } 10 | 11 | /** 12 | * This copy constructor is needed for test cases; do not touch 13 | */ 14 | public RefIntList(AbstractIntList list){ 15 | this(list.head); 16 | if(list.next != null){ 17 | this.next = new RefIntList(list.next); 18 | } 19 | } 20 | /** 21 | * Creates an IntList from a variable number of arguments 22 | */ 23 | public static AbstractIntList createList(int... a){ 24 | RefIntList head = new RefIntList(a[0]); 25 | RefIntList prev = head; 26 | for(int i=1; i < a.length; i++){ 27 | prev.next = new RefIntList(a[i]); 28 | prev = (RefIntList) prev.next; 29 | } 30 | return head; 31 | } 32 | 33 | /** 34 | * Appends value to the end of the list 35 | */ 36 | public AbstractIntList append(int value){ 37 | if(this.next == null){ 38 | this.next = new RefIntList(value); 39 | return this.next; 40 | }else{ 41 | return this.next.append(value); 42 | } 43 | } 44 | 45 | /** 46 | * Returns true if the IntList contains the value 47 | */ 48 | public boolean contains(int value){ 49 | System.out.println("Reference"); 50 | if(this.head == value){ 51 | return true; 52 | }else if(this.next != null){ 53 | return this.next.contains(value); 54 | }else{ 55 | return false; 56 | } 57 | } 58 | 59 | /** 60 | * Converts an IntList to a string 61 | * 62 | * The last element should be 63 | * followed by a newline instead of a space. 64 | */ 65 | public String toString(){ 66 | if(this.next != null){ 67 | return this.head + " " + this.next.toString(); 68 | }else{ 69 | return this.head + "\n"; 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /java_template/src/com/gradescope/jh61b/junit/RunListenerWithCapture.java: -------------------------------------------------------------------------------- 1 | package com.gradescope.jh61b.junit; 2 | 3 | import org.junit.runner.notification.RunListener; 4 | import java.io.PrintStream; 5 | import java.io.ByteArrayOutputStream; 6 | import java.io.IOException; 7 | 8 | /** Gives captures capability to RunListener classes. */ 9 | public abstract class RunListenerWithCapture extends RunListener { 10 | private static PrintStream stdout = System.out; 11 | private static CaptureStream capture = new CaptureStream(); 12 | private static PrintStream captureOut = new PrintStream(capture, true); 13 | 14 | // Start capturing all standard output to a string 15 | public static void startCapture() { 16 | capture.resetText(); 17 | System.setOut(captureOut); 18 | } 19 | 20 | // Stop capturing and return the text since start() was called 21 | public static String endCapture() { 22 | System.setOut(stdout); 23 | return capture.getText(); 24 | } 25 | 26 | // Class that can be used to capture a PrintStream like STDOUT 27 | // and store it as a string. This is used in order 28 | // to get a program's main() output into something we can compare 29 | // inside the java test program. 30 | 31 | private static class CaptureStream extends ByteArrayOutputStream { 32 | private StringBuffer captured = new StringBuffer(); 33 | public String getText() { 34 | return captured.toString(); 35 | } 36 | 37 | public void resetText() { 38 | captured = new StringBuffer(); 39 | } 40 | 41 | public void flush() throws IOException { 42 | String record; 43 | synchronized(this) { 44 | super.flush(); 45 | record = this.toString(); 46 | super.reset(); 47 | 48 | if (record.length() == 0) { 49 | // avoid empty records 50 | return; 51 | } 52 | 53 | captured.append(record); 54 | } 55 | } 56 | } 57 | } -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/jh61b/junit/RunListenerWithCapture.java: -------------------------------------------------------------------------------- 1 | package com.gradescope.jh61b.junit; 2 | 3 | import org.junit.runner.notification.RunListener; 4 | import java.io.PrintStream; 5 | import java.io.ByteArrayOutputStream; 6 | import java.io.IOException; 7 | 8 | /** Gives captures capability to RunListener classes. */ 9 | public abstract class RunListenerWithCapture extends RunListener { 10 | private static PrintStream stdout = System.out; 11 | private static CaptureStream capture = new CaptureStream(); 12 | private static PrintStream captureOut = new PrintStream(capture, true); 13 | 14 | // Start capturing all standard output to a string 15 | public static void startCapture() { 16 | capture.resetText(); 17 | System.setOut(captureOut); 18 | } 19 | 20 | // Stop capturing and return the text since start() was called 21 | public static String endCapture() { 22 | System.setOut(stdout); 23 | return capture.getText(); 24 | } 25 | 26 | // Class that can be used to capture a PrintStream like STDOUT 27 | // and store it as a string. This is used in order 28 | // to get a program's main() output into something we can compare 29 | // inside the java test program. 30 | 31 | private static class CaptureStream extends ByteArrayOutputStream { 32 | private StringBuffer captured = new StringBuffer(); 33 | public String getText() { 34 | return captured.toString(); 35 | } 36 | 37 | public void resetText() { 38 | captured = new StringBuffer(); 39 | } 40 | 41 | public void flush() throws IOException { 42 | String record; 43 | synchronized(this) { 44 | super.flush(); 45 | record = this.toString(); 46 | super.reset(); 47 | 48 | if (record.length() == 0) { 49 | // avoid empty records 50 | return; 51 | } 52 | 53 | captured.append(record); 54 | } 55 | } 56 | } 57 | } -------------------------------------------------------------------------------- /docs/leaderboards.md: -------------------------------------------------------------------------------- 1 | ## Leaderboards 2 | 3 | You can create leaderboards from quantities that are generated by your 4 | autograder. You can use this for things like programming contests 5 | where students compete to earn the highest score by some metric, 6 | rather than being graded purely on correctness. Students will be 7 | identified by pseudonyms that they choose, rather than by their actual 8 | names. To enable leaderboard support for your assignment, enable the 9 | setting at assignment creation or by changing the setting afterwards. 10 | 11 | [![Leaderboard setting](leaderboard_setting.png)](leaderboard_setting.png) 12 | 13 | 14 | ### Autograder specifications 15 | 16 | For the leaderboard to work, your autograder must export a top-level 17 | "leaderboard" section in the results.json output. This will be an 18 | array, with objects defining the different quantities to be displayed 19 | on the leaderboard. 20 | 21 | ``` 22 | { 23 | "leaderboard": 24 | [ 25 | {"name": "Accuracy", "value": .926}, 26 | {"name": "Time", "value": 15.1, "order": "asc"}, 27 | {"name": "Stars", "value": "*****"} 28 | ] 29 | } 30 | ``` 31 | 32 | You can specify the sort order for a quantity by specifying an "order" 33 | property. The default is "desc" for descending, i.e. higher scores 34 | rank higher, but for quantities like execution time you may want to 35 | sort them in ascending order. 36 | 37 | ### Student submission 38 | 39 | When students submit their work, they will be required to submit a 40 | pseudonym for the leaderboard. This is to keep results anonymous. 41 | 42 | [![Student submission form with leaderboard enabled](submit_form_with_leaderboard.png)](submit_form_with_leaderboard.png) 43 | 44 | 45 | After submitting, students will see a link to the leaderboard, if it 46 | is enabled for the particular assignment. 47 | 48 | [![Assignment submission with leaderboard enabled](submission_with_leaderboard_enabled.png)](submission_with_leaderboard_enabled.png) 49 | 50 | Clicking on the leaderboard link will show a page with a sortable 51 | table for the various leaderboard quantities defined in the 52 | autograder. The student's submission will be highlighted in bold. 53 | 54 | [![Leaderboard](leaderboard.png)](leaderboard.png) 55 | -------------------------------------------------------------------------------- /docs/ssh.md: -------------------------------------------------------------------------------- 1 | # Debugging via SSH 2 | 3 | You can debug a submission by clicking the "Debug via SSH" button in 4 | the action bar at the bottom of a submission. This will allow you to 5 | log into a new, empty container that has your autograder code and the student's 6 | code downloaded, set up in the same way as an actual autograder run. 7 | You can then run the autograder to see the results that it would produce. 8 | 9 | [![Debug via SSH](debug_via_ssh.png)](debug_via_ssh.png) 10 | 11 | ## Adding an SSH key 12 | 13 | If you haven't done this before, you'll need to provide an SSH public 14 | key. Click on the link in the popup to do so: 15 | 16 | [![Debug via SSH: No public key](debug_via_ssh_no_public_key.png)](debug_via_ssh_no_public_key.png) 17 | 18 | You'll be asked to paste your public key into a text field. It should 19 | start with something like `ssh-rsa`, `ssh-dsa`, or something 20 | similar. If you don't have one, see [GitHub's instructions](https://help.github.com/articles/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent/#generating-a-new-ssh-key) 21 | for how to generate one. If copying from a terminal, make sure that 22 | you don't include any line breaks, because there should not be any 23 | line breaks in a public key. 24 | 25 | ### Account settings 26 | 27 | Once you have added one SSH key, you can add more keys or change 28 | existing keys by going to the 29 | [Account settings](https://www.gradescope.com/account/edit) page. 30 | 31 | Your public keys will appear at the bottom of the page. 32 | 33 | ## Logging in via SSH 34 | 35 | Once you've set up your public key, clicking on the button will start 36 | a new container for you. As soon as it's ready, instructions will 37 | appear that give you a command to run to log into the container. 38 | 39 | [![SSH Instructions](ssh_instructions.png)](ssh_instructions.png) 40 | 41 | After logging in, you can inspect the filesytem and run the 42 | autograder. If the autograder fails to execute, you can make tweaks to 43 | see how to fix the autograder or the submission. 44 | 45 | ## Persisting changes 46 | 47 | Once you get the autograder running correctly, you can persist changes 48 | by either resubmitting for the student, if the error is in the 49 | student's code, or by uploading a new version of the autograder. 50 | -------------------------------------------------------------------------------- /docs/base_images.md: -------------------------------------------------------------------------------- 1 | # Base Images 2 | 3 | The default base image that we use is currently Ubuntu 22.04. 4 | The previous default version was Ubuntu 18.04. 5 | 6 | You'll see this default auto-selected on the Configure Autograder page. 7 | Gradescope supports selecting a different base image to build your 8 | autograder with. 9 | 10 | [![Base Image Selector](base_image_selector.png)](base_image_selector.png) 11 | 12 | When you create a new autograder, it will default to the current 13 | default base image. If you do not want to change the default base image, 14 | you can upload your autograder Zip file and select "Update Autograder" 15 | on the Configure Autograder page of your Gradescope assignment. Over 16 | time, new base image versions may be released 17 | and become the default version. We recommend that you update your 18 | autograders to use the latest versions when available to take 19 | advantage of the latest software updates and security features. 20 | 21 | Rebuilding an autograder or duplicating an autograder will use the 22 | same base image as was previously selected. 23 | 24 | ## Base Image Options 25 | 26 | ### Base Image OS 27 | 28 | This is the operating system which the base image is running 29 | (e.g. Ubuntu, Fedora, etc). 30 | 31 | ### Base Image Version 32 | 33 | This is the major version of the OS which is loaded in the base image 34 | (E.g. 22.04 for Ubuntu). Note that minor software upgrades may happen 35 | from time to time without changing the major version of the OS 36 | (i.e. individual packages may receive minor updates while keeping the 37 | same version of the OS). This is up to the upstream OS maintainers' 38 | software upgrade policies. 39 | 40 | ### Base Image Variant 41 | 42 | Variants may add to or modify the pre-installed software in the base 43 | image. This can be used to improve autograder build times by avoiding 44 | installing packages in the setup phase. 45 | 46 | Selecting *Base* will mean that it is a fresh install of the OS with 47 | no modifications to the default installed programs, other than what 48 | our autograder platform requires. 49 | 50 | Additional base images may be added which contain commonly installed 51 | packages, such as different versions of Python or Java. This should 52 | decrease the build time for those autograders. 53 | -------------------------------------------------------------------------------- /docs/python3_issues.md: -------------------------------------------------------------------------------- 1 | # Python 3 issues 2 | 3 | In September 2020, we upgraded our autograder base image to use Python 4 | 3 for our autograder harness. 5 | 6 | If you're having issues with your Python 3 autograder not starting and 7 | you're overriding the system version of `python3`, it may be occurring 8 | because you have overridden the system `python3` executable. Since 9 | Gradescope uses `python3` for our harness, we need to ensure that we 10 | have the correct dependencies installed, and if the system `python3` 11 | executable is overridden, this may no longer be the case. There are a 12 | few possible ways to handle this situation. 13 | 14 | If you have any trouble with these approaches, please contact 15 | [help@gradescope.com](mailto:help@gradescope.com?subject=Python 3 16 | autograder issues) and we'll help you get your autograder working. 17 | 18 | ## Update your autograder to use an explicit version of python3 19 | 20 | Instead of overriding the sytem `python3` executable, you should be 21 | able to refer to an explicit Python version in your code. E.g., if you 22 | are installing `python3.8`, instead of overriding `python3` you can 23 | use `python3.8` explicitly in your scripts (e.g. in your 24 | `run_autograder` script, in any [#! 25 | lines](https://en.wikipedia.org/wiki/Shebang_(Unix)), etc). 26 | 27 | You will need to install packages by using `python3.8 -m pip ...` 28 | instead of using `pip3`, since that would install packages in the 29 | default system version of `python3`. 30 | 31 | This is the preferred approach to solving this issue, and should be 32 | most resilient in the long-term. 33 | 34 | ## Only alias python3 within your run_autograder script 35 | 36 | If it's easier to continue using the `python3` executable name, you 37 | could alias it within your `run_autograder` script, so that it does 38 | not affect the rest of the system. 39 | 40 | ## Install Gradescope autograder harness dependencies 41 | 42 | This approach is a little more brittle, but you can also work around 43 | this issue by installing the dependencies our autograder harness needs 44 | within your custom Python version. The current list of dependencies is 45 | the following: 46 | 47 | ``` 48 | pyyaml pytz requests psutil grequests python-dateutil 49 | ``` 50 | 51 | You should be able to install these via `pip3` or by adding these 52 | packages to your requirements.txt file. 53 | -------------------------------------------------------------------------------- /diff/tests/test_subprocess.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from gradescope_utils.autograder_utils.decorators import weight 3 | import subprocess32 as subprocess 4 | 5 | 6 | class TestDiff(unittest.TestCase): 7 | def setUp(self): 8 | pass 9 | 10 | @weight(1) 11 | def test_no_args(self): 12 | """Invalid Input (no argument)""" 13 | fib = subprocess.Popen(["./fib"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) 14 | output = fib.stdout.read().strip() 15 | self.assertEqual(output, b"") 16 | err = fib.stderr.read().strip() 17 | referenceOutput = b"Error: Insufficient arguments." 18 | self.assertEqual(err, referenceOutput) 19 | fib.terminate() 20 | 21 | @weight(1) 22 | def test_fib0(self): 23 | """Invalid Input (0)""" 24 | fib = subprocess.Popen(["./fib", "0"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) 25 | output = fib.stdout.read().strip() 26 | self.assertEqual(output, b"") 27 | err = fib.stderr.read().strip() 28 | referenceOutput = b"Error: number must be greater than 0." 29 | self.assertEqual(err, referenceOutput) 30 | fib.terminate() 31 | 32 | @weight(1) 33 | def test_fib1(self): 34 | """1st Fibonacci Number""" 35 | fib = subprocess.Popen(["./fib", "1"], stdout=subprocess.PIPE) 36 | output = fib.stdout.read().strip() 37 | referenceOutput = b"1" 38 | self.assertEqual(output, referenceOutput) 39 | fib.terminate() 40 | 41 | @weight(1) 42 | def test_fib2(self): 43 | """2nd Fibonacci Number""" 44 | fib = subprocess.Popen(["./fib", "2"], stdout=subprocess.PIPE) 45 | output = fib.stdout.read().strip() 46 | referenceOutput = b"1" 47 | self.assertEqual(output, referenceOutput) 48 | fib.terminate() 49 | 50 | @weight(1) 51 | def test_fib3(self): 52 | """3rd Fibonacci Number""" 53 | fib = subprocess.Popen(["./fib", "3"], stdout=subprocess.PIPE) 54 | output = fib.stdout.read().strip() 55 | referenceOutput = b"2" 56 | self.assertEqual(output, referenceOutput) 57 | fib.terminate() 58 | 59 | @weight(1) 60 | def test_fib4(self): 61 | """4th Fibonacci number""" 62 | fib = subprocess.Popen(["./fib", "4"], stdout=subprocess.PIPE) 63 | output = fib.stdout.read().strip() 64 | referenceOutput = b"3" 65 | self.assertEqual(output, referenceOutput) 66 | fib.terminate() 67 | -------------------------------------------------------------------------------- /diff_general/README.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | [View project source on GitHub](https://github.com/gradescope/autograder_samples/tree/master/diff_general) 4 | 5 | This is an example of using Python and the gradescope-utils library to 6 | implement diff-style autograding of a C assignment. The idea is that 7 | you can compile the student's code, and then execute it in a 8 | subprocess using Python. Then you can communicate with the subprocess 9 | by providing arguments via the command line, or via standard input, 10 | and read standard output to see what the program produced. The 11 | student's output is checked against a reference answer to decide 12 | whether the test case passed or failed. 13 | 14 | This type of testing helps with testing assignments that are not 15 | easily amenable to unit testing, such as assignments where students 16 | don't necessarily write specific functions. 17 | 18 | ## Building and executing code 19 | 20 | - **compile.sh**: This script should do whatever is necessary to 21 | compile the student's code. If nothing needs to be compiled, you can 22 | use this file to copy the student's files to the right directory. 23 | - **run.sh**: This script should run the student's program. This can 24 | be overridden for a given test case. 25 | 26 | ## Adding test cases 27 | 28 | This example is driven entirely by the files that are in the `test_data` 29 | directory, i.e. to add test cases you only have to add directories to the 30 | `test_data` directory. Each test case should have the following files: 31 | 32 | - **input**: This file will be fed to the program over standard input. 33 | - **output**: This file will serve as the reference output for the 34 | test, and must be matched for the test to pass. 35 | - **settings.yml**: This file holds various settings, such as the 36 | weight assigned to a test case. 37 | - **err**: Optionally, this file can be used to compare any output 38 | that is expected to be printed to standard error. 39 | - **run.sh**: Optionally, you can override the command used to execute 40 | this test case. This can be used to provide different command line 41 | arguments. 42 | 43 | ## The example program 44 | 45 | The C program in question ([`fib.c`](https://github.com/gradescope/autograder_samples/blob/master/diff_general/fib.c)) computes the nth Fibonacci number 46 | (1-indexed), as specified as the first command line argument 47 | (i.e. `argv[1]`). This is just a simple example to demonstrate how you 48 | might structure such an autograder. 49 | 50 | ## Providing input to the program 51 | 52 | You can provide command line arguments in run.sh, or you can send 53 | input to standard input using the `input` file. 54 | -------------------------------------------------------------------------------- /c#/README.md: -------------------------------------------------------------------------------- 1 | # Gradescope C# Autograder Example 2 | 3 | [View project source on Github](https://github.com/gradescope/autograder_samples/tree/master/c#/src) - [autograder.zip](https://github.com/gradescope/autograder_samples/raw/master/c#/src/autograder.zip) - [sample solution](https://github.com/gradescope/autograder_samples/blob/master/c%23/src/solution/HelloWorld.cs) 4 | 5 | ## Project Description 6 | 7 | This project is a simple example of how to build a C# autograder. 8 | 9 | 10 | 11 | ## Dependencies (for tests) 12 | 13 | - Python 3+ 14 | - NUnit 15 | 16 | ## Example Test 17 | 18 | ``` 19 | [TestFixture] 20 | public class HelloWorldTest 21 | { 22 | [Test, Property("Weight", 1.0), Property("Visibility", "visible")] 23 | public void HelloTest() 24 | { 25 | Assert.AreEqual(HelloWorld.Hello(), "Hello"); 26 | } 27 | 28 | [Test, Property("Weight", 2.0), Property("Visibility", "hidden"), Property("Name", "Bye")] 29 | public void MyTest2() 30 | { 31 | Assert.AreEqual(HelloWorld.Bye(), "Bye"); 32 | } 33 | } 34 | ``` 35 | 36 | ## Running Tests 37 | 38 | ``` 39 | mcs -target:library -pkg:nunit -out:test.dll test.cs HelloWorld.cs 40 | nunit-console test.dll 41 | ``` 42 | 43 | # Files 44 | 45 | ## [setup.sh](https://github.com/gradescope/autograder_samples/blob/master/c%23/src/setup.sh) 46 | 47 | This script installs NUnit and the Mono development tools. 48 | 49 | ## [run_autograder](https://github.com/gradescope/autograder_samples/blob/master/c%23/src/run_autograder) 50 | 51 | This script copies the student's submission to the target directory, 52 | compiles the required files (the test case and the student's submission), 53 | executes the test suite, and then converts the XML test results to Gradescope's 54 | JSON format. 55 | 56 | ## [nunit_to_gs.py](https://github.com/gradescope/autograder_samples/blob/master/c%23/src/nunit_to_gs.py) 57 | 58 | This python script loads the test results from TestResults.xml 59 | and converts them into Gradescope's JSON format. This script is what 60 | reads the property tags that you supplied in the test case, as shown above, 61 | and turns those into the appropriate Gradescope metadata. 62 | 63 | ## [Framework.cs](https://github.com/gradescope/autograder_samples/blob/master/c%23/src/Framework.cs) 64 | 65 | This is a blank template file for the students to fill in. Note that 66 | their solution must be called HelloWorld.cs for the autograder to work 67 | correctly. 68 | 69 | ## [autograder.zip](https://github.com/gradescope/autograder_samples/blob/master/c%23/src/autograder.zip) 70 | 71 | This is a zipped up autograder that can be directly uploaded to Gradescope. 72 | You can then try out the correct and/or incorrect solutions we provide to see how 73 | the autograder works. 74 | -------------------------------------------------------------------------------- /docs/submission_metadata.md: -------------------------------------------------------------------------------- 1 | # Submission Metadata 2 | 3 | Note: The content mentioned here may be subject to change. We will try 4 | not to make backwards incompatible changes to the platform, but we do 5 | reserve the right to make breaking changes to anything described here. 6 | 7 | The file `/autograder/submission_metadata.json` contains information 8 | about the current and previous submissions. It contains the following 9 | information: 10 | 11 | ``` 12 | { 13 | "id": 123456, // Unique identifier for this particular submission 14 | "created_at": "2018-07-01T14:22:32.365935-07:00", // Submission time 15 | "assignment": { // Assignment details 16 | "due_date": "2018-07-31T23:00:00.000000-07:00", 17 | "group_size": 4, // Maximum group size, or null if not set 18 | "group_submission": true, // Whether group submission is allowed 19 | "id": 25828, // Gradescope assignment ID 20 | "course_id": 1234, // Gradescope course ID 21 | "late_due_date": null, // Late due date, if set 22 | "release_date": "2018-07-02T00:00:00.000000-07:00", 23 | "title": "Programming Assignment 1", 24 | "total_points": "20.0" // Total point value, including any manual grading portion 25 | }, 26 | "submission_method": "upload", // Can be "upload", "GitHub", or "Bitbucket" 27 | "users": [ 28 | { 29 | "email": "student@example.com", 30 | "id": 1234, 31 | "name": "Student User" 32 | }, ... // Multiple users will be listed in the case of group submissions 33 | ], 34 | "previous_submissions": [ 35 | { 36 | "submission_time": "2017-04-06T14:24:48.087023-07:00",// previous submission time 37 | "score": 0.0, // Previous submission score 38 | "autograder_error": true|false // If true, this submission failed to run at no fault of the student. 39 | "results": { ... } // Previous submission results object, ONLY for the latest previous submission. 40 | }, ... 41 | ] 42 | } 43 | ``` 44 | 45 | ## Rate limiting schemes 46 | 47 | You can use submission_metadata.json to implement arbitrary rate 48 | limiting schemes. For instance, to limit the number of submissions 49 | within a 24 hour period, you can count the number of entries in 50 | `previous_submissions` which are within the last 24 hours, and display 51 | that information in the top level output field. 52 | 53 | If a student's submission should be rate limited, you can add a 54 | message to the top level output, and merge that with the results 55 | object from the previous submission. This way, students will keep 56 | their last valid score, but they'll know that they can't submit 57 | anymore. 58 | 59 | When implementing such schemes, be sure to compute time periods based 60 | on the current submission's "created_at" (submission time) - 61 | otherwise, re-running the autograder will cause the rate limits to be 62 | computed based on the current system time. 63 | -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/jh61b/junit/JUnitUtilities.java: -------------------------------------------------------------------------------- 1 | package com.gradescope.jh61b.junit; 2 | import org.junit.runner.notification.Failure; 3 | import java.lang.reflect.Method; 4 | import org.junit.Test; 5 | 6 | 7 | public class JUnitUtilities { 8 | /** Converts a JUnit failure object into a string. */ 9 | public static String failureToString(Failure failure) { 10 | StringBuilder sb = new StringBuilder(); 11 | 12 | Throwable exception = failure.getException (); 13 | 14 | if (exception instanceof AssertionError) { 15 | if (exception.getMessage() == null) 16 | sb.append("Assertion failed"); 17 | else { 18 | sb.append(String.format("%s%n", exception.getMessage())); 19 | if (exception.getMessage().startsWith("Expected exception:")) { 20 | return sb.toString(); 21 | } 22 | } 23 | } else { 24 | if (exception.getCause() != null) { 25 | exception = exception.getCause(); 26 | } 27 | sb.append(String.format(" %s%n", exception)); 28 | } 29 | 30 | for (StackTraceElement frame : exception.getStackTrace ()) { 31 | if (frame.getClassName().startsWith ("org.junit.")) 32 | continue; 33 | sb.append(printPosition(frame)); 34 | if (isStoppingFrame(frame)) 35 | break; 36 | } 37 | String noTrailingWhitespace = sb.toString().replaceFirst("\\s+$", ""); 38 | return noTrailingWhitespace; 39 | } 40 | 41 | /** Returns a string representation of the source position indicated by FRAME. */ 42 | private static String printPosition(StackTraceElement frame) { 43 | 44 | if (frame.isNativeMethod()) 45 | return String.format(" at %s.%s (native method)%n", 46 | frame.getClassName (), 47 | frame.getMethodName ()); 48 | else 49 | return String.format(" at %s.%s:%d (%s)%n", 50 | frame.getClassName (), 51 | frame.getMethodName (), 52 | frame.getLineNumber (), 53 | frame.getFileName ()); 54 | } 55 | 56 | /** True iff FRAME is positioned on a method with a junit @Test 57 | * annotation. */ 58 | private static boolean isStoppingFrame (StackTraceElement frame) { 59 | if (frame.isNativeMethod ()) 60 | return false; 61 | try { 62 | Class cls = Class.forName(frame.getClassName ()); 63 | Method mthd = cls.getMethod(frame.getMethodName ()); 64 | return mthd.getAnnotation(Test.class) != null; 65 | } catch (ClassNotFoundException e) { 66 | return false; 67 | } catch (NoSuchMethodException e) { 68 | return false; 69 | } 70 | } 71 | } -------------------------------------------------------------------------------- /java_template/src/com/gradescope/jh61b/junit/JUnitUtilities.java: -------------------------------------------------------------------------------- 1 | package com.gradescope.jh61b.junit; 2 | import org.junit.runner.notification.Failure; 3 | import java.lang.reflect.Method; 4 | import org.junit.Test; 5 | 6 | 7 | public class JUnitUtilities { 8 | /** Converts a JUnit failure object into a string. */ 9 | public static String failureToString(Failure failure) { 10 | StringBuilder sb = new StringBuilder(); 11 | 12 | Throwable exception = failure.getException (); 13 | 14 | if (exception instanceof AssertionError) { 15 | if (exception.getMessage() == null) 16 | sb.append("Assertion failed"); 17 | else { 18 | sb.append(String.format("%s%n", exception.getMessage())); 19 | if (exception.getMessage().startsWith("Expected exception:")) { 20 | return sb.toString(); 21 | } 22 | } 23 | } else { 24 | if (exception.getCause() != null) { 25 | exception = exception.getCause(); 26 | } 27 | sb.append(String.format(" %s%n", exception)); 28 | } 29 | 30 | for (StackTraceElement frame : exception.getStackTrace ()) { 31 | if (frame.getClassName().startsWith ("org.junit.")) 32 | continue; 33 | sb.append(printPosition(frame)); 34 | if (isStoppingFrame(frame)) 35 | break; 36 | } 37 | String noTrailingWhitespace = sb.toString().replaceFirst("\\s+$", ""); 38 | return noTrailingWhitespace; 39 | } 40 | 41 | /** Returns a string representation of the source position indicated by FRAME. */ 42 | private static String printPosition(StackTraceElement frame) { 43 | 44 | if (frame.isNativeMethod()) 45 | return String.format(" at %s.%s (native method)%n", 46 | frame.getClassName (), 47 | frame.getMethodName ()); 48 | else 49 | return String.format(" at %s.%s:%d (%s)%n", 50 | frame.getClassName (), 51 | frame.getMethodName (), 52 | frame.getLineNumber (), 53 | frame.getFileName ()); 54 | } 55 | 56 | /** True iff FRAME is positioned on a method with a junit @Test 57 | * annotation. */ 58 | private static boolean isStoppingFrame (StackTraceElement frame) { 59 | if (frame.isNativeMethod ()) 60 | return false; 61 | try { 62 | Class cls = Class.forName(frame.getClassName ()); 63 | Method mthd = cls.getMethod(frame.getMethodName ()); 64 | return mthd.getAnnotation(Test.class) != null; 65 | } catch (ClassNotFoundException e) { 66 | return false; 67 | } catch (NoSuchMethodException e) { 68 | return false; 69 | } 70 | } 71 | } -------------------------------------------------------------------------------- /docs/getting_started.md: -------------------------------------------------------------------------------- 1 | # Getting Started 2 | 3 | ## Video Walkthrough 4 | 5 | If you'd like to watch a video walkthrough of the programming assignment and 6 | code autograder setup process, please see our 7 | [Gradescope for Programming Assignments webinar](https://www.youtube.com/watch?v=ZX3G5dFRZKI){:target="_blank" rel="noopener"}. 8 | 9 | ## Pre-requisites 10 | 11 | If you haven't made an account on Gradescope, sign up as an instructor at 12 | [Gradescope.com](https://gradescope.com/). 13 | 14 | Programming assignments are part of [Gradescope Complete](https://info.gradescope.com/pricing), 15 | so you'll need to either upgrade your course, or request trial access to the feature by 16 | emailing [help@gradescope.com](mailto:help@gradescope.com). 17 | 18 | You'll need to have a course set up, so create one if you don't have one. 19 | 20 | ## Setting Up Your Assignment 21 | 22 | Navigate to the assignments tab on your course, and click on "Create 23 | Programming Assignment" in the bottom bar. 24 | 25 | [![Assignments](assignments.png)](assignments.png) 26 | 27 | You'll be asked for some basic details about your assignment. Choose 28 | the appropriate options and continue. 29 | 30 | [![Assignment](assignment_form.png)](assignment_form.png) 31 | 32 | Next you'll be asked to upload your autograder. Upload a zip file 33 | meeting the required [specifications](../specs), select the [base image](../base_images) 34 | you would like to build the autograder with, and press "Update Autograder." 35 | 36 | [![Configure Autograder](configure_autograder.png)](configure_autograder.png) 37 | 38 | The page will reload and show you the Docker build output 39 | as it sets up your image. 40 | 41 | [![Build Status](build_status.png)](build_status.png) 42 | 43 | Once the Docker image finishes building, you can click "Test 44 | Autograder" to submit to the autograder yourself. You can use this to 45 | debug the output of your autograder, and make sure everything is set 46 | up correctly. You'll want to make sure your autograder is stable 47 | before allowing students to submit. 48 | 49 | [![Test Autograder](test_autograder.png)](test_autograder.png) 50 | 51 | Once the release date arrives, students will be allowed to submit 52 | their homework. They'll receive feedback as soon as the autograder 53 | finishes running. You can control what feedback they receive in the 54 | output file format. They'll see a list of test cases that they've 55 | passed and failed, and any output you provide. 56 | 57 | [![Student results](results.png)](results.png) 58 | 59 | Note that the autograder is run automatically only on new submissions. If 60 | you update the autograder for the assignment and want to run the new 61 | autograder on an existing submission, or if you'd like 62 | to rerun the autograder on a submission, you can do so manually by 63 | clicking on "Rerun Autograder". 64 | 65 | You can rerun the autograder for all submissions by clicking on "Regrade 66 | All Submissions" on the Manage Submissions page. 67 | -------------------------------------------------------------------------------- /diff/README.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | [View project source on GitHub](https://github.com/gradescope/autograder_samples/tree/master/diff) 4 | 5 | This is an example of using Python and the gradescope-utils library to 6 | implement diff-style autograding of a C assignment. The idea is that 7 | you can compile the student's code, and then execute it in a 8 | subprocess using Python. Then you can communicate with the subprocess 9 | by providing arguments via the command line, or via standard input, 10 | and read standard output to see what the program produced. Finally, 11 | you check the output against a reference answer and decide whether the 12 | test case passed or failed. 13 | 14 | The basic structure of this autograder example is the same as the 15 | regular Python example, so you may want to familiarize yourself with 16 | that one first. 17 | 18 | ## The program 19 | 20 | The C program in question ([`fib.c`](https://github.com/gradescope/autograder_samples/blob/master/diff/fib.c)) computes the nth Fibonacci number 21 | (1-indexed), as specified as the first command line argument 22 | (i.e. `argv[1]`). This is just a simple example to demonstrate how you 23 | might structure such an autograder. 24 | 25 | ## Compiling the C code 26 | 27 | For this simple example of a one-file C program, it can be compiled by 28 | running `make fib`. In general, you will need to compile the code 29 | within `run_autograder` before starting the Python script. This can be 30 | as complicated as you need it to be - you can actually write a 31 | Makefile or use whatever other build system you need to use. 32 | 33 | ## Providing input to the program 34 | 35 | This example uses command line arguments because that was the easiest thing 36 | to do, but you can also write to the subprocess's standard 37 | input. You can see the [Python integration test](https://github.com/gradescope/autograder_samples/blob/master/python/src/tests/test_integration.py) 38 | for an example of how to do that. 39 | 40 | ## Comparing to the reference answers 41 | 42 | Once you've read the program's output, you should compare them to your 43 | reference answers. You can either put the values directly in the unit 44 | tests, or you can load them from a file. Either way, you will want to 45 | check the student's results by asserting something about their output 46 | in relation to the reference. 47 | 48 | Reference in unit test: 49 | 50 | ```python 51 | def test_fib1(self): 52 | """1st Fibonacci Number""" 53 | fib = subprocess.Popen(["./fib", "1"], stdout=subprocess.PIPE) 54 | output = fib.stdout.read().strip() 55 | referenceOutput = "1" 56 | self.assertEqual(output, referenceOutput) 57 | fib.terminate() 58 | ``` 59 | 60 | Loading from a file: 61 | 62 | ```python 63 | def test_from_file(self): 64 | """10th Fibonacci number""" 65 | fib = subprocess.Popen(["./fib", "10"], stdout=subprocess.PIPE) 66 | output = fib.stdout.read().strip() 67 | with open("reference/10", "r") as outputFile: 68 | referenceOutput = outputFile.read() 69 | 70 | self.assertEqual(output, referenceOutput) 71 | fib.terminate() 72 | ``` 73 | -------------------------------------------------------------------------------- /diff_general/test_generator.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | import os.path 4 | import subprocess32 as subprocess 5 | from subprocess32 import PIPE 6 | from gradescope_utils.autograder_utils.decorators import weight, visibility 7 | import yaml 8 | 9 | BASE_DIR = './test_data' 10 | 11 | 12 | class TestMetaclass(type): 13 | """ 14 | Metaclass that allows generating tests based on a directory. 15 | """ 16 | def __new__(cls, name, bases, attrs): 17 | data_dir = attrs['data_dir'] 18 | attrs[cls.test_name(data_dir)] = cls.generate_test(data_dir) 19 | return super(TestMetaclass, cls).__new__(cls, name, bases, attrs) 20 | 21 | @classmethod 22 | def generate_test(cls, dir_name): 23 | """ Returns a testcase for the given directory """ 24 | command = cls.generate_command(dir_name) 25 | 26 | def load_test_file(path): 27 | full_path = os.path.join(BASE_DIR, dir_name, path) 28 | if os.path.isfile(full_path): 29 | with open(full_path, 'rb') as f: 30 | return f.read() 31 | return None 32 | 33 | def load_settings(): 34 | settings_yml = load_test_file('settings.yml') 35 | 36 | if settings_yml is not None: 37 | return yaml.safe_load(settings_yml) or {} 38 | else: 39 | return {} 40 | 41 | settings = load_settings() 42 | 43 | @weight(settings.get('weight', 1)) 44 | @visibility(settings.get('visibility', 'visible')) 45 | def fn(self): 46 | proc = subprocess.Popen(command, stdin=PIPE, stdout=PIPE, stderr=PIPE) 47 | stdin = load_test_file('input') 48 | 49 | output, err = proc.communicate(stdin, settings.get('timeout', 1)) 50 | 51 | expected_output = load_test_file('output') 52 | expected_err = load_test_file('err') 53 | 54 | msg = settings.get('msg', "Output did not match expected") 55 | self.assertEqual(expected_output, output, msg=msg) 56 | if expected_err is not None: 57 | self.assertEqual(expected_err, err, msg=msg) 58 | fn.__doc__ = 'Test {0}'.format(dir_name) 59 | return fn 60 | 61 | @staticmethod 62 | def generate_command(dir_name): 63 | """Generates the command passed to Popen""" 64 | test_specific_script = os.path.join(BASE_DIR, dir_name, 'run.sh') 65 | if os.path.isfile(test_specific_script): 66 | return ["bash", test_specific_script] 67 | return ["bash", "./run.sh"] 68 | 69 | @staticmethod 70 | def klass_name(dir_name): 71 | return 'Test{0}'.format(''.join([x.capitalize() for x in dir_name.split('_')])) 72 | 73 | @staticmethod 74 | def test_name(dir_name): 75 | return 'test_{0}'.format(dir_name) 76 | 77 | 78 | def build_test_class(data_dir): 79 | klass = TestMetaclass( 80 | TestMetaclass.klass_name(data_dir), 81 | (unittest.TestCase,), 82 | { 83 | 'data_dir': data_dir 84 | } 85 | ) 86 | return klass 87 | 88 | 89 | def find_data_directories(): 90 | return filter( 91 | lambda x: os.path.isdir(os.path.join(BASE_DIR, x)), 92 | os.listdir(BASE_DIR) 93 | ) 94 | -------------------------------------------------------------------------------- /docs/resources.md: -------------------------------------------------------------------------------- 1 | # Community Resources 2 | 3 | Here are some resources that our community members have shared with 4 | us. Please let us know if you'd like to share anything you have 5 | created with other users of the Gradescope autograder platform! 6 | 7 | Note that Gradescope does not necessarily endorse or support any of 8 | these tools, and we may not be familiar with their inner workings. If 9 | you need help with these resources please contact the authors 10 | directly. 11 | 12 | Name | Description 13 | -----| ----------- 14 | [jacquard](https://github.com/espertus/jacquard) | A Java library integrated with Checkstyle, PMD, JUnit 5, Jacoco, and JavaParser for customizable autograders 15 | [jgrade](https://github.com/tkutche1/jgrade) | A Java helper library for building autograders for Gradescope 16 | [bralax/gradescope_autograder](https://github.com/bralax/gradescope_autograder) | Another Java helper library designed for lower level courses with closer acceess to student code. 17 | [UCSB Gradescope Tools](https://ucsb-gradescope-tools.github.io/) | Tools and examples for C++ and Python autograders, pulling from GitHub, etc 18 | [GradescopeGrader4Autolab](https://github.com/xyzisinus/GradescopeGrader4Autolab) | A wrapper autograder to help port Autolab autograders to Gradescope 19 | [ThaumicMekanism/GradescopeBase](https://github.com/ThaumicMekanism/GradescopeBase) | An alternative Python autograder framework ([usage example](https://github.com/ThaumicMekanism/GradescopeBaseAG)) 20 | [mnoukhov/gradescope-autograder-template](https://github.com/mnoukhov/gradescope-autograder-template) | An autograder template for Python autograders 21 | [C++ Configurable Autograder Sample](https://github.com/UgiR/gradescope-autograde-cpp) | Configurable autograder template with C++ tooling 22 | [R autograder](https://github.com/guerzh/r_autograde_gradescope) | A sample Gradescope-compatible autograder for R scripts 23 | [R autograder using the gradeR package](https://github.com/tbrown122387/Using-gradeR-for-the-Gradescope-Autograder) | Another R autograder with a fully-worked example 24 | [Otter-Grader](https://otter-grader.readthedocs.io/) | A Jupyter Notebook/Python autograder that is compatible with the Gradescope autograder platform 25 | [pytest with Gradescope](https://github.com/GRudolph/autograder_samples/tree/master/python3-pytest) | A minimal example that uses pytest instead of unittest for Python 3 code with Gradescope 26 | [SQLite3 with Gradescope](https://github.com/scotpatti/SQLiteAutograder) | A lightweight autograder for SQLite3. Handles multiple queries and returns the number right as the grade. Running six complex queries, students usually receive their grades in 6-10 seconds. 27 | [Racket with Gradescope](https://github.com/shriram/gradescope-racket) | An auto-grader for [Racket](https://racket-lang.org/) 28 | [UoL Autograder](https://uol-autograder.readthedocs.io/en/latest/) | University of Leeds autograder for C++ and Python 29 | [Tufts C/C++ Gradescope Autograder](https://gitlab.cs.tufts.edu/mrussell/gradescope-autograding) | A customizable C/C++ Autograder that integrates with Gradescope, along with instructions and scripts for custom container building and git integration. 30 | [Excel workbook autograder](https://github.com/roleohibachi/excel-autograder) | Using the "programming assignment" format to autograde a non-programming assignment in Excel format. 31 | -------------------------------------------------------------------------------- /docs/updates.md: -------------------------------------------------------------------------------- 1 | # Updates 2 | 3 | Here are some updates we've made to our autograder platform. If you have any questions or issues with any of these changes, please email [help@gradescope.com](mailto:help@gradescope.com). 4 | 5 | ## Oct 2nd, 2023 6 | 7 | We have added the following base images: 8 | 9 | * Centos 8 10 | * Fedora 35 11 | * Fedora 37 12 | * Fedora 38 13 | * Rocky 8.5 14 | * Rocky 9 15 | 16 | We have also added the JDK 8, 11, and 17 variants to Ubuntu 18.04 and 20.04. 17 | 18 | In addition, we have changed the default version of: 19 | 20 | * Fedora from 36 to 38 21 | * Rocky from 8 to 9. 22 | 23 | ## Mar 22th, 2023 24 | 25 | We have added rich formatting options to the autograder output! 26 | See [our documentation on the formatting options here](../specs#output-string-formatting) for more information. 27 | 28 | ## Nov 8th, 2022 29 | 30 | We have upgraded `openssl` to 3.0.2-0ubuntu1.7 on Ubuntu 22.04 autograder base images. 31 | See [https://ubuntu.com/security/notices/USN-5710-1](https://ubuntu.com/security/notices/USN-5710-1) for more information. 32 | 33 | ## Aug 22nd, 2022 34 | 35 | We have added the Fedora 36 and Rocky 8 base images. 36 | 37 | ## Aug 16th, 2022 38 | 39 | We have added the ability to specify whether a test case should be considered to have passed or failed, overriding the default styling. See [our documentation on the results.json format](../specs#test-case-status) to learn more. 40 | 41 | ## Aug 3rd, 2022 42 | 43 | We have updated the default autograder base image to Ubuntu 22.04, the current LTS release of Ubuntu. This will make newer versions of packages available to install. 44 | 45 | If you experience any issues, such as autograders failing to build or failing to execute, you can revert to the previous version, which is Ubuntu 18.04. See [our documentation about base images here](../base_images). 46 | 47 | ## July 13th, 2022 48 | 49 | We have added the ability to set the base image you want your autograder to build with. This means you are no longer required to build your own Docker container and use the manual docker configuration for zip file autograders which need a different base image. You can [find out more about base images here](../base_images). 50 | 51 | ## March 17th, 2022 52 | 53 | An upstream update to our underlying host OS introduced a conflict with the default Ubuntu 18.04 base image used for building autograders. For users installing certain packages that depend on glibc, this may have manifested in the build error below: 54 | 55 | ``` 56 | ERROR: Your kernel version indicates a revision number 57 | of 255 or greater. Glibc has a number of built in 58 | assumptions that this revision number is less than 255. 59 | If you\'ve built your own kernel, please make sure that any 60 | custom version numbers are appended to the upstream 61 | kernel number with a dash or some other delimiter. 62 | ``` 63 | 64 | We have temporarily worked around this issue by building a new version of our base image. This should resolve the issue for now, until a more permanent fix is released by the Ubuntu team. Affected users will need to re-upload their autograder zip file to successfully update their autograder image. 65 | 66 | ## September 4th, 2020 67 | 68 | - The Ubuntu, Fedora, and CentOS base images had their default Python installation upgraded from Python 2 to Python 3. 69 | - See our [help documentation](../python3_issues) if you suspect this is causing issues with your autograder setup. 70 | -------------------------------------------------------------------------------- /docs/troubleshooting.md: -------------------------------------------------------------------------------- 1 | # Troubleshooting 2 | 3 | You can find frequently asked questions on our 4 | [discussion forum](https://github.com/gradescope/autograder_samples/discussions), 5 | or ask your own questions there. 6 | 7 | ## Common error messages 8 | 9 | ### Autograder terminated with 'Killed' 10 | 11 | If you see the word 'Killed' at the end of your autograder run, and it gets 12 | terminated abruptly, this likely means that your autograder was terminated due 13 | to exceeding the memory limit for a single container. Our default memory 14 | limit is 768MB, but you can increase this from the "Autograder Settings" section 15 | of the settings for your assignment. 16 | 17 | [![Autograder Settings](autograder_settings.png){: style="width:595px;"}](autograder_settings.png) 18 | 19 | ### Your submission timed out 20 | 21 | We have a default overall timeout of 10 minutes. If your autograder script takes 22 | longer than that on our platform, it'll be forcibly terminated to avoid clogging 23 | resources. To avoid running into our global timeout, we recommend adding 24 | timeouts to your individual test cases - that'll at least allow your students to 25 | receive a partial score for the parts that do terminate in a reasonable time 26 | span. If they are subject to the global timeout, the submission will receive a 0 27 | and, they won't see which tests were able to run successfully. 28 | 29 | You can increase the timeout for your assignment on the assignment's settings 30 | page, up to a maximum of 40 minutes. We don't recommend increasing it beyond 31 | what you expect to need, because for submissions that don't terminate it will 32 | just take that much longer for the autograder to time out. 33 | 34 | Keep in mind that on our autograder cluster, multiple tasks are running 35 | concurrently. By default, each autograder is allocated 1/2 of a virtual CPU, so 36 | you should adjust your running time expectations accordingly. The easiest way to 37 | do this is to time how long your solution code takes to run. 38 | 39 | You can also increase the CPU allocation if it's a CPU-intensive assignment, under 40 | "Autograder Settings". Note that to take advantage of multiple CPUs, you will need 41 | to write your autograder to use multiple processes or threads; Gradescope cannot 42 | automaticaly parallelize your tests. If your testing framework has parallel test 43 | running capabilities that may help though! 44 | 45 | ### The autograder failed to execute correctly 46 | 47 | This error message is returned when the autograder does not produce a valid JSON 48 | file in `/autograder/results/results.json`. This could either mean that no such 49 | file exists, or that the JSON is malformed. 50 | 51 | Make sure your JSON output is not being interleaved with print statements from 52 | within your autograder code or student code. The safest way to write your JSON 53 | is to build up the JSON object structure as your tests run, and then write it to 54 | the file at once, rather than outputting partial JSON strings to standard output 55 | and then redirecting standard output to `/autograder/results/results.json`. The 56 | latter is helpful when debugging your script locally, but for production use 57 | it's not ideal because any print statements will break the JSON structure. 58 | 59 | 60 | ## Contact us! 61 | 62 | You can also ask your questions on our 63 | [discussion forum](https://github.com/gradescope/autograder_samples/discussions) 64 | to get help from the community and share your questions with others. 65 | 66 | If you have any further questions, feel free to contact us 67 | at [help@gradescope.com](mailto:help@gradescope.com). Please mention Autograder 68 | in the subject line, it'll help us categorize the type of support request. 69 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | [![Documentation Status](https://readthedocs.org/projects/gradescope-autograders/badge/?version=latest)](https://gradescope-autograders.readthedocs.org/en/latest/?badge=latest) 2 | 3 | # Overview 4 | 5 | Gradescope provides a language-agnostic platform for running your 6 | autograders on our infrastructure. By running in Docker containers, we 7 | give you full flexibility in setting up whatever language, compilers, 8 | libraries, or other dependencies you need for your programming 9 | assignments. You provide us with a setup script and an autograder 10 | script, along with whatever supporting code you need, and we manage 11 | accepting student submissions, running your autograder at scale, and 12 | distributing the results back to students and to you. 13 | 14 | !!! note "Updates" 15 | Our autograder platform is under active development! Check out the 16 | [Updates](updates/) page to see what we've changed recently. 17 | 18 | # How it works 19 | 20 | As an instructor, you create a new assignment on Gradescope, and 21 | upload your autograder zip file following our 22 | [specifications](https://gradescope-autograders.readthedocs.io/en/latest/specs/). Your code produces output in the format we 23 | request. Students submit to Gradescope and have their work evaluated 24 | on demand. They can submit as many times as they want. At the end of 25 | the process, you can download their code and their results. 26 | 27 | Follow our **[instructions](https://gradescope-autograders.readthedocs.io/en/latest/getting_started/)** to get started with 28 | autograding. 29 | 30 | # Examples 31 | 32 | If you want to jump right in, we have built the following examples for 33 | autograders in different languages. You can see how they are built, 34 | and use them as a starting point for your own autograders. If you have 35 | any further questions, feel free to contact us at 36 | [help@gradescope.com](mailto:help@gradescope.com) 37 | 38 | - [Python](python/) 39 | - [Java (JUnit)](java/) 40 | - [Java+Maven](java-mvn/) 41 | 42 | # Getting Help 43 | 44 | You can also ask questions on the [GitHub discussions 45 | forum](https://github.com/gradescope/autograder_samples/discussions) 46 | for this project, especially if there is no private information in 47 | your question. 48 | 49 | If you need any help getting set up with the autograder platform, 50 | please contact us at 51 | [help@gradescope.com](mailto:help@gradescope.com). Please mention 52 | Autograder in the subject line, it'll help us categorize the type of 53 | support request. 54 | 55 | Issues regarding setup or installation of packages are often not 56 | Gradescope specific. In general, information about installing packages 57 | on our base operating system (currently Ubuntu 22.04 by default) will 58 | be relevant to Gradescope. If you get errors during your autograder 59 | setup phase it may help to search for those errors on Google or Stack 60 | Overflow. 61 | 62 | Given the multitude of software packages that are in use on the 63 | Gradescope autograder platform, the Gradescope team is not intimately 64 | familiar with each package's setup and configuration. When trying to 65 | install or use packages, it is a good idea to check the package's 66 | installation instructions or source code repository for any tips on 67 | installing on Ubuntu 22.04, or instructions on configuration and 68 | usage. It is also helpful to contact the authors of such packages when 69 | possible because they will be more familiar with their own code. 70 | 71 | # Pricing 72 | 73 | The code autograder platform is available with an [Institutional license or Institutional Trial](https://turnitin.gradescope.com/advocate). 74 | If you have any questions regarding pricing, please contact us at sales@gradescope.com. 75 | -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/jh61b/junit/TestRunnerPrintAll.java: -------------------------------------------------------------------------------- 1 | // adapted from http://memorynotfound.com/add-junit-listener-example/ 2 | // Highly redundant with GradedTestListenerJSON. Maybe refactor later. 3 | // Also, should output go to StdErr? That's what Paul did. 4 | package com.gradescope.jh61b.junit; 5 | 6 | import java.util.List; 7 | import java.util.ArrayList; 8 | 9 | import org.junit.runner.Description; 10 | import org.junit.runner.Result; 11 | import org.junit.runner.notification.Failure; 12 | import org.junit.runner.notification.RunListener; 13 | import org.junit.runner.JUnitCore; 14 | 15 | import java.io.ByteArrayOutputStream; 16 | import java.io.PrintStream; 17 | 18 | import java.util.Collection; 19 | 20 | import java.lang.annotation.Annotation; 21 | import java.lang.reflect.Method; 22 | import java.lang.reflect.InvocationTargetException; 23 | 24 | //import com.gradescope.jh61b.junit.JUnitUtilities; 25 | import java.util.HashSet; 26 | 27 | import java.util.Arrays; 28 | 29 | public class TestRunnerPrintAll extends RunListenerWithCapture { 30 | 31 | private boolean mostRecentTestPassed; 32 | 33 | /* Code to run at the beginning of a test run. */ 34 | public void testRunStarted(Description description) throws Exception { 35 | System.out.println("Running JUnit tests using com.gradescope.jh61b.junit.TestRunner in \"all\" mode.\n"); 36 | } 37 | 38 | /* Code to run at the end of test run. */ 39 | public void testRunFinished(Result result) throws Exception { 40 | int count = result.getRunCount(); 41 | int numFailed = result.getFailureCount(); 42 | int numPassed = count - numFailed; 43 | System.out.println(String.format("Passed: %d/%d tests.", numPassed, count)); 44 | } 45 | 46 | public void testStarted(Description description) throws Exception { 47 | String testSummary = String.format("%s", description.getMethodName()); 48 | System.out.println("Running " + testSummary + ": "); 49 | System.out.println("===================================="); 50 | mostRecentTestPassed = true; 51 | this.startCapture(); 52 | } 53 | 54 | /** When a test completes, add the test output at the bottom. Then stop capturing 55 | * StdOut. Open question: Is putting the captured output at the end clear? Or is that 56 | * possibly confusing? We'll see... */ 57 | public void testFinished(Description description) throws Exception { 58 | String printedOutput = this.endCapture(); 59 | String printedOutputNoTrailingWS = printedOutput.replaceFirst("\\s+$", ""); 60 | if (printedOutputNoTrailingWS.length() > 0) { 61 | System.out.println(printedOutputNoTrailingWS); 62 | } 63 | 64 | if (mostRecentTestPassed) { 65 | System.out.println("=====> Passed\n"); 66 | } else { 67 | System.out.println("=====> FAILED!\n"); 68 | } 69 | //System.out.println(String.format("==> Score: %.2f / %.2f", currentTestResult.score, currentTestResult.maxScore)); 70 | } 71 | 72 | /** Sets score to 0 and appends reason for failure and dumps a stack trace. 73 | * TODO: Clean up this stack trace so it is not hideous. 74 | * Other possible things we might want to consider including: http://junit.sourceforge.net/javadoc/org/junit/runner/notification/Failure.html. 75 | */ 76 | public void testFailure(Failure failure) throws Exception { 77 | System.out.println(JUnitUtilities.failureToString(failure)); 78 | mostRecentTestPassed = false; 79 | } 80 | 81 | public static void runTests(Class... classes) { 82 | JUnitCore runner = new JUnitCore(); 83 | runner.addListener(new TestRunnerPrintAll()); 84 | runner.run(classes); 85 | } 86 | } -------------------------------------------------------------------------------- /java_template/src/com/gradescope/jh61b/junit/TestRunnerPrintAll.java: -------------------------------------------------------------------------------- 1 | // adapted from http://memorynotfound.com/add-junit-listener-example/ 2 | // Highly redundant with GradedTestListenerJSON. Maybe refactor later. 3 | // Also, should output go to StdErr? That's what Paul did. 4 | package com.gradescope.jh61b.junit; 5 | 6 | import java.util.List; 7 | import java.util.ArrayList; 8 | 9 | import org.junit.runner.Description; 10 | import org.junit.runner.Result; 11 | import org.junit.runner.notification.Failure; 12 | import org.junit.runner.notification.RunListener; 13 | import org.junit.runner.JUnitCore; 14 | 15 | import java.io.ByteArrayOutputStream; 16 | import java.io.PrintStream; 17 | 18 | import java.util.Collection; 19 | 20 | import java.lang.annotation.Annotation; 21 | import java.lang.reflect.Method; 22 | import java.lang.reflect.InvocationTargetException; 23 | 24 | //import com.gradescope.jh61b.junit.JUnitUtilities; 25 | import java.util.HashSet; 26 | 27 | import java.util.Arrays; 28 | 29 | public class TestRunnerPrintAll extends RunListenerWithCapture { 30 | 31 | private boolean mostRecentTestPassed; 32 | 33 | /* Code to run at the beginning of a test run. */ 34 | public void testRunStarted(Description description) throws Exception { 35 | System.out.println("Running JUnit tests using com.gradescope.jh61b.junit.TestRunner in \"all\" mode.\n"); 36 | } 37 | 38 | /* Code to run at the end of test run. */ 39 | public void testRunFinished(Result result) throws Exception { 40 | int count = result.getRunCount(); 41 | int numFailed = result.getFailureCount(); 42 | int numPassed = count - numFailed; 43 | System.out.println(String.format("Passed: %d/%d tests.", numPassed, count)); 44 | } 45 | 46 | public void testStarted(Description description) throws Exception { 47 | String testSummary = String.format("%s", description.getMethodName()); 48 | System.out.println("Running " + testSummary + ": "); 49 | System.out.println("===================================="); 50 | mostRecentTestPassed = true; 51 | this.startCapture(); 52 | } 53 | 54 | /** When a test completes, add the test output at the bottom. Then stop capturing 55 | * StdOut. Open question: Is putting the captured output at the end clear? Or is that 56 | * possibly confusing? We'll see... */ 57 | public void testFinished(Description description) throws Exception { 58 | String printedOutput = this.endCapture(); 59 | String printedOutputNoTrailingWS = printedOutput.replaceFirst("\\s+$", ""); 60 | if (printedOutputNoTrailingWS.length() > 0) { 61 | System.out.println(printedOutputNoTrailingWS); 62 | } 63 | 64 | if (mostRecentTestPassed) { 65 | System.out.println("=====> Passed\n"); 66 | } else { 67 | System.out.println("=====> FAILED!\n"); 68 | } 69 | //System.out.println(String.format("==> Score: %.2f / %.2f", currentTestResult.score, currentTestResult.maxScore)); 70 | } 71 | 72 | /** Sets score to 0 and appends reason for failure and dumps a stack trace. 73 | * TODO: Clean up this stack trace so it is not hideous. 74 | * Other possible things we might want to consider including: http://junit.sourceforge.net/javadoc/org/junit/runner/notification/Failure.html. 75 | */ 76 | public void testFailure(Failure failure) throws Exception { 77 | System.out.println(JUnitUtilities.failureToString(failure)); 78 | mostRecentTestPassed = false; 79 | } 80 | 81 | public static void runTests(Class... classes) { 82 | JUnitCore runner = new JUnitCore(); 83 | runner.addListener(new TestRunnerPrintAll()); 84 | runner.run(classes); 85 | } 86 | } -------------------------------------------------------------------------------- /docs/manual_docker.md: -------------------------------------------------------------------------------- 1 | # Manual Docker Configuration 2 | 3 | If you are familiar with Docker, you can build your own Docker image instead of 4 | having Gradescope build your container image from a zip file. This can provide 5 | more control and speed up build times by taking advantage of local Docker build 6 | caching. It is not necessary to use this option to run your autograders on 7 | Gradescope, but it is available as an advanced feature. 8 | 9 | !!! note "Docker documentation" 10 | The rest of this page assumes familiarity with the Docker platform, 11 | including building a Docker image and pushing it to a container 12 | registry such as DockerHub. For more information, please see the 13 | [official Docker documentation](https://docs.docker.com). 14 | 15 | ## Requirements 16 | 17 | To use the "Manual Docker Configuration" option, you'll need to use 18 | `gradescope/autograder-base` as the base image for your Docker image. 19 | You can also other operating systems such as `fedora`, or different versions of 20 | Ubuntu - see the complete list [on DockerHub](https://hub.docker.com/r/gradescope/autograder-base/tags/). 21 | 22 | Note: You can now also 23 | [use different base images with a zip file upload](../base_images). 24 | 25 | You'll need to ensure that your image contains the `run_autograder` script at 26 | the path `/autograder/run_autograder`. This should match the requirements 27 | described on the [specifications](../specs) page. In particular, at the end of the 28 | script, the results should be in `/autograder/results/results.json` with the 29 | correct formatting. 30 | 31 | Any setup can be done in the Dockerfile, so there is no need for a `setup.sh` 32 | script. You can use one if it's easier though. 33 | 34 | Beyond this, there are no other requirements on the structure of your Docker 35 | image, so you can organize it as you wish. 36 | 37 | If you're just getting started, you can look at [our sample Dockerfile](https://github.com/gradescope/autograder_samples/tree/master/manual_docker). 38 | This example puts all the autograder source in a `source` directory and uses a 39 | `setup.sh` file similar to the zip file upload method, so it can be a good 40 | transition path for going from a zip file to fully custom Docker builds. 41 | 42 | You may also wish to refer to the [Dockerfile reference docs](https://docs.docker.com/engine/reference/builder/). 43 | 44 | ## Private Docker Hub Repositories 45 | 46 | If your Docker Hub repository is private, you'll need to give the user 47 | `gradescopeecs` access to your repository. You may want to do this to 48 | make sure that students cannot download your autograder image. 49 | 50 | ## Running autograder images locally 51 | 52 | To run your autograder image locally, you will currently need to bypass our 53 | autograder harness because otherwise it will try to communicate with Gradescope 54 | by default. You can do this by mounting a sample submission into the 55 | `/autograder/submission` directory and then running `/autograder/run_autograder` 56 | directly. Here's an example command; replace the path to the submission, results 57 | directory, and Docker image name with the appropriate values. 58 | 59 | ```bash 60 | docker run --rm -v /path/to/submission:/autograder/submission -v /path/to/results:/autograder/results username/image_name:tag /autograder/run_autograder && cat /path/to/results/results.json 61 | ``` 62 | 63 | or to start an interactive session: 64 | 65 | ```bash 66 | docker run --rm -it -v /path/to/submission:/autograder/submission -v /path/to/results:/autograder/results username/image_name:tag bash 67 | ``` 68 | 69 | Minor notes: 70 | 71 | - `--rm` is added to clean up the container after it exits. You can remove it if 72 | you want to inspect container logs or state afterwards. 73 | - The `/autograder/results` directory should be mounted to a path on your host 74 | so that you can inspect the results.json file that your autograder produces. 75 | -------------------------------------------------------------------------------- /python/README.md: -------------------------------------------------------------------------------- 1 | # Gradescope Python Autograder Example 2 | 3 | [View project source on Github](https://github.com/gradescope/autograder_samples/tree/master/python/src) - [autograder.zip](https://github.com/gradescope/autograder_samples/raw/master/python/src/autograder.zip) - [sample solution](https://github.com/gradescope/autograder_samples/raw/master/python/src/solution/calculator.py) 4 | 5 | ## Project Description 6 | 7 | In this assignment, students will build an infix calculator REPL. The 8 | goal of this project is to teach the basics of parsing and evaluating 9 | a simple language. 10 | 11 | **Requirements** 12 | 13 | * Build an infix calculator read-eval-print loop 14 | * The calculator should handle the 4 basic operations, +, -, *, /, with operator precedence 15 | * In addition, it should handle parentheses and negative numbers 16 | * If the user types 'quit', exit the program 17 | * If there are syntax errors in the user input, raise CalculatorException 18 | 19 | ## Dependencies (for tests) 20 | 21 | - Python 3.6+ 22 | - [gradescope-utils](https://github.com/gradescope/gradescope-utils) provides decorators for setting point values for tests, and running tests with a JSON output. [See the Github repository](https://github.com/gradescope/gradescope-utils) for more on what you can do with it, or you can look at the example tests in this project for some usage examples. 23 | 24 | ### Python 3 25 | 26 | Make sure to use `pip3` and `python3` when writing your code, because in Ubuntu Python 2 is currently the default for `pip` and `python`. When installing Python 3, use the apt packages `python3` and `python3-pip`. If you need a more recent version than what is packaged by Ubuntu, you can try using a PPA or installing from source. 27 | 28 | ## Example Test 29 | 30 | ``` 31 | class TestSimpleArithmetic(unittest.TestCase): 32 | def setUp(self): 33 | self.calc = Calculator() 34 | 35 | @weight(1) 36 | def test_eval_add(self): 37 | """Evaluate 1 + 1""" 38 | val = self.calc.eval("1 + 1") 39 | self.assertEqual(val, 2) 40 | ``` 41 | 42 | The title of the test case is taken from the first line of the 43 | docstring. This is a `unittest` convention. The weight for each test is 44 | given by the `@weight` decorator. 45 | 46 | See all tests 47 | [here](https://github.com/gradescope/autograder_samples/tree/master/python/src/tests) 48 | 49 | ## Running Tests 50 | 51 | ``` 52 | suite = unittest.defaultTestLoader.discover('tests') 53 | JSONTestRunner().run(suite) 54 | 55 | ``` 56 | 57 | This will load tests from the `tests/` directory in your autograder source code, 58 | [loading only files starting with `test` by default](https://docs.python.org/3/library/unittest.html#unittest.TestLoader.discover). 59 | `JSONTestRunner` is included in `gradescope-utils`, as described below. 60 | 61 | # Files 62 | 63 | ## [setup.sh](https://github.com/gradescope/autograder_samples/blob/master/python/src/setup.sh) 64 | 65 | This script installs Python and the pip package manager. Then it uses 66 | pip to install our two external dependencies. 67 | 68 | ## [run_autograder](https://github.com/gradescope/autograder_samples/blob/master/python/src/run_autograder) 69 | 70 | This script copies the student's submission to the target directory, 71 | and then executes the test runner Python script. 72 | 73 | ## [run_tests.py](https://github.com/gradescope/autograder_samples/blob/master/python/src/run_tests.py) 74 | 75 | This python script loads and runs the tests using the JSONTestRunner 76 | class from gradescope-utils. This produces the JSON formatted output 77 | to stdout, which is then captured and uploaded by the autograder 78 | harness. 79 | 80 | ## [framework.py](https://github.com/gradescope/autograder_samples/blob/master/python/src/framework.py) 81 | 82 | This is a blank template file for the students to fill in. Note that 83 | their solution must be called calculator.py for the autograder to work 84 | correctly. 85 | -------------------------------------------------------------------------------- /java_template/src/com/gradescope/jh61b/junit/TestRunnerPrintFailuresOnly.java: -------------------------------------------------------------------------------- 1 | // adapted from http://memorynotfound.com/add-junit-listener-example/ 2 | // Highly redundant with GradedTestListenerJSON. Maybe refactor later. 3 | // Also, should output go to StdErr? That's what Paul did. 4 | package com.gradescope.jh61b.junit; 5 | 6 | import java.util.List; 7 | import java.util.ArrayList; 8 | 9 | import org.junit.runner.Description; 10 | import org.junit.runner.Result; 11 | import org.junit.runner.notification.Failure; 12 | import org.junit.runner.notification.RunListener; 13 | import org.junit.runner.JUnitCore; 14 | 15 | import java.io.ByteArrayOutputStream; 16 | import java.io.PrintStream; 17 | 18 | import java.util.Collection; 19 | 20 | import java.lang.annotation.Annotation; 21 | import java.lang.reflect.Method; 22 | import java.lang.reflect.InvocationTargetException; 23 | 24 | //import com.gradescope.jh61b.junit.JUnitUtilities; 25 | import java.util.HashSet; 26 | 27 | import java.util.Arrays; 28 | 29 | public class TestRunnerPrintFailuresOnly extends RunListenerWithCapture { 30 | 31 | private boolean mostRecentTestPassed; 32 | private String mostRecentTestName; 33 | 34 | /* Code to run at the beginning of a test run. */ 35 | public void testRunStarted(Description description) throws Exception { 36 | System.out.println("Running JUnit tests using com.gradescope.jh61b.junit.TestRunner in \"failure\" mode.\n"); 37 | } 38 | 39 | /* Code to run at the end of test run. */ 40 | public void testRunFinished(Result result) throws Exception { 41 | this.endCapture(); 42 | int count = result.getRunCount(); 43 | int numFailed = result.getFailureCount(); 44 | int numPassed = count - numFailed; 45 | System.out.println(String.format("Passed: %d/%d tests.", numPassed, count)); 46 | } 47 | 48 | public void testStarted(Description description) throws Exception { 49 | mostRecentTestName = String.format("%s", description.getMethodName()); 50 | mostRecentTestPassed = true; 51 | this.startCapture(); 52 | } 53 | 54 | /** When a test completes, add the test output at the bottom. Then stop capturing 55 | * StdOut. Open question: Is putting the captured output at the end clear? Or is that 56 | * possibly confusing? We'll see... */ 57 | public void testFinished(Description description) throws Exception { 58 | String printedOutput = this.endCapture(); 59 | 60 | if (!mostRecentTestPassed) { 61 | System.out.println("=====> FAILED!\n"); 62 | } 63 | //System.out.println(String.format("==> Score: %.2f / %.2f", currentTestResult.score, currentTestResult.maxScore)); 64 | } 65 | 66 | /** Sets score to 0 and appends reason for failure and dumps a stack trace. 67 | * TODO: Clean up this stack trace so it is not hideous. 68 | * Other possible things we might want to consider including: http://junit.sourceforge.net/javadoc/org/junit/runner/notification/Failure.html. 69 | */ 70 | public void testFailure(Failure failure) throws Exception { 71 | //System.out.print("Failure while running " + mostRecentTestName + ": "); 72 | String printedOutput = this.endCapture(); 73 | String printedOutputNoTrailingWS = printedOutput.replaceFirst("\\s+$", ""); 74 | 75 | 76 | System.out.println("Running " + mostRecentTestName + ": "); 77 | System.out.println("===================================="); 78 | if (printedOutputNoTrailingWS.length() > 0) { 79 | System.out.println(printedOutputNoTrailingWS); 80 | } 81 | 82 | System.out.println(JUnitUtilities.failureToString(failure)); 83 | mostRecentTestPassed = false; 84 | } 85 | 86 | public static void runTests(Class... classes) { 87 | JUnitCore runner = new JUnitCore(); 88 | runner.addListener(new TestRunnerPrintFailuresOnly()); 89 | runner.run(classes); 90 | } 91 | } -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/jh61b/junit/TestRunnerPrintFailuresOnly.java: -------------------------------------------------------------------------------- 1 | // adapted from http://memorynotfound.com/add-junit-listener-example/ 2 | // Highly redundant with GradedTestListenerJSON. Maybe refactor later. 3 | // Also, should output go to StdErr? That's what Paul did. 4 | package com.gradescope.jh61b.junit; 5 | 6 | import java.util.List; 7 | import java.util.ArrayList; 8 | 9 | import org.junit.runner.Description; 10 | import org.junit.runner.Result; 11 | import org.junit.runner.notification.Failure; 12 | import org.junit.runner.notification.RunListener; 13 | import org.junit.runner.JUnitCore; 14 | 15 | import java.io.ByteArrayOutputStream; 16 | import java.io.PrintStream; 17 | 18 | import java.util.Collection; 19 | 20 | import java.lang.annotation.Annotation; 21 | import java.lang.reflect.Method; 22 | import java.lang.reflect.InvocationTargetException; 23 | 24 | //import com.gradescope.jh61b.junit.JUnitUtilities; 25 | import java.util.HashSet; 26 | 27 | import java.util.Arrays; 28 | 29 | public class TestRunnerPrintFailuresOnly extends RunListenerWithCapture { 30 | 31 | private boolean mostRecentTestPassed; 32 | private String mostRecentTestName; 33 | 34 | /* Code to run at the beginning of a test run. */ 35 | public void testRunStarted(Description description) throws Exception { 36 | System.out.println("Running JUnit tests using com.gradescope.jh61b.junit.TestRunner in \"failure\" mode.\n"); 37 | } 38 | 39 | /* Code to run at the end of test run. */ 40 | public void testRunFinished(Result result) throws Exception { 41 | this.endCapture(); 42 | int count = result.getRunCount(); 43 | int numFailed = result.getFailureCount(); 44 | int numPassed = count - numFailed; 45 | System.out.println(String.format("Passed: %d/%d tests.", numPassed, count)); 46 | } 47 | 48 | public void testStarted(Description description) throws Exception { 49 | mostRecentTestName = String.format("%s", description.getMethodName()); 50 | mostRecentTestPassed = true; 51 | this.startCapture(); 52 | } 53 | 54 | /** When a test completes, add the test output at the bottom. Then stop capturing 55 | * StdOut. Open question: Is putting the captured output at the end clear? Or is that 56 | * possibly confusing? We'll see... */ 57 | public void testFinished(Description description) throws Exception { 58 | String printedOutput = this.endCapture(); 59 | 60 | if (!mostRecentTestPassed) { 61 | System.out.println("=====> FAILED!\n"); 62 | } 63 | //System.out.println(String.format("==> Score: %.2f / %.2f", currentTestResult.score, currentTestResult.maxScore)); 64 | } 65 | 66 | /** Sets score to 0 and appends reason for failure and dumps a stack trace. 67 | * TODO: Clean up this stack trace so it is not hideous. 68 | * Other possible things we might want to consider including: http://junit.sourceforge.net/javadoc/org/junit/runner/notification/Failure.html. 69 | */ 70 | public void testFailure(Failure failure) throws Exception { 71 | //System.out.print("Failure while running " + mostRecentTestName + ": "); 72 | String printedOutput = this.endCapture(); 73 | String printedOutputNoTrailingWS = printedOutput.replaceFirst("\\s+$", ""); 74 | 75 | 76 | System.out.println("Running " + mostRecentTestName + ": "); 77 | System.out.println("===================================="); 78 | if (printedOutputNoTrailingWS.length() > 0) { 79 | System.out.println(printedOutputNoTrailingWS); 80 | } 81 | 82 | System.out.println(JUnitUtilities.failureToString(failure)); 83 | mostRecentTestPassed = false; 84 | } 85 | 86 | public static void runTests(Class... classes) { 87 | JUnitCore runner = new JUnitCore(); 88 | runner.addListener(new TestRunnerPrintFailuresOnly()); 89 | runner.run(classes); 90 | } 91 | } -------------------------------------------------------------------------------- /c#/src/nunit_to_gs.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import xml.etree.ElementTree as et 4 | 5 | class NUnitTestCase(object): 6 | def __init__(self, node, output_map): 7 | self.node = node 8 | self.output_map = output_map 9 | self.load_properties() 10 | 11 | def load_properties(self): 12 | self.properties = {} 13 | for property_node in self.node.find('properties').getchildren(): 14 | key = property_node.get('name') 15 | value = property_node.get('value') 16 | self.properties[key] = value 17 | 18 | def property(self, key): 19 | return self.properties.get(key, None) 20 | 21 | def max_score(self): 22 | if self.property('Weight'): 23 | return float(self.property('Weight')) 24 | else: 25 | return 1.0 26 | 27 | def score(self): 28 | if self.property('Score'): 29 | return float(self.property('Score')) 30 | else: 31 | if self.node.get('success') == 'True': 32 | return self.max_score() 33 | else: 34 | return 0.0 35 | 36 | def failure(self): 37 | failure_node = self.node.find('failure') 38 | if failure_node is not None: 39 | message_node = failure_node.find('message') 40 | return message_node.text 41 | 42 | 43 | def visibility(self): 44 | if self.property('Visibility'): 45 | return self.property('Visibility') 46 | 47 | def name(self): 48 | if self.property('Name'): 49 | return self.property('Name') 50 | else: 51 | return self.node.get('name') 52 | 53 | def output(self): 54 | if self.node.get('success') != 'True': 55 | return self.failure() 56 | else: 57 | key = self.node.get('name') 58 | if key in self.output_map: 59 | return self.output_map[key] 60 | 61 | def as_dict(self): 62 | result = { 63 | "score": self.score(), 64 | "max_score": self.max_score(), 65 | "name": self.name() 66 | } 67 | if self.visibility(): 68 | result["visibility"] = self.visibility() 69 | if self.output(): 70 | result["output"] = self.output() 71 | return result 72 | 73 | 74 | class NUnitResultsLoader(object): 75 | 76 | def __init__(self): 77 | self.results = {} 78 | self.results['stdout_visibility'] = 'hidden' 79 | self.results['visibility'] = 'visible' 80 | self.results['tests'] = [] 81 | self.output_map = {} 82 | self.load_stdout_stderr() 83 | 84 | def load_stdout_stderr(self): 85 | if os.path.exists('stdout_and_stderr'): 86 | with open('stdout_and_stderr', 'r') as f: 87 | data = f.readlines() 88 | self.process_stdout_stderr(data) 89 | 90 | def process_stdout_stderr(self, data): 91 | active = False 92 | current_test = None 93 | current_output = "" 94 | for line in data: 95 | if line.startswith('Tests run:'): 96 | self.output_map[current_test] = self.output_map[current_test][0:-1] 97 | break 98 | if line.startswith('***** '): 99 | current_test = line[6:-1] 100 | active = True 101 | self.output_map[current_test] = '' 102 | elif active: 103 | self.output_map[current_test] += line 104 | 105 | 106 | def process_results_file(self, filename): 107 | root = et.parse(filename).getroot() 108 | self.process_library_suite(root.find('test-suite')) 109 | 110 | def process_library_suite(self, suite_node): 111 | self.results['execution_time'] = float(suite_node.get('time')) 112 | for child in suite_node.find('results').getchildren(): 113 | self.process_file_suite(child) 114 | 115 | def process_file_suite(self, suite_node): 116 | for child in suite_node.find('results').getchildren(): 117 | self.process_test_case(child) 118 | 119 | def process_test_case(self, test_case_node): 120 | test_case = NUnitTestCase(test_case_node, self.output_map) 121 | self.results['tests'].append(test_case.as_dict()) 122 | 123 | def print_json(self): 124 | print(json.dumps(self.results)) 125 | 126 | 127 | loader = NUnitResultsLoader() 128 | loader.process_results_file('TestResult.xml') 129 | loader.print_json() 130 | 131 | -------------------------------------------------------------------------------- /mysql/README.md: -------------------------------------------------------------------------------- 1 | # Gradescope MySQL Autograder Example 2 | 3 | [View project source on Github](https://github.com/gradescope/autograder_samples/tree/master/mysql) - [autograder.zip](https://github.com/gradescope/autograder_samples/raw/master/mysql/autograder.zip) - [sample solution](https://github.com/gradescope/autograder_samples/raw/master/mysql/solution/queries.py) 4 | 5 | ## Project Description 6 | 7 | This project shows an example of how one might set up a database 8 | project on Gradescope. It installs and initializes a MySQL database 9 | within its setup script, and then uses Python to query the database. 10 | 11 | The actual Python connection and querying code is not intended to be 12 | an example of how best to write such code, or how to test database 13 | queries. It is up to you to determine how to test code that 14 | communicates with the database. The main purpose of this example is to 15 | show how to install, initialize, and start a service such as MySQL. 16 | 17 | ## Dependencies (for tests) 18 | 19 | - Python 2.7+/3+ 20 | - [gradescope-utils](https://github.com/gradescope/gradescope-utils) provides decorators for setting point values for tests, and running tests with a JSON output. [See the Github repository](https://github.com/gradescope/gradescope-utils) for more on what you can do with it, or you can look at the example tests in this project for some usage examples. 21 | - [mysql-connector-python](https://dev.mysql.com/doc/connector-python/en/) provides a way to connect to MySQL from Python. 22 | 23 | ## Preventing Mutations to Test Data 24 | 25 | As mentioned above, this project is not necessarily intended to 26 | demonstrate best practices for testing database code. In particular, 27 | this project does not show how one might prevent mutations from one 28 | test from showing up in another test. You will probably want to 29 | implement this for any serious testing that could involve mutations of 30 | the test database. For example, if one test updates or deletes data in 31 | the database, the changes should be rolled back before another test 32 | runs. Otherwise, the validity of the tests will depend on the order in 33 | which the tests are run, which is problematic. 34 | 35 | # Files 36 | 37 | ## [setup.sh](https://github.com/gradescope/autograder_samples/blob/master/mysql/setup.sh) 38 | 39 | This script installs MySQL, Python, and the pip package manager. Then 40 | it uses pip to install our Python dependencies. It also initializes 41 | the database state (relevant part below). 42 | 43 | ```bash 44 | service mysql start 45 | mysql < /autograder/source/database.sql 46 | ``` 47 | 48 | Note that while the service is started in setup.sh, this is only so 49 | that we can initialize the database state. Because of the way Docker 50 | works, when the autograder runs, we will still need to start the 51 | `mysql` service manually. 52 | 53 | ## [database.sql](https://github.com/gradescope/autograder_samples/blob/master/mysql/database.sql) 54 | 55 | This is an SQL file which initializes the database state. This is not 56 | intended as a perfect example of SQL, but it simply demonstrates a way 57 | to set up a database that students will access. 58 | 59 | ## [run_autograder](https://github.com/gradescope/autograder_samples/blob/master/mysql/run_autograder) 60 | 61 | This script copies the student's submission to the target directory, 62 | and then executes the test runner Python script. It also starts the 63 | MySQL server, which must be done when the autograder starts, because 64 | Docker containers do not start services automatically. 65 | 66 | ## [run_tests.py](https://github.com/gradescope/autograder_samples/blob/master/mysql/run_tests.py) 67 | 68 | This python script loads and runs the tests using the JSONTestRunner 69 | class from gradescope-utils. This produces the JSON formatted output 70 | to stdout, which is then captured and uploaded by the autograder 71 | harness. 72 | 73 | ## [framework/queries.py](https://github.com/gradescope/autograder_samples/blob/master/mysql/framework/queries.py) 74 | 75 | This is a blank template file for the students to fill in. It provides 76 | function signatures for methods that will query the database and 77 | return some desired value. 78 | 79 | ## [solution/queries.py](https://github.com/gradescope/autograder_samples/blob/master/mysql/solution/queries.py) 80 | 81 | This is a filled-in version of the above template file. 82 | 83 | ## Example Test 84 | 85 | ``` 86 | class TestSimpleQueries(unittest.TestCase): 87 | def setUp(self): 88 | self.connection = mysql.connector.connect(user='test', password='password', database='test_data') 89 | self.cursor = self.connection.cursor() 90 | self.queries = Queries(self.cursor) 91 | 92 | def tearDown(self): 93 | self.cursor.close() 94 | self.connection.close() 95 | 96 | @weight(1) 97 | def test_department_budget(self): 98 | """Get department budget by name""" 99 | val = self.queries.department_budget("Engineering") 100 | self.assertEqual(val, 1000000) 101 | ``` 102 | 103 | The title of the test case is taken from the first line of the 104 | docstring. This is a `unittest` convention. The weight for each test is 105 | given by the `@weight` decorator. 106 | 107 | In this simple example, the student is filling in methods that perform 108 | a database query given an argument, and the result is checked against 109 | an expected value. 110 | 111 | See all tests 112 | [here](https://github.com/gradescope/autograder_samples/tree/master/mysql/tests) 113 | -------------------------------------------------------------------------------- /python/src/solution/calculator.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | 4 | class CalculatorException(Exception): 5 | """A class to throw if you come across incorrect syntax or other issues""" 6 | def __init__(self, value): 7 | self.value = value 8 | 9 | def __str__(self): 10 | return repr(self.value) 11 | 12 | 13 | class Calculator(object): 14 | """Infix calculator REPL 15 | 16 | Parses and evaluates infix arithmetic with the 4 basic operators 17 | and parentheses. Must obey order of operations. 18 | """ 19 | DIGIT = re.compile('\-?\d+') 20 | WHITESPACE = re.compile('\s+') 21 | OPERATOR = re.compile('[\+\-\*\/]') 22 | PAREN = re.compile('[\(\)]') 23 | TOKEN_CLASSES = [DIGIT, WHITESPACE, OPERATOR, PAREN] 24 | PRECEDENCES = { 25 | '+': 1, 26 | '-': 1, 27 | '*': 2, 28 | '/': 2, 29 | '(': 0, # For precedence matters, parens don't count 30 | ')': 0 31 | } 32 | 33 | def is_digit(self, token): 34 | return self.DIGIT.match(token) 35 | 36 | def is_operator(self, token): 37 | return self.OPERATOR.match(token) 38 | 39 | def is_paren(self, token): 40 | return self.PAREN.match(token) 41 | 42 | def is_operand(self, token): 43 | return self.is_digit(token) or self.is_paren(token) 44 | 45 | def read(self): 46 | """Read input from stdin""" 47 | return input('> ') 48 | 49 | def lex(self, string): 50 | """Break an input string into tokens""" 51 | tokens = [] 52 | i = 0 53 | while i < len(string): 54 | match = self.DIGIT.match(string, i) 55 | if match: 56 | tokens.append(int(match.group())) 57 | i = match.end() 58 | continue 59 | match = self.WHITESPACE.match(string, i) 60 | if match: 61 | i = match.end() 62 | continue 63 | match = self.OPERATOR.match(string, i) 64 | if match: 65 | tokens.append(match.group()) 66 | i = match.end() 67 | continue 68 | match = self.PAREN.match(string, i) 69 | if match: 70 | tokens.append(match.group()) 71 | i = match.end() 72 | continue 73 | raise CalculatorException("Unknown character".format(string[i])) 74 | i = i + 1 75 | return tokens 76 | 77 | def parse(self, tokens): 78 | """Turns an infix arithmetic string into an RPN representation. 79 | 80 | Uses the Shunting yard algorithm. This is used to resolve operator 81 | precedence and handle parentheses.""" 82 | output = [] 83 | operator_stack = [] 84 | while len(tokens) > 0: 85 | token = tokens.pop(0) 86 | 87 | if type(token) == int: 88 | output.append(token) 89 | elif self.is_operator(token): 90 | precedence = self.PRECEDENCES[token] 91 | while len(operator_stack) > 0 and \ 92 | precedence <= self.PRECEDENCES[operator_stack[-1]]: 93 | output.append(operator_stack.pop()) 94 | operator_stack.append(token) 95 | elif token == "(": 96 | operator_stack.append(token) 97 | elif token == ")": 98 | while operator_stack[-1] != "(": 99 | output.append(operator_stack.pop()) 100 | operator_stack.pop() # Pop the left paren 101 | while len(operator_stack) > 0: 102 | output.append(operator_stack.pop()) 103 | return output 104 | 105 | def eval_rpn(self, rpn): 106 | """Evaluates an RPN expression in list form""" 107 | stack = [] 108 | while len(rpn) > 0: 109 | token = rpn.pop(0) 110 | if type(token) == int: 111 | stack.append(token) 112 | else: # token is an operator 113 | if len(stack) < 2: 114 | raise CalculatorException("Not enough inputs for operator {0}".format(token)) 115 | else: 116 | op1 = stack.pop() 117 | op2 = stack.pop() 118 | if token == '+': 119 | val = op1 + op2 120 | elif token == '-': 121 | val = op2 - op1 122 | elif token == '*': 123 | val = op1 * op2 124 | elif token == '/': 125 | val = op2 / op1 126 | stack.append(val) 127 | if len(stack) == 1: 128 | return stack[0] 129 | else: 130 | raise CalculatorException("Too many input values") 131 | 132 | def eval(self, string): 133 | """Evaluates an infix arithmetic expression""" 134 | tokens = self.lex(string) 135 | ast = self.parse(tokens) 136 | value = self.eval_rpn(ast) 137 | return value 138 | 139 | def loop(self): 140 | """Runs the read-eval-print loop 141 | 142 | Read a line of input, evaluate it, and print it. 143 | 144 | Repeat the above until the user types 'quit'.""" 145 | line = self.read() 146 | while line != "quit": 147 | value = self.eval(line) 148 | print(value) 149 | # Read next line of input 150 | line = self.read() 151 | 152 | if __name__ == '__main__': 153 | calc = Calculator() 154 | calc.loop() 155 | -------------------------------------------------------------------------------- /python/src/incorrect_3/calculator.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | 4 | class CalculatorException(Exception): 5 | """A class to throw if you come across incorrect syntax or other issues""" 6 | def __init__(self, value): 7 | self.value = value 8 | 9 | def __str__(self): 10 | return repr(self.value) 11 | 12 | 13 | class Calculator(object): 14 | """Infix calculator REPL 15 | 16 | Parses and evaluates infix arithmetic with the 4 basic operators 17 | and parentheses. Must obey order of operations. 18 | """ 19 | DIGIT = re.compile('\-?\d+') 20 | WHITESPACE = re.compile('\s+') 21 | OPERATOR = re.compile('[\+\-\*\/]') 22 | PAREN = re.compile('[\(\)]') 23 | TOKEN_CLASSES = [DIGIT, WHITESPACE, OPERATOR, PAREN] 24 | PRECEDENCES = { 25 | '+': 1, 26 | '-': 1, 27 | '*': 2, 28 | '/': 2, 29 | '(': 0, # For precedence matters, parens don't count 30 | ')': 0 31 | } 32 | 33 | def is_digit(self, token): 34 | return self.DIGIT.match(token) 35 | 36 | def is_operator(self, token): 37 | return self.OPERATOR.match(token) 38 | 39 | def is_paren(self, token): 40 | return self.PAREN.match(token) 41 | 42 | def is_operand(self, token): 43 | return self.is_digit(token) or self.is_paren(token) 44 | 45 | def read(self): 46 | """Read input from stdin""" 47 | return input('> ') 48 | 49 | def lex(self, string): 50 | """Break an input string into tokens""" 51 | tokens = [] 52 | i = 0 53 | while i < len(string): 54 | match = self.DIGIT.match(string, i) 55 | if match: 56 | tokens.append(int(match.group())) 57 | i = match.end() 58 | continue 59 | match = self.WHITESPACE.match(string, i) 60 | if match: 61 | i = match.end() 62 | continue 63 | match = self.OPERATOR.match(string, i) 64 | if match: 65 | tokens.append(match.group()) 66 | i = match.end() 67 | continue 68 | match = self.PAREN.match(string, i) 69 | if match: 70 | tokens.append(match.group()) 71 | i = match.end() 72 | continue 73 | raise CalculatorException("Unknown character".format(string[i])) 74 | i = i + 1 75 | return tokens 76 | 77 | def parse(self, tokens): 78 | """Turns an infix arithmetic string into an RPN representation. 79 | 80 | Uses the Shunting yard algorithm. This is used to resolve operator 81 | precedence and handle parentheses.""" 82 | output = [] 83 | operator_stack = [] 84 | while len(tokens) > 0: 85 | token = tokens.pop(0) 86 | 87 | if type(token) == int: 88 | output.append(token) 89 | elif self.is_operator(token): 90 | precedence = self.PRECEDENCES[token] 91 | while len(operator_stack) > 0 and \ 92 | precedence <= self.PRECEDENCES[operator_stack[-1]]: 93 | output.append(operator_stack.pop()) 94 | operator_stack.append(token) 95 | elif token == "(": 96 | operator_stack.append(token) 97 | elif token == ")": 98 | while operator_stack[-1] != "(": 99 | output.append(operator_stack.pop()) 100 | operator_stack.pop() # Pop the left paren 101 | while len(operator_stack) > 0: 102 | output.append(operator_stack.pop()) 103 | return output 104 | 105 | def eval_rpn(self, rpn): 106 | """Evaluates an RPN expression in list form""" 107 | stack = [] 108 | while len(rpn) > 0: 109 | token = rpn.pop(0) 110 | if type(token) == int: 111 | stack.append(token) 112 | else: # token is an operator 113 | if len(stack) < 2: 114 | raise CalculatorException("Not enough inputs for operator {0}".format(token)) 115 | else: 116 | op1 = stack.pop() 117 | op2 = stack.pop() 118 | if token == '+': 119 | val = op1 + op2 120 | elif token == '-': 121 | val = op2 - op1 122 | elif token == '*': 123 | val = op1 * op2 124 | elif token == '/': 125 | val = op2 / op1 126 | stack.append(val) 127 | if len(stack) == 1: 128 | return stack[0] 129 | else: 130 | raise CalculatorException("Too many input values") 131 | 132 | def eval(self, string): 133 | """Evaluates an infix arithmetic expression""" 134 | tokens = self.lex(string) 135 | ast = self.parse(tokens) 136 | value = self.eval_rpn(ast) 137 | return value 138 | 139 | def loop(self): 140 | """Runs the read-eval-print loop 141 | 142 | Read a line of input, evaluate it, and print it. 143 | 144 | Repeat the above until the user types 'quit'.""" 145 | line = self.read() 146 | quit = False 147 | while not quit: 148 | value = self.eval(line) 149 | print(value) 150 | line = self.read() 151 | quit = line == 'quit' 152 | 153 | if __name__ == '__main__': 154 | calc = Calculator() 155 | calc.loop() 156 | -------------------------------------------------------------------------------- /java/README.md: -------------------------------------------------------------------------------- 1 | # Gradescope Java Autograder Example 2 | 3 | [View project source on Github](https://github.com/gradescope/autograder_samples/tree/master/java) 4 | 5 | This example shows how to set up an autograder on Gradescope for a 6 | Java project. It uses JUnit, JDK11, and UC Berkeley CS61B's jh61b 7 | library for producing output in JSON format. 8 | 9 | !!! warning "Warning" 10 | This example was provided by an early instructor user of the autograder 11 | platform and is not actively maintained. It may not support all current 12 | autograder platform functionality. See our 13 | [Community Resources page](https://gradescope-autograders.readthedocs.io/en/latest/resources/) for alternative Java resources. 14 | 15 | ## Project Description 16 | 17 | This project builds a simple Linked List that stores ints as data 18 | values. 19 | 20 | ## Explanation of Dependencies 21 | 22 | - JUnit: Popular Java unit testing framework 23 | - jh61b: Among other things, this provides annotations for JUnit tests that allows setting point values and names, and a test listener that produces output in JSON format 24 | - JDK11: jh61b uses String.join() which is added in JDK8. If you need versions <8, you just need to replace this part of the code 25 | 26 | # Files 27 | 28 | ## [setup.sh](https://github.com/gradescope/autograder_samples/blob/master/java/setup.sh) 29 | 30 | Sets up OpenJDK 11. 31 | 32 | Note: Installing JDK11 takes a few minutes, so building the image takes 33 | a while. We may later provide base images to speed this up. 34 | 35 | ## [run_autograder](https://github.com/gradescope/autograder_samples/blob/master/java/run_autograder) 36 | 37 | Copies the student's code to the autograder directory, compiles, and 38 | executes it. 39 | 40 | One thing to be aware of for Java projects is that you need to copy 41 | the student's work to the right place depending on the 42 | package. Depending on how you set up the project, students may submit 43 | files in the root of their submission or within nested directories for 44 | the package they used. Either is fine, you just need to sure that when 45 | you're copying the files around you put them in the right place. Your 46 | autograder code should know what package to import from, so make sure 47 | that you know ahead of time what the student's package is (i.e. tell 48 | them what to do or set up a template that has it filled in). 49 | 50 | You could also do something like `$(find . -name "IntList.java")` in 51 | your bash script and copy the result of that to the destination, but 52 | it's probably better to just require them to submit in a certain 53 | structure, which Gradescope will in the future make easier to verify. 54 | 55 | This script calls compile.sh and run.sh to compile and run code. 56 | 57 | ### [compile.sh](https://github.com/gradescope/autograder_samples/blob/master/java/compile.sh) 58 | 59 | This script finds all source files under the *src* directory and 60 | compiles them. It adds the junit and hamcrest jars in the lib 61 | directory to the classpath (hamcrest is a dependency of JUnit). It 62 | produces output in the *classes* directory. 63 | 64 | ### [run.sh](https://github.com/gradescope/autograder_samples/blob/master/java/run.sh) 65 | 66 | This script just runs the IntListTest class. It adds the compiled 67 | classes and bundled libraries to the classpath. 68 | 69 | 70 | ## [AbstractIntList.java](https://github.com/gradescope/autograder_samples/blob/master/java/src/main/java/com/gradescope/intlist/AbstractIntList.java) 71 | 72 | This is the abstract base class for the students' IntList 73 | implementation. It's essentially a linked list for ints, with a few 74 | operations you can do on it. It has a constructor and .equals() 75 | method implemented, so that you can rely on those things being 76 | consistent in your tests. 77 | 78 | ## [RefIntList.java](https://github.com/gradescope/autograder_samples/blob/master/java/src/main/java/com/gradescope/intlist/RefIntList.java) 79 | 80 | This is a "reference implementation" of the IntList class. One 81 | possible approach to writing tests is to have your reference 82 | implementation as part of your autograder, and compare the student's 83 | return values to the reference implementation. Also, if you need to 84 | rely on certain functions in the student's code working for some 85 | tests, you should use your reference implementation to set up the test 86 | data structures and then call the student's code. For Java, this may 87 | require setting up a copy constructor in your students' class. 88 | 89 | ## [IntList.java](https://github.com/gradescope/autograder_samples/blob/master/java/src/main/java/com/gradescope/intlist/IntList.java) 90 | 91 | This is the template file that you would give to students to fill 92 | in. It has some parts filled in that should be kept by students; in 93 | particular, the copy constructor is used in the tests to allow setting 94 | up a test using the reference implementation and then copying the data 95 | to an instance of the student's implementation so that you can test 96 | individual functions in isolation instead of relying on students to 97 | implement basic functionality correctly. Due to the way Java works 98 | this can't be done in the abstract parent class. 99 | 100 | ## [IntList.java solution](https://github.com/gradescope/autograder_samples/blob/master/java/solution/IntList.java) 101 | 102 | This is an example solution. It's just the reference implementation 103 | with the name changed. You can submit this to Gradescope to see how it 104 | works. 105 | 106 | ## [IntListTest.java](https://github.com/gradescope/autograder_samples/blob/master/java/src/main/java/com/gradescope/intlist/tests/IntListTest.java) 107 | 108 | This is the actual Test class. It imports the necessary parts of 109 | jh61b, JUnit, and the student's code. Then, it runs a set of basic 110 | tests on the student's code. 111 | 112 | ## [IntListPredicates.java](https://github.com/gradescope/autograder_samples/blob/master/java/src/main/java/com/gradescope/intlist/tests/IntListPredicates.java) 113 | 114 | This is another Test class, just to demonstrate multi-class test suites. 115 | 116 | ## [RunTests.java](https://github.com/gradescope/autograder_samples/blob/master/java/src/main/java/com/gradescope/intlist/tests/RunTests.java) 117 | 118 | This class actually runs the tests. This demonstrates setting up a 119 | Suite in JUnit. 120 | -------------------------------------------------------------------------------- /java/src/main/java/com/gradescope/jh61b/grader/GradedTestListenerHumanReadable.java: -------------------------------------------------------------------------------- 1 | // adapted from http://memorynotfound.com/add-junit-listener-example/ 2 | // Highly redundant with GradedTestListenerJSON. Maybe refactor later. 3 | // Also, should output go to StdErr? That's what Paul did. 4 | // TODO: Make stack traces less onerous. See textui.java for ideas of how we might do this. 5 | package com.gradescope.jh61b.grader; 6 | 7 | import java.util.List; 8 | import java.util.ArrayList; 9 | 10 | import org.junit.runner.Description; 11 | import org.junit.runner.Result; 12 | import org.junit.runner.notification.Failure; 13 | import org.junit.runner.notification.RunListener; 14 | 15 | import java.io.ByteArrayOutputStream; 16 | import java.io.PrintStream; 17 | 18 | import java.util.Collection; 19 | 20 | import java.lang.annotation.Annotation; 21 | import java.lang.reflect.Method; 22 | import java.lang.reflect.InvocationTargetException; 23 | 24 | import com.gradescope.jh61b.junit.JUnitUtilities; 25 | 26 | 27 | public class GradedTestListenerHumanReadable extends RunListener { 28 | 29 | /* Current test result. Created at the beginning of every test, completed at the 30 | end of every test. */ 31 | private static TestResult currentTestResult; 32 | 33 | /* All test results. */ 34 | private static List allTestResults; 35 | 36 | /** Returns the name of a test as stored in an annotation. 37 | * TODO: Is there a more elegant way to do this? */ 38 | private static String getAnnotationString(Annotation x, String annotationStringName) throws 39 | IllegalAccessException, InvocationTargetException { 40 | Method[] methods = x.getClass().getDeclaredMethods(); 41 | /** If the annotation has a method name() that returns 42 | * a String, invoke that method and return the result. 43 | */ 44 | 45 | for (Method m : methods) { 46 | if (m.getName().equals(annotationStringName) && 47 | m.getReturnType().getCanonicalName().equals("java.lang.String")) { 48 | return (String) m.invoke(x); 49 | } 50 | } 51 | return "Uh-oh, getAnnotationString failed to get test String. This should never happen!"; 52 | } 53 | 54 | /** Returns the name of a test as stored in an annotation. 55 | * TODO: Is there a more elegant way to do this? */ 56 | private static double getAnnotationDouble(Annotation x, String annotationDoubleName) throws 57 | IllegalAccessException, InvocationTargetException { 58 | Method[] methods = x.getClass().getDeclaredMethods(); 59 | /** If the annotation has a method name() that returns 60 | * a String, invoke that method and return the result. 61 | */ 62 | 63 | for (Method m : methods) { 64 | if (m.getName().equals(annotationDoubleName) && 65 | m.getReturnType().getCanonicalName().equals("double")) { 66 | return (double) m.invoke(x); 67 | } 68 | } 69 | return -31337; 70 | } 71 | 72 | /** Gets test name of the given test. */ 73 | private static String getTestName(GradedTest x) throws 74 | IllegalAccessException, InvocationTargetException { 75 | return getAnnotationString(x, "name"); 76 | } 77 | 78 | /** Gets test number of the given test. */ 79 | private static String getTestNumber(GradedTest x) throws 80 | IllegalAccessException, InvocationTargetException { 81 | return getAnnotationString(x, "number"); 82 | } 83 | 84 | /** Gets test weight of the given test. */ 85 | private static double getTestMaxScore(GradedTest x) throws 86 | IllegalAccessException, InvocationTargetException { 87 | return getAnnotationDouble(x, "max_score"); 88 | } 89 | 90 | private static String getTestVisibility(GradedTest x) throws 91 | IllegalAccessException, InvocationTargetException { 92 | return getAnnotationString(x, "visibility"); 93 | } 94 | 95 | /** Returns the name of a test as stored in an annotation. 96 | * TODO: Is there a more elegant way to do this? */ 97 | 98 | 99 | /* Code to run at the beginning of a test run. */ 100 | public void testRunStarted(Description description) throws Exception { 101 | allTestResults = new ArrayList(); 102 | } 103 | 104 | /* Code to run at the end of test run. */ 105 | public void testRunFinished(Result result) throws Exception { 106 | int count = result.getRunCount(); 107 | int numFailed = result.getFailureCount(); 108 | int numPassed = count - numFailed; 109 | System.out.println(String.format("Passed: %d/%d tests.", numPassed, count)); 110 | } 111 | 112 | public void testStarted(Description description) throws Exception { 113 | GradedTest gradedTestAnnotation = description.getAnnotation(GradedTest.class); 114 | String testName = getTestName(gradedTestAnnotation); 115 | String testNumber = getTestNumber(gradedTestAnnotation); 116 | double testMaxScore = getTestMaxScore(gradedTestAnnotation); 117 | String visibility = getTestVisibility(gradedTestAnnotation); 118 | /* Capture StdOut (both ours and theirs) so that we can relay it to the students. */ 119 | currentTestResult = new TestResult(testName, testNumber, testMaxScore, visibility); 120 | 121 | /* By default every test passes. */ 122 | currentTestResult.setScore(testMaxScore); 123 | 124 | String testSummary = String.format("Test %s: %s (%s)", testNumber, testName, description.getMethodName()); 125 | System.out.println("Running " + testSummary); 126 | } 127 | 128 | /** When a test completes, add the test output at the bottom. Then stop capturing 129 | * StdOut. Open question: Is putting the captured output at the end clear? Or is that 130 | * possibly confusing? We'll see... */ 131 | public void testFinished(Description description) throws Exception { 132 | /* For Debugging. */ 133 | if (false) { 134 | System.out.println(currentTestResult); 135 | } 136 | 137 | System.out.println(String.format("==> Score: %.2f / %.2f", currentTestResult.score, currentTestResult.maxScore)); 138 | } 139 | 140 | /** Sets score to 0 and appends reason for failure and dumps a stack trace. 141 | * TODO: Clean up this stack trace so it is not hideous. 142 | * Other possible things we might want to consider including: http://junit.sourceforge.net/javadoc/org/junit/runner/notification/Failure.html. 143 | */ 144 | public void testFailure(Failure failure) throws Exception { 145 | currentTestResult.setScore(0); 146 | System.out.println("Test Failed!\n");//\nReason: " + failure.getMessage() + "\n"); 147 | System.out.println(JUnitUtilities.failureToString(failure)); 148 | //failure.getTrace()); 149 | } 150 | 151 | } 152 | 153 | 154 | 155 | 156 | /* Unused, but kept around for future reference. 157 | public void testAssumptionFailure(Failure failure) { 158 | System.out.println("Failed: " + failure.getDescription().getMethodName()); 159 | } 160 | 161 | public void testIgnored(Description description) throws Exception { 162 | System.out.println("Ignored: " + description.getMethodName()); 163 | } 164 | */ 165 | -------------------------------------------------------------------------------- /java_template/src/com/gradescope/jh61b/grader/GradedTestListenerHumanReadable.java: -------------------------------------------------------------------------------- 1 | // adapted from http://memorynotfound.com/add-junit-listener-example/ 2 | // Highly redundant with GradedTestListenerJSON. Maybe refactor later. 3 | // Also, should output go to StdErr? That's what Paul did. 4 | // TODO: Make stack traces less onerous. See textui.java for ideas of how we might do this. 5 | package com.gradescope.jh61b.grader; 6 | 7 | import java.util.List; 8 | import java.util.ArrayList; 9 | 10 | import org.junit.runner.Description; 11 | import org.junit.runner.Result; 12 | import org.junit.runner.notification.Failure; 13 | import org.junit.runner.notification.RunListener; 14 | 15 | import java.io.ByteArrayOutputStream; 16 | import java.io.PrintStream; 17 | 18 | import java.util.Collection; 19 | 20 | import java.lang.annotation.Annotation; 21 | import java.lang.reflect.Method; 22 | import java.lang.reflect.InvocationTargetException; 23 | 24 | import com.gradescope.jh61b.junit.JUnitUtilities; 25 | 26 | 27 | public class GradedTestListenerHumanReadable extends RunListener { 28 | 29 | /* Current test result. Created at the beginning of every test, completed at the 30 | end of every test. */ 31 | private static TestResult currentTestResult; 32 | 33 | /* All test results. */ 34 | private static List allTestResults; 35 | 36 | /** Returns the name of a test as stored in an annotation. 37 | * TODO: Is there a more elegant way to do this? */ 38 | private static String getAnnotationString(Annotation x, String annotationStringName) throws 39 | IllegalAccessException, InvocationTargetException { 40 | Method[] methods = x.getClass().getDeclaredMethods(); 41 | /** If the annotation has a method name() that returns 42 | * a String, invoke that method and return the result. 43 | */ 44 | 45 | for (Method m : methods) { 46 | if (m.getName().equals(annotationStringName) && 47 | m.getReturnType().getCanonicalName().equals("java.lang.String")) { 48 | return (String) m.invoke(x); 49 | } 50 | } 51 | return "Uh-oh, getAnnotationString failed to get test String. This should never happen!"; 52 | } 53 | 54 | /** Returns the name of a test as stored in an annotation. 55 | * TODO: Is there a more elegant way to do this? */ 56 | private static double getAnnotationDouble(Annotation x, String annotationDoubleName) throws 57 | IllegalAccessException, InvocationTargetException { 58 | Method[] methods = x.getClass().getDeclaredMethods(); 59 | /** If the annotation has a method name() that returns 60 | * a String, invoke that method and return the result. 61 | */ 62 | 63 | for (Method m : methods) { 64 | if (m.getName().equals(annotationDoubleName) && 65 | m.getReturnType().getCanonicalName().equals("double")) { 66 | return (double) m.invoke(x); 67 | } 68 | } 69 | return -31337; 70 | } 71 | 72 | /** Gets test name of the given test. */ 73 | private static String getTestName(GradedTest x) throws 74 | IllegalAccessException, InvocationTargetException { 75 | return getAnnotationString(x, "name"); 76 | } 77 | 78 | /** Gets test number of the given test. */ 79 | private static String getTestNumber(GradedTest x) throws 80 | IllegalAccessException, InvocationTargetException { 81 | return getAnnotationString(x, "number"); 82 | } 83 | 84 | /** Gets test weight of the given test. */ 85 | private static double getTestMaxScore(GradedTest x) throws 86 | IllegalAccessException, InvocationTargetException { 87 | return getAnnotationDouble(x, "max_score"); 88 | } 89 | 90 | private static String getTestVisibility(GradedTest x) throws 91 | IllegalAccessException, InvocationTargetException { 92 | return getAnnotationString(x, "visibility"); 93 | } 94 | 95 | /** Returns the name of a test as stored in an annotation. 96 | * TODO: Is there a more elegant way to do this? */ 97 | 98 | 99 | /* Code to run at the beginning of a test run. */ 100 | public void testRunStarted(Description description) throws Exception { 101 | allTestResults = new ArrayList(); 102 | } 103 | 104 | /* Code to run at the end of test run. */ 105 | public void testRunFinished(Result result) throws Exception { 106 | int count = result.getRunCount(); 107 | int numFailed = result.getFailureCount(); 108 | int numPassed = count - numFailed; 109 | System.out.println(String.format("Passed: %d/%d tests.", numPassed, count)); 110 | } 111 | 112 | public void testStarted(Description description) throws Exception { 113 | GradedTest gradedTestAnnotation = description.getAnnotation(GradedTest.class); 114 | String testName = getTestName(gradedTestAnnotation); 115 | String testNumber = getTestNumber(gradedTestAnnotation); 116 | double testMaxScore = getTestMaxScore(gradedTestAnnotation); 117 | String visibility = getTestVisibility(gradedTestAnnotation); 118 | /* Capture StdOut (both ours and theirs) so that we can relay it to the students. */ 119 | currentTestResult = new TestResult(testName, testNumber, testMaxScore, visibility); 120 | 121 | /* By default every test passes. */ 122 | currentTestResult.setScore(testMaxScore); 123 | 124 | String testSummary = String.format("Test %s: %s (%s)", testNumber, testName, description.getMethodName()); 125 | System.out.println("Running " + testSummary); 126 | } 127 | 128 | /** When a test completes, add the test output at the bottom. Then stop capturing 129 | * StdOut. Open question: Is putting the captured output at the end clear? Or is that 130 | * possibly confusing? We'll see... */ 131 | public void testFinished(Description description) throws Exception { 132 | /* For Debugging. */ 133 | if (false) { 134 | System.out.println(currentTestResult); 135 | } 136 | 137 | System.out.println(String.format("==> Score: %.2f / %.2f", currentTestResult.score, currentTestResult.maxScore)); 138 | } 139 | 140 | /** Sets score to 0 and appends reason for failure and dumps a stack trace. 141 | * TODO: Clean up this stack trace so it is not hideous. 142 | * Other possible things we might want to consider including: http://junit.sourceforge.net/javadoc/org/junit/runner/notification/Failure.html. 143 | */ 144 | public void testFailure(Failure failure) throws Exception { 145 | currentTestResult.setScore(0); 146 | System.out.println("Test Failed!\n");//\nReason: " + failure.getMessage() + "\n"); 147 | System.out.println(JUnitUtilities.failureToString(failure)); 148 | //failure.getTrace()); 149 | } 150 | 151 | } 152 | 153 | 154 | 155 | 156 | /* Unused, but kept around for future reference. 157 | public void testAssumptionFailure(Failure failure) { 158 | System.out.println("Failed: " + failure.getDescription().getMethodName()); 159 | } 160 | 161 | public void testIgnored(Description description) throws Exception { 162 | System.out.println("Ignored: " + description.getMethodName()); 163 | } 164 | */ 165 | --------------------------------------------------------------------------------