Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
59 commits
Select commit Hold shift + click to select a range
56c9c20
Initial rewrite of process_all.sh
williamjallen Jun 25, 2021
13f3635
Merge branch 'main' into redo-file-structure
williamjallen Jun 25, 2021
198790d
Update process_all.sh
williamjallen Jun 25, 2021
8737f55
Make modifications to file paths and add timers
sbelsk Jun 25, 2021
70e3032
Merge branch 'redo-file-structure' of https://github.com/Submitty/Lic…
sbelsk Jun 25, 2021
acf4842
Overhaul concatenate_all.py
williamjallen Jun 25, 2021
388ff4e
Fix python errors
sbelsk Jun 28, 2021
053f0f6
Progress: everything through tokenization finished
williamjallen Jun 29, 2021
9480b47
Everything works
williamjallen Jun 29, 2021
0938e76
Add timers
williamjallen Jun 29, 2021
563642a
remove unnecessary code
williamjallen Jun 29, 2021
210a778
little python changes
sbelsk Jun 30, 2021
473ff7b
William made an oopsie (forgot to deal with provided code)
williamjallen Jun 30, 2021
558b963
Merge branch 'redo-file-structure' of https://github.com/Submitty/Lic…
williamjallen Jun 30, 2021
00675a3
Fix minor bugs
williamjallen Jul 1, 2021
8a5db9d
Fix permissions issue with provided code editing
williamjallen Jul 2, 2021
f7abb09
Add initial script
williamjallen Jul 3, 2021
3ba16d2
Update lichen_run.yml
williamjallen Jul 3, 2021
e0ac0da
Update lichen_run.yml
williamjallen Jul 3, 2021
ddbd29f
Update lichen_run.yml
williamjallen Jul 3, 2021
6c3aadd
Update lichen_run.yml
williamjallen Jul 3, 2021
d644fde
Update lichen_run.yml
williamjallen Jul 3, 2021
67566b0
add boost
williamjallen Jul 3, 2021
32ff986
add testing file
williamjallen Jul 3, 2021
be1a57f
forgot that paths are important
williamjallen Jul 3, 2021
4b01ead
Make separate setup.sh script
williamjallen Jul 3, 2021
2b15816
Update lichen_run.yml
williamjallen Jul 3, 2021
0595eb3
Adjust file structure, add setup script
williamjallen Jul 3, 2021
10dbed2
need sudo for test
williamjallen Jul 3, 2021
b9b0857
Update tests.py
williamjallen Jul 3, 2021
c50e017
fix path
williamjallen Jul 3, 2021
30df2b8
fix path
williamjallen Jul 3, 2021
8554c60
Update tests.py
williamjallen Jul 3, 2021
32c7aa1
add assertion to implement test
williamjallen Jul 3, 2021
5956ec4
fix more paths
williamjallen Jul 3, 2021
63df190
fix another path issue
williamjallen Jul 3, 2021
57db882
Add second test
williamjallen Jul 3, 2021
250d998
Update tests.py
williamjallen Jul 3, 2021
f452fa4
it's important to run the right command to get the right results...
williamjallen Jul 3, 2021
b8103fe
Add third test
williamjallen Jul 3, 2021
0efa16e
Add remaining plaintext tokenizer tests
williamjallen Jul 3, 2021
24d97bd
Add C tokenizer tests
williamjallen Jul 5, 2021
a50ef67
Update lichen_run.yml
williamjallen Jul 5, 2021
ed64153
Update lichen_run.yml
williamjallen Jul 5, 2021
7300b4e
Update lichen_run.yml
williamjallen Jul 5, 2021
2066fa3
Add MIPS tokenizer
williamjallen Jul 5, 2021
d26b144
Update tests.py
williamjallen Jul 5, 2021
bb545d8
Update tests.py
williamjallen Jul 5, 2021
fc47003
Merge branch 'main' into add-tokenizer-tests
williamjallen Jul 19, 2021
4abfaf1
Fix paths in tests.py such that it can be run in vagrant
williamjallen Jul 19, 2021
be7dd8b
Fix github actions
williamjallen Jul 19, 2021
0ef49f8
Add hash all test
sbelsk Jul 19, 2021
6b31487
Merge branch 'add-tokenizer-tests' into hash-all-tests
williamjallen Jul 19, 2021
49f1b36
change paths
sbelsk Jul 19, 2021
154e2a2
Get rid of unwanted stdout
sbelsk Jul 19, 2021
6065d73
Remove old code
williamjallen Jul 19, 2021
c6db09c
Merge branch 'add-tokenizer-tests' into hash-all-tests
williamjallen Jul 19, 2021
a6719b5
comment with missing letter was bugging me
williamjallen Jul 19, 2021
a817399
Merge branch 'main' into hash-all-tests
bmcutler Jul 22, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions tests/data/hash_all/config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"language": "plaintext",
"sequence_length": 2
}
4 changes: 4 additions & 0 deletions tests/data/hash_all/submission.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
int x = 8;
int y = 3;
int z = x + y;
int t = 2 * x + y;
158 changes: 158 additions & 0 deletions tests/data/hash_all/tokens.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,158 @@
[
{
"char": 1,
"line": 1,
"type": "string",
"value": "int"
},
{
"char": 5,
"line": 1,
"type": "string",
"value": "x"
},
{
"char": 7,
"line": 1,
"type": "punctuation",
"value": "="
},
{
"char": 9,
"line": 1,
"type": "number",
"value": 8
},
{
"char": 10,
"line": 1,
"type": "punctuation",
"value": ";"
},
{
"char": 1,
"line": 2,
"type": "string",
"value": "int"
},
{
"char": 5,
"line": 2,
"type": "string",
"value": "y"
},
{
"char": 7,
"line": 2,
"type": "punctuation",
"value": "="
},
{
"char": 9,
"line": 2,
"type": "number",
"value": 3
},
{
"char": 10,
"line": 2,
"type": "punctuation",
"value": ";"
},
{
"char": 1,
"line": 3,
"type": "string",
"value": "int"
},
{
"char": 5,
"line": 3,
"type": "string",
"value": "z"
},
{
"char": 7,
"line": 3,
"type": "punctuation",
"value": "="
},
{
"char": 9,
"line": 3,
"type": "string",
"value": "x"
},
{
"char": 11,
"line": 3,
"type": "punctuation",
"value": "+"
},
{
"char": 13,
"line": 3,
"type": "string",
"value": "y"
},
{
"char": 14,
"line": 3,
"type": "punctuation",
"value": ";"
},
{
"char": 1,
"line": 4,
"type": "string",
"value": "int"
},
{
"char": 5,
"line": 4,
"type": "string",
"value": "t"
},
{
"char": 7,
"line": 4,
"type": "punctuation",
"value": "="
},
{
"char": 9,
"line": 4,
"type": "number",
"value": 2
},
{
"char": 11,
"line": 4,
"type": "punctuation",
"value": "*"
},
{
"char": 13,
"line": 4,
"type": "string",
"value": "x"
},
{
"char": 15,
"line": 4,
"type": "punctuation",
"value": "+"
},
{
"char": 17,
"line": 4,
"type": "string",
"value": "y"
},
{
"char": 18,
"line": 4,
"type": "punctuation",
"value": ";"
}
]
58 changes: 58 additions & 0 deletions tests/tests.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import unittest
import os
import shutil
import json

lichen_installation_dir = "/usr/local/submitty/Lichen"
lichen_test_playground = "/usr/local/submitty/Lichen/test_output"
Expand Down Expand Up @@ -132,5 +133,62 @@ def testMIPSTokenizer(self):
self.assertEqual(actual_output, expected_output)


class TestHashAll(unittest.TestCase):
def setUp(self):
if not os.path.isdir(lichen_test_playground):
os.makedirs(lichen_test_playground)

def tearDown(self):
shutil.rmtree(lichen_test_playground)

def testHashAll(self):
# make the fake directory structure hash_all.py expects
os.makedirs(f"{lichen_test_playground}/test_hash_all/provided_code")
os.makedirs(f"{lichen_test_playground}/test_hash_all/other_gradeables")
os.makedirs(f"{lichen_test_playground}/test_hash_all/users/student/1")
open(f"{lichen_test_playground}/test_hash_all/config.json", 'a').close()
open(f"{lichen_test_playground}/test_hash_all/users/student/1/tokens.json", 'a').close()
with open(f"{lichen_test_playground}/test_hash_all/provided_code/tokens.json", 'w') as file:
file.write("null")

# copy the input files from /data to the the new path
shutil.copyfile("data/hash_all/config.json", f"{lichen_test_playground}/test_hash_all/config.json")
shutil.copyfile("data/hash_all/tokens.json", f"{lichen_test_playground}/test_hash_all/users/student/1/tokens.json")

# save current working directory
cwd = os.getcwd()

# run hash_all
os.chdir(f"{lichen_installation_dir}/bin")
os.system(f"python3 {lichen_installation_dir}/bin/hash_all.py {lichen_test_playground}/test_hash_all > /dev/null")
os.chdir(cwd)

# test output
hashes_file = f"{lichen_test_playground}/test_hash_all/users/student/1/hashes.txt"
with open(hashes_file, 'r') as file:
lines = file.readlines()
lines = [x.strip() for x in lines]
tokens_file = f"{lichen_test_playground}/test_hash_all/users/student/1/tokens.json"
with open(tokens_file, 'r') as file:
tokens = json.load(file)

# make sure the number of sequences and the number of hashes are the same
self.assertEqual(len(lines), len(tokens) - 2 + 1)

# make sure the same sequences hash to the same string, and
# that different sequences hash to different strings
for i in range(0, len(lines)):
for j in range(i + 1, len(lines)):
if i == 4 and j == 9\
or i == 4 and j == 16\
or i == 9 and j == 16\
or i == 13 and j == 22\
or i == 14 and j == 23\
or i == 15 and j == 24:
self.assertEqual(lines[i], lines[j])
else:
self.assertNotEqual(lines[i], lines[j])


if __name__ == '__main__':
unittest.main()