-
Notifications
You must be signed in to change notification settings - Fork 1
/
canvas_grade_submissions.py
executable file
·199 lines (160 loc) · 6.47 KB
/
canvas_grade_submissions.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
#!/usr/bin/env python3
import json
import csv
import sys
import logging
import argparse
import os
from canvasapi import Canvas
# which canvas instance to interact with
API_URL = "https://liverpool.instructure.com"
# index roster and submission_metadata byt this column
# should be the roster column corresponding to submission directories
KEY = "github_username"
helpmsg = """
Upload grades for submissions on canvas.
This will iterate through subdirectories and for each entry look up the
(existing) canvas submission and fill the (existing) rubric criteria according
to a local grades file produced by grade50.
The grades are read from a file called "grades.json" in the student submission
dir. This file is as produced by the grade50 script and contains
a list of graded rubrics at top level.
The canvas assignment must have rubric criteria named just like those in the
grades file.
For authentification, the script relies on an API token from canvas,
given via the -t argument or read from the CANVAS_TOKEN env var.
"""
parser = argparse.ArgumentParser(description=helpmsg)
parser.add_argument('course', help='canvas course id')
parser.add_argument('assignment', help='canvas assignment id')
parser.add_argument(
'roster',
help="canvas course roster (csv)",
type=argparse.FileType('r'))
parser.add_argument(
'metadata',
help='submission data such as student id and timestamp (csv)',
type=argparse.FileType('r'),
default='-')
parser.add_argument(
'submissions_dir',
help='directory containing student submissions as subdirectories')
parser.add_argument('-t', '--token',
help='canvas API token',
metavar='TOKEN',
default=os.environ.get('CANVAS_TOKEN', 'token'))
parser.add_argument("--dryrun", '-d', action='store_true', help="don't do it")
parser.add_argument('-v', '--verbose', action='count', default=0)
_LOG_LEVEL_STRINGS = [logging.ERROR, logging.INFO, logging.DEBUG]
def find_canvas_id_from_dirname(dir_name, roster):
# The roster maps the the value of a user's KEY entry to the user info dict
if dir_name not in roster:
# some students have submitted via submit50 but did not give me their
# github ID
return None
canvas_id = roster[dir_name]['Student ID']
return canvas_id
def find_submission_metadata(sid, dir_name, submissions_data, roster):
"""
Try to find the metadata for a submission made by a given student.
"""
# The submission_data maps the the value of a user's KEY entry to
# the dictionary with their submission entry
submission = submissions_data[dir_name]
return submission
def get_assignment(token, aid, cid):
""" get canvas assignment object """
canvas = Canvas(API_URL, token)
course = canvas.get_course(cid)
return course.get_assignment(aid)
def load_roster(filehandle, keyterm='Student ID'):
""" interpret canvas roster csv """
entries = {}
reader = csv.DictReader(filehandle)
fieldnames = reader.fieldnames
for student in reader:
entries[student[keyterm]] = student
return entries, fieldnames
def add_canvas_grading(assignment, sid, grades, meta, paths):
logging.debug("\ncreating submission..")
logging.debug(str(grades))
submission = assignment.get_submission(sid)
parms = {}
for r in assignment.rubric:
# for rname,rubric in grades['rubrics'].items():
desc = r['description']
for part in grades['parts']:
if part['name'] == desc:
logging.debug("---------\n\npart:\n" + str(part))
# logging.debug(grades['parts'])
#rgrade = grades['parts'][desc]
parms[r['id']] = {
'points': part['points'],
'comments': '\n'.join(part['comments']),
}
# needs to be in one go!!
submission.edit(rubric_assessment={**parms})
# attach files as comments
for f in paths:
submission.upload_comment(f)
if __name__ == "__main__":
# read arguments and set up logging
args = parser.parse_args()
logging.basicConfig(
level=_LOG_LEVEL_STRINGS[args.verbose],
format='%(message)s',
)
# silence canvasapi logger
logging.getLogger("canvasapi.requester").setLevel(logging.ERROR)
# read Canvas student roster
roster, fieldnames = load_roster(args.roster, keyterm=KEY)
# find submission on disk and its timestamp.
logging.debug("loading submissions data")
submissions_data = {}
for e in csv.DictReader(args.metadata):
e['github_username'] = e['github_username'].lower()
submissions_data[e['github_username']] = e
# read assignment from canvas
logging.info("loading assignment " + args.assignment)
assignment = get_assignment(args.token, args.assignment, args.course)
# change assignment submission type.
# we will make a submission in each students' name later
assignment = assignment.edit(assignment={
'submission_types': ['online_text_entry'],
# 'allowed_attempts': 1,
})
# store write operations for later
ops = []
# GO!
logging.info("iterating over submission directories")
for sub_dir in os.listdir(args.submissions_dir):
sid = find_canvas_id_from_dirname(sub_dir, roster)
if sid is None:
# could not find canvas id for this directory. Ignore and move on
logging.error('no canvas ID found for ' + sub_dir)
continue
# read grades report
grades_path = os.path.join(args.submissions_dir, sub_dir, "grade.json")
logging.debug('grades_path: ' + grades_path)
grades = {}
try:
with open(grades_path, "r") as f:
grades = json.load(f)
except BaseException:
logging.error("could not read " + grades_path)
sys.exit(1)
# find submission metadata (timestamp)
metadata = find_submission_metadata(
sid, sub_dir, submissions_data, roster)
if metadata is None:
logging.error('no submission metadata found for ' + sub_dir)
sys.exit(1)
# Store what to write for now, so that all submissions are considered
# before write-out.
# TODO: last parm is for file attachments
ops.append((sid, grades, metadata, []))
if args.dryrun:
print(json.dumps(ops, indent=2))
else:
for sid, grades, meta, paths in ops:
add_canvas_grading(assignment, sid, grades, meta, paths)