From c338d8b8e1c4e44f31c7739c32301287cd0e40bf Mon Sep 17 00:00:00 2001 From: Amir Qayyum Khan Date: Fri, 29 Jan 2016 16:45:16 +0500 Subject: [PATCH] Added normalize column and checkbox in UI --- lms/djangoapps/courseware/grades.py | 119 ++++++++---------- lms/djangoapps/instructor/views/legacy.py | 21 ++-- .../legacy_instructor_dashboard.html | 4 +- 3 files changed, 66 insertions(+), 78 deletions(-) diff --git a/lms/djangoapps/courseware/grades.py b/lms/djangoapps/courseware/grades.py index 1becdeb04b0a..fafb2b33a501 100644 --- a/lms/djangoapps/courseware/grades.py +++ b/lms/djangoapps/courseware/grades.py @@ -382,85 +382,64 @@ def _grade(student, request, course, keep_raw_scores, field_data_cache, scores_c # TODO This block is causing extra savepoints to be fired that are empty because no queries are executed # during the loop. When refactoring this code please keep this outer_atomic call in mind and ensure we # are not making unnecessary database queries. - should_grade_section = any( - descriptor.always_recalculate_grades for descriptor in section['xmoduledescriptors'] - ) - # If there are no problems that always have to be regraded, check to - # see if any of our locations are in the scores from the submissions - # API. If scores exist, we have to calculate grades for this section. - if not should_grade_section: - should_grade_section = any( - descriptor.location.to_deprecated_string() in submissions_scores - for descriptor in section['xmoduledescriptors'] + # If we haven't seen a single problem in the section, we don't have + # to grade it at all! We can assume 0% + scores = [] + + def create_module(descriptor): + """creates an XModule instance given a descriptor""" + # TODO: We need the request to pass into here. If we could forego that, our arguments + # would be simpler + return get_module_for_descriptor( + student, request, descriptor, field_data_cache, course.id, course=course ) - if not should_grade_section: - should_grade_section = any( - descriptor.location in scores_client - for descriptor in section['xmoduledescriptors'] + descendants = yield_dynamic_descriptor_descendants(section_descriptor, student.id, create_module) + for module_descriptor in descendants: + user_access = has_access( + student, 'load', module_descriptor, module_descriptor.location.course_key ) + if not user_access: + continue - # If we haven't seen a single problem in the section, we don't have - # to grade it at all! We can assume 0% - if should_grade_section: - scores = [] - - def create_module(descriptor): - '''creates an XModule instance given a descriptor''' - # TODO: We need the request to pass into here. If we could forego that, our arguments - # would be simpler - return get_module_for_descriptor( - student, request, descriptor, field_data_cache, course.id, course=course - ) + (correct, total) = get_score( + student, + module_descriptor, + create_module, + scores_client, + submissions_scores, + max_scores_cache, + ) + if correct is None and total is None: + continue - descendants = yield_dynamic_descriptor_descendants(section_descriptor, student.id, create_module) - for module_descriptor in descendants: - user_access = has_access( - student, 'load', module_descriptor, module_descriptor.location.course_key - ) - if not user_access: - continue - - (correct, total) = get_score( - student, - module_descriptor, - create_module, - scores_client, - submissions_scores, - max_scores_cache, - ) - if correct is None and total is None: - continue - - if settings.GENERATE_PROFILE_SCORES: # for debugging! - if total > 1: - correct = random.randrange(max(total - 2, 1), total + 1) - else: - correct = total - - graded = module_descriptor.graded - if not total > 0: - # We simply cannot grade a problem that is 12/0, because we might need it as a percentage - graded = False - - scores.append( - Score( - correct, - total, - graded, - module_descriptor.display_name_with_default, - module_descriptor.location - ) + if settings.GENERATE_PROFILE_SCORES: # for debugging! + if total > 1: + correct = random.randrange(max(total - 2, 1), total + 1) + else: + correct = total + + graded = module_descriptor.graded + if not total > 0: + # We simply cannot grade a problem that is 12/0, because we might need it as a percentage + graded = False + + scores.append( + Score( + correct, + total, + graded, + module_descriptor.display_name_with_default, + module_descriptor.location ) + ) - __, graded_total = graders.aggregate_scores(scores, section_name) - if keep_raw_scores: - raw_scores += scores - else: - graded_total = Score(0.0, 1.0, True, section_name, None) + __, graded_total = graders.aggregate_scores(scores, section_name) + if keep_raw_scores: + raw_scores += scores - #Add the graded total to totaled_scores + # Add the graded total to totaled_scores if graded_total.possible > 0: format_scores.append(graded_total) else: diff --git a/lms/djangoapps/instructor/views/legacy.py b/lms/djangoapps/instructor/views/legacy.py index 51e09e91fb0b..e256b65a7704 100644 --- a/lms/djangoapps/instructor/views/legacy.py +++ b/lms/djangoapps/instructor/views/legacy.py @@ -238,6 +238,7 @@ def domatch(student): elif action in ['Display grades for assignment', 'Export grades for assignment to remote gradebook', 'Export CSV file of grades for assignment']: + normalize_grades_enable = 1 if request.POST.get('normalize_grades', None) else 0 log.debug(action) datatable = {} aname = request.POST.get('assignment_name', '') @@ -249,7 +250,7 @@ def domatch(student): course, get_grades=True, use_offline=use_offline, - get_score_max=True + get_score_max=False if normalize_grades_enable == 1 else True ) if aname not in allgrades['assignments']: msg += "{text}".format( @@ -257,15 +258,18 @@ def domatch(student): ) else: aidx = allgrades['assignments'].index(aname) - datatable = {'header': [_('External email'), aname, _('max_pts')]} + datatable = {'header': [_('External email'), aname, _('max_pts'), _('normalize')]} ddata = [] # do one by one in case there is a student who has only partial grades for student in allgrades['students']: if len(student.grades) >= aidx and student.grades[aidx] is not None: ddata.append( - [student.email, - student.grades[aidx][0], - student.grades[aidx][1]] + [ + student.email, + student.grades[aidx][0], + student.grades[aidx][1], + normalize_grades_enable + ], ) else: log.debug(u'No grade for assignment %(idx)s (%(name)s) for student %(email)s', { @@ -745,7 +749,7 @@ def get_student_grade_summary_data( if get_score_max is True: add_grade(score.section, score.earned, score.possible) else: - add_grade(score.section, score.earned) + add_grade(score.section, (score.earned / score.possible), 1) else: category_cnts = Counter() for grade_item in gradeset['section_breakdown']: @@ -753,7 +757,10 @@ def get_student_grade_summary_data( try: earned = gradeset['totaled_scores'][category][category_cnts[category]].earned possible = gradeset['totaled_scores'][category][category_cnts[category]].possible - add_grade(grade_item['label'], earned, possible=possible) + if get_score_max is True: + add_grade(grade_item['label'], earned, possible=possible) + else: + add_grade(grade_item['label'], grade_item['percent'], possible=1) except (IndexError, KeyError): add_grade(grade_item['label'], grade_item['percent']) category_cnts[category] += 1 diff --git a/lms/templates/courseware/legacy_instructor_dashboard.html b/lms/templates/courseware/legacy_instructor_dashboard.html index 2ec867f36361..dc41150e0849 100644 --- a/lms/templates/courseware/legacy_instructor_dashboard.html +++ b/lms/templates/courseware/legacy_instructor_dashboard.html @@ -240,7 +240,9 @@

${_("Export grades to remote gradebook")}



-
  • ${_("Assignment name:")} +
  • ${_("Assignment name:")}