diff --git a/xmodule/capa/capa_problem.py b/xmodule/capa/capa_problem.py index 83145486014..3bf5b78cd4a 100644 --- a/xmodule/capa/capa_problem.py +++ b/xmodule/capa/capa_problem.py @@ -32,8 +32,6 @@ import xmodule.capa.inputtypes as inputtypes import xmodule.capa.responsetypes as responsetypes import xmodule.capa.xqueue_interface as xqueue_interface -import xmodule.capa.xqueue_submission as xqueue_submission -from xmodule.capa.xqueue_interface import get_flag_by_name from xmodule.capa.correctmap import CorrectMap from xmodule.capa.safe_exec import safe_exec from xmodule.capa.util import contextualize_text, convert_files_to_filenames, get_course_id_from_capa_block diff --git a/xmodule/capa/errors.py b/xmodule/capa/errors.py new file mode 100644 index 00000000000..f972c689ad3 --- /dev/null +++ b/xmodule/capa/errors.py @@ -0,0 +1,50 @@ +# errors.py +""" +Custom error handling for the XQueue submission interface. +""" + +class XQueueSubmissionError(Exception): + """Base class for all XQueue submission errors.""" + # No es necesario el `pass`, la clase ya hereda de Exception. + +class JSONParsingError(XQueueSubmissionError): + """Raised when JSON parsing fails.""" + MESSAGE = "Error parsing {name}: {error}" + + def __init__(self, name, error): + super().__init__(self.MESSAGE.format(name=name, error=error)) + +class MissingKeyError(XQueueSubmissionError): + """Raised when a required key is missing.""" + MESSAGE = "Missing key: {key}" + + def __init__(self, key): + super().__init__(self.MESSAGE.format(key=key)) + +class ValidationError(XQueueSubmissionError): + """Raised when a validation check fails.""" + MESSAGE = "Validation error: {error}" + + def __init__(self, error): + super().__init__(self.MESSAGE.format(error=error)) + +class TypeErrorSubmission(XQueueSubmissionError): + """Raised when an invalid type is encountered.""" + MESSAGE = "Type error: {error}" + + def __init__(self, error): + super().__init__(self.MESSAGE.format(error=error)) + +class RuntimeErrorSubmission(XQueueSubmissionError): + """Raised for runtime errors.""" + MESSAGE = "Runtime error: {error}" + + def __init__(self, error): + super().__init__(self.MESSAGE.format(error=error)) + +class GetSubmissionParamsError(XQueueSubmissionError): + """Raised when there is an issue getting submission parameters.""" + MESSAGE = "Block instance is not defined!" + + def __init__(self): + super().__init__(self.MESSAGE) diff --git a/xmodule/capa/inputtypes.py b/xmodule/capa/inputtypes.py index df9e48b09b7..03a9a59134d 100644 --- a/xmodule/capa/inputtypes.py +++ b/xmodule/capa/inputtypes.py @@ -56,11 +56,11 @@ from lxml import etree -from xmodule.capa.xqueue_interface import XQUEUE_TIMEOUT, get_flag_by_name +from xmodule.capa.xqueue_interface import XQUEUE_TIMEOUT from openedx.core.djangolib.markup import HTML, Text from xmodule.stringify import stringify_children -from . import xqueue_interface, xqueue_submission +from . import xqueue_interface from .registry import TagRegistry from .util import sanitize_html diff --git a/xmodule/capa/tests/test_capa_problem.py b/xmodule/capa/tests/test_capa_problem.py index 4e43efb5053..74cf4d096fd 100644 --- a/xmodule/capa/tests/test_capa_problem.py +++ b/xmodule/capa/tests/test_capa_problem.py @@ -54,7 +54,7 @@ def test_label_and_description_inside_responsetype(self, question): """.format(question=question) problem = new_loncapa_problem(xml) assert problem.problem_data ==\ - {'1_2_1': {'label': question, 'descriptions': {'description_1_1_1': 'Only the paranoid survive.'}}} + {'1_2_1': {'label': question, 'descriptions': {'description_1_1_1': 'Only the paranoid survive.'}}} assert len(problem.tree.xpath('//label')) == 0 @ddt.unpack @@ -123,7 +123,7 @@ def test_neither_label_tag_nor_attribute(self, question1, question2): """.format(question1, question2) problem = new_loncapa_problem(xml) assert problem.problem_data ==\ - {'1_2_1': {'label': question1, 'descriptions': {}}, '1_3_1': {'label': question2, 'descriptions': {}}} + {'1_2_1': {'label': question1, 'descriptions': {}}, '1_3_1': {'label': question2, 'descriptions': {}}} for question in (question1, question2): assert len(problem.tree.xpath('//label[text()="{}"]'.format(question))) == 0 @@ -146,8 +146,8 @@ def test_multiple_descriptions(self): """.format(desc1, desc2) problem = new_loncapa_problem(xml) assert problem.problem_data ==\ - {'1_2_1': {'label': '___ requires sacrifices.', - 'descriptions': {'description_1_1_1': desc1, 'description_1_1_2': desc2}}} + {'1_2_1': {'label': '___ requires sacrifices.', + 'descriptions': {'description_1_1_1': desc1, 'description_1_1_2': desc2}}} def test_additional_answer_is_skipped_from_resulting_html(self): """Tests that additional_answer element is not present in transformed HTML""" @@ -236,10 +236,10 @@ def test_multiple_questions_problem(self): """ problem = new_loncapa_problem(xml) assert problem.problem_data ==\ - {'1_2_1': {'label': 'Select the correct synonym of paranoid?', - 'descriptions': {'description_1_1_1': 'Only the paranoid survive.'}}, - '1_3_1': {'label': 'What Apple device competed with the portable CD player?', - 'descriptions': {'description_1_2_1': 'Device looks like an egg plant.'}}} + {'1_2_1': {'label': 'Select the correct synonym of paranoid?', + 'descriptions': {'description_1_1_1': 'Only the paranoid survive.'}}, + '1_3_1': {'label': 'What Apple device competed with the portable CD player?', + 'descriptions': {'description_1_2_1': 'Device looks like an egg plant.'}}} assert len(problem.tree.xpath('//label')) == 0 def test_question_title_not_removed_got_children(self): @@ -291,8 +291,8 @@ def test_multiple_inputtypes(self, group_label): problem = new_loncapa_problem(xml) assert problem.problem_data ==\ - {'1_2_1': {'group_label': group_label, 'label': input1_label, 'descriptions': {}}, - '1_2_2': {'group_label': group_label, 'label': input2_label, 'descriptions': {}}} + {'1_2_1': {'group_label': group_label, 'label': input1_label, 'descriptions': {}}, + '1_2_2': {'group_label': group_label, 'label': input2_label, 'descriptions': {}}} def test_single_inputtypes(self): """ @@ -355,7 +355,7 @@ def assert_question_tag(self, question1, question2, tag, label_attr=False): ) problem = new_loncapa_problem(xml) assert problem.problem_data ==\ - {'1_2_1': {'label': question1, 'descriptions': {}}, '1_3_1': {'label': question2, 'descriptions': {}}} + {'1_2_1': {'label': question1, 'descriptions': {}}, '1_3_1': {'label': question2, 'descriptions': {}}} assert len(problem.tree.xpath('//{}'.format(tag))) == 0 @ddt.unpack diff --git a/xmodule/capa/tests/test_errors.py b/xmodule/capa/tests/test_errors.py new file mode 100644 index 00000000000..412839a1ff2 --- /dev/null +++ b/xmodule/capa/tests/test_errors.py @@ -0,0 +1,43 @@ +""" +Unit tests for custom error handling in the XQueue submission interface. +""" + +import pytest +from xmodule.capa.errors import ( + JSONParsingError, + MissingKeyError, + ValidationError, + TypeErrorSubmission, + RuntimeErrorSubmission, + GetSubmissionParamsError +) + +def test_json_parsing_error(): + with pytest.raises(JSONParsingError) as excinfo: + raise JSONParsingError("test_name", "test_error") + assert str(excinfo.value) == "Error parsing test_name: test_error" + +def test_missing_key_error(): + with pytest.raises(MissingKeyError) as excinfo: + raise MissingKeyError("test_key") + assert str(excinfo.value) == "Missing key: test_key" + +def test_validation_error(): + with pytest.raises(ValidationError) as excinfo: + raise ValidationError("test_error") + assert str(excinfo.value) == "Validation error: test_error" + +def test_type_error_submission(): + with pytest.raises(TypeErrorSubmission) as excinfo: + raise TypeErrorSubmission("test_error") + assert str(excinfo.value) == "Type error: test_error" + +def test_runtime_error_submission(): + with pytest.raises(RuntimeErrorSubmission) as excinfo: + raise RuntimeErrorSubmission("test_error") + assert str(excinfo.value) == "Runtime error: test_error" + +def test_get_submission_params_error(): + with pytest.raises(GetSubmissionParamsError) as excinfo: + raise GetSubmissionParamsError() + assert str(excinfo.value) == "Block instance is not defined!" diff --git a/xmodule/capa/tests/test_inputtypes.py b/xmodule/capa/tests/test_inputtypes.py index 5f585fbe845..4e14bc42b76 100644 --- a/xmodule/capa/tests/test_inputtypes.py +++ b/xmodule/capa/tests/test_inputtypes.py @@ -476,12 +476,10 @@ def test_rendering(self): assert context == expected -@pytest.mark.django_db class MatlabTest(unittest.TestCase): """ Test Matlab input types """ - def setUp(self): super(MatlabTest, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments self.rows = '10' @@ -930,40 +928,6 @@ def test_matlab_sanitize_msg(self): expected = "" assert self.the_input._get_render_context()['msg'] == expected # pylint: disable=protected-access - @patch('xmodule.capa.inputtypes.get_flag_by_name', return_value=True) - @patch('xmodule.capa.inputtypes.datetime') - def test_plot_data_with_flag_active(self, mock_datetime, mock_get_flag_by_name): - """ - Test that the correct date format is used when the flag is active. - """ - mock_datetime.utcnow.return_value.strftime.return_value = 'formatted_date_with_flag' - data = {'submission': 'x = 1234;'} - response = self.the_input.handle_ajax("plot", data) - self.the_input.capa_system.xqueue.interface.send_to_queue.assert_called_with(header=ANY, body=ANY) - assert response['success'] - assert self.the_input.input_state['queuekey'] is not None - assert self.the_input.input_state['queuestate'] == 'queued' - assert 'formatted_date_with_flag' in self.the_input.capa_system.xqueue.interface.send_to_queue.call_args[1][ - 'body' - ] - - @patch('xmodule.capa.inputtypes.get_flag_by_name', return_value=False) - @patch('xmodule.capa.inputtypes.datetime') - def test_plot_data_with_flag_inactive(self, mock_datetime, mock_get_flag_by_name): - """ - Test that the correct date format is used when the flag is inactive. - """ - mock_datetime.utcnow.return_value.strftime.return_value = 'formatted_date_without_flag' - data = {'submission': 'x = 1234;'} - response = self.the_input.handle_ajax("plot", data) - self.the_input.capa_system.xqueue.interface.send_to_queue.assert_called_with(header=ANY, body=ANY) - assert response['success'] - assert self.the_input.input_state['queuekey'] is not None - assert self.the_input.input_state['queuestate'] == 'queued' - assert 'formatted_date_without_flag' in self.the_input.capa_system.xqueue.interface.send_to_queue.call_args[1][ - 'body' - ] - def html_tree_equal(received, expected): """ @@ -983,7 +947,6 @@ class SchematicTest(unittest.TestCase): """ Check that schematic inputs work """ - def test_rendering(self): height = '12' width = '33' @@ -1039,7 +1002,6 @@ class ImageInputTest(unittest.TestCase): """ Check that image inputs work """ - def check(self, value, egx, egy): # lint-amnesty, pylint: disable=missing-function-docstring height = '78' width = '427' @@ -1099,7 +1061,6 @@ class CrystallographyTest(unittest.TestCase): """ Check that crystallography inputs work """ - def test_rendering(self): height = '12' width = '33' @@ -1141,7 +1102,6 @@ class VseprTest(unittest.TestCase): """ Check that vsepr inputs work """ - def test_rendering(self): height = '12' width = '33' @@ -1189,7 +1149,6 @@ class ChemicalEquationTest(unittest.TestCase): """ Check that chemical equation inputs work. """ - def setUp(self): super(ChemicalEquationTest, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments self.size = "42" @@ -1285,7 +1244,6 @@ class FormulaEquationTest(unittest.TestCase): """ Check that formula equation inputs work. """ - def setUp(self): super(FormulaEquationTest, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments self.size = "42" @@ -1434,7 +1392,6 @@ class DragAndDropTest(unittest.TestCase): """ Check that drag and drop inputs work """ - def test_rendering(self): path_to_images = '/dummy-static/images/' @@ -1509,7 +1466,6 @@ class AnnotationInputTest(unittest.TestCase): """ Make sure option inputs work """ - def test_rendering(self): xml_str = ''' @@ -1670,7 +1626,6 @@ class TestStatus(unittest.TestCase): """ Tests for Status class """ - def test_str(self): """ Test stringifing Status objects diff --git a/xmodule/capa/tests/test_responsetypes.py b/xmodule/capa/tests/test_responsetypes.py index 6e52175c53f..e8df8894c78 100644 --- a/xmodule/capa/tests/test_responsetypes.py +++ b/xmodule/capa/tests/test_responsetypes.py @@ -39,8 +39,6 @@ ) from xmodule.capa.util import convert_files_to_filenames from xmodule.capa.xqueue_interface import dateformat -import xmodule.capa.xqueue_submission as xqueue_submission -from xmodule.capa.xqueue_interface import get_flag_by_name class ResponseTest(unittest.TestCase): @@ -931,7 +929,6 @@ def test_empty_answer_graded_as_incorrect(self): self.assert_grade(problem, " ", "incorrect") -@pytest.mark.django_db class CodeResponseTest(ResponseTest): # pylint: disable=missing-class-docstring xml_factory_class = CodeResponseXMLFactory @@ -947,12 +944,8 @@ def setUp(self): @staticmethod def make_queuestate(key, time): """Create queuestate dict""" - if get_flag_by_name('send_to_submission_course.enable'): - timestr = datetime.strftime(time, xqueue_submission.dateformat) - return {'key': key, 'time': timestr} - else: - timestr = datetime.strftime(time, dateformat) - return {'key': key, 'time': timestr} + timestr = datetime.strftime(time, dateformat) + return {'key': key, 'time': timestr} def test_is_queued(self): """ diff --git a/xmodule/capa/tests/test_xqueue_interface.py b/xmodule/capa/tests/test_xqueue_interface.py index bb73ed42f94..07529bafe8b 100644 --- a/xmodule/capa/tests/test_xqueue_interface.py +++ b/xmodule/capa/tests/test_xqueue_interface.py @@ -46,30 +46,27 @@ def test_construct_callback_with_flag_enabled(self, mock_flag): course_id = str(usage_id.course_key) callback_url = f"courses/{course_id}/xqueue/user1/{usage_id}" - assert self.service.construct_callback() == f"{settings.LMS_ROOT_URL}/{callback_url}/score_update10" + assert self.service.construct_callback() == f"{settings.LMS_ROOT_URL}/{callback_url}/score_update" assert self.service.construct_callback("alt_dispatch") == ( - f"{settings.LMS_ROOT_URL}/{callback_url}/alt_dispatch10" + f"{settings.LMS_ROOT_URL}/{callback_url}/alt_dispatch" ) custom_callback_url = "http://alt.url" with override_settings(XQUEUE_INTERFACE={**settings.XQUEUE_INTERFACE, "callback_url": custom_callback_url}): - assert self.service.construct_callback() == f"{custom_callback_url}/{callback_url}/score_update10" + assert self.service.construct_callback() == f"{custom_callback_url}/{callback_url}/score_update" @patch("xmodule.capa.xqueue_interface.is_flag_active", return_value=False) def test_construct_callback_with_flag_disabled(self, mock_flag): """Test construct_callback when the waffle flag is disabled.""" usage_id = self.block.scope_ids.usage_id - course_id = str(usage_id.course_key) - callback_url = f"courses/{course_id}/xqueue/user1/{usage_id}" + callback_url = f'courses/{usage_id.context_key}/xqueue/user1/{usage_id}' - assert self.service.construct_callback() == f"{settings.LMS_ROOT_URL}/{callback_url}/score_update" - assert self.service.construct_callback("alt_dispatch") == ( - f"{settings.LMS_ROOT_URL}/{callback_url}/alt_dispatch" - ) + assert self.service.construct_callback() == f'{settings.LMS_ROOT_URL}/{callback_url}/score_update' + assert self.service.construct_callback('alt_dispatch') == f'{settings.LMS_ROOT_URL}/{callback_url}/alt_dispatch' - custom_callback_url = "http://alt.url" - with override_settings(XQUEUE_INTERFACE={**settings.XQUEUE_INTERFACE, "callback_url": custom_callback_url}): - assert self.service.construct_callback() == f"{custom_callback_url}/{callback_url}/score_update" + custom_callback_url = 'http://alt.url' + with override_settings(XQUEUE_INTERFACE={**settings.XQUEUE_INTERFACE, 'callback_url': custom_callback_url}): + assert self.service.construct_callback() == f'{custom_callback_url}/{callback_url}/score_update' def test_default_queuename(self): """Check the format of the default queue name.""" @@ -90,7 +87,8 @@ def test_send_to_queue_with_flag_enabled(mock_send_to_submission, mock_flag): """Test send_to_queue when the waffle flag is enabled.""" url = "http://example.com/xqueue" django_auth = {"username": "user", "password": "pass"} - xqueue_interface = XQueueInterface(url, django_auth) + block = Mock() # Mock block for the constructor + xqueue_interface = XQueueInterface(url, django_auth, block=block) header = json.dumps({ "lms_callback_url": ( @@ -117,7 +115,8 @@ def test_send_to_queue_with_flag_disabled(mock_http_post, mock_flag): """Test send_to_queue when the waffle flag is disabled.""" url = "http://example.com/xqueue" django_auth = {"username": "user", "password": "pass"} - xqueue_interface = XQueueInterface(url, django_auth) + block = Mock() # Mock block for the constructor + xqueue_interface = XQueueInterface(url, django_auth, block=block) header = json.dumps({ "lms_callback_url": ( diff --git a/xmodule/capa/tests/test_xqueue_submission.py b/xmodule/capa/tests/test_xqueue_submission.py index 51858ada03e..158bcf73310 100644 --- a/xmodule/capa/tests/test_xqueue_submission.py +++ b/xmodule/capa/tests/test_xqueue_submission.py @@ -21,17 +21,17 @@ def xqueue_service(): "ExampleProblem" ) block = Mock(scope_ids=ScopeIds('user1', 'mock_problem', location, location)) - return XQueueInterfaceSubmission() + return XQueueInterfaceSubmission(block) -def test_extract_item_data(): +def test_get_submission_params(xqueue_service): """ Test extracting item data from an xqueue submission. """ header = json.dumps({ 'lms_callback_url': ( - 'http://example.com/courses/course-v1:org+course+run/xqueue/5/' - 'block-v1:org+course+run+type@problem+block@item_id/5.0' + 'http://example.com/courses/course-v1:test_org+test_course+test_run/xqueue/5/' + 'block-v1:test_org+test_course+test_run+type@problem+block@item_id/' ), 'queue_name': 'default' }) @@ -42,21 +42,21 @@ def test_extract_item_data(): }) student_item, student_answer, queue_name, grader, score = ( - XQueueInterfaceSubmission().extract_item_data(header, payload) + xqueue_service.get_submission_params(header, payload) ) assert student_item == { 'item_id': ( - 'block-v1:org+course+run+type@problem+block@item_id' + 'block-v1:test_org+test_course+test_run+type@mock_problem+block@ExampleProblem' ), - 'item_type': 'problem', - 'course_id': 'course-v1:org+course+run', + 'item_type': 'mock_problem', + 'course_id': 'course-v1:test_org+test_course+test_run', 'student_id': 'student_id' } assert student_answer == 'student_answer' assert queue_name == 'default' assert grader == 'test.py' - assert score == 5.0 + assert score == xqueue_service.block.max_score() # Mocked max_score value @pytest.mark.django_db @@ -68,8 +68,7 @@ def test_send_to_submission(mock_create_submission, xqueue_service): header = json.dumps({ 'lms_callback_url': ( 'http://example.com/courses/course-v1:test_org+test_course+test_run/xqueue/5/' - 'block-v1:test_org+test_course+test_run+type@problem+block@item_id/' - '0.85' + 'block-v1:test_org+test_course+test_run+type@mock_problem+block@ExampleProblem/' ), }) body = json.dumps({ @@ -79,7 +78,7 @@ def test_send_to_submission(mock_create_submission, xqueue_service): }) with patch('lms.djangoapps.courseware.models.StudentModule.objects.filter') as mock_filter: - mock_filter.return_value.first.return_value = Mock(grade=0.85) + mock_filter.return_value.first.return_value = Mock(grade=10) mock_create_submission.return_value = {'submission': 'mock_submission'} @@ -91,15 +90,16 @@ def test_send_to_submission(mock_create_submission, xqueue_service): assert result['submission'] == 'mock_submission' mock_create_submission.assert_called_once_with( { - 'item_id': 'block-v1:test_org+test_course+test_run+type@problem+block@item_id', - 'item_type': 'problem', + 'item_id': 'block-v1:test_org+test_course+test_run+type@mock_problem+block@ExampleProblem', + 'item_type': 'mock_problem', 'course_id': 'course-v1:test_org+test_course+test_run', 'student_id': 'student_id' }, 'student_answer', - queue_name='default', + queue_name='default', grader='test.py', - score=0.85 + score=xqueue_service.block.max_score(), # Mocked max_score value + file=None ) @@ -112,7 +112,7 @@ def test_send_to_submission_with_missing_fields(mock_create_submission, xqueue_s header = json.dumps({ 'lms_callback_url': ( 'http://example.com/courses/course-v1:test_org+test_course+test_run/xqueue/5/' - 'block@item_id/5.0' + 'block@item_id/' ) }) body = json.dumps({ diff --git a/xmodule/capa/xqueue_interface.py b/xmodule/capa/xqueue_interface.py index ef02950028e..a3d2bd7609b 100644 --- a/xmodule/capa/xqueue_interface.py +++ b/xmodule/capa/xqueue_interface.py @@ -106,11 +106,15 @@ class XQueueInterface: Interface to the external grading system """ - def __init__(self, url: str, django_auth: Dict[str, str], requests_auth: Optional[HTTPBasicAuth] = None): + def __init__(self, url: str, django_auth: Dict[str, str], + requests_auth: Optional[HTTPBasicAuth] = None, + block: 'ProblemBlock' = None): self.url = url self.auth = django_auth self.session = requests.Session() self.session.auth = requests_auth + self.block = block + self.submission = XQueueInterfaceSubmission(self.block) def send_to_queue(self, header, body, files_to_upload=None): """ @@ -165,16 +169,17 @@ def _send_to_queue(self, header, body, files_to_upload): # lint-amnesty, pylint for f in files_to_upload: files.update({f.name: f}) - header_info = json.loads(header) - course_id = get_course_id(header_info['lms_callback_url']) + course_id = str(self.block.scope_ids.usage_id.context_key) if is_flag_active('send_to_submission_course.enable', course_id): # Use the new edx-submissions workflow - submission = XQueueInterfaceSubmission().send_to_submission(header, body, files) + submission = self.submission.send_to_submission(header, body, files) log.error(submission) return None, '' else: - return self._http_post(self.url + '/xqueue/submit/', payload, files=files) + return self._http_post( + self.url + '/xqueue/submit/', payload, files=files + ) def _http_post(self, url, data, files=None): # lint-amnesty, pylint: disable=missing-function-docstring try: @@ -204,15 +209,16 @@ class XQueueService: """ def __init__(self, block: 'ProblemBlock'): - #breakpoint() basic_auth = settings.XQUEUE_INTERFACE.get('basic_auth') requests_auth = HTTPBasicAuth(*basic_auth) if basic_auth else None self._interface = XQueueInterface( - settings.XQUEUE_INTERFACE['url'], settings.XQUEUE_INTERFACE['django_auth'], requests_auth + settings.XQUEUE_INTERFACE['url'], settings.XQUEUE_INTERFACE['django_auth'], requests_auth, + block=block ) self._block = block + @property def interface(self): """ @@ -220,36 +226,33 @@ def interface(self): """ return self._interface - def construct_callback(self, dispatch: str = 'score_update') -> str: + def construct_callback(self, dispatch: str = "score_update") -> str: """ - Return a fully qualified callback URL for external queueing system. + Return a fully qualified callback URL for the external queueing system. """ - dispatch_callback = "callback_submission" + course_id = str(self._block.scope_ids.usage_id.context_key) + userid = str(self._block.scope_ids.user_id) + mod_id = str(self._block.scope_ids.usage_id) + + callback_type = ( + "callback_submission" + if is_flag_active("send_to_submission_course.enable", course_id) + else "xqueue_callback" + ) + relative_xqueue_callback_url = reverse( - dispatch_callback, - kwargs=dict( - course_id=str(self._block.scope_ids.usage_id.context_key), - userid=str(self._block.scope_ids.user_id), - mod_id=str(self._block.scope_ids.usage_id), - dispatch=dispatch, - )) - xqueue_callback_url_prefix = settings.XQUEUE_INTERFACE.get('callback_url', settings.LMS_ROOT_URL) - lms_callback_url = xqueue_callback_url_prefix + relative_xqueue_callback_url - course_id = get_course_id(lms_callback_url) - if is_flag_active('send_to_submission_course.enable', course_id): - return xqueue_callback_url_prefix + relative_xqueue_callback_url + str(self._block.max_score()) - else: - dispatch_callback = 'xqueue_callback' - relative_xqueue_callback_url = reverse( - dispatch_callback, - kwargs=dict( - course_id=str(self._block.scope_ids.usage_id.context_key), - userid=str(self._block.scope_ids.user_id), - mod_id=str(self._block.scope_ids.usage_id), - dispatch=dispatch, - )) - xqueue_callback_url_prefix = settings.XQUEUE_INTERFACE.get('callback_url', settings.LMS_ROOT_URL) - return xqueue_callback_url_prefix + relative_xqueue_callback_url + callback_type, + kwargs={ + "course_id": course_id, + "userid": userid, + "mod_id": mod_id, + "dispatch": dispatch, + }, + ) + + xqueue_callback_url_prefix = settings.XQUEUE_INTERFACE.get("callback_url", settings.LMS_ROOT_URL) + return f"{xqueue_callback_url_prefix}{relative_xqueue_callback_url}" + @property def default_queuename(self) -> str: diff --git a/xmodule/capa/xqueue_submission.py b/xmodule/capa/xqueue_submission.py index 414218f5cd5..3f128448485 100644 --- a/xmodule/capa/xqueue_submission.py +++ b/xmodule/capa/xqueue_submission.py @@ -5,95 +5,73 @@ import json import logging -import re -from opaque_keys.edx.keys import CourseKey + +from xmodule.capa.errors import ( + GetSubmissionParamsError, + JSONParsingError, + MissingKeyError, + ValidationError, + TypeErrorSubmission, + RuntimeErrorSubmission +) log = logging.getLogger(__name__) class XQueueInterfaceSubmission: """Interface to the external grading system.""" + def __init__(self, block): + self.block = block + def _parse_json(self, data, name): """Helper function to parse JSON safely.""" try: return json.loads(data) if isinstance(data, str) else data except json.JSONDecodeError as e: - raise ValueError(f"Error parsing {name}: {e}") from e - - def _extract_identifiers(self, callback_url): - """Extracts identifiers from the callback URL.""" - item_id_match = re.search(r'block@([^\/]+)', callback_url) - item_type_match = re.search(r'type@([^+]+)', callback_url) - course_id_match = re.search(r'(course-v1:[^\/]+)', callback_url) - max_score_match = re.search(r'(\d+(\.\d+)?)$', callback_url) - - if not all([item_id_match, item_type_match, course_id_match, max_score_match]): - raise ValueError("The callback_url does not contain the required information.") - - return ( - item_id_match.group(1), - item_type_match.group(1), - course_id_match.group(1), - float(max_score_match.group(1)) - ) - - def extract_item_data(self, header, payload): + raise JSONParsingError(name, str(e)) from e + + def get_submission_params(self, header, payload): """ Extracts student submission data from the given header and payload. """ - header = self._parse_json(header, "header") payload = self._parse_json(payload, "payload") - callback_url = header.get('lms_callback_url') queue_name = header.get('queue_name', 'default') - if not callback_url: - raise ValueError("The header does not contain 'lms_callback_url'.") - - item_id, item_type, course_id, max_score = self._extract_identifiers(callback_url) - - student_info = self._parse_json(payload["student_info"], "student_info") - - full_block_id = None + if not self.block: + raise GetSubmissionParamsError() - try: - full_block_id = ( - f"block-v1:{course_id.replace('course-v1:', '')}+type@{item_type}+block@{item_id}" - ) - except Exception as e: - raise ValueError( - f"Error creating BlockUsageLocator. Invalid ID: {full_block_id}, Error: {e}" - ) from e + course_id = str(self.block.scope_ids.usage_id.context_key) + item_type = self.block.scope_ids.block_type + block_id = self.block.scope_ids.def_id.block_id + score = self.block.max_score() - try: - course_key = CourseKey.from_string(course_id) - except Exception as e: - raise ValueError(f"Error creating CourseKey: {e}") from e + item_id = f"block-v1:{course_id.replace('course-v1:', '')}+type@{item_type}+block@{block_id}" try: grader_payload = self._parse_json(payload["grader_payload"], "grader_payload") grader = grader_payload.get("grader", '') except KeyError as e: - raise ValueError(f"Error in payload: {e}") from e + raise MissingKeyError("grader_payload") from e + student_info = self._parse_json(payload["student_info"], "student_info") student_id = student_info.get("anonymous_student_id") + if not student_id: - raise ValueError("The field 'anonymous_student_id' is missing from student_info.") + raise ValidationError("The field 'anonymous_student_id' is missing from student_info.") + + student_answer = payload.get("student_response") + if student_answer is None: + raise ValidationError("The field 'student_response' does not exist.") student_dict = { - 'item_id': full_block_id, + 'item_id': item_id, 'item_type': item_type, 'course_id': course_id, 'student_id': student_id } - student_answer = payload.get("student_response") - if student_answer is None: - raise ValueError("The field 'student_response' does not exist.") - - score = max_score - return student_dict, student_answer, queue_name, grader, score def send_to_submission(self, header, body, files_to_upload=None): @@ -103,25 +81,31 @@ def send_to_submission(self, header, body, files_to_upload=None): from submissions.api import create_submission try: - student_item, answer, queue_name, grader, score = self.extract_item_data(header, body) - return create_submission(student_item, answer, queue_name=queue_name, grader=grader, score=score) - - except json.JSONDecodeError as e: - log.error("JSON decoding error: %s", e) - return {"error": "Invalid JSON format"} + student_item, answer, queue_name, grader, score = self.get_submission_params(header, body) + return create_submission( + student_item, + answer, + queue_name=queue_name, + grader=grader, + score=score, + file=files_to_upload + ) + except JSONParsingError as e: + log.error("%s", e) + return {"error": str(e)} - except KeyError as e: - log.error("Missing key: %s", e) - return {"error": f"Missing key: {e}"} + except MissingKeyError as e: + log.error("%s", e) + return {"error": str(e)} - except ValueError as e: - log.error("Validation error: %s", e) - return {"error": f"Validation error: {e}"} + except ValidationError as e: + log.error("%s", e) + return {"error": str(e)} except TypeError as e: - log.error("Type error: %s", e) - return {"error": f"Type error: {e}"} + log.error("%s", e) + raise TypeErrorSubmission(str(e)) from e except RuntimeError as e: - log.error("Runtime error: %s", e) - return {"error": f"Runtime error: {e}"} + log.error("%s", e) + raise RuntimeErrorSubmission(str(e)) from e diff --git a/xmodule/docs/decisions/0005-send-data-to-edx-submission.rst b/xmodule/docs/decisions/0005-send-data-to-edx-submission.rst index 5e8d4536727..1cbbf3580ae 100644 --- a/xmodule/docs/decisions/0005-send-data-to-edx-submission.rst +++ b/xmodule/docs/decisions/0005-send-data-to-edx-submission.rst @@ -7,7 +7,7 @@ Status Accepted. -2025-02-21 +2025-02-27 Context ******* @@ -27,39 +27,13 @@ Key changes include: 3. **`construct_callback` Method Update**: Within the `XQueueService` class in `xqueue_interfaces.py`, the `construct_callback` method was modified. This method generates the callback URL that `XQueue` or `edx-submission` will use to return the evaluation results. The method now checks the state of the `send_to_submission_course.enable` *waffle flag* to determine whether the callback URL should point to the `edx-submission` handler (`callback_submission`) or to the original `XQueue` handler (`xqueue_callback`). - Additionally, the method now appends the `max_score` value to the callback URL when using `edx-submission`. This ensures that `xqueue_submission.py` can extract and pass the `max_score` value correctly to `create_submission`. - - **Updated Logic in `construct_callback`**: - .. code-block:: python - - if is_flag_active('send_to_submission_course.enable', course_id): - return f"{xqueue_callback_url_prefix}{relative_xqueue_callback_url}{self.block.max_score()}" - else: - return f"{xqueue_callback_url_prefix}{relative_xqueue_callback_url}" + The updated logic in `construct_callback` involves checking the state of the waffle flag and constructing the callback URL accordingly. If the flag is active for a given course, the callback URL is constructed for `edx-submission`; otherwise, it is constructed for `XQueue`. 4. **Implementation of `send_to_submission` in `xqueue_submission.py`**: The `send_to_submission` function was developed in the `xqueue_submission.py` file. This function is responsible for: - - **Parse Submission Data**: Extracts and processes relevant information from the student response, including identifiers such as `course_id`, `item_id`, `student_id`, and now `max_score`. - - - **Extraction of `max_score` from Callback URL**: The function was updated to extract `max_score` using a regex pattern from the callback URL. + - **Parsing Submission Data**: Extracts and processes relevant information from the student response, including identifiers such as `course_id`, `item_id`, `student_id`, and `max_score`. - .. code-block:: python - - def _extract_identifiers(self, callback_url): - max_score_match = re.search(r'(\d+(\.\d+)?)$', callback_url) - if max_score_match: - max_score = float(max_score_match.group(1)) - else: - raise ValueError("The callback URL does not contain max_score.") - return max_score - - **Interaction with `edx-submission`**: Uses the API provided by `edx-submission` to create a new submission record in the submissions database, ensuring that the student response is stored and processed appropriately with the correct `max_score`. - .. code-block:: python - - def send_to_submission(self, header, body, files_to_upload=None): - student_item, student_answer, queue_name, grader, max_score = self.extract_item_data(header, body) - return create_submission(student_item, student_answer, queue_name=queue_name, grader=grader, score=max_score) - Configuration for Xqueue-watcher: Prerequisites