diff --git a/.readthedocs.yaml b/.readthedocs.yaml index fdef59eb56b2..efd38bc796ac 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -10,6 +10,10 @@ sphinx: python: install: + # Need to install this to set the correct version of steuptools for now + # because it is needed by fs + # See https://github.com/openedx/openedx-platform/issues/38068 for details. + - requirements: "requirements/pip-tools.txt" - requirements: "requirements/edx/doc.txt" - method: pip path: . diff --git a/Makefile b/Makefile index 92a2e37b9aac..ed6d1f2b7a41 100644 --- a/Makefile +++ b/Makefile @@ -116,7 +116,7 @@ $(COMMON_CONSTRAINTS_TXT): printf "$(COMMON_CONSTRAINTS_TEMP_COMMENT)" | cat - $(@) > temp && mv temp $(@) compile-requirements: export CUSTOM_COMPILE_COMMAND=make upgrade -compile-requirements: pre-requirements $(COMMON_CONSTRAINTS_TXT) ## Re-compile *.in requirements to *.txt +compile-requirements: pre-requirements ## Re-compile *.in requirements to *.txt @# Bootstrapping: Rebuild pip and pip-tools first, and then install them @# so that if there are any failures we'll know now, rather than the next @# time someone tries to use the outputs. @@ -139,7 +139,7 @@ compile-requirements: pre-requirements $(COMMON_CONSTRAINTS_TXT) ## Re-compile * export REBUILD=''; \ done -upgrade: ## update the pip requirements files to use the latest releases satisfying our constraints +upgrade: $(COMMON_CONSTRAINTS_TXT) ## update the pip requirements files to use the latest releases satisfying our constraints $(MAKE) compile-requirements COMPILE_OPTS="--upgrade" upgrade-package: ## update just one package to the latest usable release diff --git a/cms/djangoapps/cms_user_tasks/signals.py b/cms/djangoapps/cms_user_tasks/signals.py index 40bfd5781825..e6ddba747d5f 100644 --- a/cms/djangoapps/cms_user_tasks/signals.py +++ b/cms/djangoapps/cms_user_tasks/signals.py @@ -12,6 +12,7 @@ from cms.djangoapps.contentstore.toggles import bypass_olx_failure_enabled from cms.djangoapps.contentstore.utils import course_import_olx_validation_is_enabled +from openedx.core.djangoapps.content_libraries.api import is_library_backup_task, is_library_restore_task from .tasks import send_task_complete_email @@ -64,6 +65,28 @@ def get_olx_validation_from_artifact(): if olx_artifact and not bypass_olx_failure_enabled(): return olx_artifact.text + def should_skip_end_of_task_email(task_name) -> bool: + """ + Studio tasks generally send an email when finished, but not always. + + Some tasks can last many minutes, e.g. course import/export. For these + tasks, there is a high chance that the user has navigated away and will + want to check back in later. Yet email notification is unnecessary and + distracting for things like the Library restore task, which is + relatively quick and cannot be resumed (i.e. if you navigate away, you + have to upload again). + + The task_name passed in will be lowercase. + """ + # We currently have to pattern match on the name to differentiate + # between tasks. A better long term solution would be to add a separate + # task type identifier field to Django User Tasks. + return ( + is_library_content_update(task_name) or + is_library_backup_task(task_name) or + is_library_restore_task(task_name) + ) + status = kwargs['status'] # Only send email when the entire task is complete, should only send when @@ -72,7 +95,7 @@ def get_olx_validation_from_artifact(): task_name = status.name.lower() # Also suppress emails on library content XBlock updates (too much like spam) - if is_library_content_update(task_name): + if should_skip_end_of_task_email(task_name): LOGGER.info(f"Suppressing end-of-task email on task {task_name}") return diff --git a/cms/djangoapps/contentstore/api/tests/test_validation.py b/cms/djangoapps/contentstore/api/tests/test_validation.py index 4e0a9bbce666..4928d31dc1f2 100644 --- a/cms/djangoapps/contentstore/api/tests/test_validation.py +++ b/cms/djangoapps/contentstore/api/tests/test_validation.py @@ -2,9 +2,11 @@ Tests for the course import API views """ - +import factory from datetime import datetime +from django.conf import settings +import ddt from django.test.utils import override_settings from django.urls import reverse from rest_framework import status @@ -12,10 +14,13 @@ from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory, BlockFactory +from common.djangoapps.course_modes.models import CourseMode +from common.djangoapps.course_modes.tests.factories import CourseModeFactory from common.djangoapps.student.tests.factories import StaffFactory from common.djangoapps.student.tests.factories import UserFactory +@ddt.ddt @override_settings(PROCTORING_BACKENDS={'DEFAULT': 'proctortrack', 'proctortrack': {}}) class CourseValidationViewTest(SharedModuleStoreTestCase, APITestCase): """ @@ -82,39 +87,54 @@ def test_student_fails(self): resp = self.client.get(self.get_url(self.course_key)) self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN) - def test_staff_succeeds(self): - self.client.login(username=self.staff.username, password=self.password) - resp = self.client.get(self.get_url(self.course_key), {'all': 'true'}) - self.assertEqual(resp.status_code, status.HTTP_200_OK) - expected_data = { - 'assignments': { - 'total_number': 1, - 'total_visible': 1, - 'assignments_with_dates_before_start': [], - 'assignments_with_dates_after_end': [], - 'assignments_with_ora_dates_after_end': [], - 'assignments_with_ora_dates_before_start': [], - }, - 'dates': { - 'has_start_date': True, - 'has_end_date': False, - }, - 'updates': { - 'has_update': True, - }, - 'certificates': { - 'is_enabled': False, - 'is_activated': False, - 'has_certificate': False, - }, - 'grades': { - 'has_grading_policy': False, - 'sum_of_weights': 1.0, - }, - 'proctoring': { - 'needs_proctoring_escalation_email': True, - 'has_proctoring_escalation_email': True, - }, - 'is_self_paced': True, - } - self.assertDictEqual(resp.data, expected_data) + @ddt.data( + (False, False), + (True, False), + (False, True), + (True, True), + ) + @ddt.unpack + def test_staff_succeeds(self, certs_html_view, with_modes): + features = dict(settings.FEATURES, CERTIFICATES_HTML_VIEW=certs_html_view) + with override_settings(FEATURES=features): + if with_modes: + CourseModeFactory.create_batch( + 2, + course_id=self.course.id, + mode_slug=factory.Iterator([CourseMode.AUDIT, CourseMode.VERIFIED]), + ) + self.client.login(username=self.staff.username, password=self.password) + resp = self.client.get(self.get_url(self.course_key), {'all': 'true'}) + self.assertEqual(resp.status_code, status.HTTP_200_OK) + expected_data = { + 'assignments': { + 'total_number': 1, + 'total_visible': 1, + 'assignments_with_dates_before_start': [], + 'assignments_with_dates_after_end': [], + 'assignments_with_ora_dates_after_end': [], + 'assignments_with_ora_dates_before_start': [], + }, + 'dates': { + 'has_start_date': True, + 'has_end_date': False, + }, + 'updates': { + 'has_update': True, + }, + 'certificates': { + 'is_enabled': with_modes, + 'is_activated': False, + 'has_certificate': False, + }, + 'grades': { + 'has_grading_policy': False, + 'sum_of_weights': 1.0, + }, + 'proctoring': { + 'needs_proctoring_escalation_email': True, + 'has_proctoring_escalation_email': True, + }, + 'is_self_paced': True, + } + self.assertDictEqual(resp.data, expected_data) diff --git a/cms/djangoapps/contentstore/models.py b/cms/djangoapps/contentstore/models.py index f5ee218f3e11..a4f2ce3c6119 100644 --- a/cms/djangoapps/contentstore/models.py +++ b/cms/djangoapps/contentstore/models.py @@ -7,12 +7,12 @@ from config_models.models import ConfigurationModel from django.db import models -from django.db.models import QuerySet, OuterRef, Case, When, Exists, Value, ExpressionWrapper -from django.db.models.fields import IntegerField, TextField, BooleanField +from django.db.models import Case, Exists, ExpressionWrapper, OuterRef, Q, QuerySet, Value, When +from django.db.models.fields import BooleanField, IntegerField, TextField from django.db.models.functions import Coalesce from django.db.models.lookups import GreaterThan from django.utils.translation import gettext_lazy as _ -from opaque_keys.edx.django.models import CourseKeyField, ContainerKeyField, UsageKeyField +from opaque_keys.edx.django.models import ContainerKeyField, CourseKeyField, UsageKeyField from opaque_keys.edx.keys import CourseKey, UsageKey from opaque_keys.edx.locator import LibraryContainerLocator from openedx_learning.api.authoring import get_published_version @@ -23,7 +23,6 @@ manual_date_time_field, ) - logger = logging.getLogger(__name__) @@ -391,7 +390,7 @@ def filter_links( cls.objects.filter(**link_filter).select_related(*RELATED_FIELDS), ) if ready_to_sync is not None: - result = result.filter(ready_to_sync=ready_to_sync) + result = result.filter(Q(ready_to_sync=ready_to_sync) | Q(ready_to_sync_from_children=ready_to_sync)) # Handle top-level parents logic if use_top_level_parents: @@ -436,6 +435,11 @@ def _annotate_query_with_ready_to_sync(cls, query_set: QuerySet["EntityLinkBase" ), then=1 ), + # If upstream block was deleted, set ready_to_sync = True + When( + Q(upstream_container__publishable_entity__published__version__version_num__isnull=True), + then=1 + ), default=0, output_field=models.IntegerField() ) @@ -457,6 +461,11 @@ def _annotate_query_with_ready_to_sync(cls, query_set: QuerySet["EntityLinkBase" ), then=1 ), + # If upstream block was deleted, set ready_to_sync = True + When( + Q(upstream_block__publishable_entity__published__version__version_num__isnull=True), + then=1 + ), default=0, output_field=models.IntegerField() ) diff --git a/cms/djangoapps/contentstore/rest_api/v1/serializers/home.py b/cms/djangoapps/contentstore/rest_api/v1/serializers/home.py index bbc45ddf9a37..8ee8cb035478 100644 --- a/cms/djangoapps/contentstore/rest_api/v1/serializers/home.py +++ b/cms/djangoapps/contentstore/rest_api/v1/serializers/home.py @@ -28,26 +28,11 @@ class LibraryViewSerializer(serializers.Serializer): org = serializers.CharField() number = serializers.CharField() can_edit = serializers.BooleanField() - is_migrated = serializers.SerializerMethodField() - migrated_to_title = serializers.CharField( - source="migrations__target__title", - required=False - ) - migrated_to_key = serializers.CharField( - source="migrations__target__key", - required=False - ) - migrated_to_collection_key = serializers.CharField( - source="migrations__target_collection__key", - required=False - ) - migrated_to_collection_title = serializers.CharField( - source="migrations__target_collection__title", - required=False - ) - - def get_is_migrated(self, obj): - return "migrations__target__key" in obj + is_migrated = serializers.BooleanField() + migrated_to_title = serializers.CharField(required=False) + migrated_to_key = serializers.CharField(required=False) + migrated_to_collection_key = serializers.CharField(required=False) + migrated_to_collection_title = serializers.CharField(required=False) class CourseHomeTabSerializer(serializers.Serializer): diff --git a/cms/djangoapps/contentstore/rest_api/v1/views/home.py b/cms/djangoapps/contentstore/rest_api/v1/views/home.py index a4e93de9caff..95723020c11f 100644 --- a/cms/djangoapps/contentstore/rest_api/v1/views/home.py +++ b/cms/djangoapps/contentstore/rest_api/v1/views/home.py @@ -236,7 +236,7 @@ def get(self, request: Request): "number": "CPSPR", "can_edit": true } - ], } + ], ``` """ diff --git a/cms/djangoapps/contentstore/rest_api/v1/views/tests/test_home.py b/cms/djangoapps/contentstore/rest_api/v1/views/tests/test_home.py index cd7592c46629..72d58fa00dfa 100644 --- a/cms/djangoapps/contentstore/rest_api/v1/views/tests/test_home.py +++ b/cms/djangoapps/contentstore/rest_api/v1/views/tests/test_home.py @@ -18,7 +18,6 @@ from cms.djangoapps.contentstore.tests.utils import CourseTestCase from cms.djangoapps.modulestore_migrator import api as migrator_api from cms.djangoapps.modulestore_migrator.data import CompositionLevel, RepeatHandlingStrategy -from cms.djangoapps.modulestore_migrator.tests.factories import ModulestoreSourceFactory from openedx.core.djangoapps.content.course_overviews.tests.factories import CourseOverviewFactory from openedx.core.djangoapps.content_libraries import api as lib_api @@ -253,8 +252,9 @@ class HomePageLibrariesViewTest(LibraryTestCase): def setUp(self): super().setUp() - # Create an additional legacy library + # Create an two additional legacy libaries self.lib_key_1 = self._create_library(library="lib1") + self.lib_key_2 = self._create_library(library="lib2") self.organization = OrganizationFactory() # Create a new v2 library @@ -269,7 +269,6 @@ def setUp(self): library = lib_api.ContentLibrary.objects.get(slug=self.lib_key_v2.slug) learning_package = library.learning_package # Create a migration source for the legacy library - self.source = ModulestoreSourceFactory(key=self.lib_key_1) self.url = reverse("cms.djangoapps.contentstore:v1:libraries") # Create a collection to migrate this library to collection_key = "test-collection" @@ -280,20 +279,32 @@ def setUp(self): created_by=self.user.id, ) - # Migrate self.lib_key_1 to self.lib_key_v2 + # Migrate both lib_key_1 and lib_key_2 to v2 + # Only make lib_key_1 a "forwarding" migration. migrator_api.start_migration_to_library( user=self.user, - source_key=self.source.key, + source_key=self.lib_key_1, target_library_key=self.lib_key_v2, target_collection_slug=collection_key, - composition_level=CompositionLevel.Component.value, - repeat_handling_strategy=RepeatHandlingStrategy.Skip.value, + composition_level=CompositionLevel.Component, + repeat_handling_strategy=RepeatHandlingStrategy.Skip, + preserve_url_slugs=True, + forward_source_to_target=True, + ) + migrator_api.start_migration_to_library( + user=self.user, + source_key=self.lib_key_2, + target_library_key=self.lib_key_v2, + target_collection_slug=collection_key, + composition_level=CompositionLevel.Component, + repeat_handling_strategy=RepeatHandlingStrategy.Skip, preserve_url_slugs=True, forward_source_to_target=False, ) def test_home_page_libraries_response(self): - """Check successful response content""" + """Check sucessful response content""" + self.maxDiff = None response = self.client.get(self.url) expected_response = { @@ -322,6 +333,17 @@ def test_home_page_libraries_response(self): 'migrated_to_collection_key': 'test-collection', 'migrated_to_collection_title': 'Test Collection', }, + # Third library was migrated, but not with forwarding. + # So, it appears just like the unmigrated library. + { + 'display_name': 'Test Library', + 'library_key': 'library-v1:org+lib2', + 'url': '/library/library-v1:org+lib2', + 'org': 'org', + 'number': 'lib2', + 'can_edit': True, + 'is_migrated': False, + }, ] } @@ -366,6 +388,15 @@ def test_home_page_libraries_response(self): 'can_edit': True, 'is_migrated': False, }, + { + 'display_name': 'Test Library', + 'library_key': 'library-v1:org+lib2', + 'url': '/library/library-v1:org+lib2', + 'org': 'org', + 'number': 'lib2', + 'can_edit': True, + 'is_migrated': False, + }, ], } diff --git a/cms/djangoapps/contentstore/rest_api/v2/views/tests/test_downstreams.py b/cms/djangoapps/contentstore/rest_api/v2/views/tests/test_downstreams.py index ad10e373cfc8..b33d980732fa 100644 --- a/cms/djangoapps/contentstore/rest_api/v2/views/tests/test_downstreams.py +++ b/cms/djangoapps/contentstore/rest_api/v2/views/tests/test_downstreams.py @@ -23,7 +23,7 @@ from common.djangoapps.student.tests.factories import UserFactory from openedx.core.djangoapps.content_libraries import api as lib_api from xmodule.modulestore.django import modulestore -from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase, ImmediateOnCommitMixin +from xmodule.modulestore.tests.django_utils import ImmediateOnCommitMixin, SharedModuleStoreTestCase from xmodule.modulestore.tests.factories import BlockFactory, CourseFactory from .. import downstreams as downstreams_views @@ -32,6 +32,7 @@ URL_PREFIX = '/api/libraries/v2/' URL_LIB_CREATE = URL_PREFIX URL_LIB_BLOCKS = URL_PREFIX + '{lib_key}/blocks/' +URL_LIB_BLOCK = URL_PREFIX + 'blocks/{block_key}/' URL_LIB_BLOCK_PUBLISH = URL_PREFIX + 'blocks/{block_key}/publish/' URL_LIB_BLOCK_OLX = URL_PREFIX + 'blocks/{block_key}/olx/' URL_LIB_CONTAINER = URL_PREFIX + 'containers/{container_key}/' # Get a container in this library @@ -277,6 +278,10 @@ def _create_container(self, lib_key, container_type, slug: str | None, display_n data["slug"] = slug return self._api('post', URL_LIB_CONTAINERS.format(lib_key=lib_key), data, expect_response) + def _delete_component(self, block_key, expect_response=200): + """ Publish all changes in the specified container + children """ + return self._api('delete', URL_LIB_BLOCK.format(block_key=block_key), None, expect_response) + class SharedErrorTestCases(_BaseDownstreamViewTestMixin): """ @@ -1503,3 +1508,109 @@ def test_200_summary(self): 'last_published_at': self.now.strftime('%Y-%m-%dT%H:%M:%S.%fZ'), }] self.assertListEqual(data, expected) + + +class GetDownstreamDeletedUpstream( + _BaseDownstreamViewTestMixin, + ImmediateOnCommitMixin, + SharedModuleStoreTestCase, +): + """ + Test that parent container is marked ready_to_sync when even when the only change is a deleted component under it + """ + def call_api( + self, + course_id: str | None = None, + ready_to_sync: bool | None = None, + upstream_key: str | None = None, + item_type: str | None = None, + use_top_level_parents: bool | None = None, + ): + data = {} + if course_id is not None: + data["course_id"] = str(course_id) + if ready_to_sync is not None: + data["ready_to_sync"] = str(ready_to_sync) + if upstream_key is not None: + data["upstream_key"] = str(upstream_key) + if item_type is not None: + data["item_type"] = str(item_type) + if use_top_level_parents is not None: + data["use_top_level_parents"] = str(use_top_level_parents) + return self.client.get("/api/contentstore/v2/downstreams/", data=data) + + def test_delete_component_should_be_ready_to_sync(self): + """ + Test deleting a component from library should mark the entire section container ready to sync + """ + # Create blocks + section_id = self._create_container(self.library_id, "section", "section-12", "Section 12")["id"] + subsection_id = self._create_container(self.library_id, "subsection", "subsection-12", "Subsection 12")["id"] + unit_id = self._create_container(self.library_id, "unit", "unit-12", "Unit 12")["id"] + video_id = self._add_block_to_library(self.library_id, "video", "video-bar-13")["id"] + section_key = ContainerKey.from_string(section_id) + subsection_key = ContainerKey.from_string(subsection_id) + unit_key = ContainerKey.from_string(unit_id) + video_key = LibraryUsageLocatorV2.from_string(video_id) + + # Set children + lib_api.update_container_children(section_key, [subsection_key], None) + lib_api.update_container_children(subsection_key, [unit_key], None) + lib_api.update_container_children(unit_key, [video_key], None) + self._publish_container(unit_id) + self._publish_container(subsection_id) + self._publish_container(section_id) + self._publish_library_block(video_id) + course = CourseFactory.create(display_name="Course New") + add_users(self.superuser, CourseStaffRole(course.id), self.course_user) + chapter = BlockFactory.create( + category='chapter', parent=course, upstream=section_id, upstream_version=2, + ) + sequential = BlockFactory.create( + category='sequential', + parent=chapter, + upstream=subsection_id, + upstream_version=2, + top_level_downstream_parent_key=get_block_key_string(chapter.usage_key), + ) + vertical = BlockFactory.create( + category='vertical', + parent=sequential, + upstream=unit_id, + upstream_version=2, + top_level_downstream_parent_key=get_block_key_string(chapter.usage_key), + ) + BlockFactory.create( + category='video', + parent=vertical, + upstream=video_id, + upstream_version=1, + top_level_downstream_parent_key=get_block_key_string(chapter.usage_key), + ) + self._delete_component(video_id) + self._publish_container(unit_id) + response = self.call_api(course_id=course.id, ready_to_sync=True, use_top_level_parents=True) + assert response.status_code == 200 + data = response.json()['results'] + assert len(data) == 1 + date_format = self.now.isoformat().split("+")[0] + 'Z' + expected_results = { + 'created': date_format, + 'downstream_context_key': str(course.id), + 'downstream_usage_key': str(chapter.usage_key), + 'downstream_customized': [], + 'id': 8, + 'ready_to_sync': False, + 'ready_to_sync_from_children': True, + 'top_level_parent_usage_key': None, + 'updated': date_format, + 'upstream_context_key': self.library_id, + 'upstream_context_title': self.library_title, + 'upstream_key': section_id, + 'upstream_type': 'container', + 'upstream_version': 2, + 'version_declined': None, + 'version_synced': 2, + } + + self.assertDictEqual(data[0], expected_results) diff --git a/cms/djangoapps/contentstore/tests/test_course_listing.py b/cms/djangoapps/contentstore/tests/test_course_listing.py index d256228228cb..990eff83c922 100644 --- a/cms/djangoapps/contentstore/tests/test_course_listing.py +++ b/cms/djangoapps/contentstore/tests/test_course_listing.py @@ -21,8 +21,10 @@ get_courses_accessible_to_user ) from common.djangoapps.course_action_state.models import CourseRerunState +from common.djangoapps.student.models.user import CourseAccessRole from common.djangoapps.student.roles import ( CourseInstructorRole, + CourseLimitedStaffRole, CourseStaffRole, GlobalStaff, OrgInstructorRole, @@ -176,6 +178,48 @@ def test_staff_course_listing(self): with self.assertNumQueries(2): list(_accessible_courses_summary_iter(self.request)) + def test_course_limited_staff_course_listing(self): + # Setup a new course + course_location = self.store.make_course_key('Org', 'CreatedCourse', 'Run') + CourseFactory.create( + org=course_location.org, + number=course_location.course, + run=course_location.run + ) + course = CourseOverviewFactory.create(id=course_location, org=course_location.org) + + # Add the user as a course_limited_staff on the course + CourseLimitedStaffRole(course.id).add_users(self.user) + self.assertTrue(CourseLimitedStaffRole(course.id).has_user(self.user)) + + # Fetch accessible courses list & verify their count + courses_list_by_staff, __ = get_courses_accessible_to_user(self.request) + + # Limited Course Staff should not be able to list courses in Studio + assert len(list(courses_list_by_staff)) == 0 + + def test_org_limited_staff_course_listing(self): + + # Setup a new course + course_location = self.store.make_course_key('Org', 'CreatedCourse', 'Run') + CourseFactory.create( + org=course_location.org, + number=course_location.course, + run=course_location.run + ) + course = CourseOverviewFactory.create(id=course_location, org=course_location.org) + + # Add a user as course_limited_staff on the org + # This is not possible using the course roles classes but is possible via Django admin so we + # insert a row into the model directly to test that scenario. + CourseAccessRole.objects.create(user=self.user, org=course_location.org, role=CourseLimitedStaffRole.ROLE) + + # Fetch accessible courses list & verify their count + courses_list_by_staff, __ = get_courses_accessible_to_user(self.request) + + # Limited Course Staff should not be able to list courses in Studio + assert len(list(courses_list_by_staff)) == 0 + def test_get_course_list_with_invalid_course_location(self): """ Test getting courses with invalid course location (course deleted from modulestore). diff --git a/cms/djangoapps/contentstore/utils.py b/cms/djangoapps/contentstore/utils.py index 78e41b7b1813..5506d8c33e41 100644 --- a/cms/djangoapps/contentstore/utils.py +++ b/cms/djangoapps/contentstore/utils.py @@ -57,7 +57,8 @@ ) from cms.djangoapps.models.settings.course_grading import CourseGradingModel from cms.djangoapps.models.settings.course_metadata import CourseMetadata -from cms.djangoapps.modulestore_migrator.api import get_migration_info +from cms.djangoapps.modulestore_migrator import api as migrator_api +from cms.djangoapps.modulestore_migrator.data import ModulestoreMigration from common.djangoapps.course_action_state.managers import CourseActionStateItemNotFoundError from common.djangoapps.course_action_state.models import CourseRerunState, CourseRerunUIStateManager from common.djangoapps.course_modes.models import CourseMode @@ -1578,13 +1579,12 @@ def request_response_format_is_json(request, response_format): def get_library_context(request, request_is_json=False): """ - Utils is used to get context of course home library tab. - It is used for both DRF and django views. + Utils is used to get context of course home library tab. Returned in DRF view. """ from cms.djangoapps.contentstore.views.course import ( _accessible_libraries_iter, - _format_library_for_view, _get_course_creator_status, + format_library_for_view, get_allowed_organizations, get_allowed_organizations_for_libraries, user_can_create_organizations, @@ -1596,21 +1596,25 @@ def get_library_context(request, request_is_json=False): user_can_create_library, ) + is_migrated: bool | None # None means: do not filter on is_migrated + if (is_migrated_param := request.GET.get('is_migrated')) is not None: + is_migrated = BooleanField().to_internal_value(is_migrated_param) + else: + is_migrated = None libraries = list(_accessible_libraries_iter(request.user) if libraries_v1_enabled() else []) - library_keys = [lib.location.library_key for lib in libraries] - migration_info = get_migration_info(library_keys) - is_migrated_filter = request.GET.get('is_migrated', None) + migration_info: dict[LibraryLocator, ModulestoreMigration | None] = { + lib.id: migrator_api.get_forwarding(lib.id) + for lib in libraries + } data = { 'libraries': [ - _format_library_for_view( + format_library_for_view( lib, request, - migrated_to=migration_info.get(lib.location.library_key) + migration=migration_info[lib.id], ) for lib in libraries - if is_migrated_filter is None or ( - BooleanField().to_internal_value(is_migrated_filter) == (lib.location.library_key in migration_info) - ) + if is_migrated is None or is_migrated == bool(migration_info[lib.id]) ] } @@ -1719,8 +1723,7 @@ def format_in_process_course_view(uca): def get_home_context(request, no_course=False): """ - Utils is used to get context of course home. - It is used for both DRF and django views. + Utils is used to get context of course home. Returned by DRF view. """ from cms.djangoapps.contentstore.views.course import ( diff --git a/cms/djangoapps/contentstore/views/certificate_manager.py b/cms/djangoapps/contentstore/views/certificate_manager.py index 429950477fdd..081afdcc0dd7 100644 --- a/cms/djangoapps/contentstore/views/certificate_manager.py +++ b/cms/djangoapps/contentstore/views/certificate_manager.py @@ -121,7 +121,7 @@ def is_activated(course): along with the certificates. """ is_active = False - certificates = None + certificates = [] if settings.FEATURES.get('CERTIFICATES_HTML_VIEW', False): certificates = CertificateManager.get_certificates(course) # we are assuming only one certificate in certificates collection. diff --git a/cms/djangoapps/contentstore/views/course.py b/cms/djangoapps/contentstore/views/course.py index 453e30e0aad0..681ea5f9fe9b 100644 --- a/cms/djangoapps/contentstore/views/course.py +++ b/cms/djangoapps/contentstore/views/course.py @@ -7,7 +7,7 @@ import random import re import string -from typing import Dict, NamedTuple, Optional +from typing import Dict import django.utils from ccx_keys.locator import CCXLocator @@ -44,6 +44,7 @@ from cms.djangoapps.models.settings.course_grading import CourseGradingModel from cms.djangoapps.models.settings.course_metadata import CourseMetadata from cms.djangoapps.models.settings.encoder import CourseSettingsEncoder +from cms.djangoapps.modulestore_migrator.data import ModulestoreMigration from cms.djangoapps.contentstore.api.views.utils import get_bool_param from common.djangoapps.course_action_state.managers import CourseActionStateItemNotFoundError from common.djangoapps.course_action_state.models import CourseRerunState, CourseRerunUIStateManager @@ -61,6 +62,7 @@ GlobalStaff, UserBasedRole, OrgStaffRole, + strict_role_checking, ) from common.djangoapps.util.json_request import JsonResponse, JsonResponseBadRequest, expect_json from common.djangoapps.util.string_utils import _has_non_ascii_characters @@ -536,7 +538,9 @@ def filter_ccx(course_access): return not isinstance(course_access.course_id, CCXLocator) instructor_courses = UserBasedRole(request.user, CourseInstructorRole.ROLE).courses_with_role() - staff_courses = UserBasedRole(request.user, CourseStaffRole.ROLE).courses_with_role() + with strict_role_checking(): + staff_courses = UserBasedRole(request.user, CourseStaffRole.ROLE).courses_with_role() + all_courses = list(filter(filter_ccx, instructor_courses | staff_courses)) courses_list = [] course_keys = {} @@ -671,11 +675,18 @@ def library_listing(request): ) -def _format_library_for_view(library, request, migrated_to: Optional[NamedTuple]): +def format_library_for_view(library, request, migration: ModulestoreMigration | None): """ Return a dict of the data which the view requires for each library """ - + migration_info = {} + if migration: + migration_info = { + 'migrated_to_key': migration.target_key, + 'migrated_to_title': migration.target_title, + 'migrated_to_collection_key': migration.target_collection_slug, + 'migrated_to_collection_title': migration.target_collection_title, + } return { 'display_name': library.display_name, 'library_key': str(library.location.library_key), @@ -683,7 +694,8 @@ def _format_library_for_view(library, request, migrated_to: Optional[NamedTuple] 'org': library.display_org_with_default, 'number': library.display_number_with_default, 'can_edit': has_studio_write_access(request.user, library.location.library_key), - **(migrated_to._asdict() if migrated_to is not None else {}), + 'is_migrated': migration is not None, + **migration_info, } @@ -1840,12 +1852,20 @@ def get_allowed_organizations_for_libraries(user): """ Helper method for returning the list of organizations for which the user is allowed to create libraries. """ + organizations_set = set() + + # This allows org-level staff to create libraries. We should re-evaluate + # whether this is necessary and try to normalize course and library creation + # authorization behavior. if settings.FEATURES.get('ENABLE_ORGANIZATION_STAFF_ACCESS_FOR_CONTENT_LIBRARIES', False): - return get_organizations_for_non_course_creators(user) - elif settings.FEATURES.get('ENABLE_CREATOR_GROUP', False): - return get_organizations(user) - else: - return [] + organizations_set.update(get_organizations_for_non_course_creators(user)) + + # This allows people in the course creator group for an org to create + # libraries, which mimics course behavior. + if settings.FEATURES.get('ENABLE_CREATOR_GROUP', False): + organizations_set.update(get_organizations(user)) + + return sorted(organizations_set) def user_can_create_organizations(user): diff --git a/cms/djangoapps/contentstore/views/tests/test_organizations.py b/cms/djangoapps/contentstore/views/tests/test_organizations.py index cf3a376f3461..d231e3adc507 100644 --- a/cms/djangoapps/contentstore/views/tests/test_organizations.py +++ b/cms/djangoapps/contentstore/views/tests/test_organizations.py @@ -3,12 +3,18 @@ import json +from django.conf import settings from django.test import TestCase +from django.test.utils import override_settings from django.urls import reverse from organizations.api import add_organization +from cms.djangoapps.course_creators.models import CourseCreator +from common.djangoapps.student.roles import OrgStaffRole from common.djangoapps.student.tests.factories import UserFactory +from ..course import get_allowed_organizations_for_libraries + class TestOrganizationListing(TestCase): """Verify Organization listing behavior.""" @@ -32,3 +38,96 @@ def test_organization_list(self): self.assertEqual(response.status_code, 200) org_names = json.loads(response.content.decode('utf-8')) self.assertEqual(org_names, self.org_short_names) + + +class TestOrganizationsForLibraries(TestCase): + """ + Verify who is allowed to create Libraries. + + This uses some low-level implementation details to set up course creator and + org staff data, which should be replaced by API calls. + + The behavior of this call depends on two FEATURES toggles: + + * ENABLE_ORGANIZATION_STAFF_ACCESS_FOR_CONTENT_LIBRARIES + * ENABLE_CREATOR_GROUP + """ + + @classmethod + def setUpTestData(cls): + cls.library_author = UserFactory(is_staff=False) + cls.org_short_names = ["OrgStaffOrg", "CreatorOrg", "RandomOrg"] + cls.orgs = {} + for index, short_name in enumerate(cls.org_short_names): + cls.orgs[short_name] = add_organization(organization_data={ + 'name': 'Test Organization %s' % index, + 'short_name': short_name, + 'description': 'Testing Organization %s Description' % index, + }) + + # Our user is an org staff for OrgStaffOrg + OrgStaffRole("OrgStaffOrg").add_users(cls.library_author) + + # Our user is also a CourseCreator in CreatorOrg + creator = CourseCreator.objects.create( + user=cls.library_author, + state=CourseCreator.GRANTED, + all_organizations=False, + ) + # The following is because course_creators app logic assumes that all + # updates to CourseCreator go through the CourseCreatorAdmin. + # Specifically, CourseCreatorAdmin.save_model() attaches the current + # request.user to the model instance's .admin field, and then the + # course_creator_organizations_changed_callback() signal handler assumes + # creator.admin is present. I think that code could use some judicious + # refactoring, but I'm just writing this test as part of a last-minute + # Ulmo bug fix, and I don't want to add risk by refactoring something as + # critical-path as course_creators as part of this work. + creator.admin = UserFactory(is_staff=True) + creator.organizations.add( + cls.orgs["CreatorOrg"]['id'] + ) + + @override_settings( + FEATURES={ + **settings.FEATURES, + 'ENABLE_ORGANIZATION_STAFF_ACCESS_FOR_CONTENT_LIBRARIES': False, + 'ENABLE_CREATOR_GROUP': False, + } + ) + def test_both_toggles_disabled(self): + allowed_orgs = get_allowed_organizations_for_libraries(self.library_author) + assert allowed_orgs == [] + + @override_settings( + FEATURES={ + **settings.FEATURES, + 'ENABLE_ORGANIZATION_STAFF_ACCESS_FOR_CONTENT_LIBRARIES': True, + 'ENABLE_CREATOR_GROUP': True, + } + ) + def test_both_toggles_enabled(self): + allowed_orgs = get_allowed_organizations_for_libraries(self.library_author) + assert allowed_orgs == ["CreatorOrg", "OrgStaffOrg"] + + @override_settings( + FEATURES={ + **settings.FEATURES, + 'ENABLE_ORGANIZATION_STAFF_ACCESS_FOR_CONTENT_LIBRARIES': True, + 'ENABLE_CREATOR_GROUP': False, + } + ) + def test_org_staff_enabled(self): + allowed_orgs = get_allowed_organizations_for_libraries(self.library_author) + assert allowed_orgs == ["OrgStaffOrg"] + + @override_settings( + FEATURES={ + **settings.FEATURES, + 'ENABLE_ORGANIZATION_STAFF_ACCESS_FOR_CONTENT_LIBRARIES': False, + 'ENABLE_CREATOR_GROUP': True, + } + ) + def test_creator_group_enabled(self): + allowed_orgs = get_allowed_organizations_for_libraries(self.library_author) + assert allowed_orgs == ["CreatorOrg"] diff --git a/cms/djangoapps/modulestore_migrator/admin.py b/cms/djangoapps/modulestore_migrator/admin.py index 8eef778531ac..c9a5c90256fd 100644 --- a/cms/djangoapps/modulestore_migrator/admin.py +++ b/cms/djangoapps/modulestore_migrator/admin.py @@ -147,8 +147,8 @@ def start_migration_task( source_key=source.key, target_library_key=target_library_key, target_collection_slug=target_collection_slug, - composition_level=form.cleaned_data['composition_level'], - repeat_handling_strategy=form.cleaned_data['repeat_handling_strategy'], + composition_level=CompositionLevel(form.cleaned_data['composition_level']), + repeat_handling_strategy=RepeatHandlingStrategy(form.cleaned_data['repeat_handling_strategy']), preserve_url_slugs=form.cleaned_data['preserve_url_slugs'], forward_source_to_target=form.cleaned_data['forward_to_target'], ) @@ -178,6 +178,7 @@ class ModulestoreBlockMigrationInline(admin.TabularInline): "source", "target", "change_log_record", + "unsupported_reason", ) list_display = ("id", *readonly_fields) diff --git a/cms/djangoapps/modulestore_migrator/api.py b/cms/djangoapps/modulestore_migrator/api.py deleted file mode 100644 index e16d3061c9d8..000000000000 --- a/cms/djangoapps/modulestore_migrator/api.py +++ /dev/null @@ -1,163 +0,0 @@ -""" -API for migration from modulestore to learning core -""" -from celery.result import AsyncResult -from opaque_keys import InvalidKeyError -from opaque_keys.edx.keys import CourseKey, LearningContextKey, UsageKey -from opaque_keys.edx.locator import LibraryLocator, LibraryLocatorV2, LibraryUsageLocatorV2 -from openedx_learning.api.authoring import get_collection -from openedx_learning.api.authoring_models import Component -from user_tasks.models import UserTaskStatus - -from openedx.core.djangoapps.content_libraries.api import get_library, library_component_usage_key -from openedx.core.types.user import AuthUser - -from . import tasks -from .models import ModulestoreBlockMigration, ModulestoreSource - -__all__ = ( - "start_migration_to_library", - "start_bulk_migration_to_library", - "is_successfully_migrated", - "get_migration_info", - "get_target_block_usage_keys", -) - - -def start_migration_to_library( - *, - user: AuthUser, - source_key: LearningContextKey, - target_library_key: LibraryLocatorV2, - target_collection_slug: str | None = None, - composition_level: str, - repeat_handling_strategy: str, - preserve_url_slugs: bool, - forward_source_to_target: bool, -) -> AsyncResult: - """ - Import a course or legacy library into a V2 library (or, a collection within a V2 library). - """ - source, _ = ModulestoreSource.objects.get_or_create(key=source_key) - target_library = get_library(target_library_key) - # get_library ensures that the library is connected to a learning package. - target_package_id: int = target_library.learning_package_id # type: ignore[assignment] - target_collection_id = None - - if target_collection_slug: - target_collection_id = get_collection(target_package_id, target_collection_slug).id - - return tasks.migrate_from_modulestore.delay( - user_id=user.id, - source_pk=source.id, - target_library_key=str(target_library_key), - target_collection_pk=target_collection_id, - composition_level=composition_level, - repeat_handling_strategy=repeat_handling_strategy, - preserve_url_slugs=preserve_url_slugs, - forward_source_to_target=forward_source_to_target, - ) - - -def start_bulk_migration_to_library( - *, - user: AuthUser, - source_key_list: list[LearningContextKey], - target_library_key: LibraryLocatorV2, - target_collection_slug_list: list[str | None] | None = None, - create_collections: bool = False, - composition_level: str, - repeat_handling_strategy: str, - preserve_url_slugs: bool, - forward_source_to_target: bool, -) -> AsyncResult: - """ - Import a list of courses or legacy libraries into a V2 library (or, a collections within a V2 library). - """ - target_library = get_library(target_library_key) - # get_library ensures that the library is connected to a learning package. - target_package_id: int = target_library.learning_package_id # type: ignore[assignment] - - sources_pks: list[int] = [] - for source_key in source_key_list: - source, _ = ModulestoreSource.objects.get_or_create(key=source_key) - sources_pks.append(source.id) - - target_collection_pks: list[int | None] = [] - if target_collection_slug_list: - for target_collection_slug in target_collection_slug_list: - if target_collection_slug: - target_collection_id = get_collection(target_package_id, target_collection_slug).id - target_collection_pks.append(target_collection_id) - else: - target_collection_pks.append(None) - - return tasks.bulk_migrate_from_modulestore.delay( - user_id=user.id, - sources_pks=sources_pks, - target_library_key=str(target_library_key), - target_collection_pks=target_collection_pks, - create_collections=create_collections, - composition_level=composition_level, - repeat_handling_strategy=repeat_handling_strategy, - preserve_url_slugs=preserve_url_slugs, - forward_source_to_target=forward_source_to_target, - ) - - -def is_successfully_migrated( - source_key: CourseKey | LibraryLocator, - source_version: str | None = None, -) -> bool: - """ - Check if the source course/library has been migrated successfully. - """ - filters = {"task_status__state": UserTaskStatus.SUCCEEDED} - if source_version is not None: - filters["source_version"] = source_version - return ModulestoreSource.objects.get_or_create(key=str(source_key))[0].migrations.filter(**filters).exists() - - -def get_migration_info(source_keys: list[CourseKey | LibraryLocator]) -> dict: - """ - Check if the source course/library has been migrated successfully and return target info - """ - return { - info.key: info - for info in ModulestoreSource.objects.filter( - migrations__task_status__state=UserTaskStatus.SUCCEEDED, - migrations__is_failed=False, - key__in=source_keys, - ) - .values_list( - 'migrations__target__key', - 'migrations__target__title', - 'migrations__target_collection__key', - 'migrations__target_collection__title', - 'key', - named=True, - ) - } - - -def get_target_block_usage_keys(source_key: CourseKey | LibraryLocator) -> dict[UsageKey, LibraryUsageLocatorV2 | None]: - """ - For given source_key, get a map of legacy block key and its new location in migrated v2 library. - """ - query_set = ModulestoreBlockMigration.objects.filter(overall_migration__source__key=source_key).select_related( - 'source', 'target__component__component_type', 'target__learning_package' - ) - - def construct_usage_key(lib_key_str: str, component: Component) -> LibraryUsageLocatorV2 | None: - try: - lib_key = LibraryLocatorV2.from_string(lib_key_str) - except InvalidKeyError: - return None - return library_component_usage_key(lib_key, component) - - # Use LibraryUsageLocatorV2 and construct usage key - return { - obj.source.key: construct_usage_key(obj.target.learning_package.key, obj.target.component) - for obj in query_set - if obj.source.key is not None - } diff --git a/cms/djangoapps/modulestore_migrator/api/__init__.py b/cms/djangoapps/modulestore_migrator/api/__init__.py new file mode 100644 index 000000000000..f480d92bf092 --- /dev/null +++ b/cms/djangoapps/modulestore_migrator/api/__init__.py @@ -0,0 +1,9 @@ + +""" +This is the public API for the modulestore_migrator. +""" + +# These wildcard imports are okay because these api modules declare __all__. +# pylint: disable=wildcard-import +from .read_api import * +from .write_api import * diff --git a/cms/djangoapps/modulestore_migrator/api/read_api.py b/cms/djangoapps/modulestore_migrator/api/read_api.py new file mode 100644 index 000000000000..893ef416d091 --- /dev/null +++ b/cms/djangoapps/modulestore_migrator/api/read_api.py @@ -0,0 +1,244 @@ +""" +API for reading information about previous migrations +""" +from __future__ import annotations + +import typing as t +from uuid import UUID + +from opaque_keys.edx.keys import UsageKey +from opaque_keys.edx.locator import ( + LibraryLocatorV2, LibraryUsageLocatorV2, LibraryContainerLocator +) +from openedx_learning.api.authoring import get_draft_version +from openedx_learning.api.authoring_models import ( + PublishableEntityVersion, PublishableEntity, DraftChangeLogRecord +) + +from openedx.core.djangoapps.content_libraries.api import ( + library_component_usage_key, library_container_locator +) + +from ..data import ( + SourceContextKey, ModulestoreMigration, ModulestoreBlockMigrationResult, + ModulestoreBlockMigrationSuccess, ModulestoreBlockMigrationFailure +) +from .. import models + + +__all__ = ( + 'get_forwarding', + 'is_forwarded', + 'get_forwarding_for_blocks', + 'get_migrations', + 'get_migration_blocks', +) + + +def get_forwarding_for_blocks(source_keys: t.Iterable[UsageKey]) -> dict[UsageKey, ModulestoreBlockMigrationSuccess]: + """ + Authoritatively determine how some Modulestore blocks have been migrated to Learning Core. + + Returns a mapping from source usage keys to block migration data objects. Each block migration object + holds the target usage key and title. If a source key is missing from the mapping, then it has not + been authoritatively migrated. + """ + sources = models.ModulestoreBlockSource.objects.filter( + key__in=[str(sk) for sk in source_keys] + ).select_related( + "forwarded__target__learning_package", + # For building component key + "forwarded__target__component__component_type", + # For building container key + "forwarded__target__container__section", + "forwarded__target__container__subsection", + "forwarded__target__container__unit", + # For determining title and version + "forwarded__change_log_record__new_version", + ) + result = {} + for source in sources: + if source.forwarded and source.forwarded.target: + result[source.key] = _block_migration_success( + source_key=source.key, + target=source.forwarded.target, + change_log_record=source.forwarded.change_log_record, + ) + return result + + +def is_forwarded(source_key: SourceContextKey) -> bool: + """ + Has this course or legacy library been authoratively migrated to Learning Core, + such that references to the source course/library should be forwarded to the target library? + """ + return get_forwarding(source_key) is not None + + +def get_forwarding(source_key: SourceContextKey) -> ModulestoreMigration | None: + """ + Authoritatively determine how some Modulestore course or legacy library has been migrated to Learning Core. + + If no such successful migration exists, returns None. + + Note: This function may return None for a course or legacy lib that *has* been migrated 1+ times. + This just means that those migrations were non-forwarding. In user parlance, that is, + they have been "imported" but not truly "migrated". + """ + try: + source = models.ModulestoreSource.objects.select_related( + # The following are used in _migration: + "forwarded__source", + "forwarded__target", + "forwarded__task_status", + "forwarded__target_collection", + ).get( + key=str(source_key) + ) + except models.ModulestoreSource.DoesNotExist: + return None + if not source.forwarded: + return None + if source.forwarded.is_failed: + return None + return _migration(source.forwarded) + + +def get_migrations( + source_key: SourceContextKey | None = None, + *, + target_key: LibraryLocatorV2 | None = None, + target_collection_slug: str | None = None, + task_uuid: UUID | None = None, + is_failed: bool | None = None, +) -> t.Generator[ModulestoreMigration]: + """ + Given some criteria, get all modulestore->LearningCore migrations. + + Returns an iterable, ordered from NEWEST to OLDEST. + + Please note: If you provide no filters, this will return an iterable across the whole + ModulestoreMigration table. Please paginate thoughtfully if you do that. + """ + migrations = models.ModulestoreMigration.objects.all().select_related( + "source", + "target", + "target_collection", + "task_status", + ) + if source_key: + migrations = migrations.filter(source__key=source_key) + if target_key: + migrations = migrations.filter(target__key=str(target_key)) + if target_collection_slug: + migrations = migrations.filter(target_collection__key=target_collection_slug) + if task_uuid: + migrations = migrations.filter(task_status__uuid=str(task_uuid)) + if is_failed is not None: + migrations = migrations.filter(is_failed=is_failed) + return ( + _migration(migration) + for migration in migrations.order_by("-id") # primary key is a proxy for newness + ) + + +def get_migration_blocks(migration_pk: int) -> dict[UsageKey, ModulestoreBlockMigrationResult]: + """ + Get details about the migrations of each individual block within a course/lib migration. + """ + return { + block_migration.source.key: _block_migration_result(block_migration) + for block_migration in models.ModulestoreBlockMigration.objects.filter( + overall_migration_id=migration_pk + ).select_related( + "source", + "target__learning_package", + # For building component key + "target__component__component_type", + # For building container key. + # (Hard-coding these exact 3 container types here is not a good pattern, but it's what is needed + # here in order to avoid additional SELECTs while determining the container type). + "target__container__section", + "target__container__subsection", + "target__container__unit", + # For determining title and version + "change_log_record__new_version", + ) + } + + +def _migration(m: models.ModulestoreMigration) -> ModulestoreMigration: + """ + Build a migration dataclass from the database row + """ + return ModulestoreMigration( + pk=m.id, + source_key=m.source.key, + target_key=LibraryLocatorV2.from_string(m.target.key), + target_title=m.target.title, + target_collection_slug=(m.target_collection.key if m.target_collection else None), + target_collection_title=(m.target_collection.title if m.target_collection else None), + is_failed=m.is_failed, + task_uuid=m.task_status.uuid, + ) + + +def _block_migration_result(m: models.ModulestoreBlockMigration) -> ModulestoreBlockMigrationResult: + """ + Build an instance of the migration result (successs/failure) dataclass from a database row + """ + if m.target: + return _block_migration_success( + source_key=m.source.key, + target=m.target, + change_log_record=m.change_log_record, + ) + return ModulestoreBlockMigrationFailure( + source_key=m.source.key, + unsupported_reason=(m.unsupported_reason or ""), + ) + + +def _block_migration_success( + source_key: UsageKey, + target: PublishableEntity, + change_log_record: DraftChangeLogRecord | None, +) -> ModulestoreBlockMigrationSuccess: + """ + Build an instance of the migration success dataclass + """ + target_library_key = LibraryLocatorV2.from_string(target.learning_package.key) + target_key: LibraryUsageLocatorV2 | LibraryContainerLocator + if hasattr(target, "component"): + target_key = library_component_usage_key(target_library_key, target.component) + elif hasattr(target, "container"): + target_key = library_container_locator(target_library_key, target.container) + else: + raise ValueError(f"Entity is neither a container nor component: {target}") + # We expect that any successful BlockMigration (that is, one where `target is not None`) + # will also have a `change_log_record` with a non-None `new_version`. However, the data model + # does not guarantee that `change_log_record` nor `change_log_record.new_version` are non- + # None. So, just in case some bug in the modulestore_migrator or some manual modification of + # the database leads us to a situation where `target` is set but `change_log_record.new_version` + # is not, we have fallback behavior: + # * For target_title, use the latest draft's title, which is good enough, because the + # title is just there to help users. + # * For target_version_num, just use None, because we don't want downstream code to make decisions + # about syncing, etc based on incorrect version info. + target_version: PublishableEntityVersion | None = ( + change_log_record.new_version if change_log_record else None + ) + if target_version: + target_title = target_version.title + target_version_num = target_version.version_num + else: + latest_draft = get_draft_version(target) + target_title = latest_draft.title if latest_draft else "" + target_version_num = None + return ModulestoreBlockMigrationSuccess( + source_key=source_key, + target_entity_pk=target.id, + target_key=target_key, + target_title=target_title, + target_version_num=target_version_num, + ) diff --git a/cms/djangoapps/modulestore_migrator/api/write_api.py b/cms/djangoapps/modulestore_migrator/api/write_api.py new file mode 100644 index 000000000000..4bef6c952767 --- /dev/null +++ b/cms/djangoapps/modulestore_migrator/api/write_api.py @@ -0,0 +1,94 @@ +""" +API for kicking off new migrations +""" +from __future__ import annotations + +from celery.result import AsyncResult +from opaque_keys.edx.locator import LibraryLocatorV2 +from openedx_learning.api.authoring import get_collection + +from openedx.core.types.user import AuthUser +from openedx.core.djangoapps.content_libraries.api import get_library + +from ..data import SourceContextKey, CompositionLevel, RepeatHandlingStrategy +from .. import tasks, models + + +__all__ = ( + 'start_migration_to_library', + 'start_bulk_migration_to_library' +) + + +def start_migration_to_library( + *, + user: AuthUser, + source_key: SourceContextKey, + target_library_key: LibraryLocatorV2, + target_collection_slug: str | None = None, + create_collection: bool = False, + composition_level: CompositionLevel, + repeat_handling_strategy: RepeatHandlingStrategy, + preserve_url_slugs: bool, + forward_source_to_target: bool | None +) -> AsyncResult: + """ + Import a course or legacy library into a V2 library (or, a collection within a V2 library). + """ + return start_bulk_migration_to_library( + user=user, + source_key_list=[source_key], + target_library_key=target_library_key, + target_collection_slug_list=[target_collection_slug], + create_collections=create_collection, + composition_level=composition_level, + repeat_handling_strategy=repeat_handling_strategy, + preserve_url_slugs=preserve_url_slugs, + forward_source_to_target=forward_source_to_target, + ) + + +def start_bulk_migration_to_library( + *, + user: AuthUser, + source_key_list: list[SourceContextKey], + target_library_key: LibraryLocatorV2, + target_collection_slug_list: list[str | None] | None = None, + create_collections: bool = False, + composition_level: CompositionLevel, + repeat_handling_strategy: RepeatHandlingStrategy, + preserve_url_slugs: bool, + forward_source_to_target: bool | None, +) -> AsyncResult: + """ + Import a list of courses or legacy libraries into a V2 library (or, a collections within a V2 library). + """ + target_library = get_library(target_library_key) + # get_library ensures that the library is connected to a learning package. + target_package_id: int = target_library.learning_package_id # type: ignore[assignment] + + sources_pks: list[int] = [] + for source_key in source_key_list: + source, _ = models.ModulestoreSource.objects.get_or_create(key=str(source_key)) + sources_pks.append(source.id) + + target_collection_pks: list[int | None] = [] + if target_collection_slug_list: + for target_collection_slug in target_collection_slug_list: + if target_collection_slug: + target_collection_id = get_collection(target_package_id, target_collection_slug).id + target_collection_pks.append(target_collection_id) + else: + target_collection_pks.append(None) + + return tasks.bulk_migrate_from_modulestore.delay( + user_id=user.id, + sources_pks=sources_pks, + target_library_key=str(target_library_key), + target_collection_pks=target_collection_pks, + create_collections=create_collections, + composition_level=composition_level.value, + repeat_handling_strategy=repeat_handling_strategy.value, + preserve_url_slugs=preserve_url_slugs, + forward_source_to_target=forward_source_to_target, + ) diff --git a/cms/djangoapps/modulestore_migrator/data.py b/cms/djangoapps/modulestore_migrator/data.py index e42649557d67..529d4c78ad40 100644 --- a/cms/djangoapps/modulestore_migrator/data.py +++ b/cms/djangoapps/modulestore_migrator/data.py @@ -1,9 +1,23 @@ """ Value objects """ + from __future__ import annotations +import typing as t +from dataclasses import dataclass from enum import Enum +from uuid import UUID + +from django.utils.translation import gettext_lazy as _ +from opaque_keys.edx.keys import UsageKey +from opaque_keys.edx.locator import ( + CourseLocator, + LibraryContainerLocator, + LibraryLocator, + LibraryLocatorV2, + LibraryUsageLocatorV2, +) from openedx.core.djangoapps.content_libraries.api import ContainerType @@ -70,3 +84,51 @@ def default(cls) -> RepeatHandlingStrategy: Returns the default repeat handling strategy. """ return cls.Skip + + +SourceContextKey: t.TypeAlias = CourseLocator | LibraryLocator + + +@dataclass(frozen=True) +class ModulestoreMigration: + """ + Metadata on a migration of a course or legacy library to a v2 library in learning core. + """ + pk: int + source_key: SourceContextKey + target_key: LibraryLocatorV2 + target_title: str + target_collection_slug: str | None + target_collection_title: str | None + is_failed: bool + task_uuid: UUID # the UserTask which executed this migration + + +@dataclass(frozen=True) +class ModulestoreBlockMigrationResult: + """ + Base class for a modulestore block that was part of an attempted migration to learning core. + """ + source_key: UsageKey + is_failed: t.ClassVar[bool] + + +@dataclass(frozen=True) +class ModulestoreBlockMigrationSuccess(ModulestoreBlockMigrationResult): + """ + Info on a modulestore block which has been successfully migrated into an LC entity + """ + target_entity_pk: int + target_key: LibraryUsageLocatorV2 | LibraryContainerLocator + target_title: str + target_version_num: int | None + is_failed: t.ClassVar[bool] = False + + +@dataclass(frozen=True) +class ModulestoreBlockMigrationFailure(ModulestoreBlockMigrationResult): + """ + Info on a modulestore block which failed to be migrated into LC + """ + unsupported_reason: str + is_failed: t.ClassVar[bool] = True diff --git a/cms/djangoapps/modulestore_migrator/migrations/0004_alter_modulestoreblockmigration_target_squashed_0005_modulestoreblockmigration_unsupported_reason.py b/cms/djangoapps/modulestore_migrator/migrations/0004_alter_modulestoreblockmigration_target_squashed_0005_modulestoreblockmigration_unsupported_reason.py new file mode 100644 index 000000000000..f043e208dc35 --- /dev/null +++ b/cms/djangoapps/modulestore_migrator/migrations/0004_alter_modulestoreblockmigration_target_squashed_0005_modulestoreblockmigration_unsupported_reason.py @@ -0,0 +1,32 @@ +# Generated by Django 5.2.7 on 2025-11-26 06:35 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ('modulestore_migrator', '0003_modulestoremigration_is_failed'), + ('oel_publishing', '0008_alter_draftchangelogrecord_options_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='modulestoreblockmigration', + name='target', + field=models.ForeignKey( + blank=True, + help_text='The target entity of this block migration, set to null if it fails to migrate', + null=True, + on_delete=django.db.models.deletion.CASCADE, + to='oel_publishing.publishableentity', + ), + ), + migrations.AddField( + model_name='modulestoreblockmigration', + name='unsupported_reason', + field=models.TextField( + blank=True, help_text='Reason if the block is unsupported and target is set to null', null=True + ), + ), + ] diff --git a/cms/djangoapps/modulestore_migrator/migrations/0006_alter_modulestoreblocksource_forwarded_and_more.py b/cms/djangoapps/modulestore_migrator/migrations/0006_alter_modulestoreblocksource_forwarded_and_more.py new file mode 100644 index 000000000000..a8ee5a0eb673 --- /dev/null +++ b/cms/djangoapps/modulestore_migrator/migrations/0006_alter_modulestoreblocksource_forwarded_and_more.py @@ -0,0 +1,30 @@ +# Generated by Django 5.2.9 on 2025-12-14 15:33 + +import django.db.models.deletion +import opaque_keys.edx.django.models +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('modulestore_migrator', '0004_alter_modulestoreblockmigration_target_squashed_0005_modulestoreblockmigration_unsupported_reason'), + ] + + operations = [ + migrations.AlterField( + model_name='modulestoreblocksource', + name='forwarded', + field=models.OneToOneField(help_text='If set, the system will forward references of this block source over to the target of this block migration', null=True, on_delete=django.db.models.deletion.SET_NULL, to='modulestore_migrator.modulestoreblockmigration'), + ), + migrations.AlterField( + model_name='modulestoreblocksource', + name='key', + field=opaque_keys.edx.django.models.UsageKeyField(help_text='Original usage key of the XBlock that has been imported.', max_length=255, unique=True), + ), + migrations.AlterField( + model_name='modulestoresource', + name='forwarded', + field=models.OneToOneField(blank=True, help_text='If set, the system will forward references of this source over to the target of this migration', null=True, on_delete=django.db.models.deletion.SET_NULL, to='modulestore_migrator.modulestoremigration'), + ), + ] diff --git a/cms/djangoapps/modulestore_migrator/models.py b/cms/djangoapps/modulestore_migrator/models.py index 810333fc9be9..11f614c2ed03 100644 --- a/cms/djangoapps/modulestore_migrator/models.py +++ b/cms/djangoapps/modulestore_migrator/models.py @@ -6,16 +6,19 @@ from django.contrib.auth import get_user_model from django.db import models from django.utils.translation import gettext_lazy as _ -from user_tasks.models import UserTaskStatus - from model_utils.models import TimeStampedModel from opaque_keys.edx.django.models import ( LearningContextKeyField, UsageKeyField, ) from openedx_learning.api.authoring_models import ( - LearningPackage, PublishableEntity, Collection, DraftChangeLog, DraftChangeLogRecord + Collection, + DraftChangeLog, + DraftChangeLogRecord, + LearningPackage, + PublishableEntity, ) +from user_tasks.models import UserTaskStatus from .data import CompositionLevel, RepeatHandlingStrategy @@ -25,6 +28,23 @@ class ModulestoreSource(models.Model): """ A legacy learning context (course or library) which can be a source of a migration. + + One source can be associated with multiple (successful or unsuccessful) ModulestoreMigrations. + If a source has been migrated multiple times, then at most one of them can be considered the + "official" or "authoritative" migration; this is indicated by setting the `forwarded` field to + that ModulestoreMigration object. + + Note that `forwarded` can be NULL even when 1+ migrations have happened for this source. This just + means that none of them were authoritative. In other words, they were all "imports"/"copies" rather + than true "migrations". + + In practice, as of Ulmo: + * The `forwarded` field is used to decide how to update legacy library_content references. + * When using the Libraries Migration UI in Studio, `forwarded` is always set to the first + successful ModulestoreMigration. + * When using the REST API directly, the default is to use the same behavior as the UI, but + clients can also explicitly specify the `forward_source_to_target` boolean param in order to + control whether `forwarded` is set to any given migration. """ key = LearningContextKeyField( max_length=255, @@ -37,7 +57,6 @@ class ModulestoreSource(models.Model): blank=True, on_delete=models.SET_NULL, help_text=_('If set, the system will forward references of this source over to the target of this migration'), - related_name="forwards", ) def __str__(self): @@ -160,6 +179,9 @@ def __repr__(self): class ModulestoreBlockSource(TimeStampedModel): """ A legacy block usage (in a course or library) which can be a source of a block migration. + + The semantics of `forwarded` directly mirror those of `ModulestoreSource.forwarded`. Please see + that class's docstring for details. """ overall_source = models.ForeignKey( ModulestoreSource, @@ -168,6 +190,7 @@ class ModulestoreBlockSource(TimeStampedModel): ) key = UsageKeyField( max_length=255, + unique=True, help_text=_('Original usage key of the XBlock that has been imported.'), ) forwarded = models.OneToOneField( @@ -175,11 +198,10 @@ class ModulestoreBlockSource(TimeStampedModel): null=True, on_delete=models.SET_NULL, help_text=_( - 'If set, the system will forward references of this block source over to the target of this block migration' + 'If set, the system will forward references of this block source over to the ' + 'target of this block migration' ), - related_name="forwards", ) - unique_together = [("overall_source", "key")] def __str__(self): return f"{self.__class__.__name__}('{self.key}')" @@ -210,6 +232,9 @@ class ModulestoreBlockMigration(TimeStampedModel): target = models.ForeignKey( PublishableEntity, on_delete=models.CASCADE, + help_text=_('The target entity of this block migration, set to null if it fails to migrate'), + null=True, + blank=True, ) change_log_record = models.OneToOneField( DraftChangeLogRecord, @@ -218,10 +243,16 @@ class ModulestoreBlockMigration(TimeStampedModel): null=True, on_delete=models.SET_NULL, ) + unsupported_reason = models.TextField( + null=True, + blank=True, + help_text=_('Reason if the block is unsupported and target is set to null'), + ) class Meta: unique_together = [ ('overall_migration', 'source'), + # By default defining a unique index on a nullable column will only enforce unicity of non-null values. ('overall_migration', 'target'), ] diff --git a/cms/djangoapps/modulestore_migrator/rest_api/v1/serializers.py b/cms/djangoapps/modulestore_migrator/rest_api/v1/serializers.py index 73180791191f..643d94d2250c 100644 --- a/cms/djangoapps/modulestore_migrator/rest_api/v1/serializers.py +++ b/cms/djangoapps/modulestore_migrator/rest_api/v1/serializers.py @@ -5,11 +5,25 @@ from opaque_keys import InvalidKeyError from opaque_keys.edx.keys import LearningContextKey from opaque_keys.edx.locator import LibraryLocatorV2 +from openedx_learning.api.authoring_models import Collection from rest_framework import serializers +from user_tasks.models import UserTaskStatus from user_tasks.serializers import StatusSerializer from cms.djangoapps.modulestore_migrator.data import CompositionLevel, RepeatHandlingStrategy -from cms.djangoapps.modulestore_migrator.models import ModulestoreMigration +from cms.djangoapps.modulestore_migrator.models import ( + ModulestoreMigration, + ModulestoreSource, +) + + +class LibraryMigrationCollectionSerializer(serializers.ModelSerializer): + """ + Serializer for the target collection of a library migration. + """ + class Meta: + model = Collection + fields = ["key", "title"] class ModulestoreMigrationSerializer(serializers.Serializer): @@ -40,7 +54,7 @@ class ModulestoreMigrationSerializer(serializers.Serializer): preserve_url_slugs = serializers.BooleanField( help_text="If true, current slugs will be preserved.", required=False, - default=True, + default=False, ) target_collection_slug = serializers.CharField( help_text="The target collection slug within the library to import into. Optional.", @@ -48,10 +62,20 @@ class ModulestoreMigrationSerializer(serializers.Serializer): allow_blank=True, default=None, ) + create_collection = serializers.BooleanField( + help_text=( + "If true and `target_collection_slug` is not set, " + "create the collections in the library where the import will be made" + ), + required=False, + default=False, + ) + target_collection = LibraryMigrationCollectionSerializer(required=False) forward_source_to_target = serializers.BooleanField( help_text="Forward references of this block source over to the target of this block migration.", required=False, - default=False, + allow_null=True, + default=None, # Note: "None" means "unspecified" ) is_failed = serializers.BooleanField( help_text="It is true if this migration is failed", @@ -173,3 +197,95 @@ def get_fields(self): fields = super().get_fields() fields.pop('name', None) return fields + + +class MigrationInfoSerializer(serializers.Serializer): + """ + Serializer for the migration info + """ + + source_key = serializers.CharField() + target_key = serializers.CharField() + target_title = serializers.CharField() + target_collection_key = serializers.CharField( + source="target_collection_slug", + allow_null=True + ) + target_collection_title = serializers.CharField( + allow_null=True + ) + + +class MigrationInfoResponseSerializer(serializers.Serializer): + """ + Serializer for the migrations info view response + """ + def to_representation(self, instance): + return { + str(key): MigrationInfoSerializer(value, many=True).data + for key, value in instance.items() + } + + +class LibraryMigrationCourseSourceSerializer(serializers.ModelSerializer): + """ + Serializer for the source course of a library migration. + """ + display_name = serializers.SerializerMethodField() + + class Meta: + model = ModulestoreSource + fields = ['key', 'display_name'] + + def get_display_name(self, obj): + """ + Return the display name of the source course + """ + return self.context["course_names"].get(str(obj.key), None) + + +class LibraryMigrationCourseSerializer(serializers.ModelSerializer): + """ + Serializer for the course or legacylibrary migrations to V2 library. + """ + task_uuid = serializers.UUIDField(source='task_status.uuid', read_only=True) + source = LibraryMigrationCourseSourceSerializer() # type: ignore[assignment] + target_collection = LibraryMigrationCollectionSerializer(required=False) + state = serializers.SerializerMethodField() + progress = serializers.SerializerMethodField() + + class Meta: + model = ModulestoreMigration + fields = [ + 'task_uuid', + 'source', + 'target_collection', + 'state', + 'progress', + ] + + def get_state(self, obj: ModulestoreMigration): + """ + Return the state of the migration. + """ + if obj.is_failed or obj.task_status.state in [UserTaskStatus.FAILED, UserTaskStatus.CANCELED]: + return UserTaskStatus.FAILED + elif obj.task_status.state == UserTaskStatus.SUCCEEDED: + return UserTaskStatus.SUCCEEDED + + return UserTaskStatus.IN_PROGRESS + + def get_progress(self, obj: ModulestoreMigration): + """ + Return the progress of the migration. + """ + return obj.task_status.completed_steps / obj.task_status.total_steps + + +class BlockMigrationInfoSerializer(serializers.Serializer): + """ + Serializer for the block migration info. + """ + source_key = serializers.CharField() + target_key = serializers.CharField(allow_null=True) + unsupported_reason = serializers.CharField(allow_null=True) diff --git a/cms/djangoapps/modulestore_migrator/rest_api/v1/urls.py b/cms/djangoapps/modulestore_migrator/rest_api/v1/urls.py index 7f66dc5f6dd6..4d2c92b6af04 100644 --- a/cms/djangoapps/modulestore_migrator/rest_api/v1/urls.py +++ b/cms/djangoapps/modulestore_migrator/rest_api/v1/urls.py @@ -1,9 +1,12 @@ """ Course to Library Import API v1 URLs. """ - from rest_framework.routers import SimpleRouter -from .views import MigrationViewSet, BulkMigrationViewSet + +from .views import ( + BulkMigrationViewSet, + MigrationViewSet, +) ROUTER = SimpleRouter() ROUTER.register(r'migrations', MigrationViewSet, basename='migrations') diff --git a/cms/djangoapps/modulestore_migrator/rest_api/v1/views.py b/cms/djangoapps/modulestore_migrator/rest_api/v1/views.py index f2b231c5c1b2..f84f87a23c85 100644 --- a/cms/djangoapps/modulestore_migrator/rest_api/v1/views.py +++ b/cms/djangoapps/modulestore_migrator/rest_api/v1/views.py @@ -6,22 +6,27 @@ import edx_api_doc_tools as apidocs from edx_rest_framework_extensions.auth.jwt.authentication import JwtAuthentication from edx_rest_framework_extensions.auth.session.authentication import SessionAuthenticationAllowInactiveUser -from rest_framework.permissions import IsAdminUser -from rest_framework.response import Response from rest_framework import status +from rest_framework.decorators import action +from rest_framework.exceptions import PermissionDenied +from rest_framework.response import Response from user_tasks.models import UserTaskStatus from user_tasks.views import StatusViewSet -from cms.djangoapps.modulestore_migrator.api import start_migration_to_library, start_bulk_migration_to_library +from cms.djangoapps.modulestore_migrator import api as migrator_api +from common.djangoapps.student import auth +from openedx.core.djangoapps.content_libraries import api as lib_api from openedx.core.lib.api.authentication import BearerAuthenticationAllowInactiveUser +from ...data import ( + CompositionLevel, RepeatHandlingStrategy, +) from .serializers import ( - StatusWithModulestoreMigrationsSerializer, - ModulestoreMigrationSerializer, BulkModulestoreMigrationSerializer, + ModulestoreMigrationSerializer, + StatusWithModulestoreMigrationsSerializer, ) - log = logging.getLogger(__name__) @@ -51,21 +56,11 @@ See `POST /api/modulestore_migrator/v1/migrations` for details on its schema. """, ) -@apidocs.schema_for( - "cancel", - """ - Cancel a particular migration or bulk-migration task. - - The response is a migration task status object. - See `POST /api/modulestore_migrator/v1/migrations` for details on its schema. - """, -) class MigrationViewSet(StatusViewSet): """ JSON HTTP API to create and check on ModuleStore-to-Learning-Core migration tasks. """ - permission_classes = (IsAdminUser,) authentication_classes = ( BearerAuthenticationAllowInactiveUser, JwtAuthentication, @@ -77,14 +72,39 @@ class MigrationViewSet(StatusViewSet): # Instead, users can POST to /cancel to cancel running tasks. http_method_names = ["get", "post"] + lookup_field = "uuid" + def get_queryset(self): """ Override the default queryset to filter by the migration event and user. """ return StatusViewSet.queryset.filter( - migrations__isnull=False, user=self.request.user + migrations__isnull=False, + # The filter for `user` here is essentially the auth strategy for the /list and /retreive + # endpoints. Basically: you can view migrations if and only if you started them. + # Future devs: If you ever refactor this view to remove the user filter, be sure to enforce + # permissions some other way. + user=self.request.user ).distinct().order_by("-created") + @apidocs.schema() + @action(detail=True, methods=['post']) + def cancel(self, request, *args, **kwargs): + """ + Cancel a particular migration or bulk-migration task. + + The response is a migration task status object. + See `POST /api/modulestore_migrator/v1/migrations` for details on its schema. + + This endpoint is currently reserved for site-wide administrators. + """ + # TODO: This should check some sort of "allowed to cancel/migrations" permission + # rather than directly looking at the GlobalStaff role. + # https://github.com/openedx/edx-platform/issues/37791 + if not request.user.is_staff: + raise PermissionDenied("Only site administrators can cancel migration tasks.") + return super().cancel(request, *args, **kwargs) + @apidocs.schema( body=ModulestoreMigrationSerializer, responses={ @@ -94,28 +114,29 @@ def get_queryset(self): ) def create(self, request, *args, **kwargs): """ - Transfer content from course or legacy library into a content library. - - This begins a migration task to copy content from a ModuleStore-based **source** context - to Learning Core-based **target** context. The valid **source** contexts are: - - * A course. - * A legacy content library. - - The valid **target** contexts are: + Begin a transfer of content from course or legacy library into a content library. - * A content library. - * A collection within a content library. + Required parameters: + * A **source** key, which identifies the course or legacy library containing the items be migrated. + * A **target** key, which identifies the content library to which items will be migrated. - Other options: + Optional parameters: + * The **target_collection_slug**, which identifies an *existing* collection within the target that + should hold the migrated items. If not specified, items will be added to the target library without + any collection, unless: + * If this source was previously migrated to a collection and the **repeat_handling_strategy** (described + below) is not set to *fork*, then that same collection will be re-used. + * If **create_collection** is specified as *true*, then the items will be added to a new collection, with + a name and slug based on the source's title, but not conflicting with any existing collection. * The **composition_level** (*component*, *unit*, *subsection*, *section*) indicates the highest level of hierarchy to be transferred. Default is *component*. To maximally preserve the source structure, specify *section*. * The **repeat_handling_strategy** specifies how the system should handle source items which have - previously been migrated to the target. Specify *skip* to prefer the existing target item, specify - *update* to update the existing target item with the latest source content, or specify *fork* to create - a new target item with the source content. Default is *skip*. + previously been migrated to the target. + * Specify *skip* to prefer the existing target item. This is the default. + * Specify *update* to update the existing target item with the latest source content. + * Specify *fork* to create a new target item with the source content. * Specify **preserve_url_slugs** as *true* in order to use the source-provided block IDs (a.k.a. "URL slugs", "url_names"). Otherwise, the system will use each source item's title to auto-generate an ID in the target context. @@ -130,12 +151,17 @@ def create(self, request, *args, **kwargs): content library. * **Example**: Specify *false* if you are copying course content into a content library, but do not want to persist a link between the source source content and destination library contenet. + * **Defaults** to *false* if the source has already been mapped to a target by a successful migration, + and defaults to *true* if not. In other words, by default, establish the mapping only if it wouldn't + override an existing mapping. Example request: ```json { "source": "course-v1:MyOrganization+MyCourse+MyRun", - "target": "lib:MyOrganization:MyUlmoLibrary", + "target": "lib-collection:MyOrganization:MyUlmoLibrary", + "target_collection_slug": "MyCollection", + "create_collection": true, "composition_level": "unit", "repeat_handling_strategy": "update", "preserve_url_slugs": true @@ -183,25 +209,32 @@ def create(self, request, *args, **kwargs): ] } ``` + + This API requires that the requester have author access on both the source and target. """ serializer_data = ModulestoreMigrationSerializer(data=request.data) serializer_data.is_valid(raise_exception=True) validated_data = serializer_data.validated_data - - task = start_migration_to_library( + if not auth.has_studio_write_access(request.user, validated_data['source']): + raise PermissionDenied("Requester is not an author on the source.") + lib_api.require_permission_for_library_key( + validated_data['target'], + request.user, + lib_api.permissions.CAN_EDIT_THIS_CONTENT_LIBRARY, + ) + task = migrator_api.start_migration_to_library( user=request.user, source_key=validated_data['source'], target_library_key=validated_data['target'], target_collection_slug=validated_data['target_collection_slug'], - composition_level=validated_data['composition_level'], - repeat_handling_strategy=validated_data['repeat_handling_strategy'], + composition_level=CompositionLevel(validated_data['composition_level']), + create_collection=validated_data['create_collection'], + repeat_handling_strategy=RepeatHandlingStrategy(validated_data['repeat_handling_strategy']), preserve_url_slugs=validated_data['preserve_url_slugs'], forward_source_to_target=validated_data['forward_source_to_target'], ) - task_status = UserTaskStatus.objects.get(task_id=task.id) serializer = self.get_serializer(task_status) - return Response(serializer.data, status=status.HTTP_201_CREATED) @@ -210,7 +243,6 @@ class BulkMigrationViewSet(StatusViewSet): JSON HTTP API to bulk-create ModuleStore-to-Learning-Core migration tasks. """ - permission_classes = (IsAdminUser,) authentication_classes = ( BearerAuthenticationAllowInactiveUser, JwtAuthentication, @@ -295,19 +327,30 @@ def create(self, request, *args, **kwargs): ] } ``` + + This API requires that the requester have author access on both the source and target. """ serializer_data = BulkModulestoreMigrationSerializer(data=request.data) serializer_data.is_valid(raise_exception=True) validated_data = serializer_data.validated_data - - task = start_bulk_migration_to_library( + for source_key in validated_data['sources']: + if not auth.has_studio_write_access(request.user, source_key): + raise PermissionDenied( + f"Requester is not an author on the source: {source_key}. No migrations performed." + ) + lib_api.require_permission_for_library_key( + validated_data['target'], + request.user, + lib_api.permissions.CAN_EDIT_THIS_CONTENT_LIBRARY, + ) + task = migrator_api.start_bulk_migration_to_library( user=request.user, source_key_list=validated_data['sources'], target_library_key=validated_data['target'], target_collection_slug_list=validated_data['target_collection_slug_list'], create_collections=validated_data['create_collections'], - composition_level=validated_data['composition_level'], - repeat_handling_strategy=validated_data['repeat_handling_strategy'], + composition_level=CompositionLevel(validated_data['composition_level']), + repeat_handling_strategy=RepeatHandlingStrategy(validated_data['repeat_handling_strategy']), preserve_url_slugs=validated_data['preserve_url_slugs'], forward_source_to_target=validated_data['forward_source_to_target'], ) diff --git a/cms/djangoapps/modulestore_migrator/tasks.py b/cms/djangoapps/modulestore_migrator/tasks.py index 040501185339..e0b0b0fe2ed2 100644 --- a/cms/djangoapps/modulestore_migrator/tasks.py +++ b/cms/djangoapps/modulestore_migrator/tasks.py @@ -9,24 +9,26 @@ from dataclasses import dataclass from datetime import datetime, timezone from enum import Enum -from itertools import groupby +from gettext import ngettext from celery import shared_task from celery.utils.log import get_task_logger from django.core.exceptions import ObjectDoesNotExist -from django.utils.text import slugify from django.db import transaction +from django.utils.text import slugify +from django.utils.translation import gettext_lazy as _ from edx_django_utils.monitoring import set_code_owner_attribute_from_module from lxml import etree from lxml.etree import _ElementTree as XmlTree from opaque_keys import InvalidKeyError -from opaque_keys.edx.keys import CourseKey, UsageKey +from opaque_keys.edx.keys import UsageKey from opaque_keys.edx.locator import ( + BlockUsageLocator, CourseLocator, LibraryContainerLocator, LibraryLocator, LibraryLocatorV2, - LibraryUsageLocatorV2 + LibraryUsageLocatorV2, ) from openedx_learning.api import authoring as authoring_api from openedx_learning.api.authoring_models import ( @@ -35,23 +37,24 @@ ComponentType, LearningPackage, PublishableEntity, - PublishableEntityVersion + PublishableEntityVersion, ) from user_tasks.tasks import UserTask, UserTaskStatus from xblock.core import XBlock -from django.utils.translation import gettext_lazy as _ +from xblock.plugin import PluginMissingError from common.djangoapps.split_modulestore_django.models import SplitModulestoreCourseIndex -from common.djangoapps.util.date_utils import strftime_localized, DEFAULT_DATE_TIME_FORMAT +from common.djangoapps.util.date_utils import DEFAULT_DATE_TIME_FORMAT, strftime_localized from openedx.core.djangoapps.content_libraries import api as libraries_api from openedx.core.djangoapps.content_libraries.api import ContainerType, get_library from openedx.core.djangoapps.content_staging import api as staging_api from xmodule.modulestore import exceptions as modulestore_exceptions from xmodule.modulestore.django import modulestore +from . import models, data from .constants import CONTENT_STAGING_PURPOSE_TEMPLATE -from .data import CompositionLevel, RepeatHandlingStrategy -from .models import ModulestoreBlockMigration, ModulestoreBlockSource, ModulestoreMigration, ModulestoreSource +from .data import CompositionLevel, RepeatHandlingStrategy, SourceContextKey +from .api.read_api import get_migrations, get_migration_blocks log = get_task_logger(__name__) @@ -78,20 +81,6 @@ class MigrationStep(Enum): BULK_MIGRATION_PREFIX = 'Migrating legacy content' -class _MigrationTask(UserTask): - """ - Base class for migrate_to_modulestore - """ - - @staticmethod - def calculate_total_steps(arguments_dict): - """ - Get number of in-progress steps in importing process, as shown in the UI. - """ - # We subtract the BULK_MIGRATION_PREFIX - return len(list(MigrationStep)) - 1 - - class _BulkMigrationTask(UserTask): """ Base class for bulk_migrate_from_modulestore @@ -125,12 +114,15 @@ class _MigrationContext: """ Context for the migration process. """ - existing_source_to_target_keys: dict[ # Note: It's intended to be mutable to reflect changes during migration. - UsageKey, list[PublishableEntity] - ] + # Fields that get mutated as we migrate blocks + used_component_keys: set[LibraryUsageLocatorV2] + used_container_slugs: set[str] + + # Fields that remain constant + previous_block_migrations: dict[UsageKey, data.ModulestoreBlockMigrationResult] target_package_id: int target_library_key: LibraryLocatorV2 - source_context_key: CourseKey # Note: This includes legacy LibraryLocators, which are sneakily CourseKeys. + source_context_key: SourceContextKey content_by_filename: dict[str, int] composition_level: CompositionLevel repeat_handling_strategy: RepeatHandlingStrategy @@ -138,37 +130,6 @@ class _MigrationContext: created_by: int created_at: datetime - def is_already_migrated(self, source_key: UsageKey) -> bool: - return source_key in self.existing_source_to_target_keys - - def get_existing_target(self, source_key: UsageKey) -> PublishableEntity: - """ - Get the target entity for a given source key. - - If the source key is already migrated, return the FIRST target entity. - If the source key is not found, raise a KeyError. - """ - if source_key not in self.existing_source_to_target_keys: - raise KeyError(f"Source key {source_key} not found in existing source to target keys") - - # NOTE: This is a list of PublishableEntities, but we always return the first one. - return self.existing_source_to_target_keys[source_key][0] - - def add_migration(self, source_key: UsageKey, target: PublishableEntity) -> None: - """Update the context with a new migration (keeps it current)""" - if source_key not in self.existing_source_to_target_keys: - self.existing_source_to_target_keys[source_key] = [target] - else: - self.existing_source_to_target_keys[source_key].append(target) - - def get_existing_target_entity_keys(self, base_key: str) -> set[str]: - return set( - publishable_entity.key - for publishable_entity_list in self.existing_source_to_target_keys.values() - for publishable_entity in publishable_entity_list - if publishable_entity.key.startswith(base_key) - ) - @property def should_skip_strategy(self) -> bool: """ @@ -196,10 +157,11 @@ class _MigrationSourceData: """ Data related to a ModulestoreSource """ - source: ModulestoreSource - source_root_usage_key: UsageKey + source: models.ModulestoreSource + source_root_usage_key: BlockUsageLocator source_version: str | None - migration: ModulestoreMigration + migration: models.ModulestoreMigration + previous_migration: data.ModulestoreMigration | None def _validate_input( @@ -208,6 +170,7 @@ def _validate_input( repeat_handling_strategy: str, preserve_url_slugs: bool, composition_level: str, + target_library_key: LibraryLocatorV2, target_package: LearningPackage, target_collection: Collection | None, ) -> _MigrationSourceData | None: @@ -215,7 +178,7 @@ def _validate_input( Validates and build the source data related to `source_pk` """ try: - source = ModulestoreSource.objects.get(pk=source_pk) + source = models.ModulestoreSource.objects.get(pk=source_pk) except (ObjectDoesNotExist) as exc: status.fail(str(exc)) return None @@ -235,7 +198,17 @@ def _validate_input( ) return None - migration = ModulestoreMigration.objects.create( + # Find the latest successful migration that occurred, if any. + # We're careful to do this before creating the new ModulestoreMigration object, + # otherwise we would just end up grabbing that by one accident. + # ( mypy gets confused by how use next(...) here ) + previous_migration = next( # type: ignore[call-overload] + get_migrations( + source.key, target_key=target_library_key, is_failed=False + ), + None, # default + ) + migration = models.ModulestoreMigration.objects.create( source=source, source_version=source_version, composition_level=composition_level, @@ -245,17 +218,17 @@ def _validate_input( target_collection=target_collection, task_status=status, ) - return _MigrationSourceData( source=source, source_root_usage_key=source_root_usage_key, source_version=source_version, migration=migration, + previous_migration=previous_migration, ) def _cancel_old_tasks( - source_list: list[ModulestoreSource], + source_list: list[models.ModulestoreSource], status: UserTaskStatus, target_package: LearningPackage, migration_ids_to_exclude: list[int], @@ -264,7 +237,7 @@ def _cancel_old_tasks( Cancel all migration tasks related to the user and the source list """ # In order to prevent a user from accidentally starting a bunch of identical import tasks... - migrations_to_cancel = ModulestoreMigration.objects.filter( + migrations_to_cancel = models.ModulestoreMigration.objects.filter( # get all Migration tasks by this user with the same sources and target task_status__user=status.user, source__in=source_list, @@ -302,7 +275,7 @@ def _load_xblock( return xblock -def _import_assets(migration: ModulestoreMigration) -> dict[str, int]: +def _import_assets(migration: models.ModulestoreMigration) -> dict[str, int]: """ Import the assets of the staged content to the migration target """ @@ -333,7 +306,6 @@ def _import_assets(migration: ModulestoreMigration) -> dict[str, int]: def _import_structure( - migration: ModulestoreMigration, source_data: _MigrationSourceData, target_library: libraries_api.ContentLibraryMetadata, content_by_filename: dict[str, int], @@ -368,21 +340,25 @@ def _import_structure( represents the mapping between the legacy root node and its newly created Learning Core equivalent. """ - # "key" is locally unique across all PublishableEntities within - # a given LearningPackage. - # We use this mapping to ensure that we don't create duplicate - # PublishableEntities during the migration process for a given LearningPackage. - existing_source_to_target_keys: dict[UsageKey, list[PublishableEntity]] = {} - modulestore_blocks = ( - ModulestoreBlockMigration.objects.filter(overall_migration__target=migration.target.id).order_by("source__key") - ) - existing_source_to_target_keys = { - source_key: list(block.target for block in group) for source_key, group in groupby( - modulestore_blocks, key=lambda x: x.source.key) - } - + migration = source_data.migration migration_context = _MigrationContext( - existing_source_to_target_keys=existing_source_to_target_keys, + used_component_keys=set( + LibraryUsageLocatorV2(target_library.key, block_type, block_id) # type: ignore[abstract] + for block_type, block_id + in authoring_api.get_components(migration.target.pk).values_list( + "component_type__name", "local_key" + ) + ), + used_container_slugs=set( + authoring_api.get_containers( + migration.target.pk + ).values_list("publishable_entity__key", flat=True) + ), + previous_block_migrations=( + get_migration_blocks(source_data.previous_migration.pk) + if source_data.previous_migration + else {} + ), target_package_id=migration.target.pk, target_library_key=target_library.key, source_context_key=source_data.source_root_usage_key.course_key, @@ -393,7 +369,6 @@ def _import_structure( created_by=status.user_id, created_at=datetime.now(timezone.utc), ) - with authoring_api.bulk_draft_changes_for(migration.target.id) as change_log: root_migrated_node = _migrate_node( context=migration_context, @@ -403,11 +378,11 @@ def _import_structure( return change_log, root_migrated_node -def _forwarding_content(source_data: _MigrationSourceData) -> None: +def _forward_content(source_data: _MigrationSourceData) -> None: """ Forwarding legacy content to migrated content """ - block_migrations = ModulestoreBlockMigration.objects.filter(overall_migration=source_data.migration) + block_migrations = models.ModulestoreBlockMigration.objects.filter(overall_migration=source_data.migration) block_sources_to_block_migrations = { block_migration.source: block_migration for block_migration in block_migrations } @@ -419,7 +394,7 @@ def _forwarding_content(source_data: _MigrationSourceData) -> None: source_data.source.save() -def _populate_collection(user_id: int, migration: ModulestoreMigration) -> None: +def _populate_collection(user_id: int, migration: models.ModulestoreMigration) -> None: """ Assigning imported items to the specified collection in the migration """ @@ -427,7 +402,7 @@ def _populate_collection(user_id: int, migration: ModulestoreMigration) -> None: return block_target_pks: list[int] = list( - ModulestoreBlockMigration.objects.filter( + models.ModulestoreBlockMigration.objects.filter( overall_migration=migration ).values_list('target_id', flat=True) ) @@ -450,7 +425,7 @@ def _create_collection(library_key: LibraryLocatorV2, title: str) -> Collection: The same is true for the title. """ key = slugify(title) - collection = None + collection: Collection | None = None attempt = 0 created_at = strftime_localized(datetime.now(timezone.utc), DEFAULT_DATE_TIME_FORMAT) description = f"{_('This collection contains content migrated from a legacy library on')}: {created_at}" @@ -465,7 +440,7 @@ def _create_collection(library_key: LibraryLocatorV2, title: str) -> Collection: title=f"{title}{f'_{attempt}' if attempt > 0 else ''}", description=description, ) - except libraries_api.LibraryCollectionAlreadyExists as e: + except libraries_api.LibraryCollectionAlreadyExists: attempt += 1 return collection @@ -477,186 +452,12 @@ def _set_migrations_to_fail(source_data_list: list[_MigrationSourceData]): for source_data in source_data_list: source_data.migration.is_failed = True - ModulestoreMigration.objects.bulk_update( + models.ModulestoreMigration.objects.bulk_update( [x.migration for x in source_data_list], ["is_failed"], ) -@shared_task(base=_MigrationTask, bind=True) -# Note: The decorator @set_code_owner_attribute cannot be used here because the UserTaskMixin -# does stack inspection and can't handle additional decorators. -def migrate_from_modulestore( - self: _MigrationTask, - *, - user_id: int, - source_pk: int, - target_library_key: str, - target_collection_pk: int | None, - repeat_handling_strategy: str, - preserve_url_slugs: bool, - composition_level: str, - forward_source_to_target: bool, -) -> None: - """ - Import a single course or legacy library from modulestore into a V2 legacy library. - - This task performs the end-to-end migration for one legacy source (course or library), - including staging, parsing OLX, importing assets and structure, and assigning the - migrated content to the specified target library and collection. - - A new `UserTaskStatus` entry is created for each invocation of this task, meaning - that each migration runs independently with its own progress tracking and final - success or failure state. - - If the migration encounters an unrecoverable error at any step (for example, invalid - OLX, missing assets, or database constraints), the task is marked as **failed** and - the partial results are rolled back as necessary. The migration state can be queried - through the REST API endpoint `/api/modulestore_migrator/v1/migrations//`. - - Args: - self (_MigrationTask): - The Celery task instance that wraps the user task logic. - user_id (int): - The ID of the user initiating the migration. - source_pk (int): - Primary key of the modulestore source to migrate. - target_library_key (str): - Key of the target V2 library that will receive the imported content. - target_collection_pk (int | None): - Optional ID of a target collection to which imported content will be assigned. - repeat_handling_strategy (str): - Strategy for handling repeated imports (e.g., "skip", "update"). - preserve_url_slugs (bool): - Whether to preserve original XBlock URL slugs during import. - composition_level (str): - The structural level to migrate (e.g., component, unit, or section). - forward_source_to_target (bool): - Whether to forward legacy content references to the migrated content after import. - - See Also: - - `bulk_migrate_from_modulestore`: Multi-source batch migration equivalent. - - API docs: `/api/cms/v1/migrations/` for REST behavior and responses. - """ - - # pylint: disable=too-many-statements - # This is a large function, but breaking it up futher would probably not - # make it any easier to understand. - - set_code_owner_attribute_from_module(__name__) - status: UserTaskStatus = self.status - - # Validating input - status.set_state(MigrationStep.VALIDATING_INPUT.value) - try: - target_library = get_library(LibraryLocatorV2.from_string(target_library_key)) - if target_library.learning_package_id is None: - raise ValueError("Target library has no associated learning package.") - - target_package = LearningPackage.objects.get(pk=target_library.learning_package_id) - target_collection = Collection.objects.get(pk=target_collection_pk) if target_collection_pk else None - except (ObjectDoesNotExist, InvalidKeyError) as exc: - status.fail(str(exc)) - return - - source_data = _validate_input( - status, - source_pk, - repeat_handling_strategy, - preserve_url_slugs, - composition_level, - target_package, - target_collection, - ) - if source_data is None: - # Fail - return - - migration = source_data.migration - status.increment_completed_steps() - - try: - # Cancelling old tasks - status.set_state(MigrationStep.CANCELLING_OLD.value) - _cancel_old_tasks([source_data.source], status, target_package, [migration.id]) - status.increment_completed_steps() - - # Loading `legacy_root` - status.set_state(MigrationStep.LOADING) - legacy_root = _load_xblock(status, source_data.source_root_usage_key) - if legacy_root is None: - # Fail - _set_migrations_to_fail([source_data]) - return - status.increment_completed_steps() - - # Staging legacy block - status.set_state(MigrationStep.STAGING.value) - staged_content = staging_api.stage_xblock_temporarily( - block=legacy_root, - user_id=status.user.pk, - purpose=CONTENT_STAGING_PURPOSE_TEMPLATE.format(source_key=source_pk), - ) - migration.staged_content = staged_content - status.increment_completed_steps() - - # Parsing OLX - status.set_state(MigrationStep.PARSING.value) - parser = etree.XMLParser(strip_cdata=False) - try: - root_node = etree.fromstring(staged_content.olx, parser=parser) - except etree.ParseError as exc: - status.fail(f"Failed to parse source OLX (from staged content with id = {staged_content.id}): {exc}") - _set_migrations_to_fail([source_data]) - return - status.increment_completed_steps() - - # Importing assets of the legacy block - status.set_state(MigrationStep.IMPORTING_ASSETS.value) - content_by_filename = _import_assets(migration) - status.increment_completed_steps() - - # Importing structure of the legacy block - status.set_state(MigrationStep.IMPORTING_STRUCTURE.value) - change_log, root_migrated_node = _import_structure( - migration, - source_data, - target_library, - content_by_filename, - root_node, - status, - ) - migration.change_log = change_log - status.increment_completed_steps() - - status.set_state(MigrationStep.UNSTAGING.value) - staged_content.delete() - status.increment_completed_steps() - - _create_migration_artifacts_incrementally( - root_migrated_node=root_migrated_node, - source=source_data.source, - migration=migration, - status=status, - ) - status.increment_completed_steps() - - # Forwarding legacy content to migrated content - status.set_state(MigrationStep.FORWARDING.value) - if forward_source_to_target: - _forwarding_content(source_data) - status.increment_completed_steps() - - # Populating the collection - status.set_state(MigrationStep.POPULATING_COLLECTION.value) - if target_collection: - _populate_collection(user_id, migration) - status.increment_completed_steps() - except Exception as exc: # pylint: disable=broad-exception-caught - _set_migrations_to_fail([source_data]) - status.fail(str(exc)) - - @shared_task(base=_BulkMigrationTask, bind=True) # Note: The decorator @set_code_owner_attribute cannot be used here because the UserTaskMixin # does stack inspection and can't handle additional decorators. @@ -671,19 +472,14 @@ def bulk_migrate_from_modulestore( repeat_handling_strategy: str, preserve_url_slugs: bool, composition_level: str, - forward_source_to_target: bool, + forward_source_to_target: bool | None, ) -> None: """ Import multiple legacy courses or libraries into a single V2 library. - This task performs the same logical steps as `migrate_from_modulestore`, but allows - batching several migrations together under **one single user task** (`UserTaskStatus`). - - Unlike running `migrate_from_modulestore` in a loop (which would create multiple - independent Celery tasks and separate statuses), the bulk migration maintains - **one unified status record** that tracks progress across all included sources. - This simplifies monitoring, since the client only needs to observe one task state. - + The bulk migration maintains **one unified status record** that tracks progress across + all included sources. This simplifies monitoring, since the client only needs to observe + one task state. Each source item (course or library) still creates its own `ModulestoreMigration` database record, but all of them share the same parent task (`UserTaskStatus`). If any sub-migration fails (for example, due to invalid OLX or missing assets), @@ -708,8 +504,10 @@ def bulk_migrate_from_modulestore( Whether to preserve existing XBlock URL slugs during import. composition_level (str): Composition level at which content should be imported (e.g. course, section). - forward_source_to_target (bool): + forward_source_to_target (bool | None) Whether to forward legacy content to its migrated equivalent after import. + If unspecified (None), then forward legacy content for a source if and only + if it's that source's first migration. See Also: - `migrate_from_modulestore`: Single-source migration equivalent. @@ -752,13 +550,13 @@ def bulk_migrate_from_modulestore( repeat_handling_strategy, preserve_url_slugs, composition_level, + target_library_locator, target_package, target_collection_list[i] if target_collection_list else None, ) if source_data is None: # Fail return - source_data_list.append(source_data) status.increment_completed_steps() @@ -825,14 +623,14 @@ def bulk_migrate_from_modulestore( f"{MigrationStep.IMPORTING_STRUCTURE.value}" ) change_log, root_migrated_node = _import_structure( - source_data.migration, - source_data, - target_library, - content_by_filename, - root_node, - status, + source_data=source_data, + target_library=target_library, + content_by_filename=content_by_filename, + root_node=root_node, + status=status, ) source_data.migration.change_log = change_log + source_data.migration.save() # @@TODO keep or nah? status.increment_completed_steps() status.set_state( @@ -849,7 +647,8 @@ def bulk_migrate_from_modulestore( source_pk=source_pk, ) status.increment_completed_steps() - except: # pylint: disable=bare-except + except Exception as _exc: # pylint: disable=broad-exception-caught + log.exception("Failed: {source_data.migration}") # Mark this library as failed, migration of other libraries can continue # If this case occurs and the migration ends without any further issues, # the bulk migration status is success, @@ -858,73 +657,60 @@ def bulk_migrate_from_modulestore( # Forwarding legacy content to migrated content status.set_state(MigrationStep.FORWARDING.value) - if forward_source_to_target: - for source_data in source_data_list: - if not source_data.migration.is_failed: - _forwarding_content(source_data) + for source_data in source_data_list: + if forward_source_to_target is False: + continue # Explicitly requested not to forward. + if forward_source_to_target is None and source_data.source.forwarded: + # Unspecified whether or not to forward. + # So, forward iff there was no previous existing successful migration with forwarding. + continue + if source_data.migration.is_failed: + # Don't forward failed migrations. + continue + _forward_content(source_data) status.increment_completed_steps() # Populating collections status.set_state(MigrationStep.POPULATING_COLLECTION.value) - - # Used to check if the source has a previous migration in a V2 library collection - # It is placed here to avoid the circular import - from .api import get_migration_info for i, source_data in enumerate(source_data_list): migration = source_data.migration if migration.is_failed: continue - - title = legacy_root_list[i].display_name + if migration.target_collection is None and not create_collections: + continue if migration.target_collection is None: - if not create_collections: - continue - - source_key = source_data.source.key - - if migration.repeat_handling_strategy == RepeatHandlingStrategy.Fork.value: - # Create a new collection when it is Fork - migration.target_collection = _create_collection(target_library_locator, title) - else: - # It is Skip or Update - # We need to verify if there is a previous migration with collection - # TODO: This only fetches the latest migration, if different migrations have been done - # on different V2 libraries, this could break the logic. - previous_migration = get_migration_info([source_key]) - if ( - source_key in previous_migration - and previous_migration[source_key].migrations__target_collection__key - ): - # Has previous migration with collection - try: - # Get the previous collection - previous_collection = authoring_api.get_collection( - target_package.id, - previous_migration[source_key].migrations__target_collection__key, - ) - - migration.target_collection = previous_collection - except Collection.DoesNotExist: - # The collection no longer exists or is being migrated to a different library. - # In that case, create a new collection independent of strategy - migration.target_collection = _create_collection(target_library_locator, title) - else: - # Create collection and save in migration - migration.target_collection = _create_collection(target_library_locator, title) - + existing_collection_to_use: Collection | None = None + # For Fork strategy: Create an new collection every time. + # For Update and Skip strategies: Update an existing collection if possible. + if migration.repeat_handling_strategy != RepeatHandlingStrategy.Fork.value: + if source_data.previous_migration: + if previous_collection_slug := source_data.previous_migration.target_collection_slug: + try: + existing_collection_to_use = authoring_api.get_collection( + target_package.id, previous_collection_slug + ) + except Collection.DoesNotExist: + # Collection no longer exists. + pass + migration.target_collection = ( + existing_collection_to_use or + _create_collection(library_key=target_library_locator, title=legacy_root_list[i].display_name) + ) _populate_collection(user_id, migration) - - ModulestoreMigration.objects.bulk_update( + models.ModulestoreMigration.objects.bulk_update( [x.migration for x in source_data_list], ["target_collection", "is_failed"], ) status.increment_completed_steps() except Exception as exc: # pylint: disable=broad-exception-caught # If there is an exception in this block, all migrations fail. - _set_migrations_to_fail(source_data_list) + log.exception("Modulestore migrations failed") status.fail(str(exc)) +SourceToTarget = tuple[UsageKey, PublishableEntityVersion | None, str | None] + + @dataclass(frozen=True) class _MigratedNode: """ @@ -934,10 +720,10 @@ class _MigratedNode: This happens, particularly, if the node is above the requested composition level but has descendents which are at or below that level. """ - source_to_target: tuple[UsageKey, PublishableEntityVersion] | None + source_to_target: SourceToTarget | None children: list[_MigratedNode] - def all_source_to_target_pairs(self) -> t.Iterable[tuple[UsageKey, PublishableEntityVersion]]: + def all_source_to_target_pairs(self) -> t.Iterable[SourceToTarget]: """ Get all source_key->target_ver pairs via a pre-order traversal. """ @@ -995,13 +781,13 @@ def _migrate_node( ) for source_node_child in source_node.getchildren() ] - source_to_target: tuple[UsageKey, PublishableEntityVersion] | None = None + source_to_target: SourceToTarget | None = None if should_migrate_node: source_olx = etree.tostring(source_node).decode('utf-8') if source_block_id := source_node.get('url_name'): source_key: UsageKey = context.source_context_key.make_usage_key(source_node.tag, source_block_id) title = source_node.get('display_name', source_block_id) - target_entity_version = ( + target_entity_version, reason = ( _migrate_container( context=context, source_key=source_key, @@ -1010,7 +796,7 @@ def _migrate_node( children=[ migrated_child.source_to_target[1] for migrated_child in migrated_children if - migrated_child.source_to_target + migrated_child.source_to_target and migrated_child.source_to_target[1] ], ) if container_type else @@ -1021,9 +807,18 @@ def _migrate_node( title=title, ) ) - if target_entity_version: - source_to_target = (source_key, target_entity_version) - context.add_migration(source_key, target_entity_version.entity) + if container_type is None and target_entity_version is None and reason is not None: + # Currently, components with children are not supported + children_length = len(source_node.getchildren()) + if children_length: + reason += ( + ngettext( + ' It has {count} children block.', + ' It has {count} children blocks.', + children_length, + ) + ).format(count=children_length) + source_to_target = (source_key, target_entity_version, reason) else: log.warning( f"Cannot migrate node from {context.source_context_key} to {context.target_library_key} " @@ -1039,12 +834,14 @@ def _migrate_container( container_type: ContainerType, title: str, children: list[PublishableEntityVersion], -) -> PublishableEntityVersion: +) -> tuple[PublishableEntityVersion, str | None]: """ Create, update, or replace a container in a library based on a source key and children. (We assume that the destination is a library rather than some other future kind of learning - package, but let's keep than an internal assumption.) + package, but let's keep than an internal assumption.) + For now this returns None value for unsupported_reason as second value of tuple as we + don't have any concrete condition where a container cannot be imported/migrated. """ target_key = _get_distinct_target_container_key( context, @@ -1076,8 +873,9 @@ def _migrate_container( return PublishableEntityVersion.objects.get( entity_id=container.container_pk, version_num=container.draft_version_num, - ) - return authoring_api.create_next_container_version( + ), None + + container_publishable_entity_version = authoring_api.create_next_container_version( container.container_pk, title=title, entity_rows=[ @@ -1089,6 +887,18 @@ def _migrate_container( container_version_cls=container_type.container_model_classes[1], ).publishable_entity_version + # Publish the container + # Call post publish events synchronously to avoid + # an error when calling `wait_for_post_publish_events` + # inside a celery task. + libraries_api.publish_container_changes( + container.container_key, + context.created_by, + call_post_publish_events_sync=True, + ) + context.used_container_slugs.add(container.container_key.container_id) + return container_publishable_entity_version, None + def _migrate_component( *, @@ -1096,7 +906,7 @@ def _migrate_component( source_key: UsageKey, olx: str, title: str, -) -> PublishableEntityVersion | None: +) -> tuple[PublishableEntityVersion | None, str | None]: """ Create, update, or replace a component in a library based on a source key and OLX. @@ -1129,7 +939,10 @@ def _migrate_component( ) except libraries_api.IncompatibleTypesError as e: log.error(f"Error validating block for library {context.target_library_key}: {e}") - return None + return None, str(e) + except PluginMissingError as e: + log.error(f"Block type not supported in {context.target_library_key}: {e}") + return None, f"Invalid block type: {e}" component = authoring_api.create_component( context.target_package_id, component_type=component_type, @@ -1140,7 +953,7 @@ def _migrate_component( # Component existed and we do not replace it and it is not deleted previously if component_existed and not component_deleted and context.should_skip_strategy: - return component.versioning.draft.publishable_entity_version + return component.versioning.draft.publishable_entity_version, None # If component existed and was deleted or we have to replace the current version # Create the new component version for it @@ -1153,7 +966,14 @@ def _migrate_component( authoring_api.create_component_version_content( component_version.pk, content_pk, key=new_path ) - return component_version.publishable_entity_version + + # Publish the component + libraries_api.publish_component_changes(target_key, context.created_by) + context.used_component_keys.add(target_key) + return component_version.publishable_entity_version, None + + +_MAX_UNIQUE_SLUG_ATTEMPTS = 1000 def _get_distinct_target_container_key( @@ -1163,39 +983,36 @@ def _get_distinct_target_container_key( title: str, ) -> LibraryContainerLocator: """ - Find a unique key for block_id by appending a unique identifier if necessary. - - Args: - context (_MigrationContext): The migration context. - source_key (UsageKey): The source key. - container_type (ContainerType): The container type. - title (str): The title. - - Returns: - LibraryContainerLocator: The target container key. + Figure out the appropriate target container for this structural block. """ - # Check if we already processed this block and we are not forking. If we are forking, we will - # want a new target key. - if context.is_already_migrated(source_key) and not context.should_fork_strategy: - existing_version = context.get_existing_target(source_key) - - return LibraryContainerLocator( - context.target_library_key, - container_type.value, - existing_version.key - ) + # If we're not forking, then check if this block was part of our past migration. + # (If we are forking, we will always want a new target key). + if not context.should_fork_strategy: + if previous_block_migration := context.previous_block_migrations.get(source_key): + if isinstance(previous_block_migration, data.ModulestoreBlockMigrationSuccess): + if isinstance(previous_block_migration.target_key, LibraryContainerLocator): + return previous_block_migration.target_key # Generate new unique block ID base_slug = ( source_key.block_id if context.preserve_url_slugs else (slugify(title) or source_key.block_id) ) - unique_slug = _find_unique_slug(context, base_slug) - - return LibraryContainerLocator( - context.target_library_key, - container_type.value, - unique_slug + # Use base base slug if available + if base_slug not in context.used_container_slugs: + return LibraryContainerLocator( + context.target_library_key, container_type.value, base_slug + ) + # Try numbered variations until we find one that doesn't exist + for i in range(1, _MAX_UNIQUE_SLUG_ATTEMPTS + 1): + candidate_slug = f"{base_slug}_{i}" + if candidate_slug not in context.used_container_slugs: + return LibraryContainerLocator( + context.target_library_key, container_type.value, candidate_slug + ) + # It would be extremely unlikely for us to run out of attempts + raise RuntimeError( + f"Unable to find unique slug after {_MAX_UNIQUE_SLUG_ATTEMPTS} attempts for base: {base_slug}" ) @@ -1206,97 +1023,43 @@ def _get_distinct_target_usage_key( title: str, ) -> LibraryUsageLocatorV2: """ - Find a unique key for block_id by appending a unique identifier if necessary. - - Args: - context: The migration context - source_key: The original usage key from the source - component_type: The component type string - olx: The OLX content of the component - - Returns: - A unique LibraryUsageLocatorV2 for the target - - Raises: - ValueError: If source_key is invalid + Figure out the appropriate target component for this block. """ - # Check if we already processed this block and we are not forking. If we are forking, we will - # want a new target key. - if context.is_already_migrated(source_key) and not context.should_fork_strategy: - log.debug(f"Block {source_key} already exists, reusing first existing target") - existing_target = context.get_existing_target(source_key) - block_id = existing_target.component.local_key - - # mypy thinks LibraryUsageLocatorV2 is abstract. It's not. - return LibraryUsageLocatorV2( # type: ignore[abstract] - context.target_library_key, - source_key.block_type, - block_id - ) - + # If we're not forking, then check if this block was part of our past migration. + # (If we are forking, we will always want a new target key). + if not context.should_fork_strategy: + if previous_block_migration := context.previous_block_migrations.get(source_key): + if isinstance(previous_block_migration, data.ModulestoreBlockMigrationSuccess): + if isinstance(previous_block_migration.target_key, LibraryUsageLocatorV2): + return previous_block_migration.target_key # Generate new unique block ID base_slug = ( source_key.block_id if context.preserve_url_slugs else (slugify(title) or source_key.block_id) ) - unique_slug = _find_unique_slug(context, base_slug, component_type) - - # mypy thinks LibraryUsageLocatorV2 is abstract. It's not. - return LibraryUsageLocatorV2( # type: ignore[abstract] - context.target_library_key, - source_key.block_type, - unique_slug + # Use base base slug if available + base_key = LibraryUsageLocatorV2( # type: ignore[abstract] + context.target_library_key, component_type.name, base_slug ) - - -def _find_unique_slug( - context: _MigrationContext, - base_slug: str, - component_type: ComponentType | None = None, - max_attempts: int = 1000 -) -> str: - """ - Find a unique slug by appending incrementing numbers if necessary. - Using batch querying to avoid multiple database roundtrips. - - Args: - component_type: The component type to check against - base_slug: The base slug to make unique - max_attempts: Maximum number of attempts to prevent infinite loops - - Returns: - A unique slug string - - Raises: - RuntimeError: If unable to find unique slug within max_attempts - """ - if not component_type: - base_key = base_slug - else: - base_key = f"{component_type}:{base_slug}" - - existing_publishable_entity_keys = context.get_existing_target_entity_keys(base_key) - - # Check if base slug is available - if base_key not in existing_publishable_entity_keys: - return base_slug - + if base_key not in context.used_component_keys: + return base_key # Try numbered variations until we find one that doesn't exist - for i in range(1, max_attempts + 1): + for i in range(1, _MAX_UNIQUE_SLUG_ATTEMPTS + 1): candidate_slug = f"{base_slug}_{i}" - candidate_key = f"{component_type}:{candidate_slug}" if component_type else candidate_slug - - if candidate_key not in existing_publishable_entity_keys: - return candidate_slug - - raise RuntimeError(f"Unable to find unique slug after {max_attempts} attempts for base: {base_slug}") + candidate_key = LibraryUsageLocatorV2( # type: ignore[abstract] + context.target_library_key, component_type.name, candidate_slug + ) + if candidate_key not in context.used_component_keys: + return candidate_key + # It would be extremely unlikely for us to run out of attempts + raise RuntimeError(f"Unable to find unique slug after {_MAX_UNIQUE_SLUG_ATTEMPTS} attempts for base: {base_slug}") def _create_migration_artifacts_incrementally( root_migrated_node: _MigratedNode, - source: ModulestoreSource, - migration: ModulestoreMigration, + source: models.ModulestoreSource, + migration: models.ModulestoreMigration, status: UserTaskStatus, source_pk: int | None = None, ) -> None: @@ -1307,17 +1070,34 @@ def _create_migration_artifacts_incrementally( total_nodes = len(nodes) processed = 0 - for source_usage_key, target_version in root_migrated_node.all_source_to_target_pairs(): - block_source, _ = ModulestoreBlockSource.objects.get_or_create( + # Load a mapping from each modified entity's primary key + # to the primary key of the changelog record that captures its modification. + # This will not include any blocks whose migration failed to create a target entity. + entity_pks_to_change_log_record_pks: dict[int, int] = dict( + migration.change_log.records.values_list("entity_id", "id") + ) if migration.change_log else {} + + for source_usage_key, target_version, unsupported_reason in root_migrated_node.all_source_to_target_pairs(): + block_source, _ = models.ModulestoreBlockSource.objects.get_or_create( overall_source=source, key=source_usage_key ) - - ModulestoreBlockMigration.objects.create( - overall_migration=migration, - source=block_source, - target_id=target_version.entity_id, - ) + # target_entity_pk should be None iff the block migration failed + target_entity_pk: int | None = target_version.entity_id if target_version else None + + change_log_record_pk = entity_pks_to_change_log_record_pks.get(target_entity_pk) if target_entity_pk else None + # Only create a migration artifact for this source block if: + # (a) we have a record of a change occuring, or + # (b) it failed. + # If neither a nor b are true, then this source block was skipped. + if change_log_record_pk or unsupported_reason: + models.ModulestoreBlockMigration.objects.create( + overall_migration=migration, + source=block_source, + target_id=target_entity_pk, + change_log_record_id=change_log_record_pk, + unsupported_reason=unsupported_reason, + ) processed += 1 if processed % 10 == 0 or processed == total_nodes: diff --git a/cms/djangoapps/modulestore_migrator/tests/test_api.py b/cms/djangoapps/modulestore_migrator/tests/test_api.py index d208ff85e375..c9e2fc3b587b 100644 --- a/cms/djangoapps/modulestore_migrator/tests/test_api.py +++ b/cms/djangoapps/modulestore_migrator/tests/test_api.py @@ -3,11 +3,10 @@ """ import pytest -from opaque_keys.edx.locator import LibraryLocatorV2 +from opaque_keys.edx.locator import LibraryLocator, LibraryLocatorV2 from openedx_learning.api import authoring as authoring_api from organizations.tests.factories import OrganizationFactory -from cms.djangoapps.contentstore.tests.test_libraries import LibraryTestCase from cms.djangoapps.modulestore_migrator import api from cms.djangoapps.modulestore_migrator.data import CompositionLevel, RepeatHandlingStrategy from cms.djangoapps.modulestore_migrator.models import ModulestoreMigration @@ -15,32 +14,50 @@ from common.djangoapps.student.tests.factories import UserFactory from openedx.core.djangoapps.content_libraries import api as lib_api +from xmodule.modulestore.tests.utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import BlockFactory, LibraryFactory @pytest.mark.django_db -class TestModulestoreMigratorAPI(LibraryTestCase): +class TestModulestoreMigratorAPI(ModuleStoreTestCase): """ Test cases for the modulestore migrator API. """ def setUp(self): super().setUp() - - self.organization = OrganizationFactory() - self.lib_key_v2 = LibraryLocatorV2.from_string( - f"lib:{self.organization.short_name}:test-key" - ) - lib_api.create_library( - org=self.organization, - slug=self.lib_key_v2.slug, - title="Test Library", - ) - self.library_v2 = lib_api.ContentLibrary.objects.get(slug=self.lib_key_v2.slug) - self.learning_package = self.library_v2.learning_package - self.blocks = [] - for _ in range(3): - self.blocks.append(self._add_simple_content_block().usage_key) + self.user = UserFactory(password=self.user_password, is_staff=True) + self.organization = OrganizationFactory(name="My Org", short_name="myorg") + self.lib_key_v1 = LibraryLocator.from_string("library-v1:myorg+old") + LibraryFactory.create(org="myorg", library="old", display_name="Old Library", modulestore=self.store) + self.lib_key_v2_1 = LibraryLocatorV2.from_string("lib:myorg:1") + self.lib_key_v2_2 = LibraryLocatorV2.from_string("lib:myorg:2") + lib_api.create_library(org=self.organization, slug="1", title="Test Library 1") + lib_api.create_library(org=self.organization, slug="2", title="Test Library 2") + self.library_v2_1 = lib_api.ContentLibrary.objects.get(slug="1") + self.library_v2_2 = lib_api.ContentLibrary.objects.get(slug="2") + self.learning_package = self.library_v2_1.learning_package + self.learning_package_2 = self.library_v2_2.learning_package + self.source_unit_keys = [ + BlockFactory.create( + display_name=f"Unit {c}", + category="vertical", + location=self.lib_key_v1.make_usage_key("vertical", c), + parent_location=self.lib_key_v1.make_usage_key("library", "library"), + user_id=self.user.id, publish_item=False, + ).usage_key for c in ["X", "Y", "Z"] + ] + self.source_html_keys = [ + BlockFactory.create( + display_name=f"HTML {c}", + category="html", + location=self.lib_key_v1.make_usage_key("html", c), + parent_location=self.lib_key_v1.make_usage_key("vertical", c), + user_id=self.user.id, publish_item=False, + ).usage_key for c in ["X", "Y", "Z"] + ] + # We load this last so that it has an updated list of children. + self.lib_v1 = self.store.get_library(self.lib_key_v1) def test_start_migration_to_library(self): """ @@ -52,10 +69,10 @@ def test_start_migration_to_library(self): api.start_migration_to_library( user=user, source_key=source.key, - target_library_key=self.library_v2.library_key, + target_library_key=self.library_v2_1.library_key, target_collection_slug=None, - composition_level=CompositionLevel.Component.value, - repeat_handling_strategy=RepeatHandlingStrategy.Skip.value, + composition_level=CompositionLevel.Component, + repeat_handling_strategy=RepeatHandlingStrategy.Skip, preserve_url_slugs=True, forward_source_to_target=False, ) @@ -81,10 +98,10 @@ def test_start_bulk_migration_to_library(self): api.start_bulk_migration_to_library( user=user, source_key_list=[source.key, source_2.key], - target_library_key=self.library_v2.library_key, + target_library_key=self.library_v2_1.library_key, target_collection_slug_list=None, - composition_level=CompositionLevel.Component.value, - repeat_handling_strategy=RepeatHandlingStrategy.Skip.value, + composition_level=CompositionLevel.Component, + repeat_handling_strategy=RepeatHandlingStrategy.Skip, preserve_url_slugs=True, forward_source_to_target=False, ) @@ -128,10 +145,10 @@ def test_start_migration_to_library_with_collection(self): api.start_migration_to_library( user=user, source_key=source.key, - target_library_key=self.library_v2.library_key, + target_library_key=self.library_v2_1.library_key, target_collection_slug=collection_key, - composition_level=CompositionLevel.Component.value, - repeat_handling_strategy=RepeatHandlingStrategy.Skip.value, + composition_level=CompositionLevel.Component, + repeat_handling_strategy=RepeatHandlingStrategy.Skip, preserve_url_slugs=True, forward_source_to_target=False, ) @@ -157,9 +174,9 @@ def test_start_migration_to_library_with_strategy_skip(self): api.start_migration_to_library( user=user, source_key=source.key, - target_library_key=self.library_v2.library_key, - composition_level=CompositionLevel.Component.value, - repeat_handling_strategy=RepeatHandlingStrategy.Skip.value, + target_library_key=self.library_v2_1.library_key, + composition_level=CompositionLevel.Component, + repeat_handling_strategy=RepeatHandlingStrategy.Skip, preserve_url_slugs=True, forward_source_to_target=False, ) @@ -167,7 +184,7 @@ def test_start_migration_to_library_with_strategy_skip(self): modulestoremigration = ModulestoreMigration.objects.get() assert modulestoremigration.repeat_handling_strategy == RepeatHandlingStrategy.Skip.value - migrated_components = lib_api.get_library_components(self.library_v2.library_key) + migrated_components = lib_api.get_library_components(self.library_v2_1.library_key) assert len(migrated_components) == 1 # Update the block, changing its name @@ -178,9 +195,9 @@ def test_start_migration_to_library_with_strategy_skip(self): api.start_migration_to_library( user=user, source_key=source.key, - target_library_key=self.library_v2.library_key, - composition_level=CompositionLevel.Component.value, - repeat_handling_strategy=RepeatHandlingStrategy.Skip.value, + target_library_key=self.library_v2_1.library_key, + composition_level=CompositionLevel.Component, + repeat_handling_strategy=RepeatHandlingStrategy.Skip, preserve_url_slugs=True, forward_source_to_target=False, ) @@ -189,11 +206,11 @@ def test_start_migration_to_library_with_strategy_skip(self): assert modulestoremigration is not None assert modulestoremigration.repeat_handling_strategy == RepeatHandlingStrategy.Skip.value - migrated_components_fork = lib_api.get_library_components(self.library_v2.library_key) + migrated_components_fork = lib_api.get_library_components(self.library_v2_1.library_key) assert len(migrated_components_fork) == 1 component = lib_api.LibraryXBlockMetadata.from_component( - self.library_v2.library_key, migrated_components_fork[0] + self.library_v2_1.library_key, migrated_components_fork[0] ) assert component.display_name == "Original Block" @@ -215,9 +232,9 @@ def test_start_migration_to_library_with_strategy_update(self): api.start_migration_to_library( user=user, source_key=source.key, - target_library_key=self.library_v2.library_key, - composition_level=CompositionLevel.Component.value, - repeat_handling_strategy=RepeatHandlingStrategy.Skip.value, + target_library_key=self.library_v2_1.library_key, + composition_level=CompositionLevel.Component, + repeat_handling_strategy=RepeatHandlingStrategy.Skip, preserve_url_slugs=True, forward_source_to_target=False, ) @@ -225,7 +242,7 @@ def test_start_migration_to_library_with_strategy_update(self): modulestoremigration = ModulestoreMigration.objects.get() assert modulestoremigration.repeat_handling_strategy == RepeatHandlingStrategy.Skip.value - migrated_components = lib_api.get_library_components(self.library_v2.library_key) + migrated_components = lib_api.get_library_components(self.library_v2_1.library_key) assert len(migrated_components) == 1 # Update the block, changing its name @@ -236,9 +253,9 @@ def test_start_migration_to_library_with_strategy_update(self): api.start_migration_to_library( user=user, source_key=source.key, - target_library_key=self.library_v2.library_key, - composition_level=CompositionLevel.Component.value, - repeat_handling_strategy=RepeatHandlingStrategy.Update.value, + target_library_key=self.library_v2_1.library_key, + composition_level=CompositionLevel.Component, + repeat_handling_strategy=RepeatHandlingStrategy.Update, preserve_url_slugs=True, forward_source_to_target=False, ) @@ -247,11 +264,11 @@ def test_start_migration_to_library_with_strategy_update(self): assert modulestoremigration is not None assert modulestoremigration.repeat_handling_strategy == RepeatHandlingStrategy.Update.value - migrated_components_fork = lib_api.get_library_components(self.library_v2.library_key) + migrated_components_fork = lib_api.get_library_components(self.library_v2_1.library_key) assert len(migrated_components_fork) == 1 component = lib_api.LibraryXBlockMetadata.from_component( - self.library_v2.library_key, migrated_components_fork[0] + self.library_v2_1.library_key, migrated_components_fork[0] ) assert component.display_name == "Updated Block" @@ -273,9 +290,9 @@ def test_start_migration_to_library_with_strategy_forking(self): api.start_migration_to_library( user=user, source_key=source.key, - target_library_key=self.library_v2.library_key, - composition_level=CompositionLevel.Component.value, - repeat_handling_strategy=RepeatHandlingStrategy.Skip.value, + target_library_key=self.library_v2_1.library_key, + composition_level=CompositionLevel.Component, + repeat_handling_strategy=RepeatHandlingStrategy.Skip, preserve_url_slugs=True, forward_source_to_target=False, ) @@ -283,7 +300,7 @@ def test_start_migration_to_library_with_strategy_forking(self): modulestoremigration = ModulestoreMigration.objects.get() assert modulestoremigration.repeat_handling_strategy == RepeatHandlingStrategy.Skip.value - migrated_components = lib_api.get_library_components(self.library_v2.library_key) + migrated_components = lib_api.get_library_components(self.library_v2_1.library_key) assert len(migrated_components) == 1 # Update the block, changing its name @@ -294,9 +311,9 @@ def test_start_migration_to_library_with_strategy_forking(self): api.start_migration_to_library( user=user, source_key=source.key, - target_library_key=self.library_v2.library_key, - composition_level=CompositionLevel.Component.value, - repeat_handling_strategy=RepeatHandlingStrategy.Fork.value, + target_library_key=self.library_v2_1.library_key, + composition_level=CompositionLevel.Component, + repeat_handling_strategy=RepeatHandlingStrategy.Fork, preserve_url_slugs=True, forward_source_to_target=False, ) @@ -305,16 +322,16 @@ def test_start_migration_to_library_with_strategy_forking(self): assert modulestoremigration is not None assert modulestoremigration.repeat_handling_strategy == RepeatHandlingStrategy.Fork.value - migrated_components_fork = lib_api.get_library_components(self.library_v2.library_key) + migrated_components_fork = lib_api.get_library_components(self.library_v2_1.library_key) assert len(migrated_components_fork) == 2 first_component = lib_api.LibraryXBlockMetadata.from_component( - self.library_v2.library_key, migrated_components_fork[0] + self.library_v2_1.library_key, migrated_components_fork[0] ) assert first_component.display_name == "Original Block" second_component = lib_api.LibraryXBlockMetadata.from_component( - self.library_v2.library_key, migrated_components_fork[1] + self.library_v2_1.library_key, migrated_components_fork[1] ) assert second_component.display_name == "Updated Block" @@ -326,9 +343,9 @@ def test_start_migration_to_library_with_strategy_forking(self): api.start_migration_to_library( user=user, source_key=source.key, - target_library_key=self.library_v2.library_key, - composition_level=CompositionLevel.Component.value, - repeat_handling_strategy=RepeatHandlingStrategy.Fork.value, + target_library_key=self.library_v2_1.library_key, + composition_level=CompositionLevel.Component, + repeat_handling_strategy=RepeatHandlingStrategy.Fork, preserve_url_slugs=True, forward_source_to_target=False, ) @@ -337,74 +354,215 @@ def test_start_migration_to_library_with_strategy_forking(self): assert modulestoremigration is not None assert modulestoremigration.repeat_handling_strategy == RepeatHandlingStrategy.Fork.value - migrated_components_fork = lib_api.get_library_components(self.library_v2.library_key) + migrated_components_fork = lib_api.get_library_components(self.library_v2_1.library_key) assert len(migrated_components_fork) == 3 first_component = lib_api.LibraryXBlockMetadata.from_component( - self.library_v2.library_key, migrated_components_fork[0] + self.library_v2_1.library_key, migrated_components_fork[0] ) assert first_component.display_name == "Original Block" second_component = lib_api.LibraryXBlockMetadata.from_component( - self.library_v2.library_key, migrated_components_fork[1] + self.library_v2_1.library_key, migrated_components_fork[1] ) assert second_component.display_name == "Updated Block" third_component = lib_api.LibraryXBlockMetadata.from_component( - self.library_v2.library_key, migrated_components_fork[2] + self.library_v2_1.library_key, migrated_components_fork[2] ) assert third_component.display_name == "Updated Block Again" - def test_get_migration_info(self): + def test_migration_api_for_various_scenarios(self): """ - Test that the API can retrieve migration info. + Test that get_migrations, get_block_migrations, forward_context, and forward_block + behave as expected throughout a convoluted series of intertwined migrations. + + Also, ensure that each of the aforementioned api functions only performs 1 query each. """ + # pylint: disable=too-many-statements user = UserFactory() - collection_key = "test-collection" + all_source_usage_keys = {*self.source_html_keys, *self.source_unit_keys} + all_source_usage_key_strs = {str(sk) for sk in all_source_usage_keys} + + # In this test, we will be migrating self.lib_v1 a total of 6 times. + # We will migrate it to each collection (A, B, and C) twice. + + # Lib 1 has Collection A and Collection B + # Lib 2 has Collection C authoring_api.create_collection( learning_package_id=self.learning_package.id, - key=collection_key, - title="Test Collection", + key="test-collection-1a", + title="Test Collection A in Lib 1", + created_by=user.id, + ) + authoring_api.create_collection( + learning_package_id=self.learning_package.id, + key="test-collection-1b", + title="Test Collection B in Lib 1", + created_by=user.id, + ) + authoring_api.create_collection( + learning_package_id=self.learning_package_2.id, + key="test-collection-2c", + title="Test Collection C in Lib 2", created_by=user.id, ) + # No migrations have happened. + # Everything should return None / empty. + assert not list(api.get_migrations(self.lib_key_v1)) + assert not api.get_forwarding(source_key=self.lib_key_v1) + assert not api.get_forwarding_for_blocks(all_source_usage_keys) + + # FOUR MIGRATIONS! + # * Migrate to Lib1.CollA + # * Migrate to Lib1.CollB using FORK strategy + # * Migrate to Lib1.CollA using UPDATE strategy + # * Migrate to Lib2.CollC + # Note: None of these are forwarding migrations! api.start_migration_to_library( user=user, - source_key=self.lib_key, - target_library_key=self.library_v2.library_key, - target_collection_slug=collection_key, - composition_level=CompositionLevel.Component.value, - repeat_handling_strategy=RepeatHandlingStrategy.Skip.value, + source_key=self.lib_key_v1, + target_library_key=self.lib_key_v2_1, + target_collection_slug="test-collection-1a", + composition_level=CompositionLevel.Unit, + # repeat_handling_strategy here is arbitrary, as there will be no repeats. + repeat_handling_strategy=RepeatHandlingStrategy.Skip, preserve_url_slugs=True, - forward_source_to_target=True, + forward_source_to_target=False, + ) + api.start_migration_to_library( + user=user, + source_key=self.lib_key_v1, + target_library_key=self.lib_key_v2_1, + target_collection_slug="test-collection-1b", + composition_level=CompositionLevel.Unit, + # this will create a 2nd copy of every block + repeat_handling_strategy=RepeatHandlingStrategy.Fork, + preserve_url_slugs=True, + forward_source_to_target=False, + ) + api.start_migration_to_library( + user=user, + source_key=self.lib_key_v1, + target_library_key=self.lib_key_v2_1, + target_collection_slug="test-collection-1a", + composition_level=CompositionLevel.Unit, + # this will update the 2nd copies, but put them in the same collection as the first copies + repeat_handling_strategy=RepeatHandlingStrategy.Update, + preserve_url_slugs=True, + forward_source_to_target=False, ) + api.start_migration_to_library( + user=user, + source_key=self.lib_key_v1, + target_library_key=self.lib_key_v2_2, + target_collection_slug="test-collection-2c", + composition_level=CompositionLevel.Unit, + # repeat_handling_strategy here is arbitrary, as there will be no repeats. + repeat_handling_strategy=RepeatHandlingStrategy.Skip, + preserve_url_slugs=True, + forward_source_to_target=False, + ) + # get_migrations returns in reverse chronological order with self.assertNumQueries(1): - result = api.get_migration_info([self.lib_key]) - row = result.get(self.lib_key) - assert row is not None - assert row.migrations__target__key == str(self.lib_key_v2) - assert row.migrations__target__title == "Test Library" - assert row.migrations__target_collection__key == collection_key - assert row.migrations__target_collection__title == "Test Collection" - - def test_get_target_block_usage_keys(self): - """ - Test that the API can get the list of target block usage keys for a given library. - """ - user = UserFactory() - + migration_2c_i, migration_1a_ii, migration_1b_i, migration_1a_i = api.get_migrations(self.lib_key_v1) + assert not migration_1a_i.is_failed + assert not migration_1b_i.is_failed + assert not migration_1a_ii.is_failed + assert not migration_2c_i.is_failed + # Confirm that the metadata came back correctly. + assert migration_1a_i.target_key == self.lib_key_v2_1 + assert migration_1a_i.target_title == "Test Library 1" + assert migration_1a_i.target_collection_slug == "test-collection-1a" + assert migration_1a_i.target_collection_title == "Test Collection A in Lib 1" + assert migration_2c_i.target_key == self.lib_key_v2_2 + assert migration_2c_i.target_title == "Test Library 2" + assert migration_2c_i.target_collection_slug == "test-collection-2c" + assert migration_2c_i.target_collection_title == "Test Collection C in Lib 2" + # Call get_migration_blocks on each of the four migrations. Convert the mapping + # from UsageKey->BlockMigrationResult into str->str just so we can assert things more easily. + with self.assertNumQueries(1): + mappings_1a_i = { + str(sk): str(bm.target_key) for sk, bm in api.get_migration_blocks(migration_1a_i.pk).items() + } + mappings_1b_i = { + str(sk): str(bm.target_key) for sk, bm in api.get_migration_blocks(migration_1b_i.pk).items() + } + mappings_1a_ii = { + str(sk): str(bm.target_key) for sk, bm in api.get_migration_blocks(migration_1a_ii.pk).items() + } + mappings_2c_i = { + str(sk): str(bm.target_key) for sk, bm in api.get_migration_blocks(migration_2c_i.pk).items() + } + # Each migration should have migrated every source block. + assert set(mappings_1a_i.keys()) == all_source_usage_key_strs + assert set(mappings_1b_i.keys()) == all_source_usage_key_strs + assert set(mappings_1a_ii.keys()) == all_source_usage_key_strs + assert set(mappings_2c_i.keys()) == all_source_usage_key_strs + # Because the migration to Lib1.CollB used FORK, we expect that there is nothing in + # common between it and the prior migration to Lib1.CollA. + assert not (set(mappings_1a_i.values()) & set(mappings_1b_i.values())) + # Because the second migration to Lib1.CollA used UPDATE, we expect that it + # will have all the same mappings as the prior migration to Lib1.CollB. + # This is a little countertuitive, since the migrations targeted different collections, + # but the rule that the migrator follows is "UPDATE uses the block from the most recent migration". + assert mappings_1b_i == mappings_1a_ii + # Since forward_source_to_target=False, we have had no authoritative migration yet. + assert api.get_forwarding(self.lib_key_v1) is None + assert not api.get_forwarding_for_blocks(all_source_usage_keys) + + # ANOTHER MIGRATION! + # * Migrate to Lib2.CollC using UPDATE strategy + # Note: This *is* a forwarding migration api.start_migration_to_library( user=user, - source_key=self.lib_key, - target_library_key=self.library_v2.library_key, - target_collection_slug=None, - composition_level=CompositionLevel.Component.value, - repeat_handling_strategy=RepeatHandlingStrategy.Skip.value, + source_key=self.lib_key_v1, + target_library_key=self.lib_key_v2_2, + target_collection_slug="test-collection-2c", + composition_level=CompositionLevel.Unit, + repeat_handling_strategy=RepeatHandlingStrategy.Update, preserve_url_slugs=True, forward_source_to_target=True, ) + migration_2c_ii, _2c_i, _1a_ii, _1b_i, migration_1a_i_reloaded = api.get_migrations(self.lib_key_v1) + assert migration_1a_i_reloaded.pk == migration_1a_i.pk + assert not migration_2c_ii.is_failed + # Our source lib should now forward to Lib2. + with self.assertNumQueries(1): + forwarded = api.get_forwarding(self.lib_key_v1) + assert forwarded.target_key == self.lib_key_v2_2 + assert forwarded.target_collection_slug == "test-collection-2c" + assert forwarded.pk == migration_2c_ii.pk + # Our source lib's blocks should now forward to ones in Lib2. with self.assertNumQueries(1): - result = api.get_target_block_usage_keys(self.lib_key) - for key in self.blocks: - assert result.get(key) is not None + forwarded_blocks = api.get_forwarding_for_blocks(all_source_usage_keys) + assert forwarded_blocks[self.source_html_keys[1]].target_key.context_key == self.lib_key_v2_2 + assert forwarded_blocks[self.source_unit_keys[1]].target_key.context_key == self.lib_key_v2_2 + + # FINAL MIGRATION! + # * Migrate to Lib1.CollB using UPDATE strategy + # Note: This *is* a forwarding migration, and should supplant the previous + # migration for forwarding purposes. + api.start_migration_to_library( + user=user, + source_key=self.lib_key_v1, + target_library_key=self.lib_key_v2_1, + target_collection_slug="test-collection-1b", + composition_level=CompositionLevel.Unit, + repeat_handling_strategy=RepeatHandlingStrategy.Update, + preserve_url_slugs=True, + forward_source_to_target=True, + ) + migration_1b_ii, _2c_ii, _2c_i, _1a_ii, _1b_i, _1a_i = api.get_migrations(self.lib_key_v1) + assert not migration_1b_ii.is_failed + # Our source lib should now forward to Lib1. + forwarded = api.get_forwarding(self.lib_key_v1) + assert forwarded.target_key == self.lib_key_v2_1 + assert forwarded.target_collection_slug == "test-collection-1b" + assert forwarded.pk == migration_1b_ii.pk + # Our source lib should now forward to Lib1. + forwarded_blocks = api.get_forwarding_for_blocks(all_source_usage_keys) + assert forwarded_blocks[self.source_html_keys[1]].target_key.context_key == self.lib_key_v2_1 + assert forwarded_blocks[self.source_unit_keys[1]].target_key.context_key == self.lib_key_v2_1 diff --git a/cms/djangoapps/modulestore_migrator/tests/test_rest_api.py b/cms/djangoapps/modulestore_migrator/tests/test_rest_api.py new file mode 100644 index 000000000000..4a7f842902db --- /dev/null +++ b/cms/djangoapps/modulestore_migrator/tests/test_rest_api.py @@ -0,0 +1,822 @@ +""" +Unit tests for the modulestore_migrator REST API v1 views. + +These tests focus on validation, HTTP status codes, and serialization/deserialization. +Business logic is mocked out. +""" +from unittest.mock import MagicMock, patch +from uuid import uuid4 + +from django.contrib.auth import get_user_model +from django.test import TestCase +from opaque_keys.edx.locator import CourseLocator +from organizations.tests.factories import OrganizationFactory +from rest_framework import status +from rest_framework.exceptions import PermissionDenied +from rest_framework.test import APIRequestFactory, force_authenticate +from user_tasks.models import UserTaskStatus + +from cms.djangoapps.modulestore_migrator.models import ( + ModulestoreMigration as ModulestoreMigrationModel, + ModulestoreSource, +) +from cms.djangoapps.modulestore_migrator.rest_api.v1.views import ( + BulkMigrationViewSet, + MigrationViewSet, +) +from openedx.core.djangoapps.content_libraries import api as lib_api + + +User = get_user_model() + + +class TestMigrationViewSetCreate(TestCase): + """ + Test the MigrationViewSet.create() endpoint. + + Focus: validation, return codes, serialization/deserialization. + """ + + def setUp(self): + """Set up test fixtures.""" + self.factory = APIRequestFactory() + self.view = MigrationViewSet.as_view({'post': 'create'}) + + # Create test user + self.user = User.objects.create_user( + username='testuser', + email='testuser@test.com', + password='password' + ) + + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.migrator_api') + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.lib_api') + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.auth') + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.UserTaskStatus') + def test_create_migration_success_with_minimal_data( + self, mock_user_task_status, mock_auth, mock_lib_api, mock_migrator_api + ): + """ + Test successful migration creation with minimal required fields. + + Validates: + - 201 status code is returned + - Response contains expected serialized fields + - Request data is properly deserialized + - Permission checks are performed for both source and target + """ + mock_auth.has_studio_write_access.return_value = True + mock_lib_api.require_permission_for_library_key.return_value = None + + mock_task = MagicMock(autospec=True) + mock_task.id = 'test-task-id' + mock_migrator_api.start_migration_to_library.return_value = mock_task + + mock_task_status = MagicMock(autospec=True) + mock_task_status.uuid = uuid4() + mock_task_status.state = 'Pending' + mock_task_status.state_text = 'Pending' + mock_task_status.completed_steps = 0 + mock_task_status.total_steps = 10 + mock_task_status.attempts = 1 + mock_task_status.created = '2025-01-01T00:00:00Z' + mock_task_status.modified = '2025-01-01T00:00:00Z' + mock_task_status.artifacts = [] + mock_task_status.migrations.all.return_value = [] + + mock_user_task_status.objects.get.return_value = mock_task_status + + request_data = { + 'source': 'course-v1:TestOrg+TestCourse+TestRun', + 'target': 'lib:TestOrg:TestLibrary', + } + request = self.factory.post( + '/api/modulestore_migrator/v1/migrations/', + data=request_data, + format='json' + ) + force_authenticate(request, user=self.user) + + response = self.view(request) + + assert response.status_code == status.HTTP_201_CREATED + + assert 'uuid' in response.data + assert 'state' in response.data + assert 'state_text' in response.data + assert 'completed_steps' in response.data + assert 'total_steps' in response.data + assert 'parameters' in response.data + + mock_auth.has_studio_write_access.assert_called_once() + mock_lib_api.require_permission_for_library_key.assert_called_once() + + mock_migrator_api.start_migration_to_library.assert_called_once() + call_kwargs = mock_migrator_api.start_migration_to_library.call_args[1] + assert call_kwargs['user'] == self.user + assert str(call_kwargs['source_key']) == 'course-v1:TestOrg+TestCourse+TestRun' + assert str(call_kwargs['target_library_key']) == 'lib:TestOrg:TestLibrary' + + def test_create_migration_invalid_source_key(self): + """ + Test that invalid source key returns 400 Bad Request. + + Validates: + - 400 status code is returned + - Error message mentions validation failure + """ + request_data = { + 'source': 'not-a-valid-key', + 'target': 'lib:TestOrg:TestLibrary', + } + request = self.factory.post( + '/api/modulestore_migrator/v1/migrations/', + data=request_data, + format='json' + ) + force_authenticate(request, user=self.user) + + response = self.view(request) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert 'source' in response.data + + def test_create_migration_invalid_target_key(self): + """ + Test that invalid target library key returns 400 Bad Request. + + Validates: + - 400 status code is returned + - Error message mentions target validation failure + """ + request_data = { + 'source': 'course-v1:TestOrg+TestCourse+TestRun', + 'target': 'not-a-valid-library-key', + } + request = self.factory.post( + '/api/modulestore_migrator/v1/migrations/', + data=request_data, + format='json' + ) + force_authenticate(request, user=self.user) + + response = self.view(request) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert 'target' in response.data + + def test_create_migration_missing_required_fields(self): + """ + Test that missing required fields returns 400 Bad Request. + + Validates: + - 400 status code is returned when source is missing + - 400 status code is returned when target is missing + """ + request_data = { + 'target': 'lib:TestOrg:TestLibrary', + } + request = self.factory.post( + '/api/modulestore_migrator/v1/migrations/', + data=request_data, + format='json' + ) + force_authenticate(request, user=self.user) + response = self.view(request) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert 'source' in response.data + + request_data = { + 'source': 'course-v1:TestOrg+TestCourse+TestRun', + } + request = self.factory.post( + '/api/modulestore_migrator/v1/migrations/', + data=request_data, + format='json' + ) + force_authenticate(request, user=self.user) + response = self.view(request) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert 'target' in response.data + + def test_create_migration_unauthenticated_user(self): + """ + Test that unauthenticated requests return 401 Unauthorized. + + Validates: + - 401 status code for unauthenticated requests + """ + request_data = { + 'source': 'course-v1:TestOrg+TestCourse+TestRun', + 'target': 'lib:TestOrg:TestLibrary', + } + request = self.factory.post( + '/api/modulestore_migrator/v1/migrations/', + data=request_data, + format='json' + ) + # Note: No force_authenticate call + + response = self.view(request) + + assert response.status_code in [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN] + + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.auth') + def test_create_migration_without_source_author_access(self, mock_auth): + """ + Test that users without author access to source cannot create migrations. + + Validates: + - 403 Forbidden status code when user lacks author access to source + """ + mock_auth.has_studio_write_access.return_value = False + + request_data = { + 'source': 'course-v1:TestOrg+TestCourse+TestRun', + 'target': 'lib:TestOrg:TestLibrary', + } + request = self.factory.post( + '/api/modulestore_migrator/v1/migrations/', + data=request_data, + format='json' + ) + force_authenticate(request, user=self.user) + + response = self.view(request) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.lib_api') + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.auth') + def test_create_migration_without_target_write_access(self, mock_auth, mock_lib_api): + """ + Test that users without write access to target cannot create migrations. + + Validates: + - 403 Forbidden status code when user lacks write access to target library + """ + mock_auth.has_studio_write_access.return_value = True + mock_lib_api.require_permission_for_library_key.side_effect = PermissionDenied( + "User lacks permission to manage content in this library" + ) + + request_data = { + 'source': 'course-v1:TestOrg+TestCourse+TestRun', + 'target': 'lib:TestOrg:TestLibrary', + } + request = self.factory.post( + '/api/modulestore_migrator/v1/migrations/', + data=request_data, + format='json' + ) + force_authenticate(request, user=self.user) + + response = self.view(request) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.migrator_api') + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.lib_api') + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.auth') + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.UserTaskStatus') + def test_create_migration_with_optional_fields( + self, mock_user_task_status, mock_auth, mock_lib_api, mock_migrator_api + ): + """ + Test migration creation with all optional fields provided. + + Validates: + - Optional fields are properly deserialized + - Default values are not used when explicit values provided + """ + mock_auth.has_studio_write_access.return_value = True + mock_lib_api.require_permission_for_library_key.return_value = None + + mock_task = MagicMock(autospec=True) + mock_task.id = 'test-task-id' + mock_migrator_api.start_migration_to_library.return_value = mock_task + + mock_task_status = MagicMock(autospec=True) + mock_task_status.uuid = uuid4() + mock_task_status.state = 'Pending' + mock_task_status.state_text = 'Pending' + mock_task_status.completed_steps = 0 + mock_task_status.total_steps = 10 + mock_task_status.attempts = 1 + mock_task_status.created = '2025-01-01T00:00:00Z' + mock_task_status.modified = '2025-01-01T00:00:00Z' + mock_task_status.artifacts = [] + mock_task_status.migrations.all.return_value = [] + + mock_user_task_status.objects.get.return_value = mock_task_status + + request_data = { + 'source': 'course-v1:TestOrg+TestCourse+TestRun', + 'target': 'lib:TestOrg:TestLibrary', + 'target_collection_slug': 'my-collection', + 'composition_level': 'unit', + 'repeat_handling_strategy': 'update', + 'preserve_url_slugs': False, + 'forward_source_to_target': True, + } + request = self.factory.post( + '/api/modulestore_migrator/v1/migrations/', + data=request_data, + format='json' + ) + force_authenticate(request, user=self.user) + + response = self.view(request) + + assert response.status_code == status.HTTP_201_CREATED + + mock_migrator_api.start_migration_to_library.assert_called_once() + call_kwargs = mock_migrator_api.start_migration_to_library.call_args[1] + assert call_kwargs['target_collection_slug'] == 'my-collection' + # CompositionLevel and RepeatHandlingStrategy are enums + assert call_kwargs['composition_level'].value == 'unit' + assert call_kwargs['repeat_handling_strategy'].value == 'update' + assert call_kwargs['preserve_url_slugs'] is False + assert call_kwargs['forward_source_to_target'] is True + + def test_create_migration_invalid_composition_level(self): + """ + Test that invalid composition_level returns 400 Bad Request. + + Validates: + - 400 status code for invalid enum value + """ + request_data = { + 'source': 'course-v1:TestOrg+TestCourse+TestRun', + 'target': 'lib:TestOrg:TestLibrary', + 'composition_level': 'invalid_level', + } + request = self.factory.post( + '/api/modulestore_migrator/v1/migrations/', + data=request_data, + format='json' + ) + force_authenticate(request, user=self.user) + + response = self.view(request) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert 'composition_level' in response.data + + def test_create_migration_invalid_repeat_handling_strategy(self): + """ + Test that invalid repeat_handling_strategy returns 400 Bad Request. + + Validates: + - 400 status code for invalid enum value + """ + request_data = { + 'source': 'course-v1:TestOrg+TestCourse+TestRun', + 'target': 'lib:TestOrg:TestLibrary', + 'repeat_handling_strategy': 'invalid_strategy', + } + request = self.factory.post( + '/api/modulestore_migrator/v1/migrations/', + data=request_data, + format='json' + ) + force_authenticate(request, user=self.user) + + response = self.view(request) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert 'repeat_handling_strategy' in response.data + + +class TestMigrationViewSetList(TestCase): + """ + Test the MigrationViewSet.list() endpoint. + + Focus: validation, return codes, serialization/deserialization. + """ + + def setUp(self): + """Set up test fixtures.""" + self.factory = APIRequestFactory() + self.view = MigrationViewSet.as_view({'get': 'list'}) + + self.user = User.objects.create_user( + username='testuser', + email='testuser@test.com', + password='password' + ) + self.other_user = User.objects.create_user( + username='otheruser', + email='otheruser@test.com', + password='password' + ) + + def test_list_migrations_success(self): + """ + Test successful listing of migrations for the authenticated user. + + Validates: + - 200 status code is returned + - Response contains list of migrations + - Only user's own migrations are returned (other users' migrations filtered out) + """ + org = OrganizationFactory(short_name="TestOrg", name="Test Org") + source_key = CourseLocator.from_string('course-v1:TestOrg+TestCourse+TestRun') + source = ModulestoreSource.objects.create(key=str(source_key)) + target = lib_api.create_library(org=org, slug="TestLib", title="Test Target Lib") + user_task_status = UserTaskStatus.objects.create( + user=self.user, + task_id='user-task-id', + task_class='test.Task', + name='User Migration', + total_steps=10, + completed_steps=10, + ) + other_task_status = UserTaskStatus.objects.create( + user=self.other_user, + task_id='other-task-id', + task_class='test.Task', + name='Other Migration', + total_steps=5, + completed_steps=5, + ) + ModulestoreMigrationModel.objects.create( + task_status=user_task_status, + source=source, + target_id=target.learning_package_id, + ) + ModulestoreMigrationModel.objects.create( + task_status=other_task_status, + source=source, + target_id=target.learning_package_id, + ) + + request = self.factory.get('/api/modulestore_migrator/v1/migrations/') + force_authenticate(request, user=self.user) + response = self.view(request) + + assert response.status_code == status.HTTP_200_OK + results = response.data['results'] + assert len(results) == 1 + assert results[0]['uuid'] == str(user_task_status.uuid) + + def test_list_migrations_unauthenticated(self): + """ + Test that unauthenticated requests return 401 Unauthorized. + + Validates: + - 401 status code for unauthenticated requests + """ + request = self.factory.get('/api/modulestore_migrator/v1/migrations/') + + response = self.view(request) + + assert response.status_code == status.HTTP_401_UNAUTHORIZED + + +class TestMigrationViewSetRetrieve(TestCase): + """ + Test the MigrationViewSet.retrieve() endpoint. + + Focus: validation, return codes, serialization/deserialization. + """ + + def setUp(self): + """Set up test fixtures.""" + self.factory = APIRequestFactory() + self.view = MigrationViewSet.as_view({'get': 'retrieve'}) + + self.user = User.objects.create_user( + username='testuser', + email='testuser@test.com', + password='password' + ) + + def test_retrieve_migration_success(self): + """ + Test successful retrieval of a specific migration by UUID. + + Validates: + - 200 status code is returned + - Response contains migration details + """ + org = OrganizationFactory(short_name="TestOrg", name="Test Org") + source_key = CourseLocator.from_string('course-v1:TestOrg+TestCourse+TestRun') + source = ModulestoreSource.objects.create(key=str(source_key)) + target = lib_api.create_library(org=org, slug="TestLib", title="Test Target Lib") + user_task_status = UserTaskStatus.objects.create( + user=self.user, + task_id='user-task-id', + task_class='test.Task', + name='User Migration', + total_steps=10, + completed_steps=10, + ) + ModulestoreMigrationModel.objects.create( + task_status=user_task_status, + source=source, + target_id=target.learning_package_id, + ) + + request = self.factory.get(f'/api/modulestore_migrator/v1/migrations/{user_task_status.uuid}/') + force_authenticate(request, user=self.user) + response = self.view(request, uuid=str(user_task_status.uuid)) + + assert response.status_code == status.HTTP_200_OK + assert response.data['uuid'] == str(user_task_status.uuid) + assert 'parameters' in response.data + + def test_retrieve_migration_other_user(self): + """ + Test that users cannot retrieve migrations created by other users. + + Validates: + - 404 status code when attempting to retrieve another user's migration + - Users are isolated to their own migrations + """ + other_user = User.objects.create_user( + username='otheruser', + email='other@test.com', + password='password' + ) + org = OrganizationFactory(short_name="TestOrg", name="Test Org") + source_key = CourseLocator.from_string('course-v1:TestOrg+TestCourse+TestRun') + source = ModulestoreSource.objects.create(key=str(source_key)) + target = lib_api.create_library(org=org, slug="TestLib", title="Test Target Lib") + other_task_status = UserTaskStatus.objects.create( + user=other_user, + task_id='other-task-id', + task_class='test.Task', + name='Other Migration', + total_steps=10, + completed_steps=10, + ) + ModulestoreMigrationModel.objects.create( + task_status=other_task_status, + source=source, + target_id=target.learning_package_id, + ) + + request = self.factory.get(f'/api/modulestore_migrator/v1/migrations/{other_task_status.uuid}/') + force_authenticate(request, user=self.user) + response = self.view(request, uuid=str(other_task_status.uuid)) + + assert response.status_code == status.HTTP_404_NOT_FOUND + + def test_retrieve_migration_unauthenticated(self): + """ + Test that unauthenticated requests return 401 Unauthorized. + + Validates: + - 401 status code for unauthenticated requests + """ + task_uuid = uuid4() + request = self.factory.get(f'/api/modulestore_migrator/v1/migrations/{task_uuid}/') + + response = self.view(request, uuid=str(task_uuid)) + + assert response.status_code == status.HTTP_401_UNAUTHORIZED + + +class TestMigrationViewSetCancel(TestCase): + """ + Test the MigrationViewSet.cancel() endpoint. + + Focus: validation, return codes, authorization. + """ + + def setUp(self): + """Set up test fixtures.""" + self.factory = APIRequestFactory() + self.view = MigrationViewSet.as_view({'post': 'cancel'}) + + self.staff_user = User.objects.create_user( + username='staffuser', + email='staff@test.com', + password='password', + is_staff=True + ) + self.regular_user = User.objects.create_user( + username='regularuser', + email='regular@test.com', + password='password', + is_staff=False + ) + + def test_cancel_migration_as_staff(self): + """ + Test that staff users can cancel migrations. + + Validates: + - Staff users can successfully cancel migrations + - UserTaskStatus.cancel is called + """ + org = OrganizationFactory(short_name="TestOrg", name="Test Org") + source_key = CourseLocator.from_string('course-v1:TestOrg+TestCourse+TestRun') + source = ModulestoreSource.objects.create(key=str(source_key)) + target = lib_api.create_library(org=org, slug="TestLib", title="Test Target Lib") + user_task_status = UserTaskStatus.objects.create( + user=self.staff_user, + task_id='staff-task-id', + task_class='test.Task', + name='Staff Migration', + total_steps=10, + completed_steps=5, + ) + ModulestoreMigrationModel.objects.create( + task_status=user_task_status, + source=source, + target_id=target.learning_package_id, + ) + + with patch.object(UserTaskStatus, 'cancel') as mock_cancel: + request = self.factory.post( + f'/api/modulestore_migrator/v1/migrations/{user_task_status.uuid}/cancel/' + ) + force_authenticate(request, user=self.staff_user) + response = self.view(request, uuid=str(user_task_status.uuid)) + + assert response.status_code == status.HTTP_200_OK + mock_cancel.assert_called_once() + + def test_cancel_migration_not_found(self): + """ + Test that attempting to cancel a non-existent migration returns 404. + + Validates: + - 404 status code when migration UUID does not exist + """ + nonexistent_uuid = uuid4() + request = self.factory.post( + f'/api/modulestore_migrator/v1/migrations/{nonexistent_uuid}/cancel/' + ) + force_authenticate(request, user=self.staff_user) + + response = self.view(request, uuid=str(nonexistent_uuid)) + + assert response.status_code == status.HTTP_404_NOT_FOUND + + def test_cancel_migration_as_non_staff(self): + """ + Test that non-staff users cannot cancel migrations. + + Validates: + - 403 Forbidden status code for non-staff users + """ + task_uuid = uuid4() + request = self.factory.post( + f'/api/modulestore_migrator/v1/migrations/{task_uuid}/cancel/' + ) + force_authenticate(request, user=self.regular_user) + + response = self.view(request, uuid=str(task_uuid)) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + def test_cancel_migration_unauthenticated(self): + """ + Test that unauthenticated users cannot cancel migrations. + """ + task_uuid = uuid4() + request = self.factory.post( + f'/api/modulestore_migrator/v1/migrations/{task_uuid}/cancel/' + ) + + response = self.view(request, uuid=str(task_uuid)) + + assert response.status_code == status.HTTP_401_UNAUTHORIZED + + +class TestBulkMigrationViewSetCreate(TestCase): + """ + Test the BulkMigrationViewSet.create() endpoint. + + Focus: validation, return codes, serialization/deserialization. + """ + + def setUp(self): + """Set up test fixtures.""" + self.factory = APIRequestFactory() + self.view = BulkMigrationViewSet.as_view({'post': 'create'}) + + self.user = User.objects.create_user( + username='testuser', + email='testuser@test.com', + password='password' + ) + + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.migrator_api') + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.lib_api') + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.auth') + @patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.UserTaskStatus') + def test_create_bulk_migration_success( + self, mock_user_task_status, mock_auth, mock_lib_api, mock_migrator_api + ): + """ + Test successful bulk migration creation with multiple sources. + + Validates: + - 201 status code is returned + - Response contains expected serialized fields + - Multiple sources are properly deserialized + """ + mock_auth.has_studio_write_access.return_value = True + mock_lib_api.require_permission_for_library_key.return_value = None + + mock_task = MagicMock(autospec=True) + mock_task.id = 'test-task-id' + mock_migrator_api.start_bulk_migration_to_library.return_value = mock_task + + mock_task_status = MagicMock(autospec=True) + mock_task_status.uuid = uuid4() + mock_task_status.state = 'Pending' + mock_task_status.state_text = 'Pending' + mock_task_status.completed_steps = 0 + mock_task_status.total_steps = 10 + mock_task_status.attempts = 1 + mock_task_status.created = '2025-01-01T00:00:00Z' + mock_task_status.modified = '2025-01-01T00:00:00Z' + mock_task_status.artifacts = [] + mock_task_status.migrations.all.return_value = [] + + mock_user_task_status.objects.get.return_value = mock_task_status + + request_data = { + 'sources': [ + 'course-v1:TestOrg+TestCourse1+Run1', + 'course-v1:TestOrg+TestCourse2+Run2' + ], + 'target': 'lib:TestOrg:TestLibrary', + } + request = self.factory.post( + '/api/modulestore_migrator/v1/bulk_migration/', + data=request_data, + format='json' + ) + force_authenticate(request, user=self.user) + + response = self.view(request) + + assert response.status_code == status.HTTP_201_CREATED + assert 'uuid' in response.data + assert 'parameters' in response.data + + mock_migrator_api.start_bulk_migration_to_library.assert_called_once() + call_kwargs = mock_migrator_api.start_bulk_migration_to_library.call_args[1] + assert call_kwargs['source_key_list'] == [ + CourseLocator.from_string('course-v1:TestOrg+TestCourse1+Run1'), + CourseLocator.from_string('course-v1:TestOrg+TestCourse2+Run2'), + ] + assert call_kwargs['target_collection_slug_list'] is None + assert call_kwargs['create_collections'] is False + # CompositionLevel and RepeatHandlingStrategy are enums + assert call_kwargs['composition_level'].value == 'component' + assert call_kwargs['repeat_handling_strategy'].value == 'skip' + assert call_kwargs['preserve_url_slugs'] is False + assert call_kwargs['forward_source_to_target'] is None + + def test_create_bulk_migration_invalid_source_key(self): + """ + Test that invalid source key in list returns 400 Bad Request. + + Validates: + - 400 status code when one or more sources are invalid + """ + request_data = { + 'sources': ['not-a-valid-key', 'course-v1:TestOrg+TestCourse+TestRun'], + 'target': 'lib:TestOrg:TestLibrary', + } + request = self.factory.post( + '/api/modulestore_migrator/v1/bulk_migration/', + data=request_data, + format='json' + ) + force_authenticate(request, user=self.user) + + response = self.view(request) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert 'sources' in response.data + + def test_create_bulk_migration_missing_sources(self): + """ + Test that missing sources field returns 400 Bad Request. + + Validates: + - 400 status code when sources is missing + """ + request_data = { + 'target': 'lib:TestOrg:TestLibrary', + } + request = self.factory.post( + '/api/modulestore_migrator/v1/bulk_migration/', + data=request_data, + format='json' + ) + force_authenticate(request, user=self.user) + response = self.view(request) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert 'sources' in response.data diff --git a/cms/djangoapps/modulestore_migrator/tests/test_tasks.py b/cms/djangoapps/modulestore_migrator/tests/test_tasks.py index afd422bf04ef..02ac7c6f2bbc 100644 --- a/cms/djangoapps/modulestore_migrator/tests/test_tasks.py +++ b/cms/djangoapps/modulestore_migrator/tests/test_tasks.py @@ -3,44 +3,45 @@ """ from unittest.mock import Mock, patch + import ddt from django.utils import timezone from lxml import etree from opaque_keys.edx.keys import CourseKey from opaque_keys.edx.locator import LibraryLocator, LibraryLocatorV2 -from openedx_learning.api.authoring_models import Collection, PublishableEntityVersion from openedx_learning.api import authoring as authoring_api +from openedx_learning.api.authoring_models import Collection, PublishableEntityVersion from organizations.tests.factories import OrganizationFactory from user_tasks.models import UserTaskArtifact from user_tasks.tasks import UserTaskStatus -from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase -from xmodule.modulestore.tests.factories import CourseFactory, LibraryFactory -from common.djangoapps.student.tests.factories import UserFactory from cms.djangoapps.modulestore_migrator.data import CompositionLevel, RepeatHandlingStrategy from cms.djangoapps.modulestore_migrator.models import ( ModulestoreMigration, ModulestoreSource, ) from cms.djangoapps.modulestore_migrator.tasks import ( + MigrationStep, + _BulkMigrationTask, _migrate_component, _migrate_container, _migrate_node, _MigratedNode, _MigrationContext, - _MigrationTask, - _BulkMigrationTask, - migrate_from_modulestore, bulk_migrate_from_modulestore, - MigrationStep, ) +from common.djangoapps.student.tests.factories import UserFactory from openedx.core.djangoapps.content_libraries import api as lib_api +from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase +from xmodule.modulestore.tests.factories import CourseFactory, LibraryFactory, BlockFactory + +from .. import api as migrator_api @ddt.ddt class TestMigrateFromModulestore(ModuleStoreTestCase): """ - Test the migrate_from_modulestore task + Test the bulk_migrate_from_modulestore task """ def setUp(self): @@ -100,6 +101,28 @@ def setUp(self): title="Test Collection 2", ) + def _make_migration_context(self, **kwargs) -> _MigrationContext: + """ + Builds a _MigrationContext object with default values, overridable with kwargs + """ + return _MigrationContext( + **{ + "used_component_keys": set(), + "used_container_slugs": set(), + "previous_block_migrations": {}, + "target_package_id": self.learning_package.id, + "target_library_key": self.library.library_key, + "source_context_key": self.course.id, + "content_by_filename": {}, + "composition_level": CompositionLevel.Unit, + "repeat_handling_strategy": RepeatHandlingStrategy.Skip, + "preserve_url_slugs": True, + "created_at": timezone.now(), + "created_by": self.user.id, + **kwargs, + }, + ) + def _get_task_status_fail_message(self, status): """ Helper method to get the failure message from a UserTaskStatus object. @@ -113,18 +136,7 @@ def test_migrate_node_wiki_tag(self): Test _migrate_node ignores wiki tags """ wiki_node = etree.fromstring("") - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) + context = self._make_migration_context() result = _migrate_node( context=context, @@ -143,19 +155,7 @@ def test_migrate_node_course_root(self): '' "" ) - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) - + context = self._make_migration_context() result = _migrate_node( context=context, source_node=course_node, @@ -175,18 +175,7 @@ def test_migrate_node_library_root(self): '' "" ) - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) + context = self._make_migration_context() result = _migrate_node( context=context, source_node=library_node, @@ -215,19 +204,7 @@ def test_migrate_node_container_composition_level( container_node = etree.fromstring( f'<{tag_name} url_name="test_{tag_name}" display_name="Test {tag_name.title()}" />' ) - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=composition_level, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) - + context = self._make_migration_context(composition_level=composition_level) result = _migrate_node( context=context, source_node=container_node, @@ -235,9 +212,10 @@ def test_migrate_node_container_composition_level( if should_migrate: self.assertIsNotNone(result.source_to_target) - source_key, _ = result.source_to_target + source_key, _, reason = result.source_to_target self.assertEqual(source_key.block_type, tag_name) self.assertEqual(source_key.block_id, f"test_{tag_name}") + self.assertIsNone(reason) else: self.assertIsNone(result.source_to_target) @@ -248,25 +226,40 @@ def test_migrate_node_without_url_name(self): node_without_url_name = etree.fromstring( '' ) - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, + context = self._make_migration_context() + result = _migrate_node( + context=context, + source_node=node_without_url_name, ) + self.assertIsNone(result.source_to_target) + self.assertEqual(len(result.children), 0) + + def test_migrate_node_with_children_components(self): + """ + Test _migrate_node handles nodes with children components + """ + node_without_url_name = etree.fromstring(''' + + + + + ''') + context = self._make_migration_context() result = _migrate_node( context=context, source_node=node_without_url_name, ) - self.assertIsNone(result.source_to_target) + self.assertEqual( + result.source_to_target, + ( + self.course.id.make_usage_key('library_content', 'test_library_content'), + None, + 'The "library_content" XBlock (ID: "test_library_content") has children, ' + 'so it not supported in content libraries. It has 2 children blocks.', + ), + ) self.assertEqual(len(result.children), 0) def test_migrated_node_all_source_to_target_pairs(self): @@ -281,11 +274,11 @@ def test_migrated_node_all_source_to_target_pairs(self): key2 = self.course.id.make_usage_key("problem", "problem2") key3 = self.course.id.make_usage_key("problem", "problem3") - child_node = _MigratedNode(source_to_target=(key3, mock_version3), children=[]) + child_node = _MigratedNode(source_to_target=(key3, mock_version3, None), children=[]) parent_node = _MigratedNode( - source_to_target=(key1, mock_version1), + source_to_target=(key1, mock_version1, None), children=[ - _MigratedNode(source_to_target=(key2, mock_version2), children=[]), + _MigratedNode(source_to_target=(key2, mock_version2, None), children=[]), child_node, ], ) @@ -297,27 +290,6 @@ def test_migrated_node_all_source_to_target_pairs(self): self.assertEqual(pairs[1][0], key2) self.assertEqual(pairs[2][0], key3) - def test_migrate_from_modulestore_invalid_source(self): - """ - Test migrate_from_modulestore with invalid source - """ - task = migrate_from_modulestore.apply_async( - kwargs={ - "user_id": self.user.id, - "source_pk": 999999, # Non-existent source - "target_library_key": str(self.lib_key), - "target_collection_pk": self.collection.id, - "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, - "preserve_url_slugs": True, - "composition_level": CompositionLevel.Unit.value, - "forward_source_to_target": False, - } - ) - - status = UserTaskStatus.objects.get(task_id=task.id) - self.assertEqual(status.state, UserTaskStatus.FAILED) - self.assertEqual(self._get_task_status_fail_message(status), "ModulestoreSource matching query does not exist.") - def test_bulk_migrate_invalid_sources(self): """ Test bulk_migrate_from_modulestore with invalid source @@ -339,31 +311,6 @@ def test_bulk_migrate_invalid_sources(self): self.assertEqual(status.state, UserTaskStatus.FAILED) self.assertEqual(self._get_task_status_fail_message(status), "ModulestoreSource matching query does not exist.") - def test_migrate_from_modulestore_invalid_collection(self): - """ - Test migrate_from_modulestore with invalid collection - """ - source = ModulestoreSource.objects.create( - key=self.course.id, - ) - - task = migrate_from_modulestore.apply_async( - kwargs={ - "user_id": self.user.id, - "source_pk": source.id, - "target_library_key": str(self.lib_key), - "target_collection_pk": 999999, # Non-existent collection - "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, - "preserve_url_slugs": True, - "composition_level": CompositionLevel.Unit.value, - "forward_source_to_target": False, - } - ) - - status = UserTaskStatus.objects.get(task_id=task.id) - self.assertEqual(status.state, UserTaskStatus.FAILED) - self.assertEqual(self._get_task_status_fail_message(status), "Collection matching query does not exist.") - def test_bulk_migrate_invalid_collection(self): """ Test bulk_migrate_from_modulestore with invalid collection @@ -389,14 +336,6 @@ def test_bulk_migrate_invalid_collection(self): self.assertEqual(status.state, UserTaskStatus.FAILED) self.assertEqual(self._get_task_status_fail_message(status), "Collection matching query does not exist.") - def test_migration_task_calculate_total_steps(self): - """ - Test _MigrationTask.calculate_total_steps returns correct count - """ - total_steps = _MigrationTask.calculate_total_steps({}) - expected_steps = len(list(MigrationStep)) - 1 - self.assertEqual(total_steps, expected_steps) - def test_bulk_migration_task_calculate_total_steps(self): """ Test _BulkMigrationTask.calculate_total_steps returns correct count @@ -413,26 +352,15 @@ def test_migrate_component_success(self): """ source_key = self.course.id.make_usage_key("problem", "test_problem") olx = '' - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) - - result = _migrate_component( + context = self._make_migration_context() + result, reason = _migrate_component( context=context, source_key=source_key, olx=olx, title="test_problem" ) + self.assertIsNone(reason) self.assertIsNotNone(result) self.assertIsInstance(result, PublishableEntityVersion) @@ -440,6 +368,35 @@ def test_migrate_component_success(self): "problem", result.componentversion.component.component_type.name ) + # The component is published + self.assertFalse(result.componentversion.component.versioning.has_unpublished_changes) + + def test_migrate_component_failure(self): + """ + Test _migrate_component fails to import component with children + """ + source_key = self.course.id.make_usage_key("library_content", "test_library_content") + olx = ''' + + + + + ''' + context = self._make_migration_context() + result, reason = _migrate_component( + context=context, + source_key=source_key, + olx=olx, + title="test_library content" + ) + + self.assertIsNone(result) + self.assertEqual( + reason, + 'The "library_content" XBlock (ID: "test_library_content") has children,' + ' so it not supported in content libraries.', + ) + def test_migrate_component_with_static_content(self): """ Test _migrate_component with static file content @@ -455,19 +412,8 @@ def test_migrate_component_with_static_content(self): created=timezone.now(), ) content_by_filename = {"test_image.png": test_content.id} - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename=content_by_filename, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) - result = _migrate_component( + context = self._make_migration_context(content_by_filename=content_by_filename) + result, reason = _migrate_component( context=context, source_key=source_key, olx=olx, @@ -475,6 +421,7 @@ def test_migrate_component_with_static_content(self): ) self.assertIsNotNone(result) + self.assertIsNone(reason) component_content = result.componentversion.componentversioncontent_set.filter( key="static/test_image.png" @@ -482,159 +429,204 @@ def test_migrate_component_with_static_content(self): self.assertIsNotNone(component_content) self.assertEqual(component_content.content_id, test_content.id) - def test_migrate_component_replace_existing_false(self): + def test_migrate_skip_repeats(self): """ - Test _migrate_component with replace_existing=False returns existing component + Test that, when requested, the migration will Skip blocks that have previously been migrated + + Tests with both a container and a component """ - source_key = self.course.id.make_usage_key("problem", "existing_problem") - olx = '' - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) + source = ModulestoreSource.objects.create(key=self.course.id) - first_result = _migrate_component( - context=context, - source_key=source_key, - olx=olx, - title="test_problem" + # Create a legacy lib with 2 blocks and migrate it + source_html = BlockFactory.create( + category="html", + display_name="Test HTML for Skip", + parent_location=self.course.usage_key, + user_id=self.user.id, + publish_item=False + ) + source_unit = BlockFactory.create( + category="vertical", + display_name="Test Unit for Skip", + parent_location=self.course.usage_key, + user_id=self.user.id, + publish_item=False + ) + bulk_migrate_from_modulestore.apply_async( + kwargs={ + "user_id": self.user.id, + "sources_pks": [source.id], + "target_library_key": str(self.lib_key), + "target_collection_pks": [], + "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, # arbitrary + "preserve_url_slugs": True, + "composition_level": CompositionLevel.Unit.value, + "forward_source_to_target": False, + } ) - context.existing_source_to_target_keys[source_key] = [first_result.entity] - - second_result = _migrate_component( - context=context, - source_key=source_key, - olx='', - title="updated_problem" + # Update both blocks, and add a new one. Then migrate again. + source_html.display_name = "Test HTML for Skip - Source Updated" + source_html.save() + self.store.update_item(source_html, self.user.id) + source_unit.display_name = "Test Unit for Skip - Source Updated" + source_unit.save() + self.store.update_item(source_unit, self.user.id) + source_html_new = BlockFactory.create( + category="html", + display_name="Test HTML New", + parent_location=self.course.usage_key, + user_id=self.user.id, + publish_item=False + ) + bulk_migrate_from_modulestore.apply_async( + kwargs={ + "user_id": self.user.id, + "sources_pks": [source.id], + "target_library_key": str(self.lib_key), + "target_collection_pks": [], + "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, # <-- important + "preserve_url_slugs": True, + "composition_level": CompositionLevel.Unit.value, + "forward_source_to_target": False, + } ) - self.assertEqual(first_result.entity_id, second_result.entity_id) - self.assertEqual(first_result.version_num, second_result.version_num) + # The first migration's info includes the initial two blocks. + migration_1, migration_0 = list(migrator_api.get_migrations(source_key=source.key)) + mappings_0 = migrator_api.get_migration_blocks(migration_0.pk) + assert set(mappings_0) == {source_html.usage_key, source_unit.usage_key} + assert mappings_0[source_html.usage_key].target_title == "Test HTML for Skip" + assert mappings_0[source_unit.usage_key].target_title == "Test Unit for Skip" + + # The next migration's info includes the newly-added block, + # but not the edited blocks, because we chose Skip. + mappings_1 = migrator_api.get_migration_blocks(migration_1.pk) + assert set(mappings_1) == {source_html_new.usage_key} + assert mappings_1[source_html_new.usage_key].target_title == "Test HTML New" def test_migrate_component_same_title(self): """ - Test _migrate_component for two components with the same title + Test a migration with two components of the same title, when updating. - Using preserve_url_slugs=False to create a new component with - a different URL slug based on the component's Title. + We expect that both blocks will be migrated to target components with usage keys + based on the shared title, but disambiguated by a _1 suffix. """ - source_key_1 = self.course.id.make_usage_key("problem", "existing_problem_1") - source_key_2 = self.course.id.make_usage_key("problem", "existing_problem_2") - olx = '' - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=False, - created_at=timezone.now(), - created_by=self.user.id, - ) - - first_result = _migrate_component( - context=context, - source_key=source_key_1, - olx=olx, - title="test_problem" - ) - - context.existing_source_to_target_keys[source_key_1] = [first_result.entity] - - second_result = _migrate_component( - context=context, - source_key=source_key_2, - olx=olx, - title="test_problem" + source = ModulestoreSource.objects.create(key=self.course.id) + source_key_1 = self.course.id.make_usage_key("html", "existing_html_1") + source_key_2 = self.course.id.make_usage_key("html", "existing_html_2") + BlockFactory.create( + category="html", + display_name="Test HTML Same Title", + location=source_key_1, + parent_location=self.course.usage_key, + user_id=self.user.id, + publish_item=False + ) + BlockFactory.create( + category="html", + display_name="Test HTML Same Title", + location=source_key_2, + parent_location=self.course.usage_key, + user_id=self.user.id, + publish_item=False + ) + bulk_migrate_from_modulestore.apply_async( + kwargs={ + "user_id": self.user.id, + "sources_pks": [source.id], + "target_library_key": str(self.lib_key), + "target_collection_pks": [], + "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, + "preserve_url_slugs": False, + "composition_level": CompositionLevel.Unit.value, + "forward_source_to_target": False, + } ) + migrations = list(migrator_api.get_migrations(source_key=source.key)) + assert len(migrations) == 1 + mappings = migrator_api.get_migration_blocks(migrations[0].pk) + assert (html_migration_1 := mappings.get(source_key_1)) + assert (block_migration_2 := mappings.get(source_key_2)) + assert html_migration_1.target_title == "Test HTML Same Title" + assert block_migration_2.target_title == "Test HTML Same Title" + assert str(html_migration_1.target_key) == "lb:testorg:test-key:html:test-html-same-title" + assert str(block_migration_2.target_key) == "lb:testorg:test-key:html:test-html-same-title_1" - self.assertNotEqual(first_result.entity_id, second_result.entity_id) - self.assertNotEqual(first_result.entity.key, second_result.entity.key) - - def test_migrate_component_replace_existing_true(self): - """ - Test _migrate_component with replace_existing=True creates new version + def test_migrate_update_repeats(self): """ - source_key = self.course.id.make_usage_key("problem", "replaceable_problem") - original_olx = '' - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Update, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) + Test that, when requested, the migration will update blocks that have previously been migrated - first_result = _migrate_component( - context=context, - source_key=source_key, - olx=original_olx, - title="original" + Tests with both a container and a component + """ + source = ModulestoreSource.objects.create(key=self.course.id) + source_html = BlockFactory.create( + category="html", + display_name="Test HTML for Update", + parent_location=self.course.usage_key, + user_id=self.user.id, + publish_item=False + ) + source_unit = BlockFactory.create( + category="vertical", + display_name="Test Unit for Update", + parent_location=self.course.usage_key, + user_id=self.user.id, + publish_item=False + ) + bulk_migrate_from_modulestore.apply_async( + kwargs={ + "user_id": self.user.id, + "sources_pks": [source.id], + "target_library_key": str(self.lib_key), + "target_collection_pks": [], + # (the value of repeat_handling_strategy here doesn't matter for this test) + "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, + "preserve_url_slugs": True, + "composition_level": CompositionLevel.Unit.value, + "forward_source_to_target": False, + } ) - - context.existing_source_to_target_keys[source_key] = [first_result.entity] - - updated_olx = '' - second_result = _migrate_component( - context=context, - source_key=source_key, - olx=updated_olx, - title="updated" + source_html.display_name = "Test HTML for Update - Source Updated" + source_html.save() + self.store.update_item(source_html, self.user.id) + source_unit.display_name = "Test Unit for Update - Source Updated" + source_unit.save() + self.store.update_item(source_unit, self.user.id) + bulk_migrate_from_modulestore.apply_async( + kwargs={ + "user_id": self.user.id, + "sources_pks": [source.id], + "target_library_key": str(self.lib_key), + "target_collection_pks": [], + "repeat_handling_strategy": RepeatHandlingStrategy.Update.value, + "preserve_url_slugs": True, + "composition_level": CompositionLevel.Unit.value, + "forward_source_to_target": False, + } ) - - self.assertEqual(first_result.entity_id, second_result.entity_id) - self.assertNotEqual(first_result.version_num, second_result.version_num) - - def test_migrate_component_different_block_types(self): - """ - Test _migrate_component with different block types - """ - block_types = ["problem", "html", "video", "discussion"] - - for block_type in block_types: - source_key = self.course.id.make_usage_key(block_type, f"test_{block_type}") - olx = f'<{block_type} display_name="Test {block_type.title()}">' - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) - - result = _migrate_component( - context=context, - source_key=source_key, - olx=olx, - title="test" - ) - - self.assertIsNotNone(result, f"Failed to migrate {block_type}") - - self.assertEqual( - block_type, result.componentversion.component.component_type.name - ) + migration_1, migration_0 = list(migrator_api.get_migrations(source_key=source.key)) + mappings_0 = migrator_api.get_migration_blocks(migration_0.pk) + mappings_1 = migrator_api.get_migration_blocks(migration_1.pk) + assert (html_migration_0 := mappings_0.get(source_html.usage_key)) + assert (unit_migration_0 := mappings_0.get(source_unit.usage_key)) + assert (html_migration_1 := mappings_1.get(source_html.usage_key)) + assert (unit_migration_1 := mappings_1.get(source_unit.usage_key)) + + # The targets of both migrations are the same + assert str(html_migration_0.target_key) == "lb:testorg:test-key:html:Test_HTML_for_Update" + assert str(html_migration_1.target_key) == "lb:testorg:test-key:html:Test_HTML_for_Update" + assert html_migration_0.target_entity_pk == html_migration_1.target_entity_pk + assert str(unit_migration_0.target_key) == "lct:testorg:test-key:unit:Test_Unit_for_Update" + assert unit_migration_0.target_entity_pk == unit_migration_1.target_entity_pk + + # And because we specified Update, the targets were updated on the 2nd migration + assert html_migration_0.target_title == "Test HTML for Update" + assert unit_migration_0.target_title == "Test Unit for Update" + assert html_migration_1.target_title == "Test HTML for Update - Source Updated" + assert unit_migration_1.target_title == "Test Unit for Update - Source Updated" + assert html_migration_0.target_version_num == html_migration_1.target_version_num - 1 + assert unit_migration_0.target_version_num == unit_migration_1.target_version_num - 1 def test_migrate_component_content_filename_not_in_olx(self): """ @@ -663,20 +655,11 @@ def test_migrate_component_content_filename_not_in_olx(self): "referenced.png": referenced_content.id, "unreferenced.png": unreferenced_content.id, } - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, + context = self._make_migration_context( content_by_filename=content_by_filename, - composition_level=CompositionLevel.Unit, repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, ) - - result = _migrate_component( + result, reason = _migrate_component( context=context, source_key=source_key, olx=olx, @@ -684,6 +667,7 @@ def test_migrate_component_content_filename_not_in_olx(self): ) self.assertIsNotNone(result) + self.assertIsNone(reason) referenced_content_exists = ( result.componentversion.componentversioncontent_set.filter( @@ -706,20 +690,8 @@ def test_migrate_component_library_source_key(self): library_key = LibraryLocator(org="TestOrg", library="TestLibrary") source_key = library_key.make_usage_key("problem", "library_problem") olx = '' - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) - - result = _migrate_component( + context = self._make_migration_context() + result, reason = _migrate_component( context=context, source_key=source_key, olx=olx, @@ -727,61 +699,12 @@ def test_migrate_component_library_source_key(self): ) self.assertIsNotNone(result) + self.assertIsNone(reason) self.assertEqual( "problem", result.componentversion.component.component_type.name ) - def test_migrate_component_duplicate_content_integrity_error(self): - """ - Test _migrate_component handles IntegrityError when content already exists - """ - source_key = self.course.id.make_usage_key( - "problem", "test_problem_duplicate_content" - ) - olx = '

See image: duplicate.png

' - - media_type = authoring_api.get_or_create_media_type("image/png") - test_content = authoring_api.get_or_create_file_content( - self.learning_package.id, - media_type.id, - data=b"test_image_data", - created=timezone.now(), - ) - content_by_filename = {"duplicate.png": test_content.id} - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename=content_by_filename, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Update, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) - - first_result = _migrate_component( - context=context, - source_key=source_key, - olx=olx, - title="test_problem" - ) - - context.existing_source_to_target_keys[source_key] = [first_result.entity] - - second_result = _migrate_component( - context=context, - source_key=source_key, - olx=olx, - title="test_problem" - ) - - self.assertIsNotNone(first_result) - self.assertIsNotNone(second_result) - self.assertEqual(first_result.entity_id, second_result.entity_id) - def test_migrate_container_creates_new_container(self): """ Test _migrate_container creates a new container when none exists @@ -824,20 +747,9 @@ def test_migrate_container_creates_new_container(self): child_version_1.publishable_entity_version, child_version_2.publishable_entity_version, ] - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) + context = self._make_migration_context(repeat_handling_strategy=RepeatHandlingStrategy.Skip) - result = _migrate_container( + result, reason = _migrate_container( context=context, source_key=source_key, container_type=lib_api.ContainerType.Unit, @@ -845,6 +757,7 @@ def test_migrate_container_creates_new_container(self): children=children, ) + self.assertIsNone(reason) self.assertIsInstance(result, PublishableEntityVersion) container_version = result.containerversion @@ -866,18 +779,7 @@ def test_migrate_container_different_container_types(self): (lib_api.ContainerType.Subsection, "sequential"), (lib_api.ContainerType.Section, "chapter"), ] - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) + context = self._make_migration_context(repeat_handling_strategy=RepeatHandlingStrategy.Skip) for container_type, block_type in container_types: with self.subTest(container_type=container_type, block_type=block_type): @@ -885,7 +787,7 @@ def test_migrate_container_different_container_types(self): block_type, f"test_{block_type}" ) - result = _migrate_container( + result, reason = _migrate_container( context=context, source_key=source_key, container_type=container_type, @@ -893,180 +795,70 @@ def test_migrate_container_different_container_types(self): children=[], ) + self.assertIsNone(reason) self.assertIsNotNone(result) container_version = result.containerversion self.assertEqual(container_version.title, f"Test {block_type.title()}") - - def test_migrate_container_replace_existing_false(self): - """ - Test _migrate_container returns existing container when replace_existing=False - """ - source_key = self.course.id.make_usage_key("vertical", "existing_vertical") - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) - - first_result = _migrate_container( - context=context, - source_key=source_key, - container_type=lib_api.ContainerType.Unit, - title="Original Title", - children=[], - ) - - context.existing_source_to_target_keys[source_key] = [first_result.entity] - - second_result = _migrate_container( - context=context, - source_key=source_key, - container_type=lib_api.ContainerType.Unit, - title="Updated Title", - children=[], - ) - - self.assertEqual(first_result.entity_id, second_result.entity_id) - self.assertEqual(first_result.version_num, second_result.version_num) - - container_version = second_result.containerversion - self.assertEqual(container_version.title, "Original Title") + # The container is published + self.assertFalse(authoring_api.contains_unpublished_changes(container_version.container.pk)) def test_migrate_container_same_title(self): """ - Test _migrate_container for two containers with the same title - - Using preserve_url_slugs=False to create a new Unit with - a different URL slug based on the container's Title. - """ - source_key_1 = self.course.id.make_usage_key("vertical", "human_readable_vertical_1") - source_key_2 = self.course.id.make_usage_key("vertical", "human_readable_vertical_2") - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=False, - created_at=timezone.now(), - created_by=self.user.id, - ) - - first_result = _migrate_container( - context=context, - source_key=source_key_1, - container_type=lib_api.ContainerType.Unit, - title="Original Human Readable Title", - children=[], - ) + Test a migration with two containers of the same title and preserve_url_slugs=False - context.existing_source_to_target_keys[source_key_1] = [first_result.entity] - - second_result = _migrate_container( - context=context, - source_key=source_key_2, - container_type=lib_api.ContainerType.Unit, - title="Original Human Readable Title", - children=[], - ) - - self.assertNotEqual(first_result.entity_id, second_result.entity_id) - self.assertNotEqual(first_result.entity.key, second_result.entity.key) - # Make sure the current logic from tasts::_find_unique_slug is used - self.assertEqual(second_result.entity.key, first_result.entity.key + "_1") - - container_version = second_result.containerversion - self.assertEqual(container_version.title, "Original Human Readable Title") - - def test_migrate_container_replace_existing_true(self): + We expect that both units will be migrated to target units with container keys + based on the shared title, but disambiguated by a _1 suffix. """ - Test _migrate_container creates new version when replace_existing=True - """ - source_key = self.course.id.make_usage_key("vertical", "replaceable_vertical") - - child_component = authoring_api.create_component( - self.learning_package.id, - component_type=authoring_api.get_or_create_component_type( - "xblock.v1", "problem" - ), - local_key="child_problem", - created=timezone.now(), - created_by=self.user.id, - ) - child_version = authoring_api.create_next_component_version( - child_component.pk, - content_to_replace={}, - created=timezone.now(), - created_by=self.user.id, - ) - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Update, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) - - first_result = _migrate_container( - context=context, - source_key=source_key, - container_type=lib_api.ContainerType.Unit, - title="Original Title", - children=[], - ) - - context.existing_source_to_target_keys[source_key] = [first_result.entity] - - second_result = _migrate_container( - context=context, - source_key=source_key, - container_type=lib_api.ContainerType.Unit, - title="Updated Title", - children=[child_version.publishable_entity_version], - ) - - self.assertEqual(first_result.entity_id, second_result.entity_id) - self.assertNotEqual(first_result.version_num, second_result.version_num) - - container_version = second_result.containerversion - self.assertEqual(container_version.title, "Updated Title") - self.assertEqual(container_version.entity_list.entitylistrow_set.count(), 1) - - def test_migrate_container_with_library_source_key(self): - """ - Test _migrate_container with library source key - """ - library_key = LibraryLocator(org="TestOrg", library="TestLibrary") - source_key = library_key.make_usage_key("vertical", "library_vertical") - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, + source = ModulestoreSource.objects.create(key=self.course.id) + source_key_1 = self.course.id.make_usage_key("vertical", "existing_unit_1") + source_key_2 = self.course.id.make_usage_key("vertical", "existing_unit_2") + BlockFactory.create( + category="vertical", + display_name="Test Unit Same Title", + location=source_key_1, + parent_location=self.course.usage_key, + user_id=self.user.id, + publish_item=False + ) + BlockFactory.create( + category="html", + display_name="Test Unit Same Title", + location=source_key_2, + parent_location=self.course.usage_key, + user_id=self.user.id, + publish_item=False + ) + bulk_migrate_from_modulestore.apply_async( + kwargs={ + "user_id": self.user.id, + "sources_pks": [source.id], + "target_library_key": str(self.lib_key), + "target_collection_pks": [], + "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, + "preserve_url_slugs": False, + "composition_level": CompositionLevel.Unit.value, + "forward_source_to_target": False, + } ) + (migration,) = list(migrator_api.get_migrations(source_key=source.key)) + mappings = migrator_api.get_migration_blocks(migration.pk) + assert (html_migration_1 := mappings.get(source_key_1)) + assert (block_migration_2 := mappings.get(source_key_2)) + assert html_migration_1.target_title == "Test Unit Same Title" + assert block_migration_2.target_title == "Test Unit Same Title" + assert str(html_migration_1.target_key) == "lct:testorg:test-key:unit:test-unit-same-title" + assert str(block_migration_2.target_key) == "lct:testorg:test-key:unit:test-unit-same-title_1" + + def test_migrate_container_with_library_source_key(self): + """ + Test _migrate_container with library source key + """ + library_key = LibraryLocator(org="TestOrg", library="TestLibrary") + source_key = library_key.make_usage_key("vertical", "library_vertical") + context = self._make_migration_context(repeat_handling_strategy=RepeatHandlingStrategy.Skip) - result = _migrate_container( + result, _ = _migrate_container( context=context, source_key=source_key, container_type=lib_api.ContainerType.Unit, @@ -1084,20 +876,8 @@ def test_migrate_container_empty_children_list(self): Test _migrate_container handles empty children list """ source_key = self.course.id.make_usage_key("vertical", "empty_vertical") - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) - - result = _migrate_container( + context = self._make_migration_context(repeat_handling_strategy=RepeatHandlingStrategy.Skip) + result, reason = _migrate_container( context=context, source_key=source_key, container_type=lib_api.ContainerType.Unit, @@ -1105,6 +885,7 @@ def test_migrate_container_empty_children_list(self): children=[], ) + self.assertIsNone(reason) self.assertIsNotNone(result) container_version = result.containerversion @@ -1115,18 +896,7 @@ def test_migrate_container_preserves_child_order(self): Test _migrate_container preserves the order of children """ source_key = self.course.id.make_usage_key("vertical", "ordered_vertical") - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) + context = self._make_migration_context(repeat_handling_strategy=RepeatHandlingStrategy.Skip) children = [] for i in range(3): child_component = authoring_api.create_component( @@ -1146,7 +916,7 @@ def test_migrate_container_preserves_child_order(self): ) children.append(child_version.publishable_entity_version) - result = _migrate_container( + result, _ = _migrate_container( context=context, source_key=source_key, container_type=lib_api.ContainerType.Unit, @@ -1222,20 +992,8 @@ def test_migrate_container_with_mixed_child_types(self): html_version.publishable_entity_version, video_version.publishable_entity_version, ] - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) - - result = _migrate_container( + context = self._make_migration_context(repeat_handling_strategy=RepeatHandlingStrategy.Skip) + result, _ = _migrate_container( context=context, source_key=source_key, container_type=lib_api.ContainerType.Unit, @@ -1256,76 +1014,6 @@ def test_migrate_container_with_mixed_child_types(self): expected_entity_ids = {child.entity_id for child in children} self.assertEqual(child_entity_ids, expected_entity_ids) - def test_migrate_container_generates_correct_target_key(self): - """ - Test _migrate_container generates correct target key from source key - """ - course_source_key = self.course.id.make_usage_key("vertical", "test_vertical") - context = _MigrationContext( - existing_source_to_target_keys={}, - target_package_id=self.learning_package.id, - target_library_key=self.library.library_key, - source_context_key=self.course.id, - content_by_filename={}, - composition_level=CompositionLevel.Unit, - repeat_handling_strategy=RepeatHandlingStrategy.Skip, - preserve_url_slugs=True, - created_at=timezone.now(), - created_by=self.user.id, - ) - - course_result = _migrate_container( - context=context, - source_key=course_source_key, - container_type=lib_api.ContainerType.Unit, - title="Course Vertical", - children=[], - ) - context.add_migration(course_source_key, course_result.entity) - - library_key = LibraryLocator(org="TestOrg", library="TestLibrary") - library_source_key = library_key.make_usage_key("vertical", "test_vertical") - - library_result = _migrate_container( - context=context, - source_key=library_source_key, - container_type=lib_api.ContainerType.Unit, - title="Library Vertical", - children=[], - ) - - self.assertIsNotNone(course_result) - self.assertIsNotNone(library_result) - self.assertNotEqual(course_result.entity_id, library_result.entity_id) - - def test_migrate_from_modulestore_success_course(self): - """ - Test successful migration from course to library - """ - source = ModulestoreSource.objects.create(key=self.course.id) - - task = migrate_from_modulestore.apply_async( - kwargs={ - "user_id": self.user.id, - "source_pk": source.id, - "target_library_key": str(self.lib_key), - "target_collection_pk": self.collection.id, - "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, - "preserve_url_slugs": True, - "composition_level": CompositionLevel.Unit.value, - "forward_source_to_target": False, - } - ) - - status = UserTaskStatus.objects.get(task_id=task.id) - self.assertEqual(status.state, UserTaskStatus.SUCCEEDED) - - migration = ModulestoreMigration.objects.get( - source=source, target=self.learning_package - ) - self.assertEqual(migration.composition_level, CompositionLevel.Unit.value) - self.assertEqual(migration.repeat_handling_strategy, RepeatHandlingStrategy.Skip.value) - def test_bulk_migrate_success_courses(self): """ Test successful bulk migration from courses to library @@ -1367,12 +1055,12 @@ def test_migrate_from_modulestore_success_legacy_library(self): """ source = ModulestoreSource.objects.create(key=self.legacy_library.location.library_key) - task = migrate_from_modulestore.apply_async( + task = bulk_migrate_from_modulestore.apply_async( kwargs={ "user_id": self.user.id, - "source_pk": source.id, + "sources_pks": [source.id], "target_library_key": str(self.lib_key), - "target_collection_pk": self.collection.id, + "target_collection_pks": [self.collection.id], "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, "preserve_url_slugs": True, "composition_level": CompositionLevel.Unit.value, @@ -1518,7 +1206,6 @@ def test_bulk_migrate_use_previous_collection_on_skip_and_update(self, repeat_ha migrations = ModulestoreMigration.objects.filter( source=source, target=self.learning_package ) - for migration in migrations: self.assertEqual(migration.composition_level, CompositionLevel.Unit.value) self.assertEqual(migration.repeat_handling_strategy, repeat_handling_strategy.value) @@ -1658,50 +1345,19 @@ def test_bulk_migrate_create_a_new_collection_on_fork(self): self.assertEqual(migrations[1].target_collection.title, f"{self.legacy_library.display_name}_1") self.assertNotEqual(migrations[1].target_collection.id, previous_collection.id) - def test_migrate_from_modulestore_library_validation_failure(self): - """ - Test migration from legacy library fails when modulestore content doesn't exist - """ - library_key = LibraryLocator(org="TestOrg", library="TestLibrary") - - source = ModulestoreSource.objects.create(key=library_key) - - task = migrate_from_modulestore.apply_async( - kwargs={ - "user_id": self.user.id, - "source_pk": source.id, - "target_library_key": str(self.lib_key), - "target_collection_pk": None, - "repeat_handling_strategy": RepeatHandlingStrategy.Update.value, - "preserve_url_slugs": True, - "composition_level": CompositionLevel.Section.value, - "forward_source_to_target": True, - } - ) - - status = UserTaskStatus.objects.get(task_id=task.id) - - # Should fail at loading step since we don't have real modulestore content - self.assertEqual(status.state, UserTaskStatus.FAILED) - self.assertEqual( - self._get_task_status_fail_message(status), - "Failed to load source item 'lib-block-v1:TestOrg+TestLibrary+type@library+block@library' " - "from ModuleStore: library-v1:TestOrg+TestLibrary+branch@library" - ) - - def test_migrate_from_modulestore_invalid_source_key_type(self): + def test_bulk_migrate_invalid_source_key_type(self): """ - Test migration with invalid source key type + Test bulk migration with invalid source key type """ invalid_key = LibraryLocatorV2.from_string("lib:testorg:invalid") source = ModulestoreSource.objects.create(key=invalid_key) - task = migrate_from_modulestore.apply_async( + task = bulk_migrate_from_modulestore.apply_async( kwargs={ "user_id": self.user.id, - "source_pk": source.id, + "sources_pks": [source.id], "target_library_key": str(self.lib_key), - "target_collection_pk": self.collection.id, + "target_collection_pks": [self.collection.id], "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, "preserve_url_slugs": True, "composition_level": CompositionLevel.Unit.value, @@ -1716,48 +1372,52 @@ def test_migrate_from_modulestore_invalid_source_key_type(self): f"Not a valid source context key: {invalid_key}. Source key must reference a course or a legacy library." ) - def test_bulk_migrate_invalid_source_key_type(self): + def test_migrate_component_with_fake_block_type(self): """ - Test bulk migration with invalid source key type + Test _migrate_component with with_fake_block_type """ - invalid_key = LibraryLocatorV2.from_string("lib:testorg:invalid") - source = ModulestoreSource.objects.create(key=invalid_key) - - task = bulk_migrate_from_modulestore.apply_async( - kwargs={ - "user_id": self.user.id, - "sources_pks": [source.id], - "target_library_key": str(self.lib_key), - "target_collection_pks": [self.collection.id], - "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, - "preserve_url_slugs": True, - "composition_level": CompositionLevel.Unit.value, - "forward_source_to_target": False, - } + source_key = self.course.id.make_usage_key("fake_block", "test_fake_block") + olx = '' + context = _MigrationContext( + used_component_keys=set(), + used_container_slugs=set(), + previous_block_migrations={}, + target_package_id=self.learning_package.id, + target_library_key=self.library.library_key, + source_context_key=self.course.id, + content_by_filename={}, + composition_level=CompositionLevel.Unit, + repeat_handling_strategy=RepeatHandlingStrategy.Skip, + preserve_url_slugs=True, + created_at=timezone.now(), + created_by=self.user.id, ) - status = UserTaskStatus.objects.get(task_id=task.id) - self.assertEqual(status.state, UserTaskStatus.FAILED) - self.assertEqual( - self._get_task_status_fail_message(status), - f"Not a valid source context key: {invalid_key}. Source key must reference a course or a legacy library." + result, reason = _migrate_component( + context=context, + source_key=source_key, + olx=olx, + title="test" ) - def test_migrate_from_modulestore_nonexistent_modulestore_item(self): + self.assertIsNone(result) + self.assertEqual(reason, "Invalid block type: fake_block") + + def test_bulk_migrate_nonexistent_modulestore_item(self): """ - Test migration when modulestore item doesn't exist + Test bulk migration when modulestore item doesn't exist """ nonexistent_course_key = CourseKey.from_string( "course-v1:NonExistent+Course+Run" ) source = ModulestoreSource.objects.create(key=nonexistent_course_key) - task = migrate_from_modulestore.apply_async( + task = bulk_migrate_from_modulestore.apply_async( kwargs={ "user_id": self.user.id, - "source_pk": source.id, + "sources_pks": [source.id], "target_library_key": str(self.lib_key), - "target_collection_pk": self.collection.id, + "target_collection_pks": [self.collection.id], "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, "preserve_url_slugs": True, "composition_level": CompositionLevel.Unit.value, @@ -1773,46 +1433,47 @@ def test_migrate_from_modulestore_nonexistent_modulestore_item(self): "from ModuleStore: course-v1:NonExistent+Course+Run+branch@draft-branch" ) - def test_bulk_migrate_nonexistent_modulestore_item(self): + def test_bulk_migrate_nonexistent_library(self): """ - Test bulk migration when modulestore item doesn't exist + Test migration from legacy library fails when modulestore content doesn't exist """ - nonexistent_course_key = CourseKey.from_string( - "course-v1:NonExistent+Course+Run" - ) - source = ModulestoreSource.objects.create(key=nonexistent_course_key) + library_key = LibraryLocator(org="TestOrg", library="TestLibrary") + + source = ModulestoreSource.objects.create(key=library_key) task = bulk_migrate_from_modulestore.apply_async( kwargs={ "user_id": self.user.id, "sources_pks": [source.id], "target_library_key": str(self.lib_key), - "target_collection_pks": [self.collection.id], - "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, + "target_collection_pks": [None], + "repeat_handling_strategy": RepeatHandlingStrategy.Update.value, "preserve_url_slugs": True, - "composition_level": CompositionLevel.Unit.value, - "forward_source_to_target": False, + "composition_level": CompositionLevel.Section.value, + "forward_source_to_target": True, } ) status = UserTaskStatus.objects.get(task_id=task.id) + + # Should fail at loading step since we don't have real modulestore content self.assertEqual(status.state, UserTaskStatus.FAILED) self.assertEqual( self._get_task_status_fail_message(status), - "Failed to load source item 'block-v1:NonExistent+Course+Run+type@course+block@course' " - "from ModuleStore: course-v1:NonExistent+Course+Run+branch@draft-branch" + "Failed to load source item 'lib-block-v1:TestOrg+TestLibrary+type@library+block@library' " + "from ModuleStore: library-v1:TestOrg+TestLibrary+branch@library" ) - def test_migrate_from_modulestore_task_status_progression(self): + def test_bulk_migrate_from_modulestore_task_status_progression(self): """Test that task status progresses through expected steps""" source = ModulestoreSource.objects.create(key=self.course.id) - task = migrate_from_modulestore.apply_async( + task = bulk_migrate_from_modulestore.apply_async( kwargs={ "user_id": self.user.id, - "source_pk": source.id, + "sources_pks": [source.id], "target_library_key": str(self.lib_key), - "target_collection_pk": self.collection.id, + "target_collection_pks": [self.collection.id], "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, "preserve_url_slugs": True, "composition_level": CompositionLevel.Unit.value, @@ -1830,48 +1491,6 @@ def test_migrate_from_modulestore_task_status_progression(self): ) self.assertEqual(migration.task_status, status) - def test_migrate_from_modulestore_multiple_users_no_interference(self): - """ - Test that migrations by different users don't interfere with each other - """ - source = ModulestoreSource.objects.create(key=self.course.id) - other_user = UserFactory() - - task1 = migrate_from_modulestore.apply_async( - kwargs={ - "user_id": self.user.id, - "source_pk": source.id, - "target_library_key": str(self.lib_key), - "target_collection_pk": self.collection.id, - "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, - "preserve_url_slugs": True, - "composition_level": CompositionLevel.Unit.value, - "forward_source_to_target": False, - } - ) - - task2 = migrate_from_modulestore.apply_async( - kwargs={ - "user_id": other_user.id, - "source_pk": source.id, - "target_library_key": str(self.lib_key), - "target_collection_pk": self.collection.id, - "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, - "preserve_url_slugs": True, - "composition_level": CompositionLevel.Unit.value, - "forward_source_to_target": False, - } - ) - - status1 = UserTaskStatus.objects.get(task_id=task1.id) - status2 = UserTaskStatus.objects.get(task_id=task2.id) - - self.assertEqual(status1.user, self.user) - self.assertEqual(status2.user, other_user) - - # The first task should not be cancelled since it's from a different user - self.assertNotEqual(status1.state, UserTaskStatus.CANCELED) - def test_bulk_migrate_multiple_users_no_interference(self): """ Test that migrations by different users don't interfere with each other @@ -1914,35 +1533,6 @@ def test_bulk_migrate_multiple_users_no_interference(self): # The first task should not be cancelled since it's from a different user self.assertNotEqual(status1.state, UserTaskStatus.CANCELED) - @patch("cms.djangoapps.modulestore_migrator.tasks._import_assets") - def test_migrate_fails_on_import(self, mock_import_assets): - """ - Test failed migration from legacy library to V2 library - """ - mock_import_assets.side_effect = Exception("Simulated import error") - source = ModulestoreSource.objects.create(key=self.legacy_library.location.library_key) - - task = migrate_from_modulestore.apply_async( - kwargs={ - "user_id": self.user.id, - "source_pk": source.id, - "target_library_key": str(self.lib_key), - "target_collection_pk": self.collection.id, - "repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, - "preserve_url_slugs": True, - "composition_level": CompositionLevel.Unit.value, - "forward_source_to_target": False, - } - ) - - status = UserTaskStatus.objects.get(task_id=task.id) - self.assertEqual(status.state, UserTaskStatus.FAILED) - - migration = ModulestoreMigration.objects.get( - source=source, target=self.learning_package - ) - self.assertTrue(migration.is_failed) - @patch("cms.djangoapps.modulestore_migrator.tasks._import_assets") def test_bulk_migrate_fails_on_import(self, mock_import_assets): """ diff --git a/cms/lib/xblock/upstream_sync.py b/cms/lib/xblock/upstream_sync.py index 8a089aeda75c..b56e0d95684d 100644 --- a/cms/lib/xblock/upstream_sync.py +++ b/cms/lib/xblock/upstream_sync.py @@ -87,6 +87,13 @@ class UpstreamLink: downstream_customized: list[str] | None # List of fields modified in downstream has_top_level_parent: bool # True if this Upstream link has a top-level parent + @property + def is_upstream_deleted(self) -> bool: + return bool( + self.upstream_ref and + self.version_available is None + ) + @property def is_ready_to_sync_individually(self) -> bool: return bool( @@ -94,7 +101,7 @@ def is_ready_to_sync_individually(self) -> bool: self.version_available and self.version_available > (self.version_synced or 0) and self.version_available > (self.version_declined or 0) - ) + ) or self.is_upstream_deleted def _check_children_ready_to_sync(self, xblock_downstream: XBlock, return_fast: bool) -> list[dict[str, str]]: """ diff --git a/common/djangoapps/student/auth.py b/common/djangoapps/student/auth.py index e199142fe377..047f0174a062 100644 --- a/common/djangoapps/student/auth.py +++ b/common/djangoapps/student/auth.py @@ -24,6 +24,7 @@ OrgInstructorRole, OrgLibraryUserRole, OrgStaffRole, + strict_role_checking, ) # Studio permissions: @@ -115,8 +116,9 @@ def get_user_permissions(user, course_key, org=None, service_variant=None): return STUDIO_NO_PERMISSIONS # Staff have all permissions except EDIT_ROLES: - if OrgStaffRole(org=org).has_user(user) or (course_key and user_has_role(user, CourseStaffRole(course_key))): - return STUDIO_VIEW_USERS | STUDIO_EDIT_CONTENT | STUDIO_VIEW_CONTENT + with strict_role_checking(): + if OrgStaffRole(org=org).has_user(user) or (course_key and user_has_role(user, CourseStaffRole(course_key))): + return STUDIO_VIEW_USERS | STUDIO_EDIT_CONTENT | STUDIO_VIEW_CONTENT # Otherwise, for libraries, users can view only: if course_key and isinstance(course_key, LibraryLocator): diff --git a/common/djangoapps/student/tests/test_authz.py b/common/djangoapps/student/tests/test_authz.py index c0b88e6318b5..70636e04b68a 100644 --- a/common/djangoapps/student/tests/test_authz.py +++ b/common/djangoapps/student/tests/test_authz.py @@ -11,6 +11,7 @@ from django.test import TestCase, override_settings from opaque_keys.edx.locator import CourseLocator +from common.djangoapps.student.models.user import CourseAccessRole from common.djangoapps.student.auth import ( add_users, has_studio_read_access, @@ -305,6 +306,23 @@ def test_limited_staff_no_studio_access_cms(self): assert not has_studio_read_access(self.limited_staff, self.course_key) assert not has_studio_write_access(self.limited_staff, self.course_key) + @override_settings(SERVICE_VARIANT='cms') + def test_limited_org_staff_no_studio_access_cms(self): + """ + Verifies that course limited staff have no read and no write access when SERVICE_VARIANT is not 'lms'. + """ + # Add a user as course_limited_staff on the org + # This is not possible using the course roles classes but is possible via Django admin so we + # insert a row into the model directly to test that scenario. + CourseAccessRole.objects.create( + user=self.limited_staff, + org=self.course_key.org, + role=CourseLimitedStaffRole.ROLE, + ) + + assert not has_studio_read_access(self.limited_staff, self.course_key) + assert not has_studio_write_access(self.limited_staff, self.course_key) + class CourseOrgGroupTest(TestCase): """ diff --git a/lms/djangoapps/discussion/django_comment_client/base/tests_v2.py b/lms/djangoapps/discussion/django_comment_client/base/tests_v2.py index 7bc84e5038c0..1f5ae7805740 100644 --- a/lms/djangoapps/discussion/django_comment_client/base/tests_v2.py +++ b/lms/djangoapps/discussion/django_comment_client/base/tests_v2.py @@ -180,7 +180,8 @@ def test_flag(self): with mock.patch( "openedx.core.djangoapps.django_comment_common.signals.thread_flagged.send" ) as signal_mock: - response = self.call_view("flag_abuse_for_thread", "update_thread_flag") + with self.captureOnCommitCallbacks(execute=True): + response = self.call_view("flag_abuse_for_thread", "update_thread_flag") self._assert_json_response_contains_group_info(response) self.assertEqual(signal_mock.call_count, 1) response = self.call_view("un_flag_abuse_for_thread", "update_thread_flag") @@ -471,10 +472,15 @@ def setUp(self): def assert_discussion_signals(self, signal, user=None): if user is None: user = self.student + # Use captureOnCommitCallbacks to execute on_commit callbacks during tests, + # since signals are now deferred until after transaction commit. + # Order matters: assert_signal_sent must be outer context so the signal + # fires (via captureOnCommitCallbacks) before the assertion check. with self.assert_signal_sent( views, signal, sender=None, user=user, exclude_args=("post",) ): - yield + with self.captureOnCommitCallbacks(execute=True): + yield def test_create_thread(self): with self.assert_discussion_signals("thread_created"): @@ -1218,7 +1224,8 @@ def test_flag(self): with mock.patch( "openedx.core.djangoapps.django_comment_common.signals.comment_flagged.send" ) as signal_mock: - self.call_view("flag_abuse_for_comment", "update_comment_flag") + with self.captureOnCommitCallbacks(execute=True): + self.call_view("flag_abuse_for_comment", "update_comment_flag") self.assertEqual(signal_mock.call_count, 1) diff --git a/lms/djangoapps/discussion/django_comment_client/base/views.py b/lms/djangoapps/discussion/django_comment_client/base/views.py index 95d5a020108f..14ce9c4b575a 100644 --- a/lms/djangoapps/discussion/django_comment_client/base/views.py +++ b/lms/djangoapps/discussion/django_comment_client/base/views.py @@ -50,6 +50,7 @@ prepare_content, sanitize_body ) +from lms.djangoapps.discussion.rest_api.utils import send_signal_after_commit from openedx.core.djangoapps.django_comment_common.signals import ( comment_created, comment_deleted, @@ -587,7 +588,10 @@ def create_thread(request, course_id, commentable_id): thread.save() - thread_created.send(sender=None, user=user, post=thread) + # Use send_signal_after_commit() to ensure the signal is sent only after the transaction commits. + send_signal_after_commit( + lambda: thread_created.send(sender=None, user=user, post=thread) + ) # patch for backward compatibility to comments service if 'pinned' not in thread.attributes: @@ -598,7 +602,9 @@ def create_thread(request, course_id, commentable_id): if follow: cc_user = cc.User.from_django_user(user) cc_user.follow(thread, course_id) - thread_followed.send(sender=None, user=user, post=thread) + send_signal_after_commit( + lambda: thread_followed.send(sender=None, user=user, post=thread) + ) data = thread.to_dict() @@ -645,7 +651,9 @@ def update_thread(request, course_id, thread_id): thread.save() - thread_edited.send(sender=None, user=user, post=thread) + send_signal_after_commit( + lambda: thread_edited.send(sender=None, user=user, post=thread) + ) track_thread_edited_event(request, course, thread, None) if request.headers.get('x-requested-with') == 'XMLHttpRequest': @@ -688,7 +696,9 @@ def _create_comment(request, course_key, thread_id=None, parent_id=None): ) comment.save(params={"course_id": str(course_key)}) - comment_created.send(sender=None, user=user, post=comment) + send_signal_after_commit( + lambda: comment_created.send(sender=None, user=user, post=comment) + ) followed = post.get('auto_subscribe', 'false').lower() == 'true' @@ -729,7 +739,9 @@ def delete_thread(request, course_id, thread_id): course = get_course_with_access(request.user, 'load', course_key) thread = cc.Thread.find(thread_id) thread.delete(course_id=course_id) - thread_deleted.send(sender=None, user=request.user, post=thread) + send_signal_after_commit( + lambda: thread_deleted.send(sender=None, user=request.user, post=thread) + ) track_thread_deleted_event(request, course, thread) return JsonResponse(prepare_content(thread.to_dict(), course_key)) @@ -751,7 +763,9 @@ def update_comment(request, course_id, comment_id): comment.body = sanitize_body(request.POST["body"]) comment.save(params={"course_id": course_id}) - comment_edited.send(sender=None, user=request.user, post=comment) + send_signal_after_commit( + lambda: comment_edited.send(sender=None, user=request.user, post=comment) + ) track_comment_edited_event(request, course, comment, None) if request.headers.get('x-requested-with') == 'XMLHttpRequest': @@ -776,7 +790,9 @@ def endorse_comment(request, course_id, comment_id): comment.endorsed = endorsed comment.endorsement_user_id = user.id comment.save(params={"course_id": course_id}) - comment_endorsed.send(sender=None, user=user, post=comment) + send_signal_after_commit( + lambda: comment_endorsed.send(sender=None, user=user, post=comment) + ) track_forum_response_mark_event(request, course, comment, endorsed) return JsonResponse(prepare_content(comment.to_dict(), course_key)) @@ -828,7 +844,9 @@ def delete_comment(request, course_id, comment_id): course = get_course_with_access(request.user, 'load', course_key) comment = cc.Comment.find(comment_id) comment.delete(course_id=course_id) - comment_deleted.send(sender=None, user=request.user, post=comment) + send_signal_after_commit( + lambda: comment_deleted.send(sender=None, user=request.user, post=comment) + ) track_comment_deleted_event(request, course, comment) return JsonResponse(prepare_content(comment.to_dict(), course_key)) @@ -847,7 +865,9 @@ def _vote_or_unvote(request, course_id, obj, value='up', undo_vote=False): # (People could theoretically downvote by handcrafting AJAX requests.) else: user.vote(obj, value, course_id) - thread_voted.send(sender=None, user=request.user, post=obj) + send_signal_after_commit( + lambda: thread_voted.send(sender=None, user=request.user, post=obj) + ) track_voted_event(request, course, obj, value, undo_vote) return JsonResponse(prepare_content(obj.to_dict(), course_key)) @@ -861,7 +881,9 @@ def vote_for_comment(request, course_id, comment_id, value): """ comment = cc.Comment.find(comment_id) result = _vote_or_unvote(request, course_id, comment, value) - comment_voted.send(sender=None, user=request.user, post=comment) + send_signal_after_commit( + lambda: comment_voted.send(sender=None, user=request.user, post=comment) + ) return result @@ -914,7 +936,9 @@ def flag_abuse_for_thread(request, course_id, thread_id): thread = cc.Thread.find(thread_id) thread.flagAbuse(user, thread, course_id) track_discussion_reported_event(request, course, thread) - thread_flagged.send(sender='flag_abuse_for_thread', user=request.user, post=thread) + send_signal_after_commit( + lambda: thread_flagged.send(sender='flag_abuse_for_thread', user=request.user, post=thread) + ) return JsonResponse(prepare_content(thread.to_dict(), course_key)) @@ -953,7 +977,9 @@ def flag_abuse_for_comment(request, course_id, comment_id): comment = cc.Comment.find(comment_id) comment.flagAbuse(user, comment, course_id) track_discussion_reported_event(request, course, comment) - comment_flagged.send(sender='flag_abuse_for_comment', user=request.user, post=comment) + send_signal_after_commit( + lambda: comment_flagged.send(sender='flag_abuse_for_comment', user=request.user, post=comment) + ) return JsonResponse(prepare_content(comment.to_dict(), course_key)) @@ -1019,7 +1045,9 @@ def follow_thread(request, course_id, thread_id): # lint-amnesty, pylint: disab course = get_course_by_id(course_key) thread = cc.Thread.find(thread_id) user.follow(thread, course_id=course_id) - thread_followed.send(sender=None, user=request.user, post=thread) + send_signal_after_commit( + lambda: thread_followed.send(sender=None, user=request.user, post=thread) + ) track_thread_followed_event(request, course, thread, True) return JsonResponse({}) @@ -1051,7 +1079,9 @@ def unfollow_thread(request, course_id, thread_id): # lint-amnesty, pylint: dis user = cc.User.from_django_user(request.user) thread = cc.Thread.find(thread_id) user.unfollow(thread, course_id=course_id) - thread_unfollowed.send(sender=None, user=request.user, post=thread) + send_signal_after_commit( + lambda: thread_unfollowed.send(sender=None, user=request.user, post=thread) + ) track_thread_followed_event(request, course, thread, False) return JsonResponse({}) diff --git a/lms/djangoapps/discussion/rest_api/api.py b/lms/djangoapps/discussion/rest_api/api.py index b87852c16cfa..443f9527acda 100644 --- a/lms/djangoapps/discussion/rest_api/api.py +++ b/lms/djangoapps/discussion/rest_api/api.py @@ -128,6 +128,7 @@ discussion_open_for_user, get_usernames_for_course, get_usernames_from_search_string, + send_signal_after_commit, set_attribute, is_posting_allowed, can_user_notify_all_learners, is_captcha_enabled, get_captcha_site_key_by_platform @@ -1017,7 +1018,6 @@ def get_thread_list( "group_id": group_id, "page": page, "per_page": page_size, - "text": text_search, "sort_key": cc_map.get(order_by), "author_id": author_id, "flagged": flagged, @@ -1382,7 +1382,9 @@ def _handle_following_field(form_value, user, cc_content, request): else: user.unfollow(cc_content) signal = thread_followed if form_value else thread_unfollowed - signal.send(sender=None, user=user, post=cc_content) + send_signal_after_commit( + lambda: signal.send(sender=None, user=user, post=cc_content) + ) track_thread_followed_event(request, course, cc_content, form_value) @@ -1395,9 +1397,13 @@ def _handle_abuse_flagged_field(form_value, user, cc_content, request): track_discussion_reported_event(request, course, cc_content) if ENABLE_DISCUSSIONS_MFE.is_enabled(course_key): if cc_content.type == 'thread': - thread_flagged.send(sender='flag_abuse_for_thread', user=user, post=cc_content) + send_signal_after_commit( + lambda: thread_flagged.send(sender='flag_abuse_for_thread', user=user, post=cc_content) + ) else: - comment_flagged.send(sender='flag_abuse_for_comment', user=user, post=cc_content) + send_signal_after_commit( + lambda: comment_flagged.send(sender='flag_abuse_for_comment', user=user, post=cc_content) + ) else: remove_all = bool(is_privileged_user(course_key, User.objects.get(id=user.id))) cc_content.unFlagAbuse(user, cc_content, remove_all) @@ -1407,7 +1413,9 @@ def _handle_abuse_flagged_field(form_value, user, cc_content, request): def _handle_voted_field(form_value, cc_content, api_content, request, context): """vote or undo vote on thread/comment""" signal = thread_voted if cc_content.type == 'thread' else comment_voted - signal.send(sender=None, user=context["request"].user, post=cc_content) + send_signal_after_commit( + lambda: signal.send(sender=None, user=context["request"].user, post=cc_content) + ) if form_value: context["cc_requester"].vote(cc_content, "up") api_content["vote_count"] += 1 @@ -1452,7 +1460,9 @@ def _handle_comment_signals(update_data, comment, user, sender=None): """ for key, value in update_data.items(): if key == "endorsed" and value is True: - comment_endorsed.send(sender=sender, user=user, post=comment) + send_signal_after_commit( + lambda: comment_endorsed.send(sender=sender, user=user, post=comment) + ) def create_thread(request, thread_data): @@ -1502,7 +1512,10 @@ def create_thread(request, thread_data): raise ValidationError(dict(list(serializer.errors.items()) + list(actions_form.errors.items()))) serializer.save() cc_thread = serializer.instance - thread_created.send(sender=None, user=user, post=cc_thread, notify_all_learners=notify_all_learners) + # Use send_signal_after_commit() to ensure the signal is sent only after the transaction commits. + send_signal_after_commit( + lambda: thread_created.send(sender=None, user=user, post=cc_thread, notify_all_learners=notify_all_learners) + ) api_thread = serializer.data _do_extra_actions(api_thread, cc_thread, list(thread_data.keys()), actions_form, context, request) @@ -1550,7 +1563,9 @@ def create_comment(request, comment_data): context["cc_requester"].follow(cc_thread) serializer.save() cc_comment = serializer.instance - comment_created.send(sender=None, user=request.user, post=cc_comment) + send_signal_after_commit( + lambda: comment_created.send(sender=None, user=request.user, post=cc_comment) + ) api_comment = serializer.data _do_extra_actions(api_comment, cc_comment, list(comment_data.keys()), actions_form, context, request) track_comment_created_event(request, course, cc_comment, cc_thread["commentable_id"], followed=False, @@ -1586,7 +1601,9 @@ def update_thread(request, thread_id, update_data): if set(update_data) - set(actions_form.fields): serializer.save() # signal to update Teams when a user edits a thread - thread_edited.send(sender=None, user=request.user, post=cc_thread) + send_signal_after_commit( + lambda: thread_edited.send(sender=None, user=request.user, post=cc_thread) + ) api_thread = serializer.data _do_extra_actions(api_thread, cc_thread, list(update_data.keys()), actions_form, context, request) @@ -1635,7 +1652,9 @@ def update_comment(request, comment_id, update_data): # Only save comment object if some of the edited fields are in the comment data, not extra actions if set(update_data) - set(actions_form.fields): serializer.save() - comment_edited.send(sender=None, user=request.user, post=cc_comment) + send_signal_after_commit( + lambda: comment_edited.send(sender=None, user=request.user, post=cc_comment) + ) api_comment = serializer.data _do_extra_actions(api_comment, cc_comment, list(update_data.keys()), actions_form, context, request) _handle_comment_signals(update_data, cc_comment, request.user) @@ -1823,7 +1842,9 @@ def delete_thread(request, thread_id): cc_thread, context = _get_thread_and_context(request, thread_id) if can_delete(cc_thread, context): cc_thread.delete() - thread_deleted.send(sender=None, user=request.user, post=cc_thread) + send_signal_after_commit( + lambda: thread_deleted.send(sender=None, user=request.user, post=cc_thread) + ) track_thread_deleted_event(request, context["course"], cc_thread) else: raise PermissionDenied @@ -1848,7 +1869,9 @@ def delete_comment(request, comment_id): cc_comment, context = _get_comment_and_context(request, comment_id) if can_delete(cc_comment, context): cc_comment.delete() - comment_deleted.send(sender=None, user=request.user, post=cc_comment) + send_signal_after_commit( + lambda: comment_deleted.send(sender=None, user=request.user, post=cc_comment) + ) track_comment_deleted_event(request, context["course"], cc_comment) else: raise PermissionDenied diff --git a/lms/djangoapps/discussion/rest_api/permissions.py b/lms/djangoapps/discussion/rest_api/permissions.py index cfcea5b32834..a1ae60c35ef3 100644 --- a/lms/djangoapps/discussion/rest_api/permissions.py +++ b/lms/djangoapps/discussion/rest_api/permissions.py @@ -6,7 +6,7 @@ from opaque_keys.edx.keys import CourseKey from rest_framework import permissions -from common.djangoapps.student.models import CourseAccessRole, CourseEnrollment +from common.djangoapps.student.models import CourseEnrollment from common.djangoapps.student.roles import ( CourseInstructorRole, CourseStaffRole, @@ -19,7 +19,7 @@ from openedx.core.djangoapps.django_comment_common.comment_client.comment import Comment from openedx.core.djangoapps.django_comment_common.comment_client.thread import Thread from openedx.core.djangoapps.django_comment_common.models import ( - Role, FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_COMMUNITY_TA, FORUM_ROLE_MODERATOR + FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_COMMUNITY_TA, FORUM_ROLE_MODERATOR ) @@ -194,26 +194,7 @@ def can_take_action_on_spam(user, course_id): user: User object course_id: CourseKey or string of course_id """ - if GlobalStaff().has_user(user): - return True - - if isinstance(course_id, str): - course_id = CourseKey.from_string(course_id) - org_id = course_id.org - course_ids = CourseEnrollment.objects.filter(user=user).values_list('course_id', flat=True) - course_ids = [c_id for c_id in course_ids if c_id.org == org_id] - user_roles = set( - Role.objects.filter( - users=user, - course_id__in=course_ids, - ).values_list('name', flat=True).distinct() - ) - if bool(user_roles & {FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR}): - return True - - if CourseAccessRole.objects.filter(user=user, course_id__in=course_ids, role__in=["instructor", "staff"]).exists(): - return True - return False + return GlobalStaff().has_user(user) class IsAllowedToBulkDelete(permissions.BasePermission): diff --git a/lms/djangoapps/discussion/rest_api/tests/test_api_v2.py b/lms/djangoapps/discussion/rest_api/tests/test_api_v2.py index 900d52017c5e..df4ac947bf0d 100644 --- a/lms/djangoapps/discussion/rest_api/tests/test_api_v2.py +++ b/lms/djangoapps/discussion/rest_api/tests/test_api_v2.py @@ -273,7 +273,8 @@ def test_basic(self, mock_emit): with self.assert_signal_sent( api, "thread_created", sender=None, user=self.user, exclude_args=("post", "notify_all_learners") ): - actual = create_thread(self.request, self.minimal_data) + with self.captureOnCommitCallbacks(execute=True): + actual = create_thread(self.request, self.minimal_data) expected = self.expected_thread_data( { "id": "test_id", @@ -352,7 +353,8 @@ def test_basic_in_blackout_period_with_user_access(self, mock_emit): with self.assert_signal_sent( api, "thread_created", sender=None, user=self.user, exclude_args=("post", "notify_all_learners") ): - actual = create_thread(self.request, self.minimal_data) + with self.captureOnCommitCallbacks(execute=True): + actual = create_thread(self.request, self.minimal_data) expected = self.expected_thread_data( { "author_label": "Moderator", @@ -428,7 +430,8 @@ def test_title_truncation(self, mock_emit): with self.assert_signal_sent( api, "thread_created", sender=None, user=self.user, exclude_args=("post", "notify_all_learners") ): - create_thread(self.request, data) + with self.captureOnCommitCallbacks(execute=True): + create_thread(self.request, data) event_name, event_data = mock_emit.call_args[0] assert event_name == "edx.forum.thread.created" assert event_data == { @@ -678,7 +681,8 @@ def test_success(self, parent_id, mock_emit): with self.assert_signal_sent( api, "comment_created", sender=None, user=self.user, exclude_args=("post",) ): - actual = create_comment(self.request, data) + with self.captureOnCommitCallbacks(execute=True): + actual = create_comment(self.request, data) expected = { "id": "test_comment", "thread_id": "test_thread", @@ -785,7 +789,8 @@ def test_success_in_black_out_with_user_access(self, parent_id, mock_emit): with self.assert_signal_sent( api, "comment_created", sender=None, user=self.user, exclude_args=("post",) ): - actual = create_comment(self.request, data) + with self.captureOnCommitCallbacks(execute=True): + actual = create_comment(self.request, data) expected = { "id": "test_comment", "thread_id": "test_thread", @@ -1118,9 +1123,10 @@ def test_basic(self): with self.assert_signal_sent( api, "thread_edited", sender=None, user=self.user, exclude_args=("post",) ): - actual = update_thread( - self.request, "test_thread", {"raw_body": "Edited body"} - ) + with self.captureOnCommitCallbacks(execute=True): + actual = update_thread( + self.request, "test_thread", {"raw_body": "Edited body"} + ) assert actual == self.expected_thread_data( { @@ -1436,13 +1442,13 @@ def test_following(self, old_following, new_following, mock_emit): self.register_thread() data = {"following": new_following} signal_name = "thread_followed" if new_following else "thread_unfollowed" - mock_path = ( - f"openedx.core.djangoapps.django_comment_common.signals.{signal_name}.send" - ) + # Patch at the api module level where the signal is imported and used + mock_path = f"lms.djangoapps.discussion.rest_api.api.{signal_name}" with mock.patch(mock_path) as signal_patch: - result = update_thread(self.request, "test_thread", data) + with self.captureOnCommitCallbacks(execute=True): + result = update_thread(self.request, "test_thread", data) if old_following != new_following: - self.assertEqual(signal_patch.call_count, 1) + self.assertEqual(signal_patch.send.call_count, 1) assert result["following"] == new_following if old_following == new_following: @@ -1782,9 +1788,10 @@ def test_basic(self, parent_id): with self.assert_signal_sent( api, "comment_edited", sender=None, user=self.user, exclude_args=("post",) ): - actual = update_comment( - self.request, "test_comment", {"raw_body": "Edited body"} - ) + with self.captureOnCommitCallbacks(execute=True): + actual = update_comment( + self.request, "test_comment", {"raw_body": "Edited body"} + ) expected = { "anonymous": False, "anonymous_to_peers": False, @@ -2207,7 +2214,7 @@ def test_raw_body_access(self, role_name, is_thread_author, is_comment_author): ) @ddt.unpack @mock.patch( - "openedx.core.djangoapps.django_comment_common.signals.comment_endorsed.send" + "lms.djangoapps.discussion.rest_api.api.comment_endorsed.send" ) def test_endorsed_access( self, role_name, is_thread_author, thread_type, is_comment_author, endorsed_mock @@ -2226,7 +2233,8 @@ def test_endorsed_access( thread_type == "discussion" or not is_thread_author ) try: - update_comment(self.request, "test_comment", {"endorsed": True}) + with self.captureOnCommitCallbacks(execute=True): + update_comment(self.request, "test_comment", {"endorsed": True}) self.assertEqual(endorsed_mock.call_count, 1) assert not expected_error except ValidationError as err: @@ -2354,7 +2362,8 @@ def test_basic(self, mock_emit): with self.assert_signal_sent( api, "thread_deleted", sender=None, user=self.user, exclude_args=("post",) ): - assert delete_thread(self.request, self.thread_id) is None + with self.captureOnCommitCallbacks(execute=True): + assert delete_thread(self.request, self.thread_id) is None self.check_mock_called("delete_thread") params = { "thread_id": self.thread_id, @@ -2540,7 +2549,8 @@ def test_basic(self, mock_emit): with self.assert_signal_sent( api, "comment_deleted", sender=None, user=self.user, exclude_args=("post",) ): - assert delete_comment(self.request, self.comment_id) is None + with self.captureOnCommitCallbacks(execute=True): + assert delete_comment(self.request, self.comment_id) is None self.check_mock_called("delete_comment") params = { "comment_id": self.comment_id, diff --git a/lms/djangoapps/discussion/rest_api/utils.py b/lms/djangoapps/discussion/rest_api/utils.py index 0f02a0dcdcf2..a2591655adc2 100644 --- a/lms/djangoapps/discussion/rest_api/utils.py +++ b/lms/djangoapps/discussion/rest_api/utils.py @@ -3,13 +3,14 @@ """ import logging from datetime import datetime -from typing import Dict, List +from typing import Callable, Dict, List import requests from crum import get_current_request from django.conf import settings from django.contrib.auth.models import User # lint-amnesty, pylint: disable=imported-auth-user from django.core.paginator import Paginator +from django.db import transaction from django.db.models.functions import Length from pytz import UTC @@ -496,3 +497,24 @@ def get_captcha_site_key_by_platform(platform: str) -> str | None: Get reCAPTCHA site key based on the platform. """ return settings.RECAPTCHA_SITE_KEYS.get(platform, None) + + +def send_signal_after_commit(signal_func: Callable): + """ + Schedule a signal to be sent after the current database transaction commits. + + This helper ensures that signals are only sent after the transaction commits, + preventing race conditions where async tasks (like Celery workers) may try to + access database records before they are visible (especially important for MySQL + backend with transaction isolation). + + Args: + signal_func: A callable that sends the signal. This will be executed + after the transaction commits. + + Example: + send_signal_after_commit( + lambda: thread_created.send(sender=None, user=user, post=thread, notify_all_learners=False) + ) + """ + transaction.on_commit(signal_func) diff --git a/lms/djangoapps/lti_provider/users.py b/lms/djangoapps/lti_provider/users.py index 168d6c4c6dc1..b2df1ac02563 100644 --- a/lms/djangoapps/lti_provider/users.py +++ b/lms/djangoapps/lti_provider/users.py @@ -3,6 +3,7 @@ that an individual has in the campus LMS platform and on edX. """ +import logging import random import string @@ -18,6 +19,8 @@ from lms.djangoapps.lti_provider.models import LtiUser from openedx.core.djangoapps.safe_sessions.middleware import mark_user_change_as_expected +log = logging.getLogger("edx.lti_provider") + def get_lti_user_details(request): """ @@ -54,21 +57,47 @@ def authenticate_lti_user(request, lti_user_id, lti_consumer): if lti_consumer.require_user_account: # Verify that the email from the LTI Launch and the logged-in user are the same # before linking the LtiUser with the edx_user. + log.info( + 'LTI consumer requires existing user account for LTI user ID: %s from request path: %s', + lti_user_id, + request.path + ) if request.user.is_authenticated and request.user.email.lower() == profile["email"]: lti_user = create_lti_user(lti_user_id, lti_consumer, profile) else: + log.error( + 'LTI user account linking failed for LTI user ID: %s for request path: %s: ' + 'either user is not logged in or email mismatched', + lti_user_id, + request.path + ) # Ask the user to login before linking. raise PermissionDenied() from exc elif lti_consumer.use_lti_pii: + log.info( + 'Creating LTI user with PII for LTI user ID: %s from request path: %s', + lti_user_id, + request.path + ) profile["username"] = lti_user_id lti_user = create_lti_user(lti_user_id, lti_consumer, profile) else: + log.info( + 'Creating LTI user without PII for LTI user ID: %s from request path: %s', + lti_user_id, + request.path + ) lti_user = create_lti_user(lti_user_id, lti_consumer) if not (request.user.is_authenticated and request.user == lti_user.edx_user): # The user is not authenticated, or is logged in as somebody else. # Switch them to the LTI user + log.info( + 'Switching logged-in user to LTI user ID: %s for request path: %s', + lti_user_id, + request.path + ) switch_user(request, lti_user, lti_consumer) @@ -102,6 +131,10 @@ def create_lti_user(lti_user_id, lti_consumer, profile=None): edx_user_profile.save() created = True except IntegrityError: + log.error( + 'LTI user creation failed for LTI user ID %s. Retrying with a new username', + lti_user_id, + ) edx_user_id = generate_random_edx_username() # The random edx_user_id wasn't unique. Since 'created' is still # False, we will retry with a different random ID. @@ -128,6 +161,7 @@ def switch_user(request, lti_user, lti_consumer): if not edx_user: # This shouldn't happen, since we've created edX accounts for any LTI # users by this point, but just in case we can return a 403. + log.error('Switching user failed for LTI user ID: %s from request path: %s', lti_user.lti_user_id, request.path) raise PermissionDenied() login(request, edx_user) mark_user_change_as_expected(edx_user.id) diff --git a/lms/djangoapps/lti_provider/views.py b/lms/djangoapps/lti_provider/views.py index 2da12c0960c1..4f6f2f9a93d7 100644 --- a/lms/djangoapps/lti_provider/views.py +++ b/lms/djangoapps/lti_provider/views.py @@ -56,12 +56,14 @@ def lti_launch(request, course_id, usage_id): pair """ if not settings.FEATURES['ENABLE_LTI_PROVIDER']: + log.info('LTI provider feature is disabled.') return HttpResponseForbidden() # Check the LTI parameters, and return 400 if any required parameters are # missing params = get_required_parameters(request.POST) if not params: + log.info('Missing required LTI parameters in LTI request path: %s', request.path) return HttpResponseBadRequest() params.update(get_optional_parameters(request.POST)) params.update(get_custom_parameters(request.POST)) @@ -74,10 +76,21 @@ def lti_launch(request, course_id, usage_id): params['oauth_consumer_key'] ) except LtiConsumer.DoesNotExist: + log.error( + 'LTI consumer lookup failed because no matching consumer was found against ' + 'consumer key: %s and instance GUID: %s for request path: %s', + params['oauth_consumer_key'], + params.get('tool_consumer_instance_guid', None), + request.path + ) return HttpResponseForbidden() # Check the OAuth signature on the message if not SignatureValidator(lti_consumer).verify(request): + log.error( + 'Invalid OAuth signature for LTI launch from request path: %s', + request.path + ) return HttpResponseForbidden() # Add the course and usage keys to the parameters array @@ -85,20 +98,26 @@ def lti_launch(request, course_id, usage_id): course_key, usage_key = parse_course_and_usage_keys(course_id, usage_id) except InvalidKeyError: log.error( - 'Invalid course key %s or usage key %s from request %s', + 'Invalid course key %s or usage key %s from request path %s', course_id, usage_id, - request + request.path ) raise Http404() # lint-amnesty, pylint: disable=raise-missing-from params['course_key'] = course_key params['usage_key'] = usage_key - # Create an edX account if the user identifed by the LTI launch doesn't have + # Create an edX account if the user identified by the LTI launch doesn't have # one already, and log the edX account into the platform. try: - authenticate_lti_user(request, params['user_id'], lti_consumer) + user_id = params["user_id"] + authenticate_lti_user(request, user_id, lti_consumer) except PermissionDenied: + log.info( + 'LTI user authentication failed for user Id: %s from request path: %s', + user_id, + request.path + ) request.session.flush() context = { "login_link": request.build_absolute_uri(settings.LOGIN_URL), diff --git a/lms/envs/common.py b/lms/envs/common.py index b28c9099e96b..06ba0c050aea 100644 --- a/lms/envs/common.py +++ b/lms/envs/common.py @@ -2839,7 +2839,6 @@ "secondary_email_enabled", "year_of_birth", "phone_number", - "activation_key", "pending_name_change", ] ) diff --git a/lms/static/js/discovery/discovery_factory.js b/lms/static/js/discovery/discovery_factory.js index d26841f86fca..0e44d8154a84 100644 --- a/lms/static/js/discovery/discovery_factory.js +++ b/lms/static/js/discovery/discovery_factory.js @@ -30,8 +30,11 @@ } listing = new CoursesListing({model: courseListingModel}); - dispatcher.listenTo(form, 'search', function(query) { + dispatcher.listenTo(form, "search", function (query) { form.showLoadingIndicator(); + if (!query || query.trim() === "") { + filters.remove("search_query"); + } search.performSearch(query, filters.getTerms()); }); diff --git a/openedx/core/djangoapps/content/block_structure/factory.py b/openedx/core/djangoapps/content/block_structure/factory.py index 3697f9e92f51..9dd2ce02d71e 100644 --- a/openedx/core/djangoapps/content/block_structure/factory.py +++ b/openedx/core/djangoapps/content/block_structure/factory.py @@ -31,7 +31,8 @@ def create_from_modulestore(cls, root_block_usage_key, modulestore): xmodule.modulestore.exceptions.ItemNotFoundError if a block for root_block_usage_key is not found in the modulestore. """ - block_structure = BlockStructureModulestoreData(root_block_usage_key) + root_xblock = modulestore.get_item(root_block_usage_key, depth=None, lazy=False) + block_structure = BlockStructureModulestoreData(root_block_usage_key.for_branch(None)) blocks_visited = set() def build_block_structure(xblock): @@ -41,19 +42,26 @@ def build_block_structure(xblock): """ # Check if the xblock was already visited (can happen in # DAGs). - if xblock.location in blocks_visited: + # Normalize location to remove branch/version information + # When create_from_modulestore is wrapped in published_only branch decorator, + # "xblock being changed" location contains branch and version info which causes + # mismatch when removing inaccessible blocks in + # CourseNavigationBlocksView.filter_inaccessible_blocks + # while fetching course navigation. + location = xblock.location.for_branch(None) + if location in blocks_visited: return # Add the xBlock. - blocks_visited.add(xblock.location) - block_structure._add_xblock(xblock.location, xblock) # pylint: disable=protected-access + blocks_visited.add(location) + block_structure._add_xblock(location, xblock) # pylint: disable=protected-access # Add relations with its children and recurse. for child in xblock.get_children(): - block_structure._add_relation(xblock.location, child.location) # pylint: disable=protected-access + child_location = child.location.for_branch(None) + block_structure._add_relation(location, child_location) # pylint: disable=protected-access build_block_structure(child) - root_xblock = modulestore.get_item(root_block_usage_key, depth=None, lazy=False) build_block_structure(root_xblock) return block_structure diff --git a/openedx/core/djangoapps/content/block_structure/tests/test_factory.py b/openedx/core/djangoapps/content/block_structure/tests/test_factory.py index 00efa393d8fa..92050eef24b8 100644 --- a/openedx/core/djangoapps/content/block_structure/tests/test_factory.py +++ b/openedx/core/djangoapps/content/block_structure/tests/test_factory.py @@ -6,12 +6,13 @@ from django.test import TestCase from opaque_keys.edx.keys import CourseKey +from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator from xmodule.modulestore.exceptions import ItemNotFoundError from ..exceptions import BlockStructureNotFound from ..factory import BlockStructureFactory from ..store import BlockStructureStore -from .helpers import ChildrenMapTestMixin, MockCache, MockModulestoreFactory +from .helpers import ChildrenMapTestMixin, MockCache, MockModulestoreFactory, MockXBlock, MockModulestore class TestBlockStructureFactory(TestCase, ChildrenMapTestMixin): @@ -77,3 +78,78 @@ def test_new(self): block_structure._block_data_map, # pylint: disable=protected-access ) self.assert_block_structure(new_structure, self.children_map) + + def test_from_modulestore_normalizes_locations_with_branch_info(self): + """ + Test that locations with branch/version information are normalized + when building block structures. + + This test verifies the fix for PR #37866, which ensures that when + creating block structures within the published_only branch context, + locations are normalized by removing branch/version information. + This prevents comparison mismatches when filtering inaccessible blocks. + + Without the fix, locations with branch info would be stored as-is, + causing issues when comparing with normalized locations later. + """ + # Create a course key with branch information to simulate + # the published_only branch context + course_key_with_branch = CourseLocator('org', 'course', 'run', branch='published') + root_usage_key = BlockUsageLocator( + course_key=course_key_with_branch, + block_type='html', + block_id='0' + ) + + # Create a modulestore with xblocks that have locations containing branch info + modulestore = MockModulestore() + blocks = {} + children_map = self.SIMPLE_CHILDREN_MAP + + # Create blocks with branch information in their locations + for block_id, children in enumerate(children_map): + # Create location with branch info + block_location = BlockUsageLocator( + course_key=course_key_with_branch, + block_type='html', + block_id=str(block_id) + ) + # Create child locations with branch info + child_locations = [ + BlockUsageLocator( + course_key=course_key_with_branch, + block_type='html', + block_id=str(child_id) + ) + for child_id in children + ] + blocks[block_location] = MockXBlock( + location=block_location, + children=child_locations, + modulestore=modulestore + ) + modulestore.set_blocks(blocks) + + # Build block structure from modulestore + block_structure = BlockStructureFactory.create_from_modulestore( + root_block_usage_key=root_usage_key, + modulestore=modulestore + ) + + # Verify that all stored block keys are normalized (without branch info) + # This is the key assertion: with the fix, all keys should be normalized + for block_key in block_structure: + # The block_key should equal its normalized version + normalized_key = block_key.for_branch(None) + self.assertEqual( + block_key, + normalized_key, + f"Block key {block_key} should be normalized (without branch info). " + f"Normalized version: {normalized_key}" + ) + # Verify it doesn't have branch information in the course_key + if hasattr(block_key.course_key, 'branch'): + self.assertIsNone( + block_key.course_key.branch, + f"Block key {block_key} should not have branch information" + ) diff --git a/openedx/core/djangoapps/content_libraries/api/blocks.py b/openedx/core/djangoapps/content_libraries/api/blocks.py index af44d1874508..1328db6e6ca1 100644 --- a/openedx/core/djangoapps/content_libraries/api/blocks.py +++ b/openedx/core/djangoapps/content_libraries/api/blocks.py @@ -309,7 +309,7 @@ def validate_can_add_block_to_library( block_class = XBlock.load_class(block_type) # Will raise an exception if invalid if block_class.has_children: raise IncompatibleTypesError( - 'The "{block_type}" XBlock (ID: "{block_id}") has children, so it not supported in content libraries', + f'The "{block_type}" XBlock (ID: "{block_id}") has children, so it not supported in content libraries.', ) # Make sure the new ID is not taken already: usage_key = LibraryUsageLocatorV2( # type: ignore[abstract] @@ -956,7 +956,7 @@ def delete_library_block_static_asset_file(usage_key, file_path, user=None): ) -def publish_component_changes(usage_key: LibraryUsageLocatorV2, user: UserType): +def publish_component_changes(usage_key: LibraryUsageLocatorV2, user_id: int): """ Publish all pending changes in a single component. """ @@ -969,7 +969,7 @@ def publish_component_changes(usage_key: LibraryUsageLocatorV2, user: UserType): drafts_to_publish = authoring_api.get_all_drafts(learning_package.id).filter(entity__key=component.key) # Publish the component and update anything that needs to be updated (e.g. search index): publish_log = authoring_api.publish_from_drafts( - learning_package.id, draft_qset=drafts_to_publish, published_by=user.id, + learning_package.id, draft_qset=drafts_to_publish, published_by=user_id, ) # Since this is a single component, it should be safe to process synchronously and in-process: tasks.send_events_after_publish(publish_log.pk, str(library_key)) diff --git a/openedx/core/djangoapps/content_libraries/api/containers.py b/openedx/core/djangoapps/content_libraries/api/containers.py index 28f05a05723a..5cca0b3a994b 100644 --- a/openedx/core/djangoapps/content_libraries/api/containers.py +++ b/openedx/core/djangoapps/content_libraries/api/containers.py @@ -575,7 +575,11 @@ def get_containers_contains_item( ] -def publish_container_changes(container_key: LibraryContainerLocator, user_id: int | None) -> None: +def publish_container_changes( + container_key: LibraryContainerLocator, + user_id: int | None, + call_post_publish_events_sync=False, +) -> None: """ [ 🛑 UNSTABLE ] Publish all unpublished changes in a container and all its child containers/blocks. @@ -595,7 +599,10 @@ def publish_container_changes(container_key: LibraryContainerLocator, user_id: i ) # Update the search index (and anything else) for the affected container + blocks # This is mostly synchronous but may complete some work asynchronously if there are a lot of changes. - tasks.wait_for_post_publish_events(publish_log, library_key) + if call_post_publish_events_sync: + tasks.send_events_after_publish(publish_log.pk, str(library_key)) + else: + tasks.wait_for_post_publish_events(publish_log, library_key) def copy_container(container_key: LibraryContainerLocator, user_id: int) -> UserClipboardData: diff --git a/openedx/core/djangoapps/content_libraries/api/libraries.py b/openedx/core/djangoapps/content_libraries/api/libraries.py index 8d32e4dbc015..d77061d583be 100644 --- a/openedx/core/djangoapps/content_libraries/api/libraries.py +++ b/openedx/core/djangoapps/content_libraries/api/libraries.py @@ -53,13 +53,11 @@ from django.db import IntegrityError, transaction from django.db.models import Q, QuerySet from django.utils.translation import gettext as _ -from opaque_keys.edx.locator import ( - LibraryLocatorV2, - LibraryUsageLocatorV2, -) -from openedx_events.content_authoring.data import ( - ContentLibraryData, -) +from opaque_keys.edx.locator import LibraryLocatorV2, LibraryUsageLocatorV2 +from openedx_authz import api as authz_api +from openedx_authz.api import assign_role_to_user_in_scope +from openedx_authz.constants import permissions as authz_permissions +from openedx_events.content_authoring.data import ContentLibraryData from openedx_events.content_authoring.signals import ( CONTENT_LIBRARY_CREATED, CONTENT_LIBRARY_DELETED, @@ -70,7 +68,6 @@ from organizations.models import Organization from user_tasks.models import UserTaskArtifact, UserTaskStatus from xblock.core import XBlock -from openedx_authz.api import assign_role_to_user_in_scope from openedx.core.types import User as UserType @@ -78,6 +75,7 @@ from ..constants import ALL_RIGHTS_RESERVED from ..models import ContentLibrary, ContentLibraryPermission from .exceptions import LibraryAlreadyExists, LibraryPermissionIntegrityError +from .permissions import LEGACY_LIB_PERMISSIONS log = logging.getLogger(__name__) @@ -109,6 +107,9 @@ "revert_changes", "get_backup_task_status", "assign_library_role_to_user", + "user_has_permission_across_lib_authz_systems", + "is_library_backup_task", + "is_library_restore_task", ] @@ -245,7 +246,18 @@ def user_can_create_library(user: AbstractUser) -> bool: """ Check if the user has permission to create a content library. """ - return user.has_perm(permissions.CAN_CREATE_CONTENT_LIBRARY) + library_permission = permissions.CAN_CREATE_CONTENT_LIBRARY + lib_permission_in_authz = _transform_legacy_lib_permission_to_authz_permission(library_permission) + # The authz_api.is_user_allowed check only validates permissions within a specific library context. Since + # creating a library is not tied to an existing one, we use user.has_perm (via Bridgekeeper) to check if the user + # can create libraries, meaning they have the course creator role. In the future, this should rely on a global (*) + # role defined in the Authorization Framework for instance-level resource creation. + has_perms = user.has_perm(library_permission) or authz_api.is_user_allowed( + user, + lib_permission_in_authz, + authz_api.data.GLOBAL_SCOPE_WILDCARD, + ) + return has_perms def get_libraries_for_user(user, org=None, text_search=None, order=None) -> QuerySet[ContentLibrary]: @@ -267,7 +279,11 @@ def get_libraries_for_user(user, org=None, text_search=None, order=None) -> Quer Q(learning_package__description__icontains=text_search) ) - filtered = permissions.perms[permissions.CAN_VIEW_THIS_CONTENT_LIBRARY].filter(user, qs) + # Using distinct() temporarily to avoid duplicate results caused by overlapping permission checks + # between Bridgekeeper and the new authorization framework. This ensures correct results for now, + # but it should be removed once Bridgekeeper support is fully dropped and all permission logic + # is handled through openedx-authz. + filtered = permissions.perms[permissions.CAN_VIEW_THIS_CONTENT_LIBRARY].filter(user, qs).distinct() if order: order_query = 'learning_package__' @@ -332,7 +348,7 @@ def require_permission_for_library_key(library_key: LibraryLocatorV2, user: User library_obj = ContentLibrary.objects.get_by_key(library_key) # obj should be able to read any valid model object but mypy thinks it can only be # "User | AnonymousUser | None" - if not user.has_perm(permission, obj=library_obj): # type:ignore[arg-type] + if not user_has_permission_across_lib_authz_systems(user, permission, library_obj): raise PermissionDenied return library_obj @@ -750,3 +766,102 @@ def get_backup_task_status( result['file'] = artifact.file return result + + +def _transform_legacy_lib_permission_to_authz_permission(permission: str) -> str: + """ + Transform a legacy content library permission to an openedx-authz permission. + """ + # There is no dedicated permission or role for can_create_content_library in openedx-authz yet, + # so we reuse the same permission to rely on user.has_perm via Bridgekeeper. + return { + permissions.CAN_CREATE_CONTENT_LIBRARY: permissions.CAN_CREATE_CONTENT_LIBRARY, + permissions.CAN_DELETE_THIS_CONTENT_LIBRARY: authz_permissions.DELETE_LIBRARY.identifier, + permissions.CAN_EDIT_THIS_CONTENT_LIBRARY: authz_permissions.EDIT_LIBRARY_CONTENT.identifier, + permissions.CAN_EDIT_THIS_CONTENT_LIBRARY_TEAM: authz_permissions.MANAGE_LIBRARY_TEAM.identifier, + permissions.CAN_VIEW_THIS_CONTENT_LIBRARY: authz_permissions.VIEW_LIBRARY.identifier, + permissions.CAN_VIEW_THIS_CONTENT_LIBRARY_TEAM: authz_permissions.VIEW_LIBRARY_TEAM.identifier, + }.get(permission, permission) + + +def _transform_authz_permission_to_legacy_lib_permission(permission: str) -> str: + """ + Transform an openedx-authz permission to a legacy content library permission. + """ + return { + authz_permissions.PUBLISH_LIBRARY_CONTENT.identifier: permissions.CAN_EDIT_THIS_CONTENT_LIBRARY, + authz_permissions.CREATE_LIBRARY_COLLECTION.identifier: permissions.CAN_EDIT_THIS_CONTENT_LIBRARY, + authz_permissions.EDIT_LIBRARY_COLLECTION.identifier: permissions.CAN_EDIT_THIS_CONTENT_LIBRARY, + authz_permissions.DELETE_LIBRARY_COLLECTION.identifier: permissions.CAN_EDIT_THIS_CONTENT_LIBRARY, + }.get(permission, permission) + + +def user_has_permission_across_lib_authz_systems( + user: UserType, + permission: str | authz_api.data.PermissionData, + library_obj: ContentLibrary, +) -> bool: + """ + Check whether a user has a given permission on a content library across both the + legacy edx-platform permission system and the newer openedx-authz system. + + The provided permission name is normalized to both systems (legacy and authz), and + authorization is granted if either: + - the user holds the legacy object-level permission on the ContentLibrary instance, or + - the openedx-authz API allows the user for the corresponding permission on the library. + + **Note:** + Temporary: this function uses Bridgekeeper-based logic for cases not yet modeled in openedx-authz. + + Current gaps covered here: + - CAN_CREATE_CONTENT_LIBRARY: we call user.has_perm via Bridgekeeper to verify the user is a course creator. + - CAN_VIEW_THIS_CONTENT_LIBRARY: we respect the allow_public_read flag via Bridgekeeper. + + Replace these with authz_api.is_user_allowed once openedx-authz supports + these conditions natively (including global (*) roles). + + Args: + user: The Django user (or user-like object) to check. + permission: The permission identifier (either a legacy codename or an openedx-authz name). + library_obj: The ContentLibrary instance to check against. + + Returns: + bool: True if the user is authorized by either system; otherwise False. + """ + if isinstance(permission, authz_api.data.PermissionData): + permission = permission.identifier + if _is_legacy_permission(permission): + legacy_permission = permission + authz_permission = _transform_legacy_lib_permission_to_authz_permission(permission) + else: + authz_permission = permission + legacy_permission = _transform_authz_permission_to_legacy_lib_permission(permission) + return ( + # Check both the legacy and the new openedx-authz permissions + user.has_perm(perm=legacy_permission, obj=library_obj) + or authz_api.is_user_allowed( + user, + authz_permission, + str(library_obj.library_key), + ) + ) + + +def _is_legacy_permission(permission: str) -> bool: + """ + Determine if the specified library permission is part of the legacy + or the new openedx-authz system. + """ + return permission in LEGACY_LIB_PERMISSIONS + + +def is_library_backup_task(task_name: str) -> bool: + """Case-insensitive match to see if a task is a library backup.""" + from ..tasks import LibraryBackupTask # avoid circular import error + return task_name.startswith(LibraryBackupTask.NAME_PREFIX.lower()) + + +def is_library_restore_task(task_name: str) -> bool: + """Case-insensitive match to see if a task is a library restore.""" + from ..tasks import LibraryRestoreTask # avoid circular import error + return task_name.startswith(LibraryRestoreTask.NAME_PREFIX.lower()) diff --git a/openedx/core/djangoapps/content_libraries/api/permissions.py b/openedx/core/djangoapps/content_libraries/api/permissions.py index 6064b80d6f9e..5b8bd4ba7e1a 100644 --- a/openedx/core/djangoapps/content_libraries/api/permissions.py +++ b/openedx/core/djangoapps/content_libraries/api/permissions.py @@ -12,3 +12,13 @@ CAN_VIEW_THIS_CONTENT_LIBRARY, CAN_VIEW_THIS_CONTENT_LIBRARY_TEAM ) + +LEGACY_LIB_PERMISSIONS = frozenset({ + CAN_CREATE_CONTENT_LIBRARY, + CAN_DELETE_THIS_CONTENT_LIBRARY, + CAN_EDIT_THIS_CONTENT_LIBRARY, + CAN_EDIT_THIS_CONTENT_LIBRARY_TEAM, + CAN_LEARN_FROM_THIS_CONTENT_LIBRARY, + CAN_VIEW_THIS_CONTENT_LIBRARY, + CAN_VIEW_THIS_CONTENT_LIBRARY_TEAM, +}) diff --git a/openedx/core/djangoapps/content_libraries/permissions.py b/openedx/core/djangoapps/content_libraries/permissions.py index 4e72381986ed..c3a8b68c947c 100644 --- a/openedx/core/djangoapps/content_libraries/permissions.py +++ b/openedx/core/djangoapps/content_libraries/permissions.py @@ -2,8 +2,12 @@ Permissions for Content Libraries (v2, Learning-Core-based) """ from bridgekeeper import perms, rules -from bridgekeeper.rules import Attribute, ManyRelation, Relation, blanket_rule, in_current_groups +from bridgekeeper.rules import Attribute, ManyRelation, Relation, blanket_rule, in_current_groups, Rule from django.conf import settings +from django.db.models import Q + +from openedx_authz import api as authz_api +from openedx_authz.constants.permissions import VIEW_LIBRARY from openedx.core.djangoapps.content_libraries.models import ContentLibraryPermission @@ -54,6 +58,154 @@ def is_course_creator(user): return get_course_creator_status(user) == 'granted' + +class HasPermissionInContentLibraryScope(Rule): + """Bridgekeeper rule that checks content library permissions via the openedx-authz system. + + This rule integrates the openedx-authz authorization system (backed by Casbin) with + Bridgekeeper's declarative permission system. It checks if a user has been granted a + specific permission (action) through their role assignments in the authorization system. + + The rule works by: + 1. Querying the authorization system to find library scopes where the user has this permission + 2. Parsing the library keys (org/slug) from the scopes + 3. Building database filters to match ContentLibrary models with those org/slug combinations + + Attributes: + permission (PermissionData): The permission object representing the action to check + (e.g., 'view', 'edit'). This is used to look up scopes in the authorization system. + + filter_keys (list[str]): The Django model fields to use when building QuerySet filters. + Defaults to ['org', 'slug'] for ContentLibrary models. + + These fields are used to construct the Q object filters that match libraries + based on the parsed components from library keys in authorization scopes. + + For ContentLibrary, library keys have the format 'lib:ORG:SLUG', which maps to: + - 'org' -> filters on org__short_name (related Organization model) + - 'slug' -> filters on slug field + + If filtering by different fields is needed, pass a custom list. For example: + - ['org', 'slug'] - default for ContentLibrary (filters by org and slug) + - ['id'] - filter by primary key (for other models) + + Examples: + Basic usage with default filter_keys: + >>> from bridgekeeper import perms + >>> from openedx.core.djangoapps.content_libraries.permissions import HasPermissionInContentLibraryScope + >>> + >>> # Uses default filter_keys=['org', 'slug'] for ContentLibrary + >>> can_view = HasPermissionInContentLibraryScope('view_library') + >>> perms['libraries.view_library'] = can_view + + Compound permissions with boolean operators: + >>> from bridgekeeper.rules import Attribute + >>> + >>> is_active = Attribute('is_active', True) + >>> is_staff = Attribute('is_staff', True) + >>> can_view = HasPermissionInContentLibraryScope('view_library') + >>> + >>> # User must be active AND (staff OR have explicit permission) + >>> perms['libraries.view_library'] = is_active & (is_staff | can_view) + + QuerySet filtering (efficient, database-level): + >>> from openedx.core.djangoapps.content_libraries.models import ContentLibrary + >>> + >>> # Gets all libraries user can view in a single SQL query + >>> visible_libraries = perms['libraries.view_library'].filter( + ... request.user, + ... ContentLibrary.objects.all() + ... ) + + Individual object checks: + >>> library = ContentLibrary.objects.get(org__short_name='DemoX', slug='CSPROB') + >>> if perms['libraries.view_library'].check(request.user, library): + ... # User can view this specific library + + Note: + The library keys in authorization scopes must have the format 'lib:ORG:SLUG' + to match the ContentLibrary model's org.short_name and slug fields. + For example, scope 'lib:DemoX:CSPROB' matches a library with + org.short_name='DemoX' and slug='CSPROB'. + """ + + def __init__(self, permission: authz_api.PermissionData, filter_keys: list[str] | None = None): + """Initialize the rule with the action and filter keys to filter on. + + Args: + permission (PermissionData): The permission to check (e.g., 'view', 'edit'). + filter_keys (list[str]): The model fields to filter on when building QuerySet filters. + Defaults to ['org', 'slug'] for ContentLibrary. + """ + self.permission = permission + self.filter_keys = filter_keys if filter_keys is not None else ["org", "slug"] + + def query(self, user): + """Convert this rule to a Django Q object for QuerySet filtering. + + Args: + user: The Django user object (must have a 'username' attribute). + + Returns: + Q: A Django Q object that can be used to filter a QuerySet. + The Q object combines multiple conditions using OR (|) operators, + where each condition matches a library's org and slug fields: + Q(org__short_name='OrgA' & slug='lib-a') | Q(org__short_name='OrgB' & slug='lib-b') + + Example: + >>> # User has 'view' permission in scopes: ['lib:OrgA:lib-a', 'lib:OrgB:lib-b'] + >>> rule = HasPermissionInContentLibraryScope('view', filter_keys=['org', 'slug']) + >>> q = rule.query(user) + >>> # Results in: Q(org__short_name='OrgA', slug='lib-a') | Q(org__short_name='OrgB', slug='lib-b') + >>> + >>> # Apply to queryset + >>> libraries = ContentLibrary.objects.filter(q) + >>> # SQL: SELECT * FROM content_library + >>> # WHERE (org.short_name='OrgA' AND slug='lib-a') + >>> # OR (org.short_name='OrgB' AND slug='lib-b') + """ + scopes = authz_api.get_scopes_for_user_and_permission( + user.username, + self.permission.identifier + ) + + library_keys = [scope.library_key for scope in scopes] + + if not library_keys: + return Q(pk__in=[]) # No access, return Q that matches nothing + + # Build Q object: OR together (org AND slug) conditions for each library + query = Q() + for library_key in library_keys: + query |= Q(org__short_name=library_key.org, slug=library_key.slug) + + return query + + def check(self, user, instance, *args, **kwargs): # pylint: disable=arguments-differ + """Check if user has permission for a specific object instance. + + This method is used for checking permission on individual objects rather + than filtering a QuerySet. It extracts the scope from the object and + checks if the user has the required permission in that scope via Casbin. + + Args: + user: The Django user object (must have a 'username' attribute). + instance: The Django model instance to check permission for. + *args: Additional positional arguments (for compatibility with parent signature). + **kwargs: Additional keyword arguments (for compatibility with parent signature). + + Returns: + bool: True if the user has the permission in the object's scope, + False otherwise. + + Example: + >>> rule = HasPermissionInContentLibraryScope('view') + >>> can_view = rule.check(user, library) + >>> # Checks if user has 'view' permission in scope 'lib:DemoX:CSPROB' + """ + return authz_api.is_user_allowed(user.username, self.permission.identifier, str(instance.library_key)) + + ########################### Permissions ########################### # Is the user allowed to view XBlocks from the specified content library @@ -87,7 +239,9 @@ def is_course_creator(user): is_global_staff | # Libraries with "public read" permissions can be accessed only by course creators (Attribute('allow_public_read', True) & is_course_creator) | - # Otherwise the user must be part of the library's team + # Users can access libraries within their authorized scope (via Casbin/role-based permissions) + HasPermissionInContentLibraryScope(VIEW_LIBRARY) | + # Fallback to: the user must be part of the library's team (legacy permission system) has_explicit_read_permission_for_library ) diff --git a/openedx/core/djangoapps/content_libraries/rest_api/blocks.py b/openedx/core/djangoapps/content_libraries/rest_api/blocks.py index b93b48e5ad86..e72980f6ba0d 100644 --- a/openedx/core/djangoapps/content_libraries/rest_api/blocks.py +++ b/openedx/core/djangoapps/content_libraries/rest_api/blocks.py @@ -9,6 +9,7 @@ from django.utils.decorators import method_decorator from drf_yasg.utils import swagger_auto_schema from opaque_keys.edx.locator import LibraryLocatorV2, LibraryUsageLocatorV2 +from openedx_authz.constants import permissions as authz_permissions from openedx_learning.api import authoring as authoring_api from rest_framework import status from rest_framework.exceptions import NotFound, ValidationError @@ -238,9 +239,9 @@ def post(self, request, usage_key_str): api.require_permission_for_library_key( key.lib_key, request.user, - permissions.CAN_EDIT_THIS_CONTENT_LIBRARY + authz_permissions.PUBLISH_LIBRARY_CONTENT ) - api.publish_component_changes(key, request.user) + api.publish_component_changes(key, request.user.id) return Response({}) diff --git a/openedx/core/djangoapps/content_libraries/rest_api/collections.py b/openedx/core/djangoapps/content_libraries/rest_api/collections.py index d893d766d80f..f4d579aa04a2 100644 --- a/openedx/core/djangoapps/content_libraries/rest_api/collections.py +++ b/openedx/core/djangoapps/content_libraries/rest_api/collections.py @@ -13,6 +13,7 @@ from rest_framework.status import HTTP_204_NO_CONTENT from opaque_keys.edx.locator import LibraryLocatorV2 +from openedx_authz.constants import permissions as authz_permissions from openedx_learning.api import authoring as authoring_api from openedx_learning.api.authoring_models import Collection @@ -56,7 +57,6 @@ def get_content_library(self) -> ContentLibrary: if self.request.method in ['OPTIONS', 'GET'] else permissions.CAN_EDIT_THIS_CONTENT_LIBRARY ) - self._content_library = api.require_permission_for_library_key( library_key, self.request.user, @@ -110,6 +110,11 @@ def create(self, request: RestRequest, *args, **kwargs) -> Response: Create a Collection that belongs to a Content Library """ content_library = self.get_content_library() + api.require_permission_for_library_key( + content_library.library_key, + request.user, + authz_permissions.CREATE_LIBRARY_COLLECTION + ) create_serializer = ContentLibraryCollectionUpdateSerializer(data=request.data) create_serializer.is_valid(raise_exception=True) @@ -144,6 +149,11 @@ def partial_update(self, request: RestRequest, *args, **kwargs) -> Response: Update a Collection that belongs to a Content Library """ content_library = self.get_content_library() + api.require_permission_for_library_key( + content_library.library_key, + request.user, + authz_permissions.EDIT_LIBRARY_COLLECTION + ) collection_key = kwargs["key"] update_serializer = ContentLibraryCollectionUpdateSerializer( @@ -165,6 +175,12 @@ def destroy(self, request: RestRequest, *args, **kwargs) -> Response: """ Soft-deletes a Collection that belongs to a Content Library """ + content_library = self.get_content_library() + api.require_permission_for_library_key( + content_library.library_key, + request.user, + authz_permissions.DELETE_LIBRARY_COLLECTION + ) collection = super().get_object() assert collection.learning_package_id authoring_api.delete_collection( @@ -181,6 +197,11 @@ def restore(self, request: RestRequest, *args, **kwargs) -> Response: Restores a soft-deleted Collection that belongs to a Content Library """ content_library = self.get_content_library() + api.require_permission_for_library_key( + content_library.library_key, + request.user, + authz_permissions.EDIT_LIBRARY_COLLECTION + ) assert content_library.learning_package_id collection_key = kwargs["key"] authoring_api.restore_collection( @@ -198,6 +219,11 @@ def update_items(self, request: RestRequest, *args, **kwargs) -> Response: Collection and items must all be part of the given library/learning package. """ content_library = self.get_content_library() + api.require_permission_for_library_key( + content_library.library_key, + request.user, + authz_permissions.EDIT_LIBRARY_COLLECTION + ) collection_key = kwargs["key"] serializer = ContentLibraryItemKeysSerializer(data=request.data) diff --git a/openedx/core/djangoapps/content_libraries/rest_api/containers.py b/openedx/core/djangoapps/content_libraries/rest_api/containers.py index 67070a0a82f9..c60c40b9802d 100644 --- a/openedx/core/djangoapps/content_libraries/rest_api/containers.py +++ b/openedx/core/djangoapps/content_libraries/rest_api/containers.py @@ -12,6 +12,7 @@ from drf_yasg import openapi from opaque_keys.edx.locator import LibraryLocatorV2, LibraryContainerLocator +from openedx_authz.constants import permissions as authz_permissions from openedx_learning.api import authoring as authoring_api from rest_framework.generics import GenericAPIView from rest_framework.response import Response @@ -379,7 +380,7 @@ def post(self, request: RestRequest, container_key: LibraryContainerLocator) -> api.require_permission_for_library_key( container_key.lib_key, request.user, - permissions.CAN_EDIT_THIS_CONTENT_LIBRARY, + authz_permissions.PUBLISH_LIBRARY_CONTENT ) api.publish_container_changes(container_key, request.user.id) # If we need to in the future, we could return a list of all the child containers/components that were diff --git a/openedx/core/djangoapps/content_libraries/rest_api/libraries.py b/openedx/core/djangoapps/content_libraries/rest_api/libraries.py index 9f6cca19947a..2d50fa6c8644 100644 --- a/openedx/core/djangoapps/content_libraries/rest_api/libraries.py +++ b/openedx/core/djangoapps/content_libraries/rest_api/libraries.py @@ -82,6 +82,7 @@ from user_tasks.models import UserTaskStatus from opaque_keys.edx.locator import LibraryLocatorV2, LibraryUsageLocatorV2 +from openedx_authz.constants import permissions as authz_permissions from organizations.api import ensure_organization from organizations.exceptions import InvalidOrganizationException from organizations.models import Organization @@ -219,7 +220,7 @@ def post(self, request): """ Create a new content library. """ - if not request.user.has_perm(permissions.CAN_CREATE_CONTENT_LIBRARY): + if not api.user_can_create_library(request.user): raise PermissionDenied serializer = ContentLibraryMetadataSerializer(data=request.data) serializer.is_valid(raise_exception=True) @@ -479,7 +480,11 @@ def post(self, request, lib_key_str): descendants. """ key = LibraryLocatorV2.from_string(lib_key_str) - api.require_permission_for_library_key(key, request.user, permissions.CAN_EDIT_THIS_CONTENT_LIBRARY) + api.require_permission_for_library_key( + key, + request.user, + authz_permissions.PUBLISH_LIBRARY_CONTENT + ) api.publish_changes(key, request.user.id) return Response({}) @@ -838,7 +843,7 @@ def post(self, request): """ Restore a library from a backup file. """ - if not request.user.has_perm(permissions.CAN_CREATE_CONTENT_LIBRARY): + if not api.user_can_create_library(request.user): raise PermissionDenied serializer = LibraryRestoreFileSerializer(data=request.data) diff --git a/openedx/core/djangoapps/content_libraries/rest_api/serializers.py b/openedx/core/djangoapps/content_libraries/rest_api/serializers.py index a1e24c6a64a4..c0bf07d087fc 100644 --- a/openedx/core/djangoapps/content_libraries/rest_api/serializers.py +++ b/openedx/core/djangoapps/content_libraries/rest_api/serializers.py @@ -14,6 +14,7 @@ from user_tasks.models import UserTaskStatus from openedx.core.djangoapps.content_libraries.tasks import LibraryRestoreTask +from openedx.core.djangoapps.content_libraries import api from openedx.core.djangoapps.content_libraries.api.containers import ContainerType from openedx.core.djangoapps.content_libraries.constants import ALL_RIGHTS_RESERVED, LICENSE_OPTIONS from openedx.core.djangoapps.content_libraries.models import ( @@ -75,7 +76,8 @@ def get_can_edit_library(self, obj): return False library_obj = ContentLibrary.objects.get_by_key(obj.key) - return user.has_perm(permissions.CAN_EDIT_THIS_CONTENT_LIBRARY, obj=library_obj) + return api.user_has_permission_across_lib_authz_systems( + user, permissions.CAN_EDIT_THIS_CONTENT_LIBRARY, library_obj) class ContentLibraryUpdateSerializer(serializers.Serializer): diff --git a/openedx/core/djangoapps/content_libraries/tasks.py b/openedx/core/djangoapps/content_libraries/tasks.py index 93d9fef725ec..bbbf847bfbc7 100644 --- a/openedx/core/djangoapps/content_libraries/tasks.py +++ b/openedx/core/djangoapps/content_libraries/tasks.py @@ -27,6 +27,7 @@ from django.core.files.base import ContentFile from django.contrib.auth import get_user_model from django.core.serializers.json import DjangoJSONEncoder +from django.conf import settings from celery import shared_task from celery.utils.log import get_task_logger from celery_utils.logged_task import LoggedTask @@ -127,6 +128,15 @@ def send_events_after_publish(publish_log_pk: int, library_key_str: str) -> None elif hasattr(record.entity, "container"): container_key = api.library_container_locator(library_key, record.entity.container) affected_containers.add(container_key) + + try: + # We do need to notify listeners that the parent container(s) have changed, + # e.g. so the search index can update the "has_unpublished_changes" + for parent_container in api.get_containers_contains_item(container_key): + affected_containers.add(parent_container.container_key) + except api.ContentLibraryContainerNotFound: + # The deleted children remains in the entity, so, in this case, the container may not be found. + pass else: log.warning( f"PublishableEntity {record.entity.pk} / {record.entity.key} was modified during publish operation " @@ -502,6 +512,7 @@ class LibraryBackupTask(UserTask): # pylint: disable=abstract-method """ Base class for tasks related with Library backup functionality. """ + NAME_PREFIX = "Library Learning Package Backup" @classmethod def generate_name(cls, arguments_dict) -> str: @@ -519,7 +530,7 @@ def generate_name(cls, arguments_dict) -> str: str: The generated name """ key = arguments_dict['library_key_str'] - return f'Backup of {key}' + return f'{cls.NAME_PREFIX} of {key}' @shared_task(base=LibraryBackupTask, bind=True) @@ -548,7 +559,9 @@ def backup_library(self, user_id: int, library_key_str: str) -> None: timestamp = datetime.now().strftime("%Y-%m-%d-%H%M%S") filename = f'{sanitized_lib_key}-{timestamp}.zip' file_path = os.path.join(root_dir, filename) - create_lib_zip_file(lp_key=str(library_key), path=file_path) + user = User.objects.get(id=user_id) + origin_server = getattr(settings, 'CMS_BASE', None) + create_lib_zip_file(lp_key=str(library_key), path=file_path, user=user, origin_server=origin_server) set_custom_attribute("exporting_completed", str(library_key)) with open(file_path, 'rb') as zipfile: @@ -579,10 +592,12 @@ class LibraryRestoreTask(UserTask): ERROR_LOG_ARTIFACT_NAME = 'Error log' + NAME_PREFIX = "Library Learning Package Restore" + @classmethod def generate_name(cls, arguments_dict): storage_path = arguments_dict['storage_path'] - return f'learning package restore of {storage_path}' + return f'{cls.NAME_PREFIX} of {storage_path}' def fail_with_error_log(self, logfile) -> None: """ diff --git a/openedx/core/djangoapps/content_libraries/tests/test_containers.py b/openedx/core/djangoapps/content_libraries/tests/test_containers.py index 8b7b0c527381..7e6eac3beda8 100644 --- a/openedx/core/djangoapps/content_libraries/tests/test_containers.py +++ b/openedx/core/djangoapps/content_libraries/tests/test_containers.py @@ -630,7 +630,7 @@ def test_section_hierarchy(self): ] def test_subsection_hierarchy(self): - with self.assertNumQueries(93): + with self.assertNumQueries(95): hierarchy = self._get_container_hierarchy(self.subsection_with_units["id"]) assert hierarchy["object_key"] == self.subsection_with_units["id"] assert hierarchy["components"] == [ @@ -653,7 +653,7 @@ def test_subsection_hierarchy(self): ] def test_units_hierarchy(self): - with self.assertNumQueries(56): + with self.assertNumQueries(60): hierarchy = self._get_container_hierarchy(self.unit_with_components["id"]) assert hierarchy["object_key"] == self.unit_with_components["id"] assert hierarchy["components"] == [ @@ -679,7 +679,7 @@ def test_container_hierarchy_not_found(self): ) def test_block_hierarchy(self): - with self.assertNumQueries(21): + with self.assertNumQueries(27): hierarchy = self._get_block_hierarchy(self.problem_block["id"]) assert hierarchy["object_key"] == self.problem_block["id"] assert hierarchy["components"] == [ diff --git a/openedx/core/djangoapps/content_libraries/tests/test_content_libraries.py b/openedx/core/djangoapps/content_libraries/tests/test_content_libraries.py index c4f61f47e254..91a9c29a3754 100644 --- a/openedx/core/djangoapps/content_libraries/tests/test_content_libraries.py +++ b/openedx/core/djangoapps/content_libraries/tests/test_content_libraries.py @@ -12,14 +12,17 @@ import ddt import tomlkit +from bridgekeeper import perms from django.core.files.uploadedfile import SimpleUploadedFile from django.contrib.auth.models import Group +from django.db.models import Q from django.test import override_settings from django.test.client import Client from freezegun import freeze_time -from opaque_keys.edx.locator import LibraryLocatorV2, LibraryUsageLocatorV2 +from opaque_keys.edx.locator import LibraryLocatorV2, LibraryUsageLocatorV2, LibraryCollectionLocator from organizations.models import Organization from rest_framework.test import APITestCase +from rest_framework import status from openedx_learning.api.authoring_models import LearningPackage from user_tasks.models import UserTaskStatus, UserTaskArtifact @@ -33,10 +36,15 @@ URL_BLOCK_XBLOCK_HANDLER, ContentLibrariesRestApiTest, ) +from openedx_authz import api as authz_api +from openedx_authz.constants import roles +from openedx_authz.engine.enforcer import AuthzEnforcer from openedx.core.djangoapps.xblock import api as xblock_api from openedx.core.djangolib.testing.utils import skip_unless_cms +from openedx_authz.constants.permissions import VIEW_LIBRARY -from ..models import ContentLibrary +from ..models import ContentLibrary, ContentLibraryPermission +from ..permissions import CAN_VIEW_THIS_CONTENT_LIBRARY, HasPermissionInContentLibraryScope @skip_unless_cms @@ -1217,6 +1225,462 @@ def test_uncaught_error_creates_error_log(self): self.assertEqual(task_data, expected) +@skip_unless_cms +class ContentLibrariesAuthZTestCase(ContentLibrariesRestApiTest): + """ + Tests for Content Libraries AuthZ integration via openedx-authz. + + These tests verify the HasPermissionInContentLibraryScope Bridgekeeper rule + integrates correctly with the openedx-authz authorization system (Casbin). + See: https://github.com/openedx/openedx-authz/ + + IMPORTANT: These tests explicitly remove legacy ContentLibraryPermission grants + to ensure ONLY the AuthZ system is being tested, not the legacy fallback. + """ + + def setUp(self): + super().setUp() + # The parent class provides self.user (a staff user) and self.organization + # Set up admin_user as an alias to self.user for test readability + self.admin_user = self.user + # Set up org_short_name for convenience + self.org_short_name = self.organization.short_name + + def test_authz_scope_filters_by_authorized_libraries(self): + """ + Test that HasPermissionInContentLibraryScope rule filters libraries + based on authorized org/slug combinations. + + Given: + - 3 libraries: lib1 (org1), lib2 (org2), lib3 (org1) + - User authorized for lib1 and lib2 only via AuthZ (NO legacy permissions) + + Expected: + - Filter returns exactly 2 libraries (lib1 and lib2) + - lib3 is excluded (same org as lib1, but different slug) + - Correct org/slug combinations are matched + """ + user = UserFactory.create(username="scope_user", is_staff=False) + + Organization.objects.get_or_create(short_name="org1", defaults={"name": "Org 1"}) + Organization.objects.get_or_create(short_name="org2", defaults={"name": "Org 2"}) + + with self.as_user(self.admin_user): + lib1 = self._create_library(slug="lib1", org="org1", title="Library 1") + lib2 = self._create_library(slug="lib2", org="org2", title="Library 2") + self._create_library(slug="lib3", org="org1", title="Library 3") + + # CRITICAL: Ensure user has NO legacy permissions (test ONLY AuthZ filtering) + ContentLibraryPermission.objects.filter(user=user).delete() + + with patch( + 'openedx_authz.api.get_scopes_for_user_and_permission' + ) as mock_get_scopes: + # Mock: User authorized for lib1 (org1:lib1) and lib2 (org2:lib2) only, NOT lib3 + mock_scope1 = type('Scope', (), {'library_key': LibraryLocatorV2.from_string(lib1['id'])})() + mock_scope2 = type('Scope', (), {'library_key': LibraryLocatorV2.from_string(lib2['id'])})() + mock_get_scopes.return_value = [mock_scope1, mock_scope2] + + all_libs = ContentLibrary.objects.filter(slug__in=['lib1', 'lib2', 'lib3']) + filtered = perms[CAN_VIEW_THIS_CONTENT_LIBRARY].filter(user, all_libs).distinct() + + # TEST: Verify exactly 2 libraries returned (lib1 and lib2, not lib3) + self.assertEqual(filtered.count(), 2, "Should return exactly 2 authorized libraries") + + # TEST: Verify correct libraries are included/excluded + slugs = set(filtered.values_list('slug', flat=True)) + self.assertIn('lib1', slugs, "lib1 (org1:lib1) should be included") + self.assertIn('lib2', slugs, "lib2 (org2:lib2) should be included") + self.assertNotIn('lib3', slugs, "lib3 (org1:lib3) should be excluded") + + # TEST: Verify the org/slug combinations match + lib1_result = filtered.get(slug='lib1') + lib2_result = filtered.get(slug='lib2') + self.assertEqual(lib1_result.org.short_name, 'org1') + self.assertEqual(lib2_result.org.short_name, 'org2') + + def test_authz_scope_individual_check_with_permission(self): + """ + Test that HasPermissionInContentLibraryScope.check() returns True + when authorization is granted. + + Given: + - Non-staff user + - Library exists + - Authorization system grants permission (mocked) + - NO legacy permissions + + Expected: + - check() returns True + """ + user = UserFactory.create(username="check_user", is_staff=False) + + with self.as_user(self.admin_user): + lib = self._create_library(slug="check-lib", org=self.org_short_name, title="Check Library") + + library_obj = ContentLibrary.objects.get_by_key(LibraryLocatorV2.from_string(lib["id"])) + + # CRITICAL: Ensure user has NO legacy permissions (test ONLY AuthZ) + ContentLibraryPermission.objects.filter(user=user).delete() + + with patch("openedx_authz.api.is_user_allowed", return_value=True): + result = perms[CAN_VIEW_THIS_CONTENT_LIBRARY].check(user, library_obj) + + self.assertTrue(result, "Should return True when user is authorized") + + def test_authz_scope_individual_check_without_permission(self): + """ + Test that HasPermissionInContentLibraryScope.check() returns False + when authorization is denied. + + Given: + - Non-staff user + - Non-public library + - Authorization system denies permission (mocked) + - NO legacy permissions + + Expected: + - check() returns False + """ + user = UserFactory.create(username="no_perm_user", is_staff=False) + + with self.as_user(self.admin_user): + lib = self._create_library(slug="no-perm-lib", org=self.org_short_name, title="No Permission Library") + + library_obj = ContentLibrary.objects.get_by_key(LibraryLocatorV2.from_string(lib['id'])) + + # CRITICAL: Ensure user has NO legacy permissions (test ONLY AuthZ) + ContentLibraryPermission.objects.filter(user=user).delete() + + with patch('openedx_authz.api.is_user_allowed', return_value=False): + result = perms[CAN_VIEW_THIS_CONTENT_LIBRARY].check(user, library_obj) + + self.assertFalse(result, "Should return False when user is not authorized") + + self.assertFalse(library_obj.allow_public_read) + self.assertFalse(user.is_staff) + + def test_authz_scope_handles_empty_scopes(self): + """ + Test that HasPermissionInContentLibraryScope.query() returns empty + result when user has no authorized scopes. + + Given: + - Non-staff user + - Library exists in database + - Authorization system returns empty scope list (mocked) + - NO legacy permissions + + Expected: + - Filter returns 0 libraries + - Library exists in database but is not accessible + """ + user = UserFactory.create(username="empty_user", is_staff=False) + + with self.as_user(self.admin_user): + self._create_library(slug="empty-lib", title="Empty Scopes Test") + + # CRITICAL: Ensure user has NO legacy permissions (test ONLY AuthZ) + ContentLibraryPermission.objects.filter(user=user).delete() + + with patch( + 'openedx_authz.api.get_scopes_for_user_and_permission', + return_value=[] + ): + filtered = perms[CAN_VIEW_THIS_CONTENT_LIBRARY].filter( + user, + ContentLibrary.objects.filter(slug="empty-lib") + ).distinct() + + self.assertEqual( + filtered.count(), + 0, + "Should return 0 libraries when user has no authorized scopes", + ) + + self.assertTrue( + ContentLibrary.objects.filter(slug="empty-lib").exists(), + "Library should exist in database", + ) + + def test_authz_scope_q_object_has_correct_structure(self): + """ + Test that HasPermissionInContentLibraryScope.query() generates Q object + with structure: Q(org__short_name='X') & Q(slug='Y') for each scope. + + Multiple scopes should be OR'd: + (Q(org__short_name='org1') & Q(slug='lib1')) | (Q(org__short_name='org2') & Q(slug='lib2')) + + Note: This test focuses on Q object structure, not filtering behavior, + so legacy permissions don't affect the outcome. + """ + user = UserFactory.create(username="q_user") + rule = HasPermissionInContentLibraryScope(VIEW_LIBRARY, filter_keys=['org', 'slug']) + + with patch( + "openedx_authz.api.get_scopes_for_user_and_permission" + ) as mock_get_scopes: + # Create scopes with specific org/slug values we can verify + mock_scope1 = type("Scope", (), { + "library_key": type("Key", (), {"org": "specific-org1", "slug": "specific-slug1"})() + })() + mock_scope2 = type("Scope", (), { + "library_key": type("Key", (), {"org": "specific-org2", "slug": "specific-slug2"})() + })() + mock_get_scopes.return_value = [mock_scope1, mock_scope2] + + q_obj = rule.query(user) + + # Test 1: Verify it returns a Q object + self.assertIsInstance(q_obj, Q) + + # Test 2: Verify Q object uses OR connector (for multiple scopes) + self.assertEqual( + q_obj.connector, + 'OR', + "Should use OR to combine different library scopes", + ) + + # Test 3: Verify the Q object string contains the exact fields and values + q_str = str(q_obj) + + # Should filter by org__short_name field + self.assertIn( + "org__short_name", + q_str, + "Q object must filter by org__short_name field", + ) + + # Should filter by slug field + self.assertIn( + "slug", + q_str, + "Q object must filter by slug field", + ) + + # Should contain exact org values + self.assertIn( + "specific-org1", + q_str, + "Q object must include 'specific-org1'", + ) + self.assertIn( + "specific-org2", + q_str, + "Q object must include 'specific-org2'", + ) + + # Should contain exact slug values + self.assertIn( + "specific-slug1", + q_str, + "Q object must include 'specific-slug1'", + ) + self.assertIn( + 'specific-slug2', + q_str, + "Q object must include 'specific-slug2'", + ) + + def test_authz_scope_q_object_matches_exact_org_slug_pairs(self): + """ + Test that the Q object filters by EXACT (org, slug) pairs, not just org OR slug. + + Critical test: Verifies the rule generates: + Q(org__short_name='org1' AND slug='lib1') OR Q(org__short_name='org2' AND slug='lib2') + + NOT just: + Q(org__short_name IN ['org1', 'org2']) OR Q(slug IN ['lib1', 'lib2']) + + Creates scenario: + - lib1: org1 + lib1 (authorized) + - lib2: org2 + lib2 (authorized) + - lib3: org1 + lib3 (NOT authorized - same org, different slug) + - lib4: org3 + lib1 (NOT authorized - same slug, different org) + """ + user = UserFactory.create(username="exact_pair_user") + rule = HasPermissionInContentLibraryScope(VIEW_LIBRARY, filter_keys=['org', 'slug']) + + Organization.objects.get_or_create(short_name="pair-org1", defaults={"name": "Pair Org 1"}) + Organization.objects.get_or_create(short_name="pair-org2", defaults={"name": "Pair Org 2"}) + Organization.objects.get_or_create(short_name="pair-org3", defaults={"name": "Pair Org 3"}) + + with self.as_user(self.admin_user): + lib1 = self._create_library(slug="pair-lib1", org="pair-org1", title="Pair Lib 1") + lib2 = self._create_library(slug="pair-lib2", org="pair-org2", title="Pair Lib 2") + self._create_library(slug="pair-lib3", org="pair-org1", title="Pair Lib 3") # Same org as lib1 + self._create_library(slug="pair-lib1", org="pair-org3", title="Pair Lib 4") # Same slug as lib1 + + # CRITICAL: Ensure user has NO legacy permissions (test ONLY AuthZ filtering) + ContentLibraryPermission.objects.filter(user=user).delete() + + with patch( + 'openedx_authz.api.get_scopes_for_user_and_permission' + ) as mock_get_scopes: + # Authorize ONLY (pair-org1, pair-lib1) and (pair-org2, pair-lib2) + lib1_key = LibraryLocatorV2.from_string(lib1['id']) + lib2_key = LibraryLocatorV2.from_string(lib2['id']) + + mock_get_scopes.return_value = [ + type('Scope', (), {'library_key': lib1_key})(), + type('Scope', (), {'library_key': lib2_key})(), + ] + + q_obj = rule.query(user) + filtered = ContentLibrary.objects.filter(q_obj) + + # TEST: Verify EXACTLY 2 libraries match (lib1 and lib2 only) + self.assertEqual( + filtered.count(), + 2, + "Must match EXACTLY 2 libraries - only those with authorized (org, slug) pairs", + ) + + # TEST: Verify lib1 matches (pair-org1, pair-lib1) + lib1_result = filtered.filter(slug='pair-lib1', org__short_name='pair-org1') + self.assertEqual( + lib1_result.count(), + 1, + "Must match lib1: (pair-org1, pair-lib1) - this exact pair is authorized", + ) + + # TEST: Verify lib2 matches (pair-org2, pair-lib2) + lib2_result = filtered.filter(slug='pair-lib2', org__short_name='pair-org2') + self.assertEqual( + lib2_result.count(), + 1, + "Must match lib2: (pair-org2, pair-lib2) - this exact pair is authorized", + ) + + # TEST: Verify lib3 does NOT match (pair-org1, pair-lib3) + lib3_result = filtered.filter(slug='pair-lib3', org__short_name='pair-org1') + self.assertEqual( + lib3_result.count(), + 0, + "Must NOT match lib3: (pair-org1, pair-lib3) - only pair-lib1 is authorized for pair-org1", + ) + + # TEST: Verify lib4 does NOT match (pair-org3, pair-lib1) + lib4_result = filtered.filter(slug='pair-lib1', org__short_name='pair-org3') + self.assertEqual( + lib4_result.count(), + 0, + "Must NOT match lib4: (pair-org3, pair-lib1) - only pair-org1 is authorized for pair-lib1", + ) + + # TEST: Verify the result set contains exactly the right libraries + result_pairs = set(filtered.values_list('org__short_name', 'slug')) + expected_pairs = {('pair-org1', 'pair-lib1'), ('pair-org2', 'pair-lib2')} + self.assertEqual( + result_pairs, + expected_pairs, + f"Result must contain exactly {expected_pairs}, got {result_pairs}", + ) + + def test_authz_scope_with_combined_authz_and_legacy_permissions(self): + """ + Test that the filter returns libraries when user has BOTH AuthZ AND legacy permissions. + + The CAN_VIEW_THIS_CONTENT_LIBRARY permission uses OR logic: + is_user_active & ( + is_global_staff | + (allow_public_read & is_course_creator) | + HasPermissionInContentLibraryScope(VIEW_LIBRARY) | # AuthZ + has_explicit_read_permission_for_library # Legacy + ) + + This means a user with BOTH types of permissions should get access through EITHER system. + + Test scenario: + - lib1: User has AuthZ permission only + - lib2: User has legacy permission only + - lib3: User has BOTH AuthZ AND legacy permissions + - lib4: User has NO permissions + + Expected behavior: + - Filter returns lib1, lib2, and lib3 (NOT lib4) + - Having both permission types doesn't break filtering + - Each permission system contributes its authorized libraries + """ + user = UserFactory.create(username="combined_perm_user", is_staff=False) + + Organization.objects.get_or_create(short_name="comb-org", defaults={"name": "Combined Org"}) + + with self.as_user(self.admin_user): + lib1 = self._create_library(slug="comb-lib1", org="comb-org", title="AuthZ Only Library") + lib2 = self._create_library(slug="comb-lib2", org="comb-org", title="Legacy Only Library") + lib3 = self._create_library(slug="comb-lib3", org="comb-org", title="Both AuthZ and Legacy Library") + lib4 = self._create_library(slug="comb-lib4", org="comb-org", title="No Permissions Library") + + # Retrieve library objects for permission assignment + lib1_obj = ContentLibrary.objects.get_by_key(LibraryLocatorV2.from_string(lib1['id'])) + lib2_obj = ContentLibrary.objects.get_by_key(LibraryLocatorV2.from_string(lib2['id'])) + lib3_obj = ContentLibrary.objects.get_by_key(LibraryLocatorV2.from_string(lib3['id'])) + + # Set up legacy permissions: lib2 (legacy only), lib3 (both) + ContentLibraryPermission.objects.create( + library=lib2_obj, + user=user, + access_level=ContentLibraryPermission.READ_LEVEL, + ) + ContentLibraryPermission.objects.create( + library=lib3_obj, + user=user, + access_level=ContentLibraryPermission.READ_LEVEL, + ) + + with patch( + 'openedx_authz.api.get_scopes_for_user_and_permission' + ) as mock_get_scopes: + # Set up AuthZ permissions: lib1 (AuthZ only), lib3 (both) + lib1_key = LibraryLocatorV2.from_string(lib1['id']) + lib3_key = LibraryLocatorV2.from_string(lib3['id']) + + mock_get_scopes.return_value = [ + type('Scope', (), {'library_key': lib1_key})(), + type('Scope', (), {'library_key': lib3_key})(), + ] + + all_libs = ContentLibrary.objects.filter(slug__in=['comb-lib1', 'comb-lib2', 'comb-lib3', 'comb-lib4']) + filtered = perms[CAN_VIEW_THIS_CONTENT_LIBRARY].filter(user, all_libs).distinct() + + # TEST: Verify exactly 3 libraries returned (lib1, lib2, lib3 - NOT lib4) + self.assertEqual( + filtered.count(), + 3, + "Should return exactly 3 libraries: AuthZ-only, legacy-only, and both", + ) + + # TEST: Verify correct libraries are included + slugs = set(filtered.values_list('slug', flat=True)) + self.assertIn('comb-lib1', slugs, "lib1 should be accessible via AuthZ permission") + self.assertIn('comb-lib2', slugs, "lib2 should be accessible via legacy permission") + self.assertIn('comb-lib3', slugs, "lib3 should be accessible via BOTH AuthZ and legacy permissions") + self.assertNotIn('comb-lib4', slugs, "lib4 should NOT be accessible (no permissions)") + + # TEST: Verify lib3 doesn't get duplicated despite having both permission types + lib3_results = filtered.filter(slug='comb-lib3') + self.assertEqual( + lib3_results.count(), + 1, + "lib3 should appear exactly once despite having both AuthZ and legacy permissions", + ) + + # TEST: Verify the permission sources work independently + # This demonstrates the OR logic: user gets access if EITHER permission type grants it + result_pairs = set(filtered.values_list('org__short_name', 'slug')) + expected_pairs = { + ('comb-org', 'comb-lib1'), # AuthZ only + ('comb-org', 'comb-lib2'), # Legacy only + ('comb-org', 'comb-lib3'), # Both + } + self.assertEqual( + result_pairs, + expected_pairs, + f"Should get exactly the 3 authorized libraries via OR logic, got {result_pairs}", + ) + + @ddt.ddt class ContentLibraryXBlockValidationTest(APITestCase): """Tests only focused on service validation, no Learning Core interactions here.""" @@ -1244,3 +1708,282 @@ def test_xblock_handler_invalid_key(self): secure_token='random', ))) self.assertEqual(response.status_code, 404) + + +@skip_unless_cms +class ContentLibrariesRestAPIAuthzIntegrationTestCase(ContentLibrariesRestApiTest): + """ + Test that Content Libraries REST API endpoints respect AuthZ roles and permissions. + + Roles tested: + 1. Library Admin: Full access to all library operations. + 2. Library Author: Can view and edit library content, but cannot delete the library. + 3. Library Contributor: Can view and edit library content, but cannot delete or publish the library. + 4. Library User: Can only view library content. + """ + + def setUp(self): + super().setUp() + self._seed_database_with_policies() + + self.library_admin = UserFactory.create( + username="library_admin", + email="libadmin@example.com") + self.library_author = UserFactory.create( + username="library_author", + email="libauthor@example.com") + self.library_contributor = UserFactory.create( + username="library_contributor", + email="libcontributor@example.com") + self.library_user = UserFactory.create( + username="library_user", + email="libuser@example.com") + self.random_user = UserFactory.create( + username="random_user", + email="random@example.com") + + # Define user groups by permission level + self.list_of_all_users = [ + self.library_admin, + self.library_author, + self.library_contributor, + self.library_user, + self.random_user, + ] + self.library_viewers = [self.library_admin, self.library_author, self.library_contributor, self.library_user] + self.library_editors = [self.library_admin, self.library_author, self.library_contributor] + self.library_publishers = [self.library_admin, self.library_author] + self.library_collection_editors = [self.library_admin, self.library_author, self.library_contributor] + self.library_deleters = [self.library_admin] + + # Create library and assign roles + library = self._create_library( + slug="authzlib", + title="AuthZ Test Library", + description="Testing AuthZ", + ) + self.lib_id = library["id"] + + authz_api.assign_role_to_user_in_scope( + self.library_admin.username, + roles.LIBRARY_ADMIN.external_key, self.lib_id) + authz_api.assign_role_to_user_in_scope( + self.library_author.username, + roles.LIBRARY_AUTHOR.external_key, self.lib_id) + authz_api.assign_role_to_user_in_scope( + self.library_contributor.username, + roles.LIBRARY_CONTRIBUTOR.external_key, self.lib_id) + authz_api.assign_role_to_user_in_scope( + self.library_user.username, + roles.LIBRARY_USER.external_key, self.lib_id) + AuthzEnforcer.get_enforcer().load_policy() # Load policies to simulate fresh start + + def tearDown(self): + """Clean up after each test to ensure isolation.""" + super().tearDown() + AuthzEnforcer.get_enforcer().clear_policy() # Clear policies after each test to ensure isolation + + @classmethod + def _seed_database_with_policies(cls): + """Seed the database with policies from the policy file. + + This simulates the one-time database seeding that would happen + during application deployment, separate from the runtime policy loading. + """ + import pkg_resources + from openedx_authz.engine.utils import migrate_policy_between_enforcers + import casbin + + global_enforcer = AuthzEnforcer.get_enforcer() + global_enforcer.load_policy() + model_path = pkg_resources.resource_filename("openedx_authz.engine", "config/model.conf") + policy_path = pkg_resources.resource_filename("openedx_authz.engine", "config/authz.policy") + + migrate_policy_between_enforcers( + source_enforcer=casbin.Enforcer(model_path, policy_path), + target_enforcer=global_enforcer, + ) + global_enforcer.clear_policy() # Clear to simulate fresh start for each test + + def _all_users_excluding(self, excluded_users): + return set(self.list_of_all_users) - set(excluded_users) + + def test_view_permissions(self): + """ + Verify that only users with view permissions can view. + """ + # Test library view access + for user in self.library_viewers: + with self.as_user(user): + self._get_library(self.lib_id, expect_response=status.HTTP_200_OK) + for user in self._all_users_excluding(self.library_viewers): + with self.as_user(user): + self._get_library(self.lib_id, expect_response=status.HTTP_403_FORBIDDEN) + + def test_edit_permissions(self): + """ + Verify that only users with edit permissions can edit. + """ + # Test library edit access + for user in self.library_editors: + with self.as_user(user): + self._update_library( + self.lib_id, + description=f"Description by {user.username}", + expect_response=status.HTTP_200_OK, + ) + #Verify the permitted changes were made + data = self._get_library(self.lib_id) + assert data['description'] == f"Description by {user.username}" + + for user in self._all_users_excluding(self.library_editors): + with self.as_user(user): + self._update_library( + self.lib_id, + description="I can't edit this.", expect_response=status.HTTP_403_FORBIDDEN) + + # Verify the no permitted changes weren't made: + data = self._get_library(self.lib_id) + assert data['description'] != "I can't edit this." + + # Library XBlock editing + for user in self.library_editors: + with self.as_user(user): + # They can create blocks + block_data = self._add_block_to_library(self.lib_id, "problem", f"problem_{user.username}") + # They can modify blocks + self._set_library_block_olx( + block_data["id"], + "", + expect_response=status.HTTP_200_OK) + self._set_library_block_fields( + block_data["id"], + {"data": "", "metadata": {}}, + expect_response=status.HTTP_200_OK) + self._set_library_block_asset( + block_data["id"], + "static/test.txt", + b"data", + expect_response=status.HTTP_200_OK) + # They can remove blocks + self._delete_library_block(block_data["id"], expect_response=status.HTTP_200_OK) + # Verify deletion + self._get_library_block(block_data["id"], expect_response=404) + + # Recreate blocks for further tests + block_data = self._add_block_to_library(self.lib_id, "problem", "new_problem") + + for user in self._all_users_excluding(self.library_editors): + with self.as_user(user): + self._add_block_to_library( + self.lib_id, + "problem", + "problem1", + expect_response=status.HTTP_403_FORBIDDEN) + # They can't modify blocks + self._set_library_block_olx( + block_data["id"], + "", + expect_response=status.HTTP_403_FORBIDDEN) + self._set_library_block_fields( + block_data["id"], + {"data": "", "metadata": {}}, + expect_response=status.HTTP_403_FORBIDDEN) + self._set_library_block_asset( + block_data["id"], + "static/test.txt", + b"data", + expect_response=status.HTTP_403_FORBIDDEN) + # They can't remove blocks + self._delete_library_block(block_data["id"], expect_response=status.HTTP_403_FORBIDDEN) + + def test_publish_permissions(self): + """ + Verify that only users with publish permissions can publish. + """ + # Test publish access + for user in self.library_publishers: + with self.as_user(user): + block_data = self._add_block_to_library(self.lib_id, "problem", f"problem_{user.username}_1") + self._publish_library_block(block_data["id"], expect_response=status.HTTP_200_OK) + block_data = self._add_block_to_library(self.lib_id, "problem", f"problem_{user.username}_2") + assert self._get_library(self.lib_id)['has_unpublished_changes'] is True + self._commit_library_changes(self.lib_id, expect_response=status.HTTP_200_OK) + assert self._get_library(self.lib_id)['has_unpublished_changes'] is False + + block_data = self._add_block_to_library(self.lib_id, "problem", "draft_problem") + assert self._get_library(self.lib_id)['has_unpublished_changes'] is True + + for user in self._all_users_excluding(self.library_publishers): + with self.as_user(user): + self._publish_library_block(block_data["id"], expect_response=status.HTTP_403_FORBIDDEN) + self._commit_library_changes(self.lib_id, expect_response=status.HTTP_403_FORBIDDEN) + # Verify that no changes were published + assert self._get_library(self.lib_id)['has_unpublished_changes'] is True + + def test_collection_permissions(self): + """ + Verify that only users with collection permissions can perform collection actions. + """ + library_key = LibraryLocatorV2.from_string(self.lib_id) + block_data = self._add_block_to_library(self.lib_id, "problem", "collection_problem") + # Test library collection access + for user in self.library_collection_editors: + with self.as_user(user): + # Create collection + collection_data = self._create_collection( + self.lib_id, + title=f"Temp Collection {user.username}", + expect_response=status.HTTP_200_OK) + collection_id = collection_data["key"] + collection_key = LibraryCollectionLocator(lib_key=library_key, collection_id=collection_id) + # Update collection + self._update_collection(collection_key, title="Updated Collection", expect_response=status.HTTP_200_OK) + self._add_items_to_collection( + collection_key, + item_keys=[block_data["id"]], + expect_response=status.HTTP_200_OK) + # Delete collection + self._soft_delete_collection(collection_key, expect_response=status.HTTP_204_NO_CONTENT) + + collection_data = self._create_collection( + self.lib_id, + title="New Temp Collection", + expect_response=status.HTTP_200_OK) + collection_id = collection_data["key"] + collection_key = LibraryCollectionLocator(lib_key=library_key, collection_id=collection_id) + + for user in self._all_users_excluding(self.library_collection_editors): + with self.as_user(user): + # Attempt to create collection + self._create_collection( + self.lib_id, + title="Unauthorized Collection", + expect_response=status.HTTP_403_FORBIDDEN) + # Attempt to update collection + self._update_collection( + collection_key, + title="Unauthorized Change", + expect_response=status.HTTP_403_FORBIDDEN) + self._add_items_to_collection( + collection_key, + item_keys=[block_data["id"]], + expect_response=status.HTTP_403_FORBIDDEN) + # Attempt to delete collection + self._soft_delete_collection(collection_key, expect_response=status.HTTP_403_FORBIDDEN) + + def test_delete_library_permissions(self): + """ + Verify that only users with delete permissions can delete a library. + """ + # Test library delete access + for user in self._all_users_excluding(self.library_deleters): + with self.as_user(user): + result = self._delete_library(self.lib_id, expect_response=status.HTTP_403_FORBIDDEN) + assert 'detail' in result # Error message + assert 'permission' in result['detail'].lower() + + for user in self.library_deleters: + with self.as_user(user): + result = self._delete_library(self.lib_id, expect_response=status.HTTP_200_OK) + assert result == {} diff --git a/openedx/core/djangoapps/content_libraries/tests/test_events.py b/openedx/core/djangoapps/content_libraries/tests/test_events.py index 88d426d3ef06..975cfbafb4d9 100644 --- a/openedx/core/djangoapps/content_libraries/tests/test_events.py +++ b/openedx/core/djangoapps/content_libraries/tests/test_events.py @@ -449,6 +449,53 @@ def test_publish_container(self) -> None: c2_after = self._get_container(container2["id"]) assert c2_after["has_unpublished_changes"] + def test_publish_child_container(self): + """ + Test the events that get emitted when we publish the changes to a container that is child of another container + """ + # Create some containers + unit = self._create_container(self.lib1_key, "unit", display_name="Alpha Unit", slug=None) + subsection = self._create_container(self.lib1_key, "subsection", display_name="Bravo Subsection", slug=None) + + # Add one container as child + self._add_container_children(subsection["id"], children_ids=[unit["id"]]) + + # At first everything is unpublished: + c1_before = self._get_container(unit["id"]) + assert c1_before["has_unpublished_changes"] + c2_before = self._get_container(subsection["id"]) + assert c2_before["has_unpublished_changes"] + + # clear event log after the initial mock data setup is complete: + self.clear_events() + + # Now publish only the unit + self._publish_container(unit["id"]) + + # Now it is published: + c1_after = self._get_container(unit["id"]) + assert c1_after["has_unpublished_changes"] is False + + # And publish events were emitted: + self.expect_new_events( + { # An event for the unit being published: + "signal": LIBRARY_CONTAINER_PUBLISHED, + "library_container": LibraryContainerData( + container_key=LibraryContainerLocator.from_string(unit["id"]), + ), + }, + { # An event for parent (subsection): + "signal": LIBRARY_CONTAINER_PUBLISHED, + "library_container": LibraryContainerData( + container_key=LibraryContainerLocator.from_string(subsection["id"]), + ), + }, + ) + + # note that subsection is still unpublished + c2_after = self._get_container(subsection["id"]) + assert c2_after["has_unpublished_changes"] + def test_restore_unit(self) -> None: """ Test restoring a deleted unit via the "restore" API. diff --git a/openedx/core/djangoapps/content_libraries/tests/test_tasks.py b/openedx/core/djangoapps/content_libraries/tests/test_tasks.py index 4098b2a8fff9..a200471c00bd 100644 --- a/openedx/core/djangoapps/content_libraries/tests/test_tasks.py +++ b/openedx/core/djangoapps/content_libraries/tests/test_tasks.py @@ -1,7 +1,9 @@ """ Unit tests for content libraries Celery tasks """ +from unittest import mock +from django.test import override_settings from ..models import ContentLibrary from .base import ContentLibrariesRestApiTest @@ -13,6 +15,7 @@ class ContentLibraryBackupTaskTest(ContentLibrariesRestApiTest): """ Tests for Content Library export task. """ + SEND_TASK_COMPLETE_FN = 'cms.djangoapps.cms_user_tasks.tasks.send_task_complete_email.delay' def setUp(self) -> None: super().setUp() @@ -28,16 +31,26 @@ def test_backup_task_returns_task_id(self): result = backup_library.delay(self.user.id, str(self.lib1.library_key)) assert result.task_id is not None + @override_settings(CMS_BASE="test.com") def test_backup_task_success(self): - result = backup_library.delay(self.user.id, str(self.lib1.library_key)) + with mock.patch(self.SEND_TASK_COMPLETE_FN) as send_task_complete_email: + result = backup_library.delay(self.user.id, str(self.lib1.library_key)) + send_task_complete_email.assert_not_called() assert result.state == 'SUCCESS' # Ensure an artifact was created with the output file artifact = UserTaskArtifact.objects.filter(status__task_id=result.task_id, name='Output').first() assert artifact is not None assert artifact.file.name.endswith('.zip') + # test artifact content + with artifact.file.open('rb') as f: + content = f.read() + assert b'created_by_email = "bob@example.com"' in content + assert b'origin_server = "test.com"' in content def test_backup_task_failure(self): - result = backup_library.delay(self.user.id, self.wrong_task_id) + with mock.patch(self.SEND_TASK_COMPLETE_FN) as send_task_complete_email: + result = backup_library.delay(self.user.id, self.wrong_task_id) + send_task_complete_email.assert_not_called() assert result.state == 'FAILURE' # Ensure an error artifact was created artifact = UserTaskArtifact.objects.filter(status__task_id=result.task_id, name='Error').first() diff --git a/openedx/core/djangoapps/content_tagging/rest_api/v1/tests/test_views.py b/openedx/core/djangoapps/content_tagging/rest_api/v1/tests/test_views.py index 6ba1049082d1..3b36df3f4075 100644 --- a/openedx/core/djangoapps/content_tagging/rest_api/v1/tests/test_views.py +++ b/openedx/core/djangoapps/content_tagging/rest_api/v1/tests/test_views.py @@ -514,12 +514,12 @@ def test_create_taxonomy(self, user_attr: str, expected_status: int) -> None: @ddt.data( ('staff', 11), - ("content_creatorA", 17), - ("library_staffA", 17), - ("library_userA", 17), - ("instructorA", 17), - ("course_instructorA", 17), - ("course_staffA", 17), + ("content_creatorA", 22), + ("library_staffA", 22), + ("library_userA", 22), + ("instructorA", 22), + ("course_instructorA", 22), + ("course_staffA", 22), ) @ddt.unpack def test_list_taxonomy_query_count(self, user_attr: str, expected_queries: int): @@ -1927,16 +1927,16 @@ def test_get_copied_tags(self): ('staff', 'courseA', 8), ('staff', 'libraryA', 8), ('staff', 'collection_key', 8), - ("content_creatorA", 'courseA', 12, False), - ("content_creatorA", 'libraryA', 12, False), - ("content_creatorA", 'collection_key', 12, False), - ("library_staffA", 'libraryA', 12, False), # Library users can only view objecttags, not change them? - ("library_staffA", 'collection_key', 12, False), - ("library_userA", 'libraryA', 12, False), - ("library_userA", 'collection_key', 12, False), - ("instructorA", 'courseA', 12), - ("course_instructorA", 'courseA', 12), - ("course_staffA", 'courseA', 12), + ("content_creatorA", 'courseA', 17, False), + ("content_creatorA", 'libraryA', 17, False), + ("content_creatorA", 'collection_key', 17, False), + ("library_staffA", 'libraryA', 17, False), # Library users can only view objecttags, not change them? + ("library_staffA", 'collection_key', 17, False), + ("library_userA", 'libraryA', 17, False), + ("library_userA", 'collection_key', 17, False), + ("instructorA", 'courseA', 17), + ("course_instructorA", 'courseA', 17), + ("course_staffA", 'courseA', 17), ) @ddt.unpack def test_object_tags_query_count( diff --git a/openedx/core/djangoapps/courseware_api/tests/test_views.py b/openedx/core/djangoapps/courseware_api/tests/test_views.py index 1606d245c01f..62fd6d557b34 100644 --- a/openedx/core/djangoapps/courseware_api/tests/test_views.py +++ b/openedx/core/djangoapps/courseware_api/tests/test_views.py @@ -662,15 +662,20 @@ def test_invitation_only_property(self, invitation_only): ) def test_about_sidebar_html_property(self, waffle_enabled, mock_get_course_about_section): """ - Test about_sidebar_html property with different waffle settings + Test about_sidebar_html property with different waffle settings. + + Ensure that when a value is returned, ' with override_waffle_switch(ENABLE_COURSE_ABOUT_SIDEBAR_HTML, active=waffle_enabled): meta = self.create_courseware_meta() if waffle_enabled: assert meta.about_sidebar_html == '
About Course
' else: assert meta.about_sidebar_html is None + assert meta.overview == '
About Course
' @ddt.ddt diff --git a/openedx/core/djangoapps/courseware_api/views.py b/openedx/core/djangoapps/courseware_api/views.py index 1dcfc740c84c..a5940d8a132e 100644 --- a/openedx/core/djangoapps/courseware_api/views.py +++ b/openedx/core/djangoapps/courseware_api/views.py @@ -63,6 +63,7 @@ from openedx.core.djangoapps.courseware_api.utils import get_celebrations_dict from openedx.core.djangoapps.enrollments.permissions import ENROLL_IN_COURSE from openedx.core.djangoapps.programs.utils import ProgramProgressMeter +from openedx.core.djangolib.markup import clean_dangerous_html from openedx.core.lib.api.authentication import BearerAuthenticationAllowInactiveUser from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin from openedx.core.lib.courses import get_course_by_id @@ -516,7 +517,9 @@ def about_sidebar_html(self): Returns the HTML content for the course about section. """ if ENABLE_COURSE_ABOUT_SIDEBAR_HTML.is_enabled(): - return get_course_about_section(self.request, self.course, "about_sidebar_html") + return clean_dangerous_html( + get_course_about_section(self.request, self.course, "about_sidebar_html") + ) return None @property @@ -524,7 +527,9 @@ def overview(self): """ Returns the overview HTML content for the course. """ - return get_course_about_section(self.request, self.course, "overview") + return clean_dangerous_html( + get_course_about_section(self.request, self.course, "overview") + ) @method_decorator(transaction.non_atomic_requests, name='dispatch') diff --git a/openedx/core/djangoapps/notifications/base_notification.py b/openedx/core/djangoapps/notifications/base_notification.py index 5264053ace5a..4b9393830eb9 100644 --- a/openedx/core/djangoapps/notifications/base_notification.py +++ b/openedx/core/djangoapps/notifications/base_notification.py @@ -178,7 +178,7 @@ 'is_core': False, 'info': '', 'web': True, - 'email': False, + 'email': True, 'push': False, 'email_cadence': EmailCadence.DAILY, 'non_editable': ['push'], @@ -236,7 +236,7 @@ 'is_core': False, 'info': '', 'web': True, - 'email': False, + 'email': True, 'email_cadence': EmailCadence.DAILY, 'push': False, 'non_editable': ['push'], diff --git a/openedx/core/djangoapps/notifications/tests/test_views.py b/openedx/core/djangoapps/notifications/tests/test_views.py index 7148295dcf50..5e09a86c5914 100644 --- a/openedx/core/djangoapps/notifications/tests/test_views.py +++ b/openedx/core/djangoapps/notifications/tests/test_views.py @@ -604,7 +604,7 @@ def setUp(self): }, "new_instructor_all_learners_post": { "web": True, - "email": False, + "email": True, "push": False, "email_cadence": "Daily" }, @@ -628,7 +628,7 @@ def setUp(self): "notification_types": { "course_updates": { "web": True, - "email": False, + "email": True, "push": False, "email_cadence": "Daily" }, diff --git a/openedx/core/djangoapps/user_api/accounts/serializers.py b/openedx/core/djangoapps/user_api/accounts/serializers.py index f7ffe15d2a4b..c4acb8df9bac 100644 --- a/openedx/core/djangoapps/user_api/accounts/serializers.py +++ b/openedx/core/djangoapps/user_api/accounts/serializers.py @@ -142,11 +142,6 @@ def to_representation(self, user): # lint-amnesty, pylint: disable=arguments-di except ObjectDoesNotExist: account_recovery = None - try: - activation_key = user.registration.activation_key - except ObjectDoesNotExist: - activation_key = None - data = { "username": user.username, "url": self.context.get('request').build_absolute_uri( @@ -161,7 +156,6 @@ def to_representation(self, user): # lint-amnesty, pylint: disable=arguments-di "date_joined": user.date_joined.replace(microsecond=0), "last_login": user.last_login, "is_active": user.is_active, - "activation_key": activation_key, "bio": None, "country": None, "state": None, diff --git a/openedx/core/djangoapps/user_api/accounts/tests/test_api.py b/openedx/core/djangoapps/user_api/accounts/tests/test_api.py index 8cdb7a634118..023c04ab0217 100644 --- a/openedx/core/djangoapps/user_api/accounts/tests/test_api.py +++ b/openedx/core/djangoapps/user_api/accounts/tests/test_api.py @@ -635,7 +635,6 @@ def test_create_account(self): 'id': user.id, 'name': self.USERNAME, 'verified_name': None, - 'activation_key': user.registration.activation_key, 'gender': None, 'goals': '', 'is_active': False, 'level_of_education': None, diff --git a/openedx/core/djangoapps/user_api/accounts/tests/test_views.py b/openedx/core/djangoapps/user_api/accounts/tests/test_views.py index 45b1773e4f02..e73d8c7decd2 100644 --- a/openedx/core/djangoapps/user_api/accounts/tests/test_views.py +++ b/openedx/core/djangoapps/user_api/accounts/tests/test_views.py @@ -361,8 +361,8 @@ class TestAccountsAPI(FilteredQueryCountMixin, CacheIsolationTestCase, UserAPITe """ ENABLED_CACHES = ['default'] - TOTAL_QUERY_COUNT = 26 - FULL_RESPONSE_FIELD_COUNT = 29 + TOTAL_QUERY_COUNT = 25 + FULL_RESPONSE_FIELD_COUNT = 28 def setUp(self): super().setUp() @@ -492,19 +492,19 @@ def test_get_account_unknown_user(self, api_client, user): ("client", "user"), ) @ddt.unpack - def test_regsitration_activation_key(self, api_client, user): + def test_regsitration_activation_key_not_exposed(self, api_client, user): """ - Test that registration activation key has a value. + Test that activation_key is NOT returned in the account API response. - UserFactory does not auto-generate registration object for the test users. - It is created only for users that signup via email/API. Therefore, activation key has to be tested manually. + The activation_key is a secret used for email verification and must not be + exposed via the API, as doing so allows bypassing email verification. """ self.create_user_registration(self.user) client = self.login_client(api_client, user) response = self.send_get(client) - assert response.data["activation_key"] is not None + assert "activation_key" not in response.data def test_successful_get_account_by_email(self): """ @@ -815,12 +815,12 @@ def verify_get_own_information(queries): assert data['time_zone'] is None self.client.login(username=self.user.username, password=TEST_PASSWORD) - verify_get_own_information(self._get_num_queries(24)) + verify_get_own_information(self._get_num_queries(23)) # Now make sure that the user can get the same information, even if not active self.user.is_active = False self.user.save() - verify_get_own_information(self._get_num_queries(16)) + verify_get_own_information(self._get_num_queries(15)) def test_get_account_empty_string(self): """ @@ -835,7 +835,7 @@ def test_get_account_empty_string(self): legacy_profile.save() self.client.login(username=self.user.username, password=TEST_PASSWORD) - with self.assertNumQueries(self._get_num_queries(24), table_ignorelist=WAFFLE_TABLES): + with self.assertNumQueries(self._get_num_queries(23), table_ignorelist=WAFFLE_TABLES): response = self.send_get(self.client) for empty_field in ("level_of_education", "gender", "country", "state", "bio",): assert response.data[empty_field] is None diff --git a/openedx/core/djangoapps/user_api/accounts/views.py b/openedx/core/djangoapps/user_api/accounts/views.py index c3ff6ce7a2f2..2aa778142a1a 100644 --- a/openedx/core/djangoapps/user_api/accounts/views.py +++ b/openedx/core/djangoapps/user_api/accounts/views.py @@ -297,7 +297,6 @@ def retrieve(self, request, username): If the user makes the request for her own account, or makes a request for another account and has "is_staff" access, an HTTP 200 "OK" response is returned. The response contains the following values. * `id`: numerical lms user id in db - * `activation_key`: auto-genrated activation key when signed up via email * `bio`: null or textual representation of user biographical information ("about me"). * `country`: An ISO 3166 country code or null. * `date_joined`: The date the account was created, in the string format provided by datetime. For example, "2014-08-26T17:52:11Z". diff --git a/openedx/core/release.py b/openedx/core/release.py index ce30df8cc543..dda3add782a0 100644 --- a/openedx/core/release.py +++ b/openedx/core/release.py @@ -8,7 +8,7 @@ # The release line: an Open edX release name ("ficus"), or "master". # This should always be "master" on the master branch, and will be changed # manually when we start release-line branches, like open-release/ficus.master. -RELEASE_LINE = "master" +RELEASE_LINE = "ulmo" def doc_version(): diff --git a/requirements/common_constraints.txt b/requirements/common_constraints.txt index 28ebe29f5cc9..1f3e81f50334 100644 --- a/requirements/common_constraints.txt +++ b/requirements/common_constraints.txt @@ -16,14 +16,14 @@ # this file from Github directly. It does not require packaging in edx-lint. # using LTS django version - +Django<6.0 # elasticsearch>=7.14.0 includes breaking changes in it which caused issues in discovery upgrade process. # elastic search changelog: https://www.elastic.co/guide/en/enterprise-search/master/release-notes-7.14.0.html # See https://github.com/openedx/edx-platform/issues/35126 for more info elasticsearch<7.14.0 -# pip 25.3 is incompatible with pip-tools hence causing failures during the build process +# pip 25.3 is incompatible with pip-tools hence causing failures during the build process # Make upgrade command and all requirements upgrade jobs are broken due to this. # See issue https://github.com/openedx/public-engineering/issues/440 for details regarding the ongoing fix. # The constraint can be removed once a release (pip-tools > 7.5.1) is available with support for pip 25.3 diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 96ee4cbcbf80..dd80bc9c72c1 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -61,7 +61,7 @@ numpy<2.0.0 # Date: 2023-09-18 # pinning this version to avoid updates while the library is being developed # Issue for unpinning: https://github.com/openedx/edx-platform/issues/35269 -openedx-learning==0.29.1 +openedx-learning==0.30.2 # Date: 2023-11-29 # Open AI version 1.0.0 dropped support for openai.ChatCompletion which is currently in use in enterprise. diff --git a/requirements/edx-sandbox/README.rst b/requirements/edx-sandbox/README.rst index 4d628f3e2add..d4b1ab8199a1 100644 --- a/requirements/edx-sandbox/README.rst +++ b/requirements/edx-sandbox/README.rst @@ -74,3 +74,21 @@ releases/sumac.txt .. _Python changelog: https://docs.python.org/3.11/whatsnew/changelog.html .. _SciPy changelog: https://docs.scipy.org/doc/scipy/release.html .. _NumPy changelog: https://numpy.org/doc/stable/release.html + +releases/teak.txt +------------------ + +* Frozen at the time of the Teak release +* Supports Python 3.11 and Python 3.12 +* SciPy is upgraded from 1.14.1 to 1.15.2 + +.. _SciPy changelog: https://docs.scipy.org/doc/scipy/release.html + +releases/ulmo.txt +------------------ + +* Frozen at the time of the Ulmo release +* Supports Python 3.11 and Python 3.12 +* SciPy is upgraded from 1.15.2 to 1.16.3 + +.. _SciPy changelog: https://docs.scipy.org/doc/scipy/release.html diff --git a/requirements/edx-sandbox/releases/ulmo.txt b/requirements/edx-sandbox/releases/ulmo.txt new file mode 100644 index 000000000000..887f5cc1beaf --- /dev/null +++ b/requirements/edx-sandbox/releases/ulmo.txt @@ -0,0 +1,90 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# make upgrade +# +cffi==2.0.0 + # via cryptography +chem==2.0.0 + # via -r requirements/edx-sandbox/base.in +click==8.3.0 + # via nltk +codejail-includes==2.0.0 + # via -r requirements/edx-sandbox/base.in +contourpy==1.3.3 + # via matplotlib +cryptography==45.0.7 + # via + # -c requirements/constraints.txt + # -r requirements/edx-sandbox/base.in +cycler==0.12.1 + # via matplotlib +fonttools==4.60.1 + # via matplotlib +joblib==1.5.2 + # via nltk +kiwisolver==1.4.9 + # via matplotlib +lxml[html-clean]==5.3.2 + # via + # -c requirements/constraints.txt + # -r requirements/edx-sandbox/base.in + # lxml-html-clean + # openedx-calc +lxml-html-clean==0.4.3 + # via lxml +markupsafe==3.0.3 + # via + # chem + # openedx-calc +matplotlib==3.10.7 + # via -r requirements/edx-sandbox/base.in +mpmath==1.3.0 + # via sympy +networkx==3.5 + # via -r requirements/edx-sandbox/base.in +nltk==3.9.2 + # via + # -r requirements/edx-sandbox/base.in + # chem +numpy==1.26.4 + # via + # -c requirements/constraints.txt + # chem + # contourpy + # matplotlib + # openedx-calc + # scipy +openedx-calc==4.0.2 + # via -r requirements/edx-sandbox/base.in +packaging==25.0 + # via matplotlib +pillow==12.0.0 + # via matplotlib +pycparser==2.23 + # via cffi +pyparsing==3.2.5 + # via + # -r requirements/edx-sandbox/base.in + # chem + # matplotlib + # openedx-calc +python-dateutil==2.9.0.post0 + # via matplotlib +random2==1.0.2 + # via -r requirements/edx-sandbox/base.in +regex==2025.10.23 + # via nltk +scipy==1.16.3 + # via + # -r requirements/edx-sandbox/base.in + # chem +six==1.17.0 + # via python-dateutil +sympy==1.14.0 + # via + # -r requirements/edx-sandbox/base.in + # openedx-calc +tqdm==4.67.1 + # via nltk diff --git a/requirements/edx/base.txt b/requirements/edx/base.txt index 1f6eb49a2ad3..6b1596a06543 100644 --- a/requirements/edx/base.txt +++ b/requirements/edx/base.txt @@ -170,8 +170,9 @@ defusedxml==0.7.1 # ora2 # python3-openid # social-auth-core -django==5.2.7 +django==5.2.11 # via + # -c requirements/common_constraints.txt # -c requirements/constraints.txt # -r requirements/edx/kernel.in # casbin-django-orm-adapter @@ -464,6 +465,7 @@ edx-django-utils==8.0.1 # edx-when # enterprise-integrated-channels # event-tracking + # openedx-authz # openedx-events # ora2 # super-csv @@ -531,7 +533,7 @@ edx-rest-api-client==6.2.0 # edx-enterprise # edx-proctoring # enterprise-integrated-channels -edx-search==4.3.0 +edx-search==4.4.0 # via # -r requirements/edx/kernel.in # openedx-forum @@ -724,8 +726,6 @@ lazy==1.6 # lti-consumer-xblock # ora2 # xblock -loremipsum==1.0.5 - # via ora2 lti-consumer-xblock==9.14.3 # via -r requirements/edx/kernel.in lxml[html-clean]==5.3.2 @@ -825,7 +825,7 @@ openedx-atlas==0.7.0 # enterprise-integrated-channels # openedx-authz # openedx-forum -openedx-authz==0.11.2 +openedx-authz==0.20.0 # via -r requirements/edx/kernel.in openedx-calc==4.0.2 # via -r requirements/edx/kernel.in @@ -854,13 +854,13 @@ openedx-filters==2.1.0 # ora2 openedx-forum==0.3.8 # via -r requirements/edx/kernel.in -openedx-learning==0.29.1 +openedx-learning==0.30.2 # via # -c requirements/constraints.txt # -r requirements/edx/kernel.in optimizely-sdk==5.2.0 # via -r requirements/edx/bundled.in -ora2==6.17.1 +ora2==6.17.2 # via -r requirements/edx/bundled.in packaging==25.0 # via diff --git a/requirements/edx/development.txt b/requirements/edx/development.txt index b3fc16072930..c16e2117053f 100644 --- a/requirements/edx/development.txt +++ b/requirements/edx/development.txt @@ -343,8 +343,9 @@ distlib==0.4.0 # via # -r requirements/edx/testing.txt # virtualenv -django==5.2.7 +django==5.2.11 # via + # -c requirements/common_constraints.txt # -c requirements/constraints.txt # -r requirements/edx/doc.txt # -r requirements/edx/testing.txt @@ -746,6 +747,7 @@ edx-django-utils==8.0.1 # edx-when # enterprise-integrated-channels # event-tracking + # openedx-authz # openedx-events # ora2 # super-csv @@ -834,7 +836,7 @@ edx-rest-api-client==6.2.0 # edx-enterprise # edx-proctoring # enterprise-integrated-channels -edx-search==4.3.0 +edx-search==4.4.0 # via # -r requirements/edx/doc.txt # -r requirements/edx/testing.txt @@ -1206,11 +1208,6 @@ libsass==0.10.0 # via # -c requirements/constraints.txt # -r requirements/edx/assets.txt -loremipsum==1.0.5 - # via - # -r requirements/edx/doc.txt - # -r requirements/edx/testing.txt - # ora2 lti-consumer-xblock==9.14.3 # via # -r requirements/edx/doc.txt @@ -1375,7 +1372,7 @@ openedx-atlas==0.7.0 # enterprise-integrated-channels # openedx-authz # openedx-forum -openedx-authz==0.11.2 +openedx-authz==0.20.0 # via # -r requirements/edx/doc.txt # -r requirements/edx/testing.txt @@ -1418,7 +1415,7 @@ openedx-forum==0.3.8 # via # -r requirements/edx/doc.txt # -r requirements/edx/testing.txt -openedx-learning==0.29.1 +openedx-learning==0.30.2 # via # -c requirements/constraints.txt # -r requirements/edx/doc.txt @@ -1427,7 +1424,7 @@ optimizely-sdk==5.2.0 # via # -r requirements/edx/doc.txt # -r requirements/edx/testing.txt -ora2==6.17.1 +ora2==6.17.2 # via # -r requirements/edx/doc.txt # -r requirements/edx/testing.txt diff --git a/requirements/edx/doc.txt b/requirements/edx/doc.txt index ac81bd89e8bd..0cb0bf334d13 100644 --- a/requirements/edx/doc.txt +++ b/requirements/edx/doc.txt @@ -232,8 +232,9 @@ defusedxml==0.7.1 # ora2 # python3-openid # social-auth-core -django==5.2.7 +django==5.2.11 # via + # -c requirements/common_constraints.txt # -c requirements/constraints.txt # -r requirements/edx/base.txt # casbin-django-orm-adapter @@ -552,6 +553,7 @@ edx-django-utils==8.0.1 # edx-when # enterprise-integrated-channels # event-tracking + # openedx-authz # openedx-events # ora2 # super-csv @@ -620,7 +622,7 @@ edx-rest-api-client==6.2.0 # edx-enterprise # edx-proctoring # enterprise-integrated-channels -edx-search==4.3.0 +edx-search==4.4.0 # via # -r requirements/edx/base.txt # openedx-forum @@ -879,10 +881,6 @@ lazy==1.6 # lti-consumer-xblock # ora2 # xblock -loremipsum==1.0.5 - # via - # -r requirements/edx/base.txt - # ora2 lti-consumer-xblock==9.14.3 # via -r requirements/edx/base.txt lxml[html-clean]==5.3.2 @@ -1002,7 +1000,7 @@ openedx-atlas==0.7.0 # enterprise-integrated-channels # openedx-authz # openedx-forum -openedx-authz==0.11.2 +openedx-authz==0.20.0 # via -r requirements/edx/base.txt openedx-calc==4.0.2 # via -r requirements/edx/base.txt @@ -1032,13 +1030,13 @@ openedx-filters==2.1.0 # ora2 openedx-forum==0.3.8 # via -r requirements/edx/base.txt -openedx-learning==0.29.1 +openedx-learning==0.30.2 # via # -c requirements/constraints.txt # -r requirements/edx/base.txt optimizely-sdk==5.2.0 # via -r requirements/edx/base.txt -ora2==6.17.1 +ora2==6.17.2 # via -r requirements/edx/base.txt packaging==25.0 # via diff --git a/requirements/edx/testing.txt b/requirements/edx/testing.txt index 5cfe412a7fbd..0d87636f6689 100644 --- a/requirements/edx/testing.txt +++ b/requirements/edx/testing.txt @@ -259,8 +259,9 @@ dill==0.4.0 # via pylint distlib==0.4.0 # via virtualenv -django==5.2.7 +django==5.2.11 # via + # -c requirements/common_constraints.txt # -c requirements/constraints.txt # -r requirements/edx/base.txt # casbin-django-orm-adapter @@ -574,6 +575,7 @@ edx-django-utils==8.0.1 # edx-when # enterprise-integrated-channels # event-tracking + # openedx-authz # openedx-events # ora2 # super-csv @@ -644,7 +646,7 @@ edx-rest-api-client==6.2.0 # edx-enterprise # edx-proctoring # enterprise-integrated-channels -edx-search==4.3.0 +edx-search==4.4.0 # via # -r requirements/edx/base.txt # openedx-forum @@ -921,10 +923,6 @@ lazy==1.6 # lti-consumer-xblock # ora2 # xblock -loremipsum==1.0.5 - # via - # -r requirements/edx/base.txt - # ora2 lti-consumer-xblock==9.14.3 # via -r requirements/edx/base.txt lxml[html-clean]==5.3.2 @@ -1048,7 +1046,7 @@ openedx-atlas==0.7.0 # enterprise-integrated-channels # openedx-authz # openedx-forum -openedx-authz==0.11.2 +openedx-authz==0.20.0 # via -r requirements/edx/base.txt openedx-calc==4.0.2 # via -r requirements/edx/base.txt @@ -1078,13 +1076,13 @@ openedx-filters==2.1.0 # ora2 openedx-forum==0.3.8 # via -r requirements/edx/base.txt -openedx-learning==0.29.1 +openedx-learning==0.30.2 # via # -c requirements/constraints.txt # -r requirements/edx/base.txt optimizely-sdk==5.2.0 # via -r requirements/edx/base.txt -ora2==6.17.1 +ora2==6.17.2 # via -r requirements/edx/base.txt packaging==25.0 # via diff --git a/scripts/user_retirement/requirements/base.txt b/scripts/user_retirement/requirements/base.txt index a14836ff5e95..e37f2fa942ed 100644 --- a/scripts/user_retirement/requirements/base.txt +++ b/scripts/user_retirement/requirements/base.txt @@ -34,8 +34,9 @@ cryptography==45.0.7 # via # -c requirements/constraints.txt # pyjwt -django==5.2.7 +django==5.2.11 # via + # -c requirements/common_constraints.txt # -c requirements/constraints.txt # django-crum # django-waffle diff --git a/scripts/user_retirement/requirements/testing.txt b/scripts/user_retirement/requirements/testing.txt index 3299e4f8fd78..47dd78bca5ab 100644 --- a/scripts/user_retirement/requirements/testing.txt +++ b/scripts/user_retirement/requirements/testing.txt @@ -52,7 +52,7 @@ cryptography==45.0.7 # pyjwt ddt==1.7.2 # via -r scripts/user_retirement/requirements/testing.in -django==5.2.7 +django==5.2.11 # via # -r scripts/user_retirement/requirements/base.txt # django-crum diff --git a/setup.cfg b/setup.cfg index e4419bd149a9..06aea046d413 100644 --- a/setup.cfg +++ b/setup.cfg @@ -166,6 +166,7 @@ ignore_imports = name = Do not depend on non-public API of isolated apps. type = isolated_apps isolated_apps = + cms.djangoapps.modulestore_migrator openedx.core.djangoapps.agreements openedx.core.djangoapps.bookmarks openedx.core.djangoapps.content_libraries diff --git a/xmodule/library_content_block.py b/xmodule/library_content_block.py index f3d563a7fd30..fd65053f7a42 100644 --- a/xmodule/library_content_block.py +++ b/xmodule/library_content_block.py @@ -12,11 +12,12 @@ import json import logging +import typing as t from gettext import gettext, ngettext import nh3 from django.core.exceptions import ObjectDoesNotExist, PermissionDenied -from opaque_keys.edx.locator import LibraryLocator +from opaque_keys.edx.locator import LibraryLocator, LibraryUsageLocatorV2 from web_fragments.fragment import Fragment from webob import Response from xblock.core import XBlock @@ -29,6 +30,10 @@ from xmodule.validation import StudioValidation, StudioValidationMessage from xmodule.x_module import XModuleToXBlockMixin +if t.TYPE_CHECKING: + from xmodule.library_tools import LegacyLibraryToolsService + + _ = lambda text: text logger = logging.getLogger(__name__) @@ -118,20 +123,20 @@ def source_library_key(self): return LibraryLocator.from_string(self.source_library_id) @property - def is_source_lib_migrated_to_v2(self): + def is_source_lib_migrated_to_v2(self) -> bool: """ Determines whether the source library has been migrated to v2. """ - from cms.djangoapps.modulestore_migrator.api import is_successfully_migrated + from cms.djangoapps.modulestore_migrator.api import is_forwarded return ( self.source_library_id and self.source_library_version - and is_successfully_migrated(self.source_library_key, source_version=self.source_library_version) + and is_forwarded(self.source_library_key) ) @property - def is_ready_to_migrated_to_v2(self): + def is_ready_to_migrated_to_v2(self) -> bool: """ Returns whether the block can be migrated to v2. """ @@ -198,7 +203,7 @@ def non_editable_metadata_fields(self): ]) return non_editable_fields - def get_tools(self, to_read_library_content: bool = False) -> 'LegacyLibraryToolsService': + def get_tools(self, to_read_library_content: bool = False) -> LegacyLibraryToolsService: """ Grab the library tools service and confirm that it'll work for us. Else, raise LibraryToolsUnavailable. """ @@ -315,16 +320,24 @@ def _v2_update_children_upstream_version(self): Update the upstream and upstream version fields of all children to point to library v2 version of the legacy library blocks. This essentially converts this legacy block to new ItemBankBlock. """ - from cms.djangoapps.modulestore_migrator.api import get_target_block_usage_keys - blocks = get_target_block_usage_keys(self.source_library_key) + from cms.djangoapps.modulestore_migrator import api as migrator_api store = modulestore() with store.bulk_operations(self.course_id): - for child in self.get_children(): - source_key, _ = self.runtime.modulestore.get_block_original_usage(child.usage_key) - child.upstream = str(blocks.get(source_key, "")) - # Since after migration, the component in library is in draft state, we want to make sure that sync icon - # appears when it is published - child.upstream_version = 0 + children = self.get_children() + # These are the v1 library item upstream UsageKeys + child_old_upstream_keys = [ + self.runtime.modulestore.get_block_original_usage(child.usage_key)[0] + for child in children + ] + child_migrations = migrator_api.get_forwarding_for_blocks(child_old_upstream_keys) + for child, old_upstream_key in zip(children, child_old_upstream_keys): + upstream_migration = child_migrations.get(old_upstream_key) + if upstream_migration and isinstance(upstream_migration.target_key, LibraryUsageLocatorV2): + child.upstream = str(upstream_migration.target_key) + if upstream_migration.target_version_num: + child.upstream_version = upstream_migration.target_version_num + else: + child.upstream = "" # Use `modulestore()` instead of `self.runtime.modulestore` to make sure that the XBLOCK_UPDATED signal # is triggered store.update_item(child, None) diff --git a/xmodule/modulestore/split_mongo/split.py b/xmodule/modulestore/split_mongo/split.py index 07c985c21cc5..74539fabed90 100644 --- a/xmodule/modulestore/split_mongo/split.py +++ b/xmodule/modulestore/split_mongo/split.py @@ -3283,7 +3283,11 @@ def create_runtime(self, course_entry, lazy): """ Create the proper runtime for this course """ - services = self.services + # A single SplitMongoModuleStore may create many SplitModuleStoreRuntimes, + # each of which will later modify its internal dict of services on a per-item and often per-user basis. + # Therefore, it's critical that we make a new copy of our baseline services dict here, + # so that each runtime is free to add and replace its services without impacting other runtimes. + services = self.services.copy() # Only the CourseBlock can have user partitions. Therefore, creating the PartitionService with the library key # instead of the course key does not work. The XBlock validation in Studio fails with the following message: # "This component's access settings refer to deleted or invalid group configurations.". diff --git a/xmodule/partitions/partitions_service.py b/xmodule/partitions/partitions_service.py index 6cffd2c20c7b..ddd37d5212f5 100644 --- a/xmodule/partitions/partitions_service.py +++ b/xmodule/partitions/partitions_service.py @@ -99,8 +99,47 @@ class PartitionService: with a given course. """ - def __init__(self, course_id, cache=None, course=None): - self._course_id = course_id + def __init__(self, course_id: CourseKey, cache=None, course=None): + """Create a new ParititonService. This is user-specific.""" + + # There is a surprising amount of complexity in how to save the + # course_id we were passed in this constructor. + if course_id.org and course_id.course and course_id.run: + # This is the normal case, where we're instantiated with a CourseKey + # that has org, course, and run information. It will also often have + # a version_guid attached in this case, and we will want to strip + # that off in most cases. + # + # The reason for this is that the PartitionService is going to get + # recreated for every runtime (i.e. every block that's created for a + # user). Say you do the following: + # + # 1. You query the modulestore's get_item() for block A. + # 2. You update_item() for a different block B + # 3. You publish block B. + # + # When get_item() was called, a SplitModuleStoreRuntime was created + # for block A and it was given a CourseKey that had the version_guid + # encoded in it. If we persist that CourseKey with the version guid + # intact, then it will be incorrect after B is published, and any + # future access checks on A will break because it will try to query + # for a version of the course that is no longer published. + # + # Note that we still need to keep the branch information, or else + # this wouldn't work right in preview mode. + self._course_id = course_id.replace(version_guid=None) + else: + # If we're here, it means that the CourseKey we were sent doesn't + # have an org, course, and run. A much less common (but still legal) + # way to query by CourseKey involves a version_guid-only query, i.e. + # everything is None but the version_guid. In this scenario, it + # doesn't make sense to remove the one identifying piece of + # information we have, so we just assign the CourseKey without + # modification. We *could* potentially query the modulestore + # here and get the more normal form of the CourseKey, but that would + # be much more expensive and require database access. + self._course_id = course_id + self._cache = cache self.course = course diff --git a/xmodule/tests/test_library_content.py b/xmodule/tests/test_library_content.py index 01d27fad98d0..d1b0d4422b50 100644 --- a/xmodule/tests/test_library_content.py +++ b/xmodule/tests/test_library_content.py @@ -736,9 +736,9 @@ def test_removed_invalid(self): ) @patch('xmodule.html_block.HtmlBlock.author_view', dummy_render, create=True) @patch('xmodule.x_module.ModuleStoreRuntime.applicable_aside_types', lambda self, block: []) -class TestMigratedLibraryContentRender(LegacyLibraryContentTest): +class TestLegacyLibraryContentBlockMigration(LegacyLibraryContentTest): """ - Rendering unit tests for LegacyLibraryContentBlock + Unit tests for LegacyLibraryContentBlock """ def setUp(self): @@ -747,29 +747,64 @@ def setUp(self): super().setUp() user = UserFactory() self._sync_lc_block_from_library() - self.organization = OrganizationFactory() - self.lib_key_v2 = LibraryLocatorV2.from_string( - f"lib:{self.organization.short_name}:test-key" - ) + self.organization = OrganizationFactory(short_name="myorg") + self.lib_key_v2 = LibraryLocatorV2.from_string("lib:myorg:mylib") lib_api.create_library( org=self.organization, - slug=self.lib_key_v2.slug, - title="Test Library", + slug="mylib", + title="My Test V2 Library", ) - self.library_v2 = lib_api.ContentLibrary.objects.get(slug=self.lib_key_v2.slug) + self.library_v2 = lib_api.ContentLibrary.objects.get(slug="mylib") api.start_migration_to_library( user=user, source_key=self.library.location.library_key, target_library_key=self.library_v2.library_key, target_collection_slug=None, - composition_level=CompositionLevel.Component.value, - repeat_handling_strategy=RepeatHandlingStrategy.Skip.value, + composition_level=CompositionLevel.Component, + repeat_handling_strategy=RepeatHandlingStrategy.Skip, preserve_url_slugs=True, forward_source_to_target=True, ) # Migrate block self.lc_block.upgrade_to_v2_library(None, None) + def test_migration_of_fields(self): + """ + Test that the LC block migration correctly updates the metadata of the LC block and its children. + + This tests only the simplest state: The source lib has been migrated with forwarding, exactly once, + and the LC block has also been migrated. + + TODO(https://github.com/openedx/edx-platform/issues/37837): + It would be good to also test more cases, including: + * When migration occurs which is non-forwarding, it does *not* affect the childen of this block. + * When the library migration HAS happend but the LC block migration HASN'T YET, then the fields of + the block and its children will be unchanged, but the user will be prompted to upgrade. + * When some or all of the blocks already exist in the target library before the migration, then + the migration target versions will NOT all be 1, and the upstream_versions should reflect that. + * When the target library blocks have been edited and published AFTER the legacy library migration + but BEFORE the LC block migration, then executing the LC block migration will set upstream_version + based on the migration target versions, NOT the latest versions. + """ + assert self.lc_block.is_migrated_to_v2 is True + children = self.lc_block.get_children() + assert len(children) == len(self.lib_blocks) + # The children's legacy library blocks have been migrated to a V2 library. + # We expect that each child's `upstream` has been updated to point at + # the target of each library block's migration. + assert children[0].upstream == "lb:myorg:mylib:html:html_1" + assert children[1].upstream == "lb:myorg:mylib:html:html_2" + assert children[2].upstream == "lb:myorg:mylib:html:html_3" + assert children[3].upstream == "lb:myorg:mylib:html:html_4" + # We also expect that each child's `upstream_version` has been set to the + # version of the migrated library block at the time of its migration, which + # we are assuming is `1` (i.e., the first version, as the blocks did not + # previously exist in the target library). + assert children[0].upstream_version == 1 + assert children[1].upstream_version == 1 + assert children[2].upstream_version == 1 + assert children[3].upstream_version == 1 + def test_preview_view(self): """ Test preview view rendering """ assert len(self.lc_block.children) == len(self.lib_blocks) @@ -791,48 +826,3 @@ def test_author_view(self): assert '
  • html 2
  • ' in rendered.content assert '
  • html 3
  • ' in rendered.content assert '
  • html 4
  • ' in rendered.content - - def test_xml_export_import_cycle(self): - """ - Test the export-import cycle. - """ - # Render block to migrate it first - self.lc_block.render(AUTHOR_VIEW, {}) - # Set the virtual FS to export the olx to. - export_fs = MemoryFS() - self.lc_block.runtime.export_fs = export_fs # pylint: disable=protected-access - - # Export the olx. - node = etree.Element("unknown_root") - self.lc_block.add_xml_to_node(node) - - # Read back the olx. - file_path = f'{self.lc_block.scope_ids.usage_id.block_type}/{self.lc_block.scope_ids.usage_id.block_id}.xml' - with export_fs.open(file_path) as f: - exported_olx = f.read() - - expected_olx_export = ( - f'\n' - f' \n' - f' \n' - f' \n' - f' \n' - '\n' - ) - # And compare. - assert exported_olx == expected_olx_export - - # Now import it. - runtime = DummyModuleStoreRuntime(load_error_blocks=True, course_id=self.lc_block.location.course_key) - runtime.resources_fs = export_fs - olx_element = etree.fromstring(exported_olx) - imported_lc_block = LegacyLibraryContentBlock.parse_xml(olx_element, runtime, None) - - self._verify_xblock_properties(imported_lc_block) - # Verify migration info in the child - assert imported_lc_block.is_migrated_to_v2 - for child in imported_lc_block.get_children(): - assert child.xml_attributes.get('upstream') is not None - assert str(child.xml_attributes.get('upstream_version')) == '0'