@@ -29,10 +29,10 @@
readonly />
{% endif %}
diff --git a/gateway/sds_gateway/templates/users/partials/web_download_modal.html b/gateway/sds_gateway/templates/users/partials/web_download_modal.html
index 72d4fb77..c3e89cf4 100644
--- a/gateway/sds_gateway/templates/users/partials/web_download_modal.html
+++ b/gateway/sds_gateway/templates/users/partials/web_download_modal.html
@@ -1,13 +1,13 @@
-
diff --git a/gateway/sds_gateway/users/tests/test_dataset_versioning.py b/gateway/sds_gateway/users/tests/test_dataset_versioning.py
new file mode 100644
index 00000000..83d24cbc
--- /dev/null
+++ b/gateway/sds_gateway/users/tests/test_dataset_versioning.py
@@ -0,0 +1,224 @@
+"""Tests for dataset versioning (create new version from existing dataset)."""
+
+import uuid
+
+import pytest
+from django.test import Client
+from django.urls import reverse
+
+from sds_gateway.api_methods.models import Capture
+from sds_gateway.api_methods.models import CaptureType
+from sds_gateway.api_methods.models import Dataset
+from sds_gateway.api_methods.models import DatasetStatus
+from sds_gateway.api_methods.models import ItemType
+from sds_gateway.api_methods.models import PermissionLevel
+from sds_gateway.api_methods.models import UserSharePermission
+from sds_gateway.api_methods.tests.factories import FileFactory
+from sds_gateway.users.models import User
+from sds_gateway.users.tests.factories import UserFactory
+
+pytestmark = pytest.mark.django_db
+
+
+def _create_dataset_with_files_and_captures(owner: User, version: int = 1, **kwargs) -> Dataset:
+ """Create a dataset with linked files and captures."""
+ dataset = Dataset.objects.create(
+ name=kwargs.get("name", "Test Dataset"),
+ owner=owner,
+ version=version,
+ description=kwargs.get("description", "Description"),
+ abstract=kwargs.get("abstract", "Abstract"),
+ status=DatasetStatus.DRAFT.value,
+ **{k: v for k, v in kwargs.items() if k not in ("name", "description", "abstract")},
+ )
+ file = FileFactory(owner=owner)
+ capture = Capture.objects.create(
+ owner=owner,
+ capture_type=CaptureType.DigitalRF.value,
+ name="Test capture",
+ )
+ dataset.files.add(file)
+ dataset.captures.add(capture)
+ return dataset
+
+
+def _post_versioning(client: Client, dataset_uuid: uuid.UUID, copy_shared_users: bool = False):
+ url = reverse("users:dataset_versioning")
+ data = {"dataset_uuid": str(dataset_uuid)}
+ if copy_shared_users:
+ data["copy_shared_users"] = "true"
+ return client.post(url, data)
+
+
+class TestDatasetVersioningNewVersionGreater:
+ """Assert new version is greater than older one."""
+
+ def test_new_version_is_incremented(self, client: Client) -> None:
+ owner = UserFactory(is_approved=True)
+ dataset = _create_dataset_with_files_and_captures(owner, version=1)
+ client.force_login(owner)
+ response = _post_versioning(client, dataset.uuid)
+ assert response.status_code == 200
+ data = response.json()
+ assert data["success"] is True
+ assert data["version"] == 2
+ new_dataset = Dataset.objects.get(previous_version=dataset)
+ assert new_dataset.version == 2
+ assert new_dataset.version > dataset.version
+
+
+class TestDatasetVersioningMetadataCopied:
+ """Assert metadata fields are copied correctly."""
+
+ def test_metadata_copied_to_new_version(self, client: Client) -> None:
+ owner = UserFactory(is_approved=True)
+ dataset = _create_dataset_with_files_and_captures(
+ owner,
+ name="Original Name",
+ description="Original description",
+ abstract="Original abstract",
+ doi="10.1234/test",
+ license="MIT",
+ website="https://example.com",
+ provenance={"source": "test"},
+ citation={"title": "Test"},
+ )
+ client.force_login(owner)
+ response = _post_versioning(client, dataset.uuid)
+ assert response.status_code == 200
+ new_dataset = Dataset.objects.get(previous_version=dataset)
+ assert new_dataset.name == dataset.name
+ assert new_dataset.description == dataset.description
+ assert new_dataset.abstract == dataset.abstract
+ assert new_dataset.doi == dataset.doi
+ assert new_dataset.license == dataset.license
+ assert new_dataset.website == dataset.website
+ assert new_dataset.provenance == dataset.provenance
+ assert new_dataset.citation == dataset.citation
+
+
+class TestDatasetVersioningFilesAndCaptures:
+ """Assert references to files and captures are the same."""
+
+ def test_files_and_captures_same_as_original(self, client: Client) -> None:
+ owner = UserFactory(is_approved=True)
+ dataset = _create_dataset_with_files_and_captures(owner)
+ original_file_ids = list(dataset.files.values_list("pk", flat=True))
+ original_capture_ids = list(dataset.captures.values_list("pk", flat=True))
+ client.force_login(owner)
+ response = _post_versioning(client, dataset.uuid)
+ assert response.status_code == 200
+ new_dataset = Dataset.objects.get(previous_version=dataset)
+ assert list(new_dataset.files.values_list("pk", flat=True)) == original_file_ids
+ assert list(new_dataset.captures.values_list("pk", flat=True)) == original_capture_ids
+
+
+class TestDatasetVersioningNonOwnerForbidden:
+ """Assert non-owners cannot create derived versions."""
+
+ def test_viewer_cannot_advance_version(self, client: Client) -> None:
+ owner = UserFactory(is_approved=True)
+ other = UserFactory(is_approved=True)
+ dataset = _create_dataset_with_files_and_captures(owner)
+ UserSharePermission.objects.create(
+ owner=owner,
+ shared_with=other,
+ item_type=ItemType.DATASET,
+ item_uuid=dataset.uuid,
+ permission_level=PermissionLevel.VIEWER,
+ )
+ client.force_login(other)
+ response = _post_versioning(client, dataset.uuid)
+ assert response.status_code == 403
+ data = response.json()
+ assert "permission" in data["error"].lower() or "advance" in data["error"].lower()
+ assert not Dataset.objects.filter(previous_version=dataset).exists()
+
+ def test_contributor_cannot_advance_version(self, client: Client) -> None:
+ owner = UserFactory(is_approved=True)
+ other = UserFactory(is_approved=True)
+ dataset = _create_dataset_with_files_and_captures(owner)
+ UserSharePermission.objects.create(
+ owner=owner,
+ shared_with=other,
+ item_type=ItemType.DATASET,
+ item_uuid=dataset.uuid,
+ permission_level=PermissionLevel.CONTRIBUTOR,
+ )
+ client.force_login(other)
+ response = _post_versioning(client, dataset.uuid)
+ assert response.status_code == 403
+ assert not Dataset.objects.filter(previous_version=dataset).exists()
+
+ def test_unshared_user_cannot_advance_version(self, client: Client) -> None:
+ owner = UserFactory(is_approved=True)
+ other = UserFactory(is_approved=True)
+ dataset = _create_dataset_with_files_and_captures(owner)
+ client.force_login(other)
+ response = _post_versioning(client, dataset.uuid)
+ assert response.status_code == 403
+ assert not Dataset.objects.filter(previous_version=dataset).exists()
+
+
+class TestDatasetVersioningPreviousAndNext:
+ """Assert previous_version and next_version are as expected."""
+
+ def test_previous_version_and_next_version_linked(self, client: Client) -> None:
+ owner = UserFactory(is_approved=True)
+ dataset = _create_dataset_with_files_and_captures(owner, version=1)
+ client.force_login(owner)
+ response = _post_versioning(client, dataset.uuid)
+ assert response.status_code == 200
+ new_dataset = Dataset.objects.get(previous_version=dataset)
+ assert new_dataset.previous_version_id == dataset.pk
+ assert dataset.next_version.filter(pk=new_dataset.pk).exists()
+ assert dataset.next_version.get().version == 2
+
+
+class TestDatasetVersioningNotCarriedOver:
+ """Assert timestamps, is_public, and shared users don't carry over by default."""
+
+ def test_timestamps_do_not_carry_over(self, client: Client) -> None:
+ owner = UserFactory(is_approved=True)
+ dataset = _create_dataset_with_files_and_captures(owner)
+ orig_created = dataset.created_at
+ orig_updated = dataset.updated_at
+ client.force_login(owner)
+ response = _post_versioning(client, dataset.uuid)
+ assert response.status_code == 200
+ new_dataset = Dataset.objects.get(previous_version=dataset)
+ assert new_dataset.created_at >= orig_created
+ assert new_dataset.updated_at >= orig_updated
+ assert new_dataset.uuid != dataset.uuid
+
+ def test_is_public_reset_to_false(self, client: Client) -> None:
+ owner = UserFactory(is_approved=True)
+ dataset = _create_dataset_with_files_and_captures(owner)
+ dataset.is_public = True
+ dataset.save(update_fields=["is_public"])
+ client.force_login(owner)
+ response = _post_versioning(client, dataset.uuid)
+ assert response.status_code == 200
+ new_dataset = Dataset.objects.get(previous_version=dataset)
+ assert new_dataset.is_public is False
+
+ def test_shared_users_do_not_carry_over_when_not_requested(self, client: Client) -> None:
+ owner = UserFactory(is_approved=True)
+ shared_user = UserFactory(is_approved=True)
+ dataset = _create_dataset_with_files_and_captures(owner)
+ UserSharePermission.objects.create(
+ owner=owner,
+ shared_with=shared_user,
+ item_type=ItemType.DATASET,
+ item_uuid=dataset.uuid,
+ permission_level=PermissionLevel.VIEWER,
+ )
+ client.force_login(owner)
+ response = _post_versioning(client, dataset.uuid, copy_shared_users=False)
+ assert response.status_code == 200
+ new_dataset = Dataset.objects.get(previous_version=dataset)
+ new_perms = UserSharePermission.objects.filter(
+ item_type=ItemType.DATASET,
+ item_uuid=new_dataset.uuid,
+ )
+ assert not new_perms.exists()
diff --git a/gateway/sds_gateway/users/urls.py b/gateway/sds_gateway/users/urls.py
index a9e638ba..3480ca91 100644
--- a/gateway/sds_gateway/users/urls.py
+++ b/gateway/sds_gateway/users/urls.py
@@ -18,6 +18,7 @@
from .views import user_captures_api_view
from .views import user_dataset_details_view
from .views import user_dataset_list_view
+from .views import user_dataset_versioning_view
from .views import user_detail_view
from .views import user_download_item_view
from .views import user_file_detail_view
@@ -34,6 +35,11 @@
urlpatterns = [
path("~redirect/", view=user_redirect_view, name="redirect"),
path("~update/", view=user_update_view, name="update"),
+ path(
+ "dataset-versioning/",
+ view=user_dataset_versioning_view,
+ name="dataset_versioning",
+ ),
path("
/", view=user_detail_view, name="detail"),
path("view-api-key/", user_api_key_view, name="view_api_key"),
path("new-api-key/", new_api_key_view, name="new_api_key"),
@@ -67,6 +73,8 @@
path("dataset-list/", user_dataset_list_view, name="dataset_list"),
path("search-datasets/", user_search_datasets_view, name="search_datasets"),
path("dataset-details/", user_dataset_details_view, name="dataset_details"),
+ path("render-html/", render_html_fragment_view, name="render_html"),
+ path("group-captures/", user_group_captures_view, name="group_captures"),
path(
"api/keyword-autocomplete/",
keyword_autocomplete_api_view,
@@ -77,8 +85,6 @@
user_publish_dataset_view,
name="publish_dataset",
),
- path("render-html/", render_html_fragment_view, name="render_html"),
- path("group-captures/", user_group_captures_view, name="group_captures"),
path(
"temporary-zip//download/",
user_temporary_zip_download_view,
diff --git a/gateway/sds_gateway/users/views.py b/gateway/sds_gateway/users/views.py
index 7686bae5..da18a3f0 100644
--- a/gateway/sds_gateway/users/views.py
+++ b/gateway/sds_gateway/users/views.py
@@ -18,6 +18,7 @@
from django.core.paginator import Paginator
from django.core.serializers.json import DjangoJSONEncoder
from django.db import DatabaseError
+from django.db import transaction
from django.db.models import Q
from django.db.models import Sum
from django.db.models.query import QuerySet
@@ -31,6 +32,7 @@
from django.shortcuts import redirect
from django.shortcuts import render
from django.template.defaultfilters import slugify
+from django.template.loader import render_to_string
from django.urls import reverse
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
@@ -62,6 +64,7 @@
from sds_gateway.api_methods.models import ShareGroup
from sds_gateway.api_methods.models import TemporaryZipFile
from sds_gateway.api_methods.models import UserSharePermission
+from sds_gateway.api_methods.models import get_shared_users_for_item
from sds_gateway.api_methods.models import get_user_permission_level
from sds_gateway.api_methods.models import user_has_access_to_item
from sds_gateway.api_methods.serializers.capture_serializers import (
@@ -2791,9 +2794,30 @@ def get(self, request, *args, **kwargs) -> HttpResponse:
datasets_with_shared_users.extend(
serialize_datasets_for_user(shared_datasets, request.user)
)
-
page_obj = self._paginate_datasets(datasets_with_shared_users, request)
+ # Check if this is an AJAX request
+ if request.headers.get("X-Requested-With") == "XMLHttpRequest":
+ # Return table and modals so the client can update both after list refresh
+ table_html = render_to_string(
+ "users/components/dataset_list_table.html",
+ {
+ "page_obj": page_obj,
+ "sort_by": sort_by,
+ "sort_order": sort_order,
+ "ajax_fragment": True,
+ },
+ request=request,
+ )
+ modals_html = render_to_string(
+ "users/components/dataset_list_modals.html",
+ {"page_obj": page_obj},
+ request=request,
+ )
+ # Separator used by ListRefreshManager to split table vs modals
+ list_refresh_sep = ""
+ return HttpResponse(table_html + list_refresh_sep + modals_html)
+
return render(
request,
template_name=self.template_name,
@@ -3489,6 +3513,146 @@ def get(self, request, *args, **kwargs) -> JsonResponse:
user_dataset_details_view = DatasetDetailsView.as_view()
+class DatasetVersioningView(Auth0LoginRequiredMixin, View):
+ """View to handle dataset versioning updates."""
+
+ def post(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
+ dataset_uuid = request.POST.get("dataset_uuid")
+ copy_shared_users = request.POST.get("copy_shared_users", "false").lower() in (
+ "true",
+ "1",
+ "on",
+ )
+ if not dataset_uuid:
+ return JsonResponse({"error": "Dataset UUID is required"}, status=400)
+
+ dataset = get_object_or_404(Dataset, uuid=dataset_uuid, is_deleted=False)
+
+ # check if user has access to the dataset
+ if not UserSharePermission.user_can_advance_version(
+ request.user, dataset_uuid, ItemType.DATASET
+ ):
+ return JsonResponse(
+ {
+ "error": (
+ "You do not have permission to advance "
+ "the version of this dataset"
+ )
+ },
+ status=403,
+ )
+
+ # copy dataset with relations
+ new_dataset = self._copy_dataset_with_relations(
+ dataset, request.user, copy_shared_users
+ )
+
+ return JsonResponse({"success": True, "version": new_dataset.version})
+
+ def _copy_dataset_with_relations(
+ self, original_dataset: Dataset, request_user: User, copy_shared_users: bool
+ ) -> Dataset:
+ """
+ Copy a dataset along with all its related files and captures.
+
+ Args:
+ original_dataset: The dataset to copy
+ request_user: The user creating the new version
+
+ Returns:
+ The new dataset with copied related objects
+ """
+ new_version = original_dataset.version + 1
+
+ # Use database transaction with locking to prevent race conditions
+ # when multiple requests try to create the same version simultaneously
+ with transaction.atomic():
+ # Lock the original dataset to prevent concurrent version creation
+ locked_dataset = Dataset.objects.select_for_update().get(
+ uuid=original_dataset.uuid
+ )
+
+ # Check again for existing version within the locked transaction
+ existing_version = Dataset.objects.filter(
+ previous_version=locked_dataset,
+ version=new_version,
+ owner=request_user,
+ is_deleted=False,
+ ).first()
+
+ if existing_version:
+ # Return existing version if it was already created
+ return existing_version
+
+ # Fields that should not be copied from the original dataset
+ # These fields will be reset for the new version
+ no_copy_fields = [
+ "uuid",
+ "created_at",
+ "updated_at",
+ "status",
+ "is_public",
+ "shared_with",
+ "previous_version",
+ "version",
+ "owner",
+ ]
+
+ dataset_data = {
+ field.name: getattr(locked_dataset, field.name)
+ for field in locked_dataset._meta.get_fields() # noqa: SLF001
+ if hasattr(field, "name")
+ and field.name not in no_copy_fields
+ and not field.many_to_many
+ and not field.one_to_many
+ and not field.one_to_one
+ }
+ dataset_data["owner"] = request_user
+ dataset_data["version"] = new_version
+ dataset_data["previous_version"] = locked_dataset
+
+ # Ensure status is draft for new version
+ dataset_data["status"] = DatasetStatus.DRAFT.value
+ dataset_data["is_public"] = False
+
+ new_dataset = Dataset.objects.create(**dataset_data)
+
+ # Set the relationships on the new dataset
+ new_dataset.captures.set(locked_dataset.captures.all())
+ new_dataset.files.set(locked_dataset.files.all())
+ new_dataset.keywords.set(locked_dataset.keywords.all())
+ if copy_shared_users:
+ self._copy_shared_users(locked_dataset, new_dataset)
+
+ return new_dataset
+
+ def _copy_shared_users(
+ self, original_dataset: Dataset, new_dataset: Dataset
+ ) -> None:
+ """
+ Copy the shared users from the original dataset to the new dataset.
+ Args:
+ original_dataset: The original dataset
+ new_dataset: The new dataset
+ """
+ shared_users = get_shared_users_for_item(
+ original_dataset.uuid, ItemType.DATASET
+ )
+ for shared_user in shared_users:
+ UserSharePermission.objects.create(
+ owner=new_dataset.owner,
+ shared_with=shared_user.shared_with,
+ item_type=ItemType.DATASET,
+ item_uuid=new_dataset.uuid,
+ is_enabled=True,
+ is_deleted=False,
+ permission_level=shared_user.permission_level,
+ )
+
+
+user_dataset_versioning_view = DatasetVersioningView.as_view()
+
+
# Auth0LoginRequiredMixin is not used because this view might be called from the home
# page where users may not be authenticated, but we still want to allow rendering of
# public components.